Compare commits
236 Commits
new_sender
...
main
Author | SHA1 | Date | |
---|---|---|---|
d44396ccee | |||
11b8b575e4 | |||
dfa245e842 | |||
ed94562372 | |||
5fee3a1390 | |||
96bc7ec052 | |||
94d2fd0fc8 | |||
d8ceaef3f9 | |||
334d68ac1f | |||
e4831e1a17 | |||
e0b81d2122 | |||
95c3c0f968 | |||
154c825499 | |||
5e8b244ebd | |||
48ad545da1 | |||
4b002aaee4 | |||
088a374abb | |||
6210dfc16e | |||
d2128fcac8 | |||
0383be3ada | |||
4b20edc04d | |||
d624e7f691 | |||
2e24175899 | |||
9c8def3341 | |||
0ee08ac7e5 | |||
82cba33e99 | |||
9892e10312 | |||
f6c4f50291 | |||
64fcd5e863 | |||
78663ed522 | |||
b5ce560c88 | |||
ff0bb5743f | |||
7569516916 | |||
c45b9a13bd | |||
7ab588cb38 | |||
b6def9d2ff | |||
af96f6daae | |||
ad0d29fb57 | |||
763dbed9c3 | |||
6f96c0b929 | |||
7f7b73ec51 | |||
c6472630ca | |||
3ad9b8b579 | |||
54fc8dd1f9 | |||
2891a24b21 | |||
d29e219fa4 | |||
772ec1e9a5 | |||
887763286c | |||
0eb9163fd8 | |||
a011492f50 | |||
ae915b34a6 | |||
d8086452c3 | |||
8e27d80c33 | |||
c18628e22c | |||
8990249bb8 | |||
37550779aa | |||
321f741823 | |||
8a0c88c20d | |||
b1f27d52de | |||
1fd16e23ba | |||
0a3ca3e422 | |||
47c16889bd | |||
ff5e479dcb | |||
a50e9535e0 | |||
5f8fd477e0 | |||
783ccf091a | |||
a4f6dd5756 | |||
e421ead4ab | |||
593abab525 | |||
0badc1c7b6 | |||
1a0d9fb1e4 | |||
4e4ac74948 | |||
1cebe69872 | |||
8a12abb164 | |||
42e3fde2eb | |||
fb821b3e36 | |||
1b71740b84 | |||
f45061097d | |||
6b4b9988db | |||
c297861d9e | |||
e1fad71085 | |||
f84dc6b4e7 | |||
5695ed8552 | |||
1f199e3496 | |||
dba3562c14 | |||
3ad7a07616 | |||
a958602dd2 | |||
f71b88a459 | |||
0aefa81c1d | |||
b4d280039d | |||
5e0bcc37f5 | |||
4c45f403a3 | |||
15c3b5f71e | |||
edcac1a386 | |||
3c98b6f4bb | |||
47e2d1ad7c | |||
1b9726b7e7 | |||
89cd0263c4 | |||
ef1ee8dfae | |||
4c6dd426ba | |||
6cc5195a19 | |||
e98fe27a94 | |||
9538581a97 | |||
5e3b08a7f1 | |||
56f6220587 | |||
4f10e7cfcc | |||
9d20059d44 | |||
9eede389f5 | |||
9399ffe3b6 | |||
8916ad33c8 | |||
8d9b3d8448 | |||
d5b18067f9 | |||
f060c47a24 | |||
2b8d170383 | |||
600f102ac9 | |||
282a5d1835 | |||
578b756ae2 | |||
56a734954d | |||
699ff09533 | |||
da786fe33c | |||
e9024651cf | |||
995de7c707 | |||
3930844495 | |||
18c039ef5b | |||
d5070f773e | |||
1461828458 | |||
05ea58f0f3 | |||
f49e221ac7 | |||
8914fdc8d9 | |||
d0616858dc | |||
5795939383 | |||
b624e59f34 | |||
3d54f73032 | |||
4a4c719aa6 | |||
a57991dace | |||
db90a5a538 | |||
03ec09bf55 | |||
434322fc9b | |||
e10ebe4de8 | |||
1f7e34236b | |||
d6b94fb2c3 | |||
9665140a0d | |||
90e2dd3ad9 | |||
22b349c0ca | |||
4a727f42e4 | |||
dccf93119a | |||
80f8e81dcb | |||
06039dbe12 | |||
578054df58 | |||
768105f1e9 | |||
5f562ad260 | |||
c8e777fd7b | |||
e59ee003cb | |||
9c04e8757b | |||
cd986efd0c | |||
cc21e8b965 | |||
f8683022a5 | |||
fe67849ea7 | |||
f6df685694 | |||
ecbc1dc083 | |||
4fe781da00 | |||
d1f6e9b29f | |||
a445f548f1 | |||
d06dc57bbb | |||
f484c27b21 | |||
c0e598a32e | |||
ac5707b781 | |||
2e9fc87fb5 | |||
7ab5416f92 | |||
1b706fdfac | |||
a7cbe02428 | |||
ad894e4f2d | |||
3fadf92fd5 | |||
2d646457be | |||
ceccd15519 | |||
b8e80bbf1f | |||
0d8a0c20a3 | |||
e8ebc1a558 | |||
0cc5d1baf6 | |||
aa99c6dc4f | |||
877c514cc9 | |||
40092cd8b7 | |||
75abeefeaa | |||
1393cc8520 | |||
ec7e9dbe95 | |||
47cbf4d5f2 | |||
4794fd4cf0 | |||
be92c15b9f | |||
37223596e8 | |||
66a016425b | |||
7b84f44710 | |||
f98c90cbd8 | |||
ab31104056 | |||
1e90972c04 | |||
64713d2ed4 | |||
d0790c7f1b | |||
|
6b678eed88 | ||
d0c6c07833 | |||
7fd75ddcac | |||
73829752ed | |||
a52121b0fb | |||
e2b5f53f83 | |||
|
ad7e26d77e | ||
|
d7d916bddd | ||
|
935dac6404 | ||
d79ec42220 | |||
7f9c96bf1f | |||
2de40e83f1 | |||
41628dc4c0 | |||
8285034dd4 | |||
af6a6fa9bb | |||
1b412b84f6 | |||
4cca379971 | |||
108675e6e6 | |||
7b74dfa2be | |||
f49c0be541 | |||
c4100a7db4 | |||
d1caff5cbd | |||
ab2bf398a2 | |||
469ebd7d4e | |||
f5955915b3 | |||
8e09273f45 | |||
d13174661a | |||
faf60efd4d | |||
fcd15511d9 | |||
271bff816c | |||
70505394fe | |||
1fb1b590f1 | |||
478aca33b5 | |||
66ecd70b16 | |||
766db280f9 | |||
8c2a836e65 | |||
c79943ab80 | |||
f56e290dd5 | |||
ace8bf5e85 | |||
32df2db0bf |
10
.checkstyle
Normal file
@ -0,0 +1,10 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
|
||||
<fileset-config file-format-version="1.2.0" simple-config="true" sync-formatter="false">
|
||||
<local-check-config name="Scenarium" location="/home/heero/dev/workspace-scenarium/scenarium-core/CheckStyle.xml" type="external" description="">
|
||||
<additional-data name="protect-config-file" value="false"/>
|
||||
</local-check-config>
|
||||
<fileset name="all" enabled="true" check-config-name="Scenarium" local="true">
|
||||
<file-match-pattern match-pattern="." include-pattern="true"/>
|
||||
</fileset>
|
||||
</fileset-config>
|
10
.gitignore
vendored
@ -1,4 +1,6 @@
|
||||
# See http://help.github.com/ignore-files/ for more about ignoring files.
|
||||
out
|
||||
.idea
|
||||
|
||||
# compiled output
|
||||
/dist
|
||||
@ -6,6 +8,12 @@
|
||||
/tmp
|
||||
/out-tsc
|
||||
|
||||
/front/dist
|
||||
|
||||
config.env
|
||||
|
||||
*.class
|
||||
|
||||
dataPush
|
||||
node_modules
|
||||
|
||||
@ -52,3 +60,5 @@ backPY/env
|
||||
|
||||
__pycache__
|
||||
|
||||
.design/
|
||||
.vscode/
|
||||
|
3
.gitmodules
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
[submodule "front/src/common"]
|
||||
path = front/src/common
|
||||
url = ../common_web.git
|
32
.island/release.bash
Executable file
@ -0,0 +1,32 @@
|
||||
#!/bin/bash
|
||||
|
||||
version_file="../version.txt"
|
||||
|
||||
# update new release dependency
|
||||
cd back
|
||||
# update the Maven version number
|
||||
mvn versions:set -DnewVersion=$(sed 's/dev/SNAPSHOT/g' $version_file)
|
||||
if grep -q "DEV" "$version_file"; then
|
||||
# update all versions release of dependency
|
||||
mvn versions:use-latest-releases
|
||||
# update our manage dependency as snapshoot
|
||||
mvn versions:use-latest-versions -Dincludes=kangaroo-and-rabbit
|
||||
else
|
||||
# update our manage dependency as release (must be done before)
|
||||
mvn versions:use-latest-releases -Dincludes=kangaroo-and-rabbit
|
||||
fi
|
||||
cd -
|
||||
|
||||
|
||||
cd front
|
||||
if grep -q "dev" "$version_file"; then
|
||||
# update all dependency
|
||||
pnpm install
|
||||
pnpm run update_packages
|
||||
else
|
||||
# in case of release ==> can not do it automatically ...
|
||||
echo not implemented
|
||||
fi
|
||||
|
||||
cd -
|
||||
|
71
Dockerfile
Normal file
@ -0,0 +1,71 @@
|
||||
######################################################################################
|
||||
##
|
||||
## buyilding-end install applications:
|
||||
##
|
||||
######################################################################################
|
||||
FROM archlinux:base-devel AS builder
|
||||
# update system
|
||||
RUN pacman -Syu --noconfirm && pacman-db-upgrade \
|
||||
&& pacman -S --noconfirm jdk-openjdk maven npm pnpm \
|
||||
&& pacman -Scc --noconfirm
|
||||
|
||||
ENV PATH /tmp/node_modules/.bin:$PATH
|
||||
WORKDIR /tmp
|
||||
|
||||
######################################################################################
|
||||
##
|
||||
## Build back:
|
||||
##
|
||||
######################################################################################
|
||||
FROM builder AS buildBack
|
||||
COPY back/pom.xml /tmp
|
||||
COPY back/src /tmp/src/
|
||||
RUN mvn clean compile assembly:single
|
||||
|
||||
######################################################################################
|
||||
##
|
||||
## Build front:
|
||||
##
|
||||
######################################################################################
|
||||
FROM builder AS buildFront
|
||||
|
||||
RUN echo "@kangaroo-and-rabbit:registry=https://gitea.atria-soft.org/api/packages/kangaroo-and-rabbit/npm/" > /root/.npmrc
|
||||
|
||||
ADD front/package.json \
|
||||
front/karma.conf.js \
|
||||
front/protractor.conf.js \
|
||||
/tmp/
|
||||
|
||||
# install and cache app dependencies
|
||||
RUN pnpm install
|
||||
|
||||
ADD front/e2e \
|
||||
front/tsconfig.json \
|
||||
front/tslint.json \
|
||||
front/angular.json \
|
||||
/tmp/
|
||||
ADD front/src /tmp/src
|
||||
|
||||
# generate build
|
||||
RUN ng build --output-path=dist --configuration=production --base-href=/karideo/ --deploy-url=/karideo/
|
||||
|
||||
######################################################################################
|
||||
##
|
||||
## Production area:
|
||||
##
|
||||
######################################################################################
|
||||
|
||||
FROM bellsoft/liberica-openjdk-alpine:latest
|
||||
# add wget to manage the health check...
|
||||
RUN apk add --no-cache wget
|
||||
|
||||
ENV LANG=C.UTF-8
|
||||
|
||||
COPY --from=buildBack /tmp/out/maven/*.jar /application/application.jar
|
||||
COPY --from=buildFront /tmp/dist /application/front/
|
||||
|
||||
WORKDIR /application/
|
||||
|
||||
EXPOSE 80
|
||||
|
||||
CMD ["java", "-Xms64M", "-Xmx1G", "-cp", "/application/application.jar", "org.kar.karideo.WebLauncher"]
|
7
back/.checkstyle
Normal file
@ -0,0 +1,7 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
|
||||
<fileset-config file-format-version="1.2.0" simple-config="true" sync-formatter="false">
|
||||
<fileset name="all" enabled="true" check-config-name="Google Checks" local="false">
|
||||
<file-match-pattern match-pattern="." include-pattern="true"/>
|
||||
</fileset>
|
||||
</fileset-config>
|
9
back/.gitignore
vendored
@ -1,9 +0,0 @@
|
||||
config.*
|
||||
config.env
|
||||
.env
|
||||
config
|
||||
data
|
||||
cache
|
||||
|
||||
__pycache__
|
||||
*.pyc
|
66
back/CheckStyle.xml
Executable file
@ -0,0 +1,66 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE module PUBLIC "-//Checkstyle//DTD Check Configuration 1.3//EN" "https://checkstyle.org/dtds/configuration_1_3.dtd">
|
||||
|
||||
<!--
|
||||
This configuration file was written by the eclipse-cs plugin configuration editor
|
||||
-->
|
||||
<!--
|
||||
Checkstyle-Configuration: Marc Checks
|
||||
Description:
|
||||
Checkstyle configuration that checks the sun coding conventions.
|
||||
-->
|
||||
<module name="Checker">
|
||||
<property name="severity" value="error"/>
|
||||
<property name="fileExtensions" value="java, properties, xml"/>
|
||||
<module name="TreeWalker">
|
||||
<module name="ConstantName"/>
|
||||
<module name="LocalFinalVariableName"/>
|
||||
<module name="LocalVariableName"/>
|
||||
<module name="MemberName"/>
|
||||
<module name="MethodName"/>
|
||||
<module name="PackageName"/>
|
||||
<module name="ParameterName"/>
|
||||
<module name="StaticVariableName"/>
|
||||
<module name="TypeName"/>
|
||||
<module name="AvoidStarImport"/>
|
||||
<module name="IllegalImport"/>
|
||||
<module name="RedundantImport"/>
|
||||
<module name="UnusedImports">
|
||||
<property name="processJavadoc" value="false"/>
|
||||
</module>
|
||||
<module name="ModifierOrder"/>
|
||||
<module name="EmptyStatement"/>
|
||||
<module name="EqualsHashCode"/>
|
||||
<module name="IllegalInstantiation"/>
|
||||
<module name="MissingSwitchDefault"/>
|
||||
<module name="SimplifyBooleanExpression"/>
|
||||
<module name="SimplifyBooleanReturn"/>
|
||||
<module name="HideUtilityClassConstructor"/>
|
||||
<module name="InterfaceIsType"/>
|
||||
<module name="ArrayTypeStyle"/>
|
||||
<module name="TodoComment"/>
|
||||
<module name="UpperEll"/>
|
||||
<module name="AnnotationUseStyle"/>
|
||||
<module name="MissingDeprecated"/>
|
||||
<module name="MissingOverride"/>
|
||||
<module name="PackageAnnotation"/>
|
||||
<module name="SuppressWarnings"/>
|
||||
<module name="AnnotationLocation"/>
|
||||
<module name="ClassTypeParameterName"/>
|
||||
<module name="MethodTypeParameterName"/>
|
||||
<module name="InterfaceTypeParameterName"/>
|
||||
<module name="CatchParameterName"/>
|
||||
<module name="LambdaParameterName"/>
|
||||
<module name="Regexp"/>
|
||||
<module name="RegexpSinglelineJava"/>
|
||||
</module>
|
||||
<module name="BeforeExecutionExclusionFileFilter">
|
||||
<property name="fileNamePattern" value="module\-info\.java$"/>
|
||||
</module>
|
||||
<module name="Translation"/>
|
||||
<module name="Header"/>
|
||||
<module name="RegexpHeader"/>
|
||||
<module name="RegexpMultiline"/>
|
||||
<module name="RegexpOnFilename"/>
|
||||
<module name="RegexpSingleline"/>
|
||||
</module>
|
66
back/CleanUp.xml
Normal file
@ -0,0 +1,66 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<!DOCTYPE xml>
|
||||
<profiles version="2">
|
||||
<profile kind="CleanUpProfile" name="Scenarium" version="2">
|
||||
<setting id="cleanup.use_autoboxing" value="false"/>
|
||||
<setting id="cleanup.qualify_static_method_accesses_with_declaring_class" value="false"/>
|
||||
<setting id="cleanup.always_use_this_for_non_static_method_access" value="false"/>
|
||||
<setting id="cleanup.organize_imports" value="true"/>
|
||||
<setting id="cleanup.remove_trailing_whitespaces_ignore_empty" value="false"/>
|
||||
<setting id="cleanup.format_source_code_changes_only" value="false"/>
|
||||
<setting id="cleanup.qualify_static_field_accesses_with_declaring_class" value="false"/>
|
||||
<setting id="cleanup.add_generated_serial_version_id" value="false"/>
|
||||
<setting id="cleanup.remove_redundant_semicolons" value="false"/>
|
||||
<setting id="cleanup.qualify_static_member_accesses_through_subtypes_with_declaring_class" value="true"/>
|
||||
<setting id="cleanup.remove_redundant_type_arguments" value="true"/>
|
||||
<setting id="cleanup.remove_unused_imports" value="true"/>
|
||||
<setting id="cleanup.insert_inferred_type_arguments" value="false"/>
|
||||
<setting id="cleanup.make_private_fields_final" value="true"/>
|
||||
<setting id="cleanup.use_lambda" value="true"/>
|
||||
<setting id="cleanup.always_use_blocks" value="false"/>
|
||||
<setting id="cleanup.use_this_for_non_static_field_access_only_if_necessary" value="false"/>
|
||||
<setting id="cleanup.sort_members_all" value="false"/>
|
||||
<setting id="cleanup.remove_trailing_whitespaces_all" value="true"/>
|
||||
<setting id="cleanup.add_missing_annotations" value="true"/>
|
||||
<setting id="cleanup.always_use_this_for_non_static_field_access" value="true"/>
|
||||
<setting id="cleanup.make_parameters_final" value="false"/>
|
||||
<setting id="cleanup.sort_members" value="false"/>
|
||||
<setting id="cleanup.remove_private_constructors" value="true"/>
|
||||
<setting id="cleanup.always_use_parentheses_in_expressions" value="false"/>
|
||||
<setting id="cleanup.remove_unused_local_variables" value="false"/>
|
||||
<setting id="cleanup.convert_to_enhanced_for_loop" value="false"/>
|
||||
<setting id="cleanup.remove_unused_private_fields" value="true"/>
|
||||
<setting id="cleanup.remove_redundant_modifiers" value="false"/>
|
||||
<setting id="cleanup.never_use_blocks" value="true"/>
|
||||
<setting id="cleanup.add_missing_deprecated_annotations" value="true"/>
|
||||
<setting id="cleanup.use_this_for_non_static_field_access" value="true"/>
|
||||
<setting id="cleanup.remove_unnecessary_nls_tags" value="true"/>
|
||||
<setting id="cleanup.qualify_static_member_accesses_through_instances_with_declaring_class" value="true"/>
|
||||
<setting id="cleanup.add_missing_nls_tags" value="false"/>
|
||||
<setting id="cleanup.remove_unnecessary_casts" value="true"/>
|
||||
<setting id="cleanup.use_unboxing" value="false"/>
|
||||
<setting id="cleanup.use_blocks_only_for_return_and_throw" value="false"/>
|
||||
<setting id="cleanup.format_source_code" value="true"/>
|
||||
<setting id="cleanup.convert_functional_interfaces" value="true"/>
|
||||
<setting id="cleanup.add_default_serial_version_id" value="true"/>
|
||||
<setting id="cleanup.remove_unused_private_methods" value="true"/>
|
||||
<setting id="cleanup.remove_trailing_whitespaces" value="true"/>
|
||||
<setting id="cleanup.make_type_abstract_if_missing_method" value="false"/>
|
||||
<setting id="cleanup.add_serial_version_id" value="true"/>
|
||||
<setting id="cleanup.use_this_for_non_static_method_access" value="false"/>
|
||||
<setting id="cleanup.use_this_for_non_static_method_access_only_if_necessary" value="true"/>
|
||||
<setting id="cleanup.use_anonymous_class_creation" value="false"/>
|
||||
<setting id="cleanup.add_missing_override_annotations_interface_methods" value="true"/>
|
||||
<setting id="cleanup.remove_unused_private_members" value="false"/>
|
||||
<setting id="cleanup.make_local_variable_final" value="false"/>
|
||||
<setting id="cleanup.add_missing_methods" value="false"/>
|
||||
<setting id="cleanup.never_use_parentheses_in_expressions" value="true"/>
|
||||
<setting id="cleanup.qualify_static_member_accesses_with_declaring_class" value="true"/>
|
||||
<setting id="cleanup.use_parentheses_in_expressions" value="true"/>
|
||||
<setting id="cleanup.add_missing_override_annotations" value="true"/>
|
||||
<setting id="cleanup.use_blocks" value="true"/>
|
||||
<setting id="cleanup.make_variable_declarations_final" value="true"/>
|
||||
<setting id="cleanup.correct_indentation" value="true"/>
|
||||
<setting id="cleanup.remove_unused_private_types" value="true"/>
|
||||
</profile>
|
||||
</profiles>
|
39
back/Dockerfile
Executable file → Normal file
@ -1,34 +1,21 @@
|
||||
FROM python:alpine3.6
|
||||
FROM maven:3.6.3-openjdk-16 AS build
|
||||
|
||||
RUN apk update && \
|
||||
apk upgrade && \
|
||||
apk add --update-cache \
|
||||
--repository http://dl-cdn.alpinelinux.org/alpine/edge/community \
|
||||
--repository http://dl-cdn.alpinelinux.org/alpine/edge/main \
|
||||
--repository http://dl-cdn.alpinelinux.org/alpine/edge/testing \
|
||||
build-base mediainfo
|
||||
COPY pom.xml /tmp/
|
||||
COPY src /tmp/src/
|
||||
WORKDIR /tmp/
|
||||
RUN mvn clean compile assembly:single
|
||||
|
||||
RUN pip3 install --upgrade pip
|
||||
FROM bellsoft/liberica-openjdk-alpine:latest
|
||||
ENV LANG=C.UTF-8
|
||||
|
||||
RUN pip3 install sanic==19.9.0
|
||||
# add wget to manage the health check...
|
||||
RUN apk add --no-cache wget
|
||||
|
||||
RUN pip3 install sanic-cors
|
||||
|
||||
RUN pip3 install sanic-simple-swagger
|
||||
|
||||
RUN pip3 install python-dateutil
|
||||
|
||||
RUN pip3 install realog
|
||||
|
||||
RUN pip3 install python-magic
|
||||
|
||||
RUN pip3 install pymediainfo
|
||||
|
||||
EXPOSE 80
|
||||
|
||||
ADD src /application/
|
||||
RUN mkdir /application/
|
||||
COPY --from=build /tmp/out/maven/*.jar /application/application.jar
|
||||
WORKDIR /application/
|
||||
CMD ["python3", "-u", "./app_video.py"]
|
||||
|
||||
EXPOSE 18080
|
||||
|
||||
CMD ["java", "-Xms64M", "-Xmx1G", "-cp", "/application/application.jar", "org.kar.karideo.WebLauncher"]
|
||||
|
||||
|
366
back/Formatter.xml
Normal file
@ -0,0 +1,366 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<!DOCTYPE xml>
|
||||
<profiles version="18">
|
||||
<profile kind="CodeFormatterProfile" name="Scenarium" version="18">
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_ellipsis" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_declarations" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_allocation_expression" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_at_in_annotation_type_declaration" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_for_statment" value="common_lines"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.new_lines_at_block_boundaries" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_logical_operator" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_parameters" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.insert_new_line_for_parameter" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_package" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_method_invocation" value="common_lines"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_enum_constant" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.blank_lines_after_imports" value="1"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_while" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.insert_new_line_before_root_tags" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_annotation_type_member_declaration" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_throws" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_switch_statement" value="common_lines"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.format_javadoc_comments" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.indentation.size" value="4"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_postfix_operator" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_enum_constant_declaration" value="common_lines"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_increments" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_arguments" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_arrow_in_switch_default" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_inits" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_semicolon_in_for" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.align_with_spaces" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.disabling_tag" value="@formatter:off"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.continuation_indentation" value="2"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.number_of_blank_lines_before_code_block" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_switch_case_expressions" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_enum_constants" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_imports" value="1"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.number_of_blank_lines_at_end_of_method_body" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.blank_lines_after_package" value="1"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_local_declarations" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_if_while_statement" value="common_lines"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_enum_constant" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_parameterized_type_reference" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.indent_root_tags" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.wrap_before_or_operator_multicatch" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.enabling_tag" value="@formatter:on"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_brace_in_block" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.count_line_length_from_starting_position" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_return" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_method_declaration" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_parameter" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_arrow_in_switch_case" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.wrap_before_multiplicative_operator" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.keep_then_statement_on_same_line" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_field" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_explicitconstructorcall_arguments" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_prefix_operator" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.blank_lines_between_type_declarations" value="1"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_brace_in_array_initializer" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_for" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_catch" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_arguments" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_method" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_switch" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_parameterized_type_references" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_anonymous_type_declaration" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_logical_operator" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_parenthesized_expression" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.keep_annotation_declaration_on_one_line" value="one_line_if_empty"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_enum_constant" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_multiplicative_operator" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.never_indent_line_comments_on_first_column" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_and_in_type_parameter" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_inits" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.indent_statements_compare_to_block" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_anonymous_type_declaration" value="end_of_line"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_question_in_wildcard" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_invocation_arguments" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_switch" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.align_tags_descriptions_grouped" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.line_length" value="200"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.use_on_off_tags" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.keep_method_body_on_one_line" value="one_line_if_empty"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_brackets_in_array_allocation_expression" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.keep_loop_body_block_on_one_line" value="one_line_if_empty"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_constant" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_invocation" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_assignment_operator" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_type_declaration" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_for" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.preserve_white_space_between_code_and_line_comments" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_local_variable" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_method_declaration" value="end_of_line"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_abstract_method" value="1"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.keep_enum_constant_declaration_on_one_line" value="one_line_if_empty"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.align_variable_declarations_on_columns" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_invocation" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_union_type_in_multicatch" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_for" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.keep_type_declaration_on_one_line" value="one_line_if_empty"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.number_of_blank_lines_at_beginning_of_method_body" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_arguments" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.keep_else_statement_on_same_line" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_catch_clause" value="common_lines"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_additive_operator" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_parameterized_type_reference" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_array_initializer" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_field_declarations" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_annotation" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_explicit_constructor_call" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_relational_operator" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_multiplicative_operator" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.keep_anonymous_type_declaration_on_one_line" value="one_line_if_empty"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_switch_case_expressions" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.wrap_before_shift_operator" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_annotation_declaration_header" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_superinterfaces" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_default" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_question_in_conditional" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_block" value="end_of_line"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_constructor_declaration" value="end_of_line"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_lambda_body" value="end_of_line"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.number_of_blank_lines_at_end_of_code_block" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.compact_else_if" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_parameters" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_catch" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_invocation" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_bitwise_operator" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.put_empty_statement_on_new_line" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_parameters_in_constructor_declaration" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_type_parameters" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_invocation_arguments" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_method_invocation" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_constructor_declaration" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_compact_loops" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_block_comment" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_catch_in_try_statement" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_try" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.keep_simple_for_body_on_same_line" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_at_end_of_file_if_missing" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_javadoc_comment" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_relational_operator" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_array_initializer" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_unary_operator" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_expressions_in_array_initializer" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.format_line_comment_starting_on_first_column" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.number_of_empty_lines_to_preserve" value="1"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_annotation" value="common_lines"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_case" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_ellipsis" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_additive_operator" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_semicolon_in_try_resources" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_assert" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_if" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_arguments" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_and_in_type_parameter" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_string_concatenation" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_parenthesized_expression" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.format_line_comments" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_labeled_statement" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.text_block_indentation" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.align_type_members_on_columns" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_assignment" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_module_statements" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_type_header" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_declaration" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.number_of_blank_lines_after_code_block" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.align_tags_names_descriptions" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_enum_constant" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_type_declaration" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.keep_if_then_body_block_on_one_line" value="one_line_if_empty"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_first_class_body_declaration" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_conditional_expression" value="80"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_closing_brace_in_array_initializer" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_parameters" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.format_guardian_clause_on_one_line" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_if" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.align_assignment_statements_on_columns" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_type" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_block" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_enum_declaration" value="end_of_line"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_block_in_case" value="end_of_line"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_arrow_in_switch_default" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_constructor_declaration" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.insert_new_line_between_different_tags" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_conditional_expression_chain" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.format_header" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_allocation_expression" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_additive_operator" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_invocation" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_while" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_switch" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_method_declaration" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.join_wrapped_lines" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_constructor_declaration" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.wrap_before_conditional_operator" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_cases" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_allocation_expression" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_synchronized" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_shift_operator" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.align_fields_grouping_blank_lines" value="2147483647"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.new_lines_at_javadoc_boundaries" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_bitwise_operator" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_annotation_type_declaration" value="end_of_line"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_for" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_resources_in_try" value="80"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.use_tabs_only_for_leading_indentations" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_try_clause" value="common_lines"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_selector_in_method_invocation" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.never_indent_block_comments_on_first_column" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.keep_code_block_on_one_line" value="one_line_if_empty"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_synchronized" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_throws" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.tabulation.size" value="4"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_bitwise_operator" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_allocation_expression" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_reference" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_conditional" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.format_source_code" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_array_initializer" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_try" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_semicolon_in_try_resources" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_field" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.continuation_indentation_for_array_initializer" value="2"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_question_in_wildcard" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_method" value="1"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_superclass_in_type_declaration" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_enum_declaration" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_throw" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.wrap_before_assignment_operator" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_labeled_statement" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_not_operator" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_switch" value="end_of_line"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_superinterfaces" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_parameters" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_type_annotation" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_brace_in_array_initializer" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_parenthesized_expression" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.format_html" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation_type_declaration" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_parameters" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_method_delcaration" value="common_lines"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_compact_if" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.keep_lambda_body_block_on_one_line" value="one_line_if_empty"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.indent_empty_lines" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_type_arguments" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_parameterized_type_reference" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_unary_operator" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_enum_constant" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_annotation" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_declarations" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.keep_empty_array_initializer_on_one_line" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_switch" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_else_in_if_statement" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_assignment_operator" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_constructor_declaration" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_new_chunk" value="1"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_label" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_declaration_header" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_allocation_expression" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_constructor_declaration" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_conditional" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_parameterized_type_reference" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_parameters" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_arguments" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_cast" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_arrow_in_switch_case" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_assert" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_member_type" value="1"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_while_in_do_statement" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_logical_operator" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_type_reference" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_parameterized_type_reference" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_qualified_allocation_expression" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_opening_brace_in_array_initializer" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.indent_breaks_compare_to_cases" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_declaration" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.wrap_before_bitwise_operator" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_if" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_semicolon" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.wrap_before_relational_operator" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_postfix_operator" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_try" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_arguments" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_cast" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.format_block_comments" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_lambda_arrow" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_declaration" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.indent_tag_description" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.keep_imple_if_on_one_line" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_declaration" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_parameters_in_method_declaration" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_brackets_in_array_type_reference" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_parameters" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_string_concatenation" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_semicolon_in_for" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_throws" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_allocation_expression" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.blank_lines_after_last_class_body_declaration" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.indent_statements_compare_to_body" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_multiple_fields" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_constant_arguments" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.keep_simple_while_body_on_same_line" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_prefix_operator" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_array_initializer" value="end_of_line"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.wrap_before_logical_operator" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_shift_operator" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_method_declaration" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_parameters" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_catch" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.blank_lines_between_statement_group_in_switch" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_reference" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_annotation" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_constant_arguments" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_lambda_declaration" value="common_lines"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_shift_operator" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_braces_in_array_initializer" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_case" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_local_declarations" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.keep_simple_do_while_body_on_same_line" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_annotation_type_declaration" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_reference" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.keep_enum_declaration_on_one_line" value="one_line_if_empty"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_declaration" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.wrap_outer_expressions_when_nested" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_paren_in_cast" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_enum_constant" value="end_of_line"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_type_declaration" value="end_of_line"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_multiplicative_operator" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_package" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_for" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_synchronized" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_increments" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation_type_member_declaration" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_expressions_in_for_loop_header" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.wrap_before_additive_operator" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.keep_simple_getter_setter_on_one_line" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_while" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_enum_constant" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_explicitconstructorcall_arguments" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_annotation" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_parameters" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_constant_header" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_string_concatenation" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_lambda_arrow" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_constructor_declaration" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_throws" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.join_lines_in_comments" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_parameters" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_question_in_conditional" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.indent_parameter_description" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.number_of_blank_lines_at_beginning_of_code_block" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_finally_in_try_statement" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.tabulation.char" value="tab"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_relational_operator" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_field_declarations" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.wrap_before_string_concatenation" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.blank_lines_between_import_groups" value="1"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.lineSplit" value="200"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_annotation" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_switch" value="insert"/>
|
||||
</profile>
|
||||
</profiles>
|
6
back/LICENSE
Normal file
@ -0,0 +1,6 @@
|
||||
PROPIETARY licence
|
||||
==================
|
||||
|
||||
Copyright at Edouard DUPIN
|
||||
|
||||
you have no right
|
25
back/README.md
Normal file
@ -0,0 +1,25 @@
|
||||
Generic backend for karideo in java
|
||||
===================================
|
||||
|
||||
|
||||
mvn install
|
||||
|
||||
mvn compile
|
||||
|
||||
mvn package
|
||||
|
||||
// download all dependency in out/maven/dependency
|
||||
mvn dependency:copy-dependencies
|
||||
|
||||
java -cp out/maven/kar-karideo-0.1.0.jar org.kar.karideo.WebLauncher
|
||||
|
||||
|
||||
// create a single package jar
|
||||
mvn clean compile assembly:single
|
||||
|
||||
|
||||
|
||||
java -cp out/maven/karideo-0.1.0-jar-with-dependencies.jar org.kar.karideo.WebLauncher
|
||||
|
||||
|
||||
|
Before Width: | Height: | Size: 7.6 KiB After Width: | Height: | Size: 7.6 KiB |
Before Width: | Height: | Size: 5.4 KiB After Width: | Height: | Size: 5.4 KiB |
Before Width: | Height: | Size: 2.7 KiB After Width: | Height: | Size: 2.7 KiB |
Before Width: | Height: | Size: 3.6 KiB After Width: | Height: | Size: 3.6 KiB |
Before Width: | Height: | Size: 3.6 KiB After Width: | Height: | Size: 3.6 KiB |
Before Width: | Height: | Size: 3.4 KiB After Width: | Height: | Size: 3.4 KiB |
Before Width: | Height: | Size: 2.5 KiB After Width: | Height: | Size: 2.5 KiB |
Before Width: | Height: | Size: 3.6 KiB After Width: | Height: | Size: 3.6 KiB |
Before Width: | Height: | Size: 1.4 KiB After Width: | Height: | Size: 1.4 KiB |
Before Width: | Height: | Size: 2.6 KiB After Width: | Height: | Size: 2.6 KiB |
@ -1,12 +0,0 @@
|
||||
version: '3'
|
||||
services:
|
||||
REST_video_service:
|
||||
build: .
|
||||
restart: always
|
||||
image: yui.heero/video_rest_api
|
||||
container_name: video_rest_api
|
||||
ports:
|
||||
- 15080:80
|
||||
volumes:
|
||||
- ./data/data_karideo:/application/data
|
||||
|
257
back/pom.xml
Normal file
@ -0,0 +1,257 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>org.kar</groupId>
|
||||
<artifactId>karideo</artifactId>
|
||||
<version>0.3.0</version>
|
||||
<properties>
|
||||
<maven.compiler.version>3.1</maven.compiler.version>
|
||||
<maven.compiler.source>21</maven.compiler.source>
|
||||
<maven.compiler.target>21</maven.compiler.target>
|
||||
<maven.dependency.version>3.1.1</maven.dependency.version>
|
||||
</properties>
|
||||
<repositories>
|
||||
<repository>
|
||||
<id>gitea</id>
|
||||
<url>https://gitea.atria-soft.org/api/packages/kangaroo-and-rabbit/maven</url>
|
||||
</repository>
|
||||
</repositories>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>kangaroo-and-rabbit</groupId>
|
||||
<artifactId>archidata</artifactId>
|
||||
<version>0.12.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-simple</artifactId>
|
||||
<version>2.1.0-alpha1</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.datatype</groupId>
|
||||
<artifactId>jackson-datatype-jsr310</artifactId>
|
||||
<version>2.17.1</version>
|
||||
</dependency>
|
||||
<!--
|
||||
************************************************************
|
||||
** TEST dependency **
|
||||
************************************************************
|
||||
-->
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter-api</artifactId>
|
||||
<version>5.11.0-M2</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter-engine</artifactId>
|
||||
<version>5.11.0-M2</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>net.revelc.code.formatter</groupId>
|
||||
<artifactId>formatter-maven-plugin</artifactId>
|
||||
<version>2.24.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-checkstyle-plugin</artifactId>
|
||||
<version>3.3.1</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
<build>
|
||||
<sourceDirectory>src</sourceDirectory>
|
||||
<testSourceDirectory>test/src</testSourceDirectory>
|
||||
<directory>${project.basedir}/out/maven/</directory>
|
||||
<resources>
|
||||
<resource>
|
||||
<directory>src/resources</directory>
|
||||
</resource>
|
||||
</resources>
|
||||
<testResources>
|
||||
<testResource>
|
||||
<directory>${basedir}/test/resources</directory>
|
||||
</testResource>
|
||||
</testResources>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-compiler-plugin</artifactId>
|
||||
<version>${maven.compiler.version}</version>
|
||||
<configuration>
|
||||
<source>${maven.compiler.source}</source>
|
||||
<target>${maven.compiler.target}</target>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>exec-maven-plugin</artifactId>
|
||||
<version>1.4.0</version>
|
||||
<configuration>
|
||||
<mainClass>org.kar.karideo.WebLauncher</mainClass>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<!-- Create the source bundle -->
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-source-plugin</artifactId>
|
||||
<version>3.2.1</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>attach-sources</id>
|
||||
<goals>
|
||||
<goal>jar</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<!-- junit results -->
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
<version>3.0.0-M5</version>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<artifactId>maven-assembly-plugin</artifactId>
|
||||
<configuration>
|
||||
<archive>
|
||||
<manifest>
|
||||
<mainClass>fully.qualified.MainClass</mainClass>
|
||||
</manifest>
|
||||
</archive>
|
||||
<descriptorRefs>
|
||||
<descriptorRef>jar-with-dependencies</descriptorRef>
|
||||
</descriptorRefs>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<!-- Create coverage -->
|
||||
<!--
|
||||
<plugin>
|
||||
<groupId>org.jacoco</groupId>
|
||||
<artifactId>jacoco-maven-plugin</artifactId>
|
||||
<version>0.8.10</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>prepare-agent</id>
|
||||
<goals>
|
||||
<goal>prepare-agent</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
<execution>
|
||||
<id>report</id>
|
||||
<phase>test</phase>
|
||||
<goals>
|
||||
<goal>report</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
<execution>
|
||||
<id>jacoco-check</id>
|
||||
<goals>
|
||||
<goal>check</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<rules>
|
||||
<rule>
|
||||
<element>PACKAGE</element>
|
||||
<limits>
|
||||
<limit>
|
||||
<counter>LINE</counter>
|
||||
<value>COVEREDRATIO</value>
|
||||
<minimum>0.50</minimum>
|
||||
</limit>
|
||||
</limits>
|
||||
</rule>
|
||||
</rules>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
-->
|
||||
<!-- Java-doc generation for stand-alone site -->
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-javadoc-plugin</artifactId>
|
||||
<version>3.2.0</version>
|
||||
<configuration>
|
||||
<show>private</show>
|
||||
<nohelp>true</nohelp>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>exec-maven-plugin</artifactId>
|
||||
<version>3.1.0</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>exec-application</id>
|
||||
<phase>package</phase>
|
||||
<goals>
|
||||
<goal>java</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
<configuration>
|
||||
<mainClass>org.kar.karideo.WebLauncher</mainClass>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>net.revelc.code.formatter</groupId>
|
||||
<artifactId>formatter-maven-plugin</artifactId>
|
||||
<version>2.23.0</version>
|
||||
<configuration>
|
||||
<encoding>UTF-8</encoding>
|
||||
<lineEnding>LF</lineEnding>
|
||||
<configFile>Formatter.xml</configFile>
|
||||
<directories>
|
||||
<directory>src/</directory>
|
||||
<directory>test/src</directory>
|
||||
</directories>
|
||||
<includes>
|
||||
<include>**/*.java</include>
|
||||
</includes>
|
||||
<excludes>
|
||||
<exclude>module-info.java</exclude>
|
||||
</excludes>
|
||||
</configuration>
|
||||
<executions>
|
||||
<execution>
|
||||
<goals>
|
||||
<goal>validate</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>com.github.spotbugs</groupId>
|
||||
<artifactId>spotbugs-maven-plugin</artifactId>
|
||||
<version>4.8.5.0</version>
|
||||
<configuration>
|
||||
<includeFilterFile>spotbugs-security-include.xml</includeFilterFile>
|
||||
<excludeFilterFile>spotbugs-security-exclude.xml</excludeFilterFile>
|
||||
<!--<plugins>
|
||||
<plugin>
|
||||
<groupId>com.h3xstream.findsecbugs</groupId>
|
||||
<artifactId>findsecbugs-plugin</artifactId>
|
||||
<version>1.12.0</version>
|
||||
</plugin>
|
||||
</plugins>
|
||||
-->
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
<!-- Generate Java-docs As Part Of Project Reports -->
|
||||
<reporting>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-javadoc-plugin</artifactId>
|
||||
<version>3.2.0</version>
|
||||
<configuration>
|
||||
<show>public</show>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</reporting>
|
||||
</project>
|
@ -1,56 +0,0 @@
|
||||
REST video API
|
||||
==============
|
||||
|
||||
REST API for video streaming for personal web / application interface
|
||||
|
||||
|
||||
Download the project
|
||||
====================
|
||||
|
||||
simply download the application:
|
||||
```
|
||||
mkdir WORKSPACE & cd $_
|
||||
git clone http://xxx/HeeroYui/rest_video.git restvideo
|
||||
cd rest_video
|
||||
```
|
||||
|
||||
**Note:** It is important to remove ```-``` and ```_``` becose some docker remove these element in the network name _(like ubuntu ...)_
|
||||
**Note:** The networkname of a docker compose is ```thefoldername_default```
|
||||
|
||||
|
||||
Run the application
|
||||
===================
|
||||
|
||||
Start the application:
|
||||
```
|
||||
docker-compose up -d
|
||||
```
|
||||
|
||||
Stop the application:
|
||||
```
|
||||
docker-compose down
|
||||
```
|
||||
|
||||
Restart the application (on the fly):
|
||||
```
|
||||
docker-compose up -d --force-recreate --build
|
||||
```
|
||||
|
||||
|
||||
|
||||
Run the application (debug)
|
||||
===========================
|
||||
before the first run:
|
||||
```
|
||||
cp -r data_base data
|
||||
```
|
||||
|
||||
```
|
||||
./src/app_video.py
|
||||
```
|
||||
|
||||
or
|
||||
```
|
||||
SANIC_REST_PORT=15080 ./src/app_video.py
|
||||
```
|
||||
|
17
back/release_karideo_back/Dockerfile
Normal file
@ -0,0 +1,17 @@
|
||||
FROM bellsoft/liberica-openjdk-alpine:latest
|
||||
|
||||
ENV LANG=C.UTF-8
|
||||
#ENV JAVA_HOME=/usr/lib/jvm/java-14-openjdk
|
||||
#ENV JAVAFX_HOME=$JAVA_HOME
|
||||
#ENV PATH=/usr/lib/jvm/java-14-openjdk/bin/:$PATH
|
||||
#ENV JAVA_VERSION=14.0.2
|
||||
|
||||
|
||||
RUN mkdir /application/
|
||||
ADD karideo.jar /application/
|
||||
WORKDIR /application/
|
||||
|
||||
EXPOSE 18080
|
||||
|
||||
CMD ["java", "-cp", "/application/karideo.jar", "org.kar.karideo.WebLauncher"]
|
||||
|
12
back/release_karideo_back/docker-compose.yaml
Normal file
@ -0,0 +1,12 @@
|
||||
version: '3'
|
||||
services:
|
||||
karideo_back_service_2:
|
||||
build: .
|
||||
restart: always
|
||||
image: org.kar/karideo
|
||||
container_name: org.kar.karideo
|
||||
ports:
|
||||
- 22080:18080
|
||||
volumes:
|
||||
- ./properties.txt:/application/properties.txt
|
||||
- /workspace/data/karideo/media:/application/data
|
BIN
back/release_karideo_back/karideo.jar
Normal file
9
back/release_karideo_back/propertyies.txt
Normal file
@ -0,0 +1,9 @@
|
||||
org.kar.karideo.dataTmpFolder=/application/data/tmp
|
||||
org.kar.karideo.dataTmpFolder=/application/data/media
|
||||
org.kar.karideo.rest.oauth=http://192.168.1.156:21080/oauth/api/
|
||||
org.kar.karideo.db.host=1992.156.1.156
|
||||
org.kar.karideo.db.port=20306
|
||||
org.kar.karideo.db.login=root
|
||||
org.kar.karideo.db.port=klkhj456gkgtkhjgvkujfhjgkjhgsdfhb3467465fgdhdesfgh
|
||||
org.kar.karideo.db.name=karideo
|
||||
org.kar.karideo.address=http://0.0.0.0:18080/karideo/api/
|
@ -1,218 +0,0 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
##
|
||||
## @author Edouard DUPIN
|
||||
##
|
||||
## @copyright 2019, Edouard DUPIN, all right reserved
|
||||
##
|
||||
## @license MPL v2.0 (see license file)
|
||||
##
|
||||
|
||||
import time
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import datetime
|
||||
import time, threading
|
||||
import realog.debug as debug
|
||||
|
||||
from aiofiles import os as async_os
|
||||
|
||||
from pymediainfo import MediaInfo
|
||||
|
||||
from sanic import Sanic
|
||||
from sanic import response
|
||||
from sanic import views
|
||||
from sanic import Blueprint
|
||||
from sanic.exceptions import ServerError
|
||||
from sanic.response import file_stream
|
||||
|
||||
from sanic_simple_swagger import swagger_blueprint, openapi_blueprint
|
||||
from sanic_simple_swagger import doc
|
||||
|
||||
import tools
|
||||
import data_interface
|
||||
import data_global_elements
|
||||
|
||||
import hashlib
|
||||
import shutil
|
||||
|
||||
tmp_value = 0
|
||||
|
||||
#curl -F 'file=@Totally_Spies.mp4;type=application/octet-stream' -H 'transfer-encoding:chunked' 127.0.0.1:15080/data -X POST -O; echo ;
|
||||
|
||||
def add(_app, _name_api):
|
||||
elem_blueprint = Blueprint(_name_api)
|
||||
"""
|
||||
@elem_blueprint.get('/' + _name_api, strict_slashes=True)
|
||||
@doc.summary("Show saisons")
|
||||
@doc.description("Display a listing of the resource.")
|
||||
@doc.produces(content_type='application/json')
|
||||
async def list(request):
|
||||
return response.json(data_global_elements.get_interface(_name_api).gets())
|
||||
"""
|
||||
|
||||
class DataModelBdd:
|
||||
id = int
|
||||
size = int
|
||||
sha512 = str
|
||||
mime_type = str
|
||||
original_name = [str, type(None)]
|
||||
# creating time
|
||||
create_date = str
|
||||
|
||||
data_global_elements.get_interface(_name_api).set_data_model(DataModelBdd)
|
||||
|
||||
|
||||
@elem_blueprint.get('/' + _name_api + '/exist/<sha512:string>', strict_slashes=True)
|
||||
@doc.summary("check resource existance")
|
||||
@doc.description("simply check if the resource is already uploaded.")
|
||||
@doc.produces(content_type='application/json')
|
||||
async def check_existance(request, sha512):
|
||||
value = data_global_elements.get_interface(_name_api).gets_where(select=[["==", "sha512", sha512]], filter=["id"])
|
||||
if value != None:
|
||||
return response.json({"found":True})
|
||||
raise ServerError("No data found", status_code=404)
|
||||
|
||||
|
||||
@elem_blueprint.post('/' + _name_api, strict_slashes=True, stream=True)
|
||||
@doc.summary("send new file data")
|
||||
@doc.description("Create a new data file (associated with his sha512.")
|
||||
#@doc.consumes(DataModel, location='body')#, required=True)
|
||||
@doc.response_success(status=201, description='If successful created')
|
||||
async def create(_request):
|
||||
debug.info("request streaming " + str(_request));
|
||||
args_with_blank_values = _request.headers
|
||||
debug.info("List arguments: " + str(args_with_blank_values));
|
||||
async def streaming(_response):
|
||||
#debug.info("streaming " + str(_response));
|
||||
total_size = 0
|
||||
temporary_file = os.path.join(_app.config['REST_TMP_DATA'], str(tmp_value) + ".tmp")
|
||||
if not os.path.exists(_app.config['REST_TMP_DATA']):
|
||||
os.makedirs(_app.config['REST_TMP_DATA'])
|
||||
if not os.path.exists(_app.config['REST_MEDIA_DATA']):
|
||||
os.makedirs(_app.config['REST_MEDIA_DATA'])
|
||||
file_stream = open(temporary_file,"wb")
|
||||
sha1 = hashlib.sha512()
|
||||
while True:
|
||||
#debug.warning("ploufffff " + str(dir(_request.stream)))
|
||||
body = await _request.stream.read()
|
||||
if body is None:
|
||||
debug.warning("empty body");
|
||||
break
|
||||
total_size += len(body)
|
||||
debug.verbose("body " + str(len(body)) + "/" + str(total_size))
|
||||
file_stream.write(body)
|
||||
sha1.update(body)
|
||||
file_stream.close()
|
||||
print("SHA512: " + str(sha1.hexdigest()))
|
||||
|
||||
new_data = {
|
||||
"size": total_size,
|
||||
"sha512": str(sha1.hexdigest()),
|
||||
'original_name': _request.headers["filename"],
|
||||
'mime_type': _request.headers["mime-type"],
|
||||
'create_date': datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S.%f')[:-3] + 'Z'
|
||||
}
|
||||
# TODO: Check if the element already exist ...
|
||||
|
||||
return_bdd = data_global_elements.get_interface(_name_api).post(new_data)
|
||||
|
||||
basic_data_path = os.path.join(_app.config['REST_MEDIA_DATA'], str(return_bdd["id"]))
|
||||
|
||||
if not os.path.exists(basic_data_path):
|
||||
os.makedirs(basic_data_path)
|
||||
destination_filename = os.path.join(basic_data_path, "video")
|
||||
"""
|
||||
if os.path.isfile(destination_filename) == True:
|
||||
answer_data = {
|
||||
"size": total_size,
|
||||
"sha512": str(sha1.hexdigest()),
|
||||
'filename': _request.headers["filename"],
|
||||
'mime_type': _request.headers["mime-type"],
|
||||
"already_exist": True,
|
||||
}
|
||||
await _response.write(json.dumps(answer_data, sort_keys=True, indent=4))
|
||||
return
|
||||
"""
|
||||
|
||||
# move the file
|
||||
shutil.move(temporary_file, destination_filename)
|
||||
# collect media info ...
|
||||
media_info = MediaInfo.parse(destination_filename)
|
||||
data_metafile = {
|
||||
"sha512": str(sha1.hexdigest()),
|
||||
"size": total_size,
|
||||
'filename': _request.headers["filename"],
|
||||
'mime_type': _request.headers["mime-type"],
|
||||
'media_info': json.loads(media_info.to_json())
|
||||
}
|
||||
tools.file_write_data(os.path.join(basic_data_path, "meta.json"), json.dumps(data_metafile, sort_keys=True, indent=4))
|
||||
await _response.write(json.dumps(return_bdd, sort_keys=True, indent=4))
|
||||
return response.stream(streaming, content_type='application/json')
|
||||
|
||||
@elem_blueprint.get('/' + _name_api + '/<id:int>', strict_slashes=True)
|
||||
@doc.summary("get a specific resource")
|
||||
@doc.description("Get a resource with all the needed datas ... It permeit seek for video stream.")
|
||||
@doc.produces(content_type='application/json')
|
||||
async def retrive(request, id):
|
||||
debug.warning("Request data media 2 : " + str(id));
|
||||
"""
|
||||
if id[-4:] == ".mp4":
|
||||
id = id[:-4]
|
||||
if id[-4:] == ".mkv":
|
||||
id = id[:-4]
|
||||
"""
|
||||
filename = os.path.join(_app.config['REST_MEDIA_DATA'], str(id), "video")
|
||||
value = data_global_elements.get_interface(_name_api).get(id)
|
||||
debug.info("plouuuuuuf " + str(value))
|
||||
headers = {
|
||||
'Content-Type': value["mime_type"],
|
||||
'Accept-Ranges': 'Accept-Ranges: bytes'
|
||||
}
|
||||
try:
|
||||
with open(filename, 'rb') as fff:
|
||||
range_start = None
|
||||
range_end = None
|
||||
fff.seek(0, 2)
|
||||
file_length = fff.tell()
|
||||
fff.seek(0)
|
||||
try:
|
||||
range_ = '0-' + str(file_length)
|
||||
if 'range' in request.headers:
|
||||
range_ = request.headers['range'].split('=')[1]
|
||||
range_split = range_.split('-')
|
||||
range_start = int(range_split[0])
|
||||
fff.seek(range_start)
|
||||
range_end = int(range_split[1])
|
||||
except ValueError:
|
||||
pass
|
||||
if range_start and range_start != 0:
|
||||
if not range_end:
|
||||
range_end = file_length
|
||||
read_length = range_end - range_start
|
||||
else:
|
||||
range_start = 0
|
||||
read_length = file_length
|
||||
range_end = file_length
|
||||
fff.seek(range_start)
|
||||
headers['Content-Length'] = read_length
|
||||
headers['Content-Range'] = f'bytes {range_start}-{range_end-1}/{file_length}'
|
||||
async def streaming_fn(response):
|
||||
with open(filename, 'rb') as fff:
|
||||
chunk_size = 8192
|
||||
current_offset = range_start
|
||||
while (current_offset < file_length):
|
||||
chunk_start = current_offset
|
||||
fff.seek(current_offset)
|
||||
chunk_data = fff.read(min(chunk_size, file_length - current_offset))
|
||||
current_offset += chunk_size
|
||||
await response.write(chunk_data)
|
||||
return response.stream(streaming_fn, headers=headers, status=206)
|
||||
except FileNotFoundError:
|
||||
return response.HTTPResponse(status=404)
|
||||
|
||||
|
||||
_app.blueprint(elem_blueprint)
|
||||
|
||||
|
@ -1,153 +0,0 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
##
|
||||
## @author Edouard DUPIN
|
||||
##
|
||||
## @copyright 2019, Edouard DUPIN, all right reserved
|
||||
##
|
||||
## @license MPL v2.0 (see license file)
|
||||
##
|
||||
|
||||
import time
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import datetime
|
||||
import time, threading
|
||||
import realog.debug as debug
|
||||
|
||||
from sanic import Sanic
|
||||
from sanic import response
|
||||
from sanic import views
|
||||
from sanic import Blueprint
|
||||
from sanic.exceptions import ServerError
|
||||
|
||||
from sanic_simple_swagger import swagger_blueprint, openapi_blueprint
|
||||
from sanic_simple_swagger import doc
|
||||
|
||||
import tools
|
||||
import data_interface
|
||||
import data_global_elements
|
||||
|
||||
def add(_app, _name_api):
|
||||
elem_blueprint = Blueprint(_name_api)
|
||||
|
||||
class DataModelBdd:
|
||||
id = int
|
||||
name = str
|
||||
covers = [[], type(None)]
|
||||
|
||||
data_global_elements.get_interface(_name_api).set_data_model(DataModelBdd)
|
||||
|
||||
class DataModel:
|
||||
name = str
|
||||
|
||||
@elem_blueprint.get('/' + _name_api, strict_slashes=True)
|
||||
@doc.summary("Show resources")
|
||||
@doc.description("Display a listing of the resource.")
|
||||
@doc.produces(content_type='application/json')
|
||||
async def list(request):
|
||||
return response.json(data_global_elements.get_interface(_name_api).gets())
|
||||
|
||||
@elem_blueprint.post('/' + _name_api, strict_slashes=True)
|
||||
@doc.summary("Create new resource")
|
||||
@doc.description("Store a newly created resource in storage.")
|
||||
@doc.consumes(DataModel, location='body')#, required=True)
|
||||
@doc.response_success(status=201, description='If successful created')
|
||||
async def create(request):
|
||||
return response.json(data_global_elements.get_interface(_name_api).post(request.json))
|
||||
|
||||
@elem_blueprint.post('/' + _name_api + "/find", strict_slashes=True)
|
||||
@doc.summary("Create new resource if the name does not already exist")
|
||||
@doc.description("Store a newly created resource in storage.")
|
||||
@doc.consumes(DataModel, location='body')#, required=True)
|
||||
@doc.response_success(status=201, description='If successful created')
|
||||
async def find_with_name(request):
|
||||
api = data_global_elements.get_interface(_name_api)
|
||||
for elem in api.bdd:
|
||||
if elem["name"] == request.json["name"]:
|
||||
return response.json({"id": elem["id"]})
|
||||
raise ServerError("No data found", status_code=404)
|
||||
|
||||
@elem_blueprint.get('/' + _name_api + '/<id:int>', strict_slashes=True)
|
||||
@doc.summary("Show resources")
|
||||
@doc.description("Display a listing of the resource.")
|
||||
@doc.produces(content_type='application/json')
|
||||
async def retrive(request, id):
|
||||
value = data_global_elements.get_interface(_name_api).get(id)
|
||||
if value != None:
|
||||
return response.json(value)
|
||||
raise ServerError("No data found", status_code=404)
|
||||
|
||||
@elem_blueprint.get('/' + _name_api + '/<id:int>/video_all', strict_slashes=True)
|
||||
@doc.summary("get all videos list")
|
||||
@doc.description("List all the videos availlable for this group.")
|
||||
@doc.produces(content_type='application/json')
|
||||
async def retrive_video(request, id):
|
||||
value = data_global_elements.get_interface(data_global_elements.API_VIDEO).gets_where(select=[["==", "group_id", id]], filter=["id"])
|
||||
if value != None:
|
||||
return response.json(value)
|
||||
raise ServerError("No data found", status_code=404)
|
||||
|
||||
@elem_blueprint.get('/' + _name_api + '/<id:int>/video', strict_slashes=True)
|
||||
@doc.summary("get videos list who have no saison")
|
||||
@doc.description("List all the videos availlable for this group tht does not depend on saison.")
|
||||
@doc.produces(content_type='application/json')
|
||||
async def retrive_video_no_saison(request, id):
|
||||
value = data_global_elements.get_interface(data_global_elements.API_VIDEO).gets_where(select=[["==", "group_id", id], ["==", "saison_id", None]], filter=["id"])
|
||||
if value != None:
|
||||
return response.json(value)
|
||||
raise ServerError("No data found", status_code=404)
|
||||
|
||||
@elem_blueprint.get('/' + _name_api + '/<id:int>/saison', strict_slashes=True)
|
||||
@doc.summary("get videos list who have no saison")
|
||||
@doc.description("List all the videos availlable for this group tht does not depend on saison.")
|
||||
@doc.produces(content_type='application/json')
|
||||
async def retrive_saison(request, id):
|
||||
value = data_global_elements.get_interface(data_global_elements.API_SAISON).gets_where(select=[["==", "group_id", id]], filter=["id"])
|
||||
if value != None:
|
||||
return response.json(value)
|
||||
raise ServerError("No data found", status_code=404)
|
||||
|
||||
@elem_blueprint.put('/' + _name_api + '/<id:int>', strict_slashes=True)
|
||||
@doc.summary("Update resource")
|
||||
@doc.description("Update the specified resource in storage.")
|
||||
@doc.response_success(status=201, description='If successful updated')
|
||||
async def update(request, id):
|
||||
ret = data_global_elements.get_interface(_name_api).put(id)
|
||||
return response.json({})
|
||||
|
||||
@elem_blueprint.delete('/' + _name_api + '/<id:int>', strict_slashes=True)
|
||||
@doc.summary("Remove resource")
|
||||
@doc.description("Remove the specified resource from storage.")
|
||||
@doc.response_success(status=201, description='If successful deleted')
|
||||
async def delete(request, id):
|
||||
ret = data_global_elements.get_interface(_name_api).delete(id)
|
||||
if ret == True:
|
||||
return response.json({})
|
||||
raise ServerError("No data found", status_code=404)
|
||||
|
||||
@elem_blueprint.post('/' + _name_api + "/<id:int>/add_cover", strict_slashes=True)
|
||||
@doc.summary("Add cover on video")
|
||||
@doc.description("Add a cover data ID to the video.")
|
||||
@doc.consumes(DataModel, location='body')#, required=True)
|
||||
@doc.response_success(status=201, description='If successful added')
|
||||
async def create(request, id):
|
||||
for type_key in ["data_id"]:
|
||||
if type_key not in request.json.keys():
|
||||
raise ServerError("Bad Request: Missing Key '" + type_key + "'", status_code=400)
|
||||
# TODO: check if it is a number...
|
||||
value = data_global_elements.get_interface(_name_api).get(id)
|
||||
if value == None:
|
||||
raise ServerError("No data found", status_code=404)
|
||||
if "covers" not in value.keys():
|
||||
value["covers"] = [];
|
||||
|
||||
for elem in value["covers"]:
|
||||
if request.json["data_id"] == elem:
|
||||
return response.json(elem)
|
||||
value["covers"].append(request.json["data_id"]);
|
||||
data_global_elements.get_interface(_name_api).set(id, value)
|
||||
return response.json(elem)
|
||||
|
||||
_app.blueprint(elem_blueprint)
|
@ -1,43 +0,0 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
##
|
||||
## @author Edouard DUPIN
|
||||
##
|
||||
## @copyright 2019, Edouard DUPIN, all right reserved
|
||||
##
|
||||
## @license MPL v2.0 (see license file)
|
||||
##
|
||||
|
||||
import time
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import datetime
|
||||
import time, threading
|
||||
import realog.debug as debug
|
||||
|
||||
from sanic import Sanic
|
||||
from sanic import response
|
||||
from sanic import views
|
||||
from sanic import Blueprint
|
||||
from sanic.exceptions import ServerError
|
||||
|
||||
from sanic_simple_swagger import swagger_blueprint, openapi_blueprint
|
||||
from sanic_simple_swagger import doc
|
||||
|
||||
import tools
|
||||
import data_interface
|
||||
import data_global_elements
|
||||
|
||||
def add(_app):
|
||||
@_app.route("/")
|
||||
@doc.description("get api system information")
|
||||
async def test(request):
|
||||
return response.json({
|
||||
"api-type": "video-broker",
|
||||
"api-version": _app.config['API_VERSION'],
|
||||
"title": _app.config['API_TITLE'],
|
||||
"description": _app.config['API_DESCRIPTION'],
|
||||
"contact": _app.config['API_CONTACT_EMAIL'],
|
||||
"licence": _app.config['API_LICENSE_NAME']
|
||||
})
|
@ -1,136 +0,0 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
##
|
||||
## @author Edouard DUPIN
|
||||
##
|
||||
## @copyright 2019, Edouard DUPIN, all right reserved
|
||||
##
|
||||
## @license MPL v2.0 (see license file)
|
||||
##
|
||||
|
||||
import time
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import datetime
|
||||
import time, threading
|
||||
import realog.debug as debug
|
||||
|
||||
from sanic import Sanic
|
||||
from sanic import response
|
||||
from sanic import views
|
||||
from sanic import Blueprint
|
||||
from sanic.exceptions import ServerError
|
||||
|
||||
from sanic_simple_swagger import swagger_blueprint, openapi_blueprint
|
||||
from sanic_simple_swagger import doc
|
||||
|
||||
import tools
|
||||
import data_interface
|
||||
import data_global_elements
|
||||
|
||||
def add(_app, _name_api):
|
||||
elem_blueprint = Blueprint(_name_api)
|
||||
|
||||
class DataModelBdd:
|
||||
id = int
|
||||
number = int
|
||||
group_id = int
|
||||
covers = [[], type(None)]
|
||||
|
||||
data_global_elements.get_interface(_name_api).set_data_model(DataModelBdd)
|
||||
|
||||
class DataModel:
|
||||
number = int
|
||||
group_id = int
|
||||
|
||||
@elem_blueprint.get('/' + _name_api, strict_slashes=True)
|
||||
@doc.summary("Show saisons")
|
||||
@doc.description("Display a listing of the resource.")
|
||||
@doc.produces(content_type='application/json')
|
||||
async def list(request):
|
||||
return response.json(data_global_elements.get_interface(_name_api).gets())
|
||||
|
||||
@elem_blueprint.post('/' + _name_api, strict_slashes=True)
|
||||
@doc.summary("Create new saison")
|
||||
@doc.description("Create a new saison for a aspecific group id.")
|
||||
@doc.consumes(DataModel, location='body')#, required=True)
|
||||
@doc.response_success(status=201, description='If successful created')
|
||||
async def create(request):
|
||||
return response.json(data_global_elements.get_interface(_name_api).post(request.json))
|
||||
|
||||
@elem_blueprint.post('/' + _name_api + "/find", strict_slashes=True)
|
||||
@doc.summary("find a season existance")
|
||||
@doc.description("return the ID of the season table.")
|
||||
@doc.consumes(DataModel, location='body')
|
||||
@doc.response_success(status=201, description='If successful created')
|
||||
async def find_with_name(request):
|
||||
api = data_global_elements.get_interface(_name_api)
|
||||
for elem in api.bdd:
|
||||
if elem["group_id"] == request.json["group_id"] \
|
||||
and elem["number"] == request.json["number"]:
|
||||
return response.json({"id": elem["id"]})
|
||||
raise ServerError("No data found", status_code=404)
|
||||
|
||||
@elem_blueprint.get('/' + _name_api + '/<id:int>/video', strict_slashes=True)
|
||||
@doc.summary("Show videos")
|
||||
@doc.description("List all the videos availlable for this group.")
|
||||
@doc.produces(content_type='application/json')
|
||||
async def retrive_video(request, id):
|
||||
value = data_global_elements.get_interface(data_global_elements.API_VIDEO).gets_where(select=[["==", "saison_id", id]], filter=["id"])
|
||||
if value != None:
|
||||
return response.json(value)
|
||||
raise ServerError("No data found", status_code=404)
|
||||
|
||||
@elem_blueprint.get('/' + _name_api + '/<id:int>', strict_slashes=True)
|
||||
@doc.summary("Show resources")
|
||||
@doc.description("Display a listing of the resource.")
|
||||
@doc.produces(content_type='application/json')
|
||||
async def retrive(request, id):
|
||||
value = data_global_elements.get_interface(_name_api).get(id)
|
||||
if value != None:
|
||||
return response.json(value)
|
||||
raise ServerError("No data found", status_code=404)
|
||||
|
||||
@elem_blueprint.put('/' + _name_api + '/<id:int>', strict_slashes=True)
|
||||
@doc.summary("Update resource")
|
||||
@doc.description("Update the specified resource in storage.")
|
||||
@doc.response_success(status=201, description='If successful updated')
|
||||
async def update(request, id):
|
||||
ret = data_global_elements.get_interface(_name_api).put(id)
|
||||
return response.json({})
|
||||
|
||||
@elem_blueprint.delete('/' + _name_api + '/<id:int>', strict_slashes=True)
|
||||
@doc.summary("Remove resource")
|
||||
@doc.description("Remove the specified resource from storage.")
|
||||
@doc.response_success(status=201, description='If successful deleted')
|
||||
async def delete(request, id):
|
||||
ret = data_global_elements.get_interface(_name_api).delete(id)
|
||||
if ret == True:
|
||||
return response.json({})
|
||||
raise ServerError("No data found", status_code=404)
|
||||
|
||||
@elem_blueprint.post('/' + _name_api + "/<id:int>/add_cover", strict_slashes=True)
|
||||
@doc.summary("Add cover on video")
|
||||
@doc.description("Add a cover data ID to the video.")
|
||||
@doc.consumes(DataModel, location='body')#, required=True)
|
||||
@doc.response_success(status=201, description='If successful added')
|
||||
async def create(request, id):
|
||||
for type_key in ["data_id"]:
|
||||
if type_key not in request.json.keys():
|
||||
raise ServerError("Bad Request: Missing Key '" + type_key + "'", status_code=400)
|
||||
# TODO: check if it is a number...
|
||||
value = data_global_elements.get_interface(_name_api).get(id)
|
||||
if value == None:
|
||||
raise ServerError("No data found", status_code=404)
|
||||
if "covers" not in value.keys():
|
||||
value["covers"] = [];
|
||||
|
||||
for elem in value["covers"]:
|
||||
if request.json["data_id"] == elem:
|
||||
return response.json(elem)
|
||||
value["covers"].append(request.json["data_id"]);
|
||||
data_global_elements.get_interface(_name_api).set(id, value)
|
||||
return response.json(elem)
|
||||
|
||||
_app.blueprint(elem_blueprint)
|
@ -1,130 +0,0 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
##
|
||||
## @author Edouard DUPIN
|
||||
##
|
||||
## @copyright 2019, Edouard DUPIN, all right reserved
|
||||
##
|
||||
## @license MPL v2.0 (see license file)
|
||||
##
|
||||
|
||||
import time
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import datetime
|
||||
import time, threading
|
||||
import realog.debug as debug
|
||||
|
||||
from sanic import Sanic
|
||||
from sanic import response
|
||||
from sanic import views
|
||||
from sanic import Blueprint
|
||||
from sanic.exceptions import ServerError
|
||||
|
||||
from sanic_simple_swagger import swagger_blueprint, openapi_blueprint
|
||||
from sanic_simple_swagger import doc
|
||||
|
||||
import tools
|
||||
import data_interface
|
||||
import data_global_elements
|
||||
|
||||
def add(_app, _name_api):
|
||||
elem_blueprint = Blueprint(_name_api)
|
||||
|
||||
class DataModelBdd:
|
||||
id = int
|
||||
name = str
|
||||
description = str
|
||||
|
||||
data_global_elements.get_interface(_name_api).set_data_model(DataModelBdd)
|
||||
|
||||
class DataModel:
|
||||
name = str
|
||||
description = str
|
||||
|
||||
@elem_blueprint.get('/' + _name_api, strict_slashes=True)
|
||||
@doc.summary("Show resources")
|
||||
@doc.description("Display a listing of the resource.")
|
||||
@doc.produces(content_type='application/json')
|
||||
async def list(request):
|
||||
return response.json(data_global_elements.get_interface(_name_api).gets())
|
||||
|
||||
@elem_blueprint.post('/' + _name_api, strict_slashes=True)
|
||||
@doc.summary("Create new resource")
|
||||
@doc.description("Store a newly created resource in storage.")
|
||||
@doc.consumes(DataModel, location='body')#, required=True)
|
||||
@doc.response_success(status=201, description='If successful created')
|
||||
async def create(request):
|
||||
return response.json(data_global_elements.get_interface(_name_api).post(request.json))
|
||||
|
||||
@elem_blueprint.get('/' + _name_api + '/<id:int>', strict_slashes=True)
|
||||
@doc.summary("Show resources")
|
||||
@doc.description("Display a listing of the resource.")
|
||||
@doc.produces(content_type='application/json')
|
||||
async def retrive(request, id):
|
||||
value = data_global_elements.get_interface(_name_api).get(id)
|
||||
if value != None:
|
||||
return response.json(value)
|
||||
raise ServerError("No data found", status_code=404)
|
||||
|
||||
@elem_blueprint.put('/' + _name_api + '/<id:int>', strict_slashes=True)
|
||||
@doc.summary("Update resource")
|
||||
@doc.description("Update the specified resource in storage.")
|
||||
@doc.response_success(status=201, description='If successful updated')
|
||||
async def update(request, id):
|
||||
ret = data_global_elements.get_interface(_name_api).put(id)
|
||||
return response.json({})
|
||||
|
||||
@elem_blueprint.delete('/' + _name_api + '/<id:int>', strict_slashes=True)
|
||||
@doc.summary("Remove resource")
|
||||
@doc.description("Remove the specified resource from storage.")
|
||||
@doc.response_success(status=201, description='If successful deleted')
|
||||
async def delete(request, id):
|
||||
ret = data_global_elements.get_interface(_name_api).delete(id)
|
||||
if ret == True:
|
||||
return response.json({})
|
||||
raise ServerError("No data found", status_code=404)
|
||||
|
||||
@elem_blueprint.get('/' + _name_api + '/<id:int>/count', strict_slashes=True)
|
||||
@doc.summary("Count resources in this cathegory")
|
||||
@doc.description("count resources in this cathegory, in the whole tree.")
|
||||
@doc.produces(content_type='application/json')
|
||||
async def count_values(request, id):
|
||||
count_value = data_global_elements.get_interface(data_global_elements.API_VIDEO).count(select=[["==", "type_id", id]])
|
||||
return response.json({"count":count_value})
|
||||
|
||||
|
||||
@elem_blueprint.get('/' + _name_api + '/<id:int>/video_all', strict_slashes=True)
|
||||
@doc.summary("List the whole video ids even if they are in a group or a univers...")
|
||||
@doc.description("List all video availlable with this type (list of ids).")
|
||||
@doc.produces(content_type='application/json')
|
||||
async def retrive_video(request, id):
|
||||
list_values = data_global_elements.get_interface(data_global_elements.API_VIDEO).gets_where(select=[["==", "type_id", id]], filter=["id"])
|
||||
return response.json(list_values)
|
||||
|
||||
@elem_blueprint.get('/' + _name_api + '/<id:int>/video', strict_slashes=True)
|
||||
@doc.summary("List the whole video free")
|
||||
@doc.description("List all video availlable with this type ... not link with an univers or a group.")
|
||||
@doc.produces(content_type='application/json')
|
||||
async def retrive_video_no_group(request, id):
|
||||
list_values = data_global_elements.get_interface(data_global_elements.API_VIDEO).gets_where(select=[["==", "type_id", id], ["==", "group_id", None], ["==", "univers_id", None]], filter=["id"])
|
||||
return response.json(list_values)
|
||||
|
||||
@elem_blueprint.get('/' + _name_api + '/<id:int>/group', strict_slashes=True)
|
||||
@doc.summary("List all group availlable.")
|
||||
@doc.description("List all groups availlable in this type (not depending of an univers).")
|
||||
@doc.produces(content_type='application/json')
|
||||
async def retrive_group(request, id):
|
||||
list_values = data_global_elements.get_interface(data_global_elements.API_VIDEO).gets_where(select=[["==", "type_id", id], ["!=", "group_id", None], ["==", "univers_id", None]], filter=["group_id"])
|
||||
return response.json(list_values)
|
||||
|
||||
@elem_blueprint.get('/' + _name_api + '/<id:int>/univers', strict_slashes=True)
|
||||
@doc.summary("List all univers availlable.")
|
||||
@doc.description("List all univers availlable.")
|
||||
@doc.produces(content_type='application/json')
|
||||
async def retrive_group(request, id):
|
||||
list_values = data_global_elements.get_interface(data_global_elements.API_VIDEO).gets_where(select=[["==", "type_id", id], ["!=", "univers_id", None]], filter=["univers_id"])
|
||||
return response.json(list_values)
|
||||
|
||||
_app.blueprint(elem_blueprint)
|
@ -1,146 +0,0 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
##
|
||||
## @author Edouard DUPIN
|
||||
##
|
||||
## @copyright 2019, Edouard DUPIN, all right reserved
|
||||
##
|
||||
## @license MPL v2.0 (see license file)
|
||||
##
|
||||
|
||||
import time
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import datetime
|
||||
import time, threading
|
||||
import realog.debug as debug
|
||||
|
||||
from sanic import Sanic
|
||||
from sanic import response
|
||||
from sanic import views
|
||||
from sanic import Blueprint
|
||||
from sanic.exceptions import ServerError
|
||||
|
||||
from sanic_simple_swagger import swagger_blueprint, openapi_blueprint
|
||||
from sanic_simple_swagger import doc
|
||||
|
||||
import tools
|
||||
import data_interface
|
||||
import data_global_elements
|
||||
|
||||
def add(_app, _name_api):
|
||||
elem_blueprint = Blueprint(_name_api)
|
||||
|
||||
class DataModelBdd:
|
||||
id = int
|
||||
name = str
|
||||
description = str
|
||||
covers = [[], type(None)]
|
||||
|
||||
data_global_elements.get_interface(_name_api).set_data_model(DataModelBdd)
|
||||
|
||||
class DataModel:
|
||||
name = str
|
||||
description = str
|
||||
|
||||
@elem_blueprint.get('/' + _name_api, strict_slashes=True)
|
||||
@doc.summary("Show resources")
|
||||
@doc.description("Display a listing of the resource.")
|
||||
@doc.produces(content_type='application/json')
|
||||
async def list(request):
|
||||
return response.json(data_global_elements.get_interface(_name_api).gets())
|
||||
|
||||
@elem_blueprint.post('/' + _name_api, strict_slashes=True)
|
||||
@doc.summary("Create new resource")
|
||||
@doc.description("Store a newly created resource in storage.")
|
||||
@doc.consumes(DataModel, location='body')#, required=True)
|
||||
@doc.response_success(status=201, description='If successful created')
|
||||
async def create(request):
|
||||
return response.json(data_global_elements.get_interface(_name_api).post(request.json))
|
||||
|
||||
@elem_blueprint.get('/' + _name_api + '/<id:int>', strict_slashes=True)
|
||||
@doc.summary("Show resources")
|
||||
@doc.description("Display a listing of the resource.")
|
||||
@doc.produces(content_type='application/json')
|
||||
async def retrive(request, id):
|
||||
value = data_global_elements.get_interface(_name_api).get(id)
|
||||
if value != None:
|
||||
return response.json(value)
|
||||
raise ServerError("No data found", status_code=404)
|
||||
|
||||
@elem_blueprint.put('/' + _name_api + '/<id:int>', strict_slashes=True)
|
||||
@doc.summary("Update resource")
|
||||
@doc.description("Update the specified resource in storage.")
|
||||
@doc.response_success(status=201, description='If successful updated')
|
||||
async def update(request, id):
|
||||
ret = data_global_elements.get_interface(_name_api).put(id)
|
||||
return response.json({})
|
||||
|
||||
@elem_blueprint.delete('/' + _name_api + '/<id:int>', strict_slashes=True)
|
||||
@doc.summary("Remove resource")
|
||||
@doc.description("Remove the specified resource from storage.")
|
||||
@doc.response_success(status=201, description='If successful deleted')
|
||||
async def delete(request, id):
|
||||
ret = data_global_elements.get_interface(_name_api).delete(id)
|
||||
if ret == True:
|
||||
return response.json({})
|
||||
raise ServerError("No data found", status_code=404)
|
||||
|
||||
@elem_blueprint.get('/' + _name_api + '/<id:int>/count', strict_slashes=True)
|
||||
@doc.summary("Count resources in this cathegory")
|
||||
@doc.description("count resources in this cathegory, in the whole tree.")
|
||||
@doc.produces(content_type='application/json')
|
||||
async def count_values(request, id):
|
||||
count_value = data_global_elements.get_interface(data_global_elements.API_VIDEO).count(select=[["==", "univers_id", id]])
|
||||
return response.json({"count":count_value})
|
||||
|
||||
|
||||
@elem_blueprint.get('/' + _name_api + '/<id:int>/video_all', strict_slashes=True)
|
||||
@doc.summary("List the whole video ids even if they are in a group or a univers...")
|
||||
@doc.description("List all video availlable with this univers (list of ids).")
|
||||
@doc.produces(content_type='application/json')
|
||||
async def retrive_video(request, id):
|
||||
list_values = data_global_elements.get_interface(data_global_elements.API_VIDEO).gets_where(select=[["==", "univers_id", id]], filter=["id"])
|
||||
return response.json(list_values)
|
||||
|
||||
@elem_blueprint.get('/' + _name_api + '/<id:int>/video', strict_slashes=True)
|
||||
@doc.summary("List the whole video free")
|
||||
@doc.description("List all video availlable with this univers ... not link with an univers or a group.")
|
||||
@doc.produces(content_type='application/json')
|
||||
async def retrive_video_no_group(request, id):
|
||||
list_values = data_global_elements.get_interface(data_global_elements.API_VIDEO).gets_where(select=[["==", "univers_id", id], ["==", "group_id", None], ["==", "univers_id", None]], filter=["id"])
|
||||
return response.json(list_values)
|
||||
|
||||
@elem_blueprint.get('/' + _name_api + '/<id:int>/group', strict_slashes=True)
|
||||
@doc.summary("List all group availlable.")
|
||||
@doc.description("List all groups availlable in this univers (not depending of an univers).")
|
||||
@doc.produces(content_type='application/json')
|
||||
async def retrive_group(request, id):
|
||||
list_values = data_global_elements.get_interface(data_global_elements.API_VIDEO).gets_where(select=[["==", "univers_id", id], ["!=", "group_id", None], ["==", "univers_id", None]], filter=["group_id"])
|
||||
return response.json(list_values)
|
||||
|
||||
@elem_blueprint.post('/' + _name_api + "/<id:int>/add_cover", strict_slashes=True)
|
||||
@doc.summary("Add cover on video")
|
||||
@doc.description("Add a cover data ID to the video.")
|
||||
@doc.consumes(DataModel, location='body')#, required=True)
|
||||
@doc.response_success(status=201, description='If successful added')
|
||||
async def create(request, id):
|
||||
for type_key in ["data_id"]:
|
||||
if type_key not in request.json.keys():
|
||||
raise ServerError("Bad Request: Missing Key '" + type_key + "'", status_code=400)
|
||||
# TODO: check if it is a number...
|
||||
value = data_global_elements.get_interface(_name_api).get(id)
|
||||
if value == None:
|
||||
raise ServerError("No data found", status_code=404)
|
||||
if "covers" not in value.keys():
|
||||
value["covers"] = [];
|
||||
|
||||
for elem in value["covers"]:
|
||||
if request.json["data_id"] == elem:
|
||||
return response.json(elem)
|
||||
value["covers"].append(request.json["data_id"]);
|
||||
data_global_elements.get_interface(_name_api).set(id, value)
|
||||
return response.json(elem)
|
||||
|
||||
_app.blueprint(elem_blueprint)
|
@ -1,191 +0,0 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
##
|
||||
## @author Edouard DUPIN
|
||||
##
|
||||
## @copyright 2019, Edouard DUPIN, all right reserved
|
||||
##
|
||||
## @license MPL v2.0 (see license file)
|
||||
##
|
||||
|
||||
import time
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import copy
|
||||
import datetime
|
||||
import time, threading
|
||||
import realog.debug as debug
|
||||
|
||||
from sanic import Sanic
|
||||
from sanic import response
|
||||
from sanic import views
|
||||
from sanic import Blueprint
|
||||
from sanic.exceptions import ServerError
|
||||
|
||||
from sanic_simple_swagger import swagger_blueprint, openapi_blueprint
|
||||
from sanic_simple_swagger import doc
|
||||
|
||||
import tools
|
||||
import data_interface
|
||||
import data_global_elements
|
||||
|
||||
def generate_name(_value):
|
||||
group_name = ""
|
||||
if "univers_id" in _value.keys():
|
||||
univers_property = data_global_elements.get_interface(data_global_elements.API_UNIVERS).get(_value["univers_id"])
|
||||
if univers_property != None:
|
||||
group_name = univers_property["name"] + ":"
|
||||
if "group_id" in _value.keys():
|
||||
group_property = data_global_elements.get_interface(data_global_elements.API_GROUP).get(_value["group_id"])
|
||||
if group_property != None:
|
||||
group_name = group_property["name"]
|
||||
saison_number = ""
|
||||
if "saison_id" in _value.keys():
|
||||
saison_property = data_global_elements.get_interface(data_global_elements.API_SAISON).get(_value["saison_id"])
|
||||
if saison_property != None:
|
||||
saison_number = str(saison_property["number"])
|
||||
if len(saison_number) == 1:
|
||||
saison_number = "0" + saison_number
|
||||
out = ""
|
||||
if group_name != "":
|
||||
out += group_name + "-"
|
||||
if saison_number != "":
|
||||
out += "s" + saison_number + "-"
|
||||
if "episode" in _value.keys() and _value["episode"] != None:
|
||||
if _value["episode"] < 10:
|
||||
out += "e00" + str(_value["episode"]) + "-"
|
||||
elif _value["episode"] < 100:
|
||||
out += "e0" + str(_value["episode"]) + "-"
|
||||
else:
|
||||
out += "e" + str(_value["episode"]) + "-"
|
||||
out += _value["name"]
|
||||
if "time" in _value.keys() and _value["time"] != None:
|
||||
out += "(" + _value["name"] + ")"
|
||||
return out
|
||||
|
||||
|
||||
def add(_app, _name_api):
|
||||
elem_blueprint = Blueprint(_name_api)
|
||||
|
||||
class DataModelBdd:
|
||||
id = int
|
||||
data_id = int
|
||||
type_id = int
|
||||
saison_id = [int, type(None)]
|
||||
episode = [int, type(None)]
|
||||
univers_id = [int, type(None)]
|
||||
group_id = [int, type(None)]
|
||||
name = str
|
||||
description = [str, type(None)]
|
||||
# creating time
|
||||
create_date = str
|
||||
# date of the video
|
||||
date = [int, type(None)]
|
||||
# number of second
|
||||
time = [int, type(None)]
|
||||
# number of second
|
||||
covers = [[], type(None)]
|
||||
|
||||
data_global_elements.get_interface(_name_api).set_data_model(DataModelBdd)
|
||||
|
||||
class DataModel:
|
||||
type_id = int
|
||||
saison_id = int
|
||||
episode = int
|
||||
univers_id = int
|
||||
group_id = int
|
||||
name = str
|
||||
description = str
|
||||
# creating time
|
||||
create_date = str
|
||||
# date of the video
|
||||
date = str
|
||||
# number of second
|
||||
time = int
|
||||
|
||||
@elem_blueprint.get('/' + _name_api, strict_slashes=True)
|
||||
@doc.summary("Show saisons")
|
||||
@doc.description("Display a listing of the resource.")
|
||||
@doc.produces(content_type='application/json')
|
||||
async def list(request):
|
||||
return response.json(data_global_elements.get_interface(_name_api).gets())
|
||||
|
||||
@elem_blueprint.post('/' + _name_api, strict_slashes=True)
|
||||
@doc.summary("Create new saison")
|
||||
@doc.description("Create a new saison for a aspecific group id.")
|
||||
@doc.consumes(DataModel, location='body')#, required=True)
|
||||
@doc.response_success(status=201, description='If successful created')
|
||||
async def create(request):
|
||||
for type_key in ["data_id","type_id","name"]:
|
||||
if type_key not in request.json.keys():
|
||||
raise ServerError("Bad Request: Missing Key '" + type_key + "'", status_code=400)
|
||||
for type_key in ["create_date"]:
|
||||
if type_key in request.json.keys():
|
||||
raise ServerError("Forbidden: Must not be set Key '" + type_key + "'", status_code=403)
|
||||
for type_key in ["saison_id","episode","date","time","univers_id","group_id","description"]:
|
||||
if type_key not in request.json.keys():
|
||||
request.json[type_key] = None
|
||||
request.json["create_date"] = datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S.%f')[:-3] + 'Z'
|
||||
#Find if already exist
|
||||
list_elem = data_global_elements.get_interface(_name_api).find(["group_id", "data_id"], request.json);
|
||||
for elem in list_elem:
|
||||
return response.json(elem)
|
||||
|
||||
return response.json(data_global_elements.get_interface(_name_api).post(request.json))
|
||||
|
||||
@elem_blueprint.get('/' + _name_api + '/<id:int>', strict_slashes=True)
|
||||
@doc.summary("Show resources")
|
||||
@doc.description("Display a listing of the resource.")
|
||||
@doc.produces(content_type='application/json')
|
||||
async def retrive(request, id):
|
||||
value = data_global_elements.get_interface(_name_api).get(id)
|
||||
if value != None:
|
||||
generated_name = generate_name(value)
|
||||
tmp = copy.deepcopy(value)
|
||||
tmp["generated_name"] = generated_name
|
||||
return response.json(tmp)
|
||||
raise ServerError("No data found", status_code=404)
|
||||
|
||||
@elem_blueprint.put('/' + _name_api + '/<id:int>', strict_slashes=True)
|
||||
@doc.summary("Update resource")
|
||||
@doc.description("Update the specified resource in storage.")
|
||||
@doc.response_success(status=201, description='If successful updated')
|
||||
async def update(request, id):
|
||||
ret = data_global_elements.get_interface(_name_api).put(id)
|
||||
return response.json({})
|
||||
|
||||
@elem_blueprint.delete('/' + _name_api + '/<id:int>', strict_slashes=True)
|
||||
@doc.summary("Remove resource")
|
||||
@doc.description("Remove the specified resource from storage.")
|
||||
@doc.response_success(status=201, description='If successful deleted')
|
||||
async def delete(request, id):
|
||||
ret = data_global_elements.get_interface(_name_api).delete(id)
|
||||
if ret == True:
|
||||
return response.json({})
|
||||
raise ServerError("No data found", status_code=404)
|
||||
|
||||
@elem_blueprint.post('/' + _name_api + "/<id:int>/add_cover", strict_slashes=True)
|
||||
@doc.summary("Add cover on video")
|
||||
@doc.description("Add a cover data ID to the video.")
|
||||
@doc.consumes(DataModel, location='body')#, required=True)
|
||||
@doc.response_success(status=201, description='If successful added')
|
||||
async def create(request, id):
|
||||
for type_key in ["data_id"]:
|
||||
if type_key not in request.json.keys():
|
||||
raise ServerError("Bad Request: Missing Key '" + type_key + "'", status_code=400)
|
||||
# TODO: check if it is a number...
|
||||
value = data_global_elements.get_interface(_name_api).get(id)
|
||||
if value == None:
|
||||
raise ServerError("No data found", status_code=404)
|
||||
if "covers" not in value.keys():
|
||||
value["covers"] = [];
|
||||
|
||||
for elem in value["covers"]:
|
||||
if request.json["data_id"] == elem:
|
||||
return response.json(elem)
|
||||
value["covers"].append(request.json["data_id"]);
|
||||
data_global_elements.get_interface(_name_api).set(id, value)
|
||||
return response.json(elem)
|
||||
|
||||
_app.blueprint(elem_blueprint)
|
@ -1,164 +0,0 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
##
|
||||
## @author Edouard DUPIN
|
||||
##
|
||||
## @copyright 2019, Edouard DUPIN, all right reserved
|
||||
##
|
||||
## @license MPL v2.0 (see license file)
|
||||
##
|
||||
#pip install flask --user
|
||||
#pip install flask_restful --user
|
||||
#pip install python-dateutil --user
|
||||
#pip install sanic --user
|
||||
#pip install sanic_simple_swagger --user
|
||||
|
||||
from sanic import Sanic
|
||||
from sanic import response
|
||||
from sanic import views
|
||||
from sanic import Blueprint
|
||||
from sanic.exceptions import ServerError
|
||||
from sanic_simple_swagger import swagger_blueprint, openapi_blueprint
|
||||
from sanic_simple_swagger import doc
|
||||
from spf import SanicPluginsFramework
|
||||
|
||||
import dateutil.parser
|
||||
|
||||
|
||||
import time
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import datetime
|
||||
import time, threading
|
||||
import realog.debug as debug
|
||||
|
||||
debug.enable_color()
|
||||
|
||||
import tools
|
||||
import data_interface
|
||||
import data_global_elements
|
||||
|
||||
|
||||
from sanic_cors.extension import cors
|
||||
app = Sanic(__name__)
|
||||
spf = SanicPluginsFramework(app)
|
||||
spf.register_plugin(cors, automatic_options=True)
|
||||
|
||||
app.config['API_VERSION'] = '1.0.0'
|
||||
app.config['API_TITLE'] = 'Rest personal video API'
|
||||
app.config['API_DESCRIPTION'] = 'Simple API for the Video broker.'
|
||||
app.config['API_CONTACT_EMAIL'] = "yui.heero@gmail.com"
|
||||
app.config['API_LICENSE_NAME'] = 'MPL 2.0'
|
||||
app.config['API_LICENSE_URL'] = 'https://www.mozilla.org/en-US/MPL/2.0/'
|
||||
app.config['schemes'] = ['http', 'https']
|
||||
if "REST_TMP_DATA" not in app.config.keys():
|
||||
app.config['REST_TMP_DATA'] = os.path.join("data", "tmp")
|
||||
if "REST_MEDIA_DATA" not in app.config.keys():
|
||||
app.config['REST_MEDIA_DATA'] = os.path.join("data", "media")
|
||||
if "REST_DATA" not in app.config.keys():
|
||||
app.config['REST_DATA'] = "data"
|
||||
if "REST_HOST" not in app.config.keys():
|
||||
app.config['REST_HOST'] = "0.0.0.0"
|
||||
if "REST_PORT" not in app.config.keys():
|
||||
app.config['REST_PORT'] = "80"
|
||||
|
||||
app.blueprint(openapi_blueprint)
|
||||
app.blueprint(swagger_blueprint)
|
||||
|
||||
|
||||
default_values_type = [
|
||||
{
|
||||
"id": 0,
|
||||
"name": "Documentary",
|
||||
"description": "Documentary (annimals, space, earth...)"
|
||||
},{
|
||||
"id": 1,
|
||||
"name": "Movie",
|
||||
"description": "Movie with real humans (film)"
|
||||
},{
|
||||
"id": 2,
|
||||
"name": "Annimation",
|
||||
"description": "Annimation movies (film)"
|
||||
},{
|
||||
"id": 3,
|
||||
"name": "Short Films",
|
||||
"description": "Small movies (less 2 minutes)"
|
||||
},{
|
||||
"id": 4,
|
||||
"name": "tv show",
|
||||
"description": "Tv show form old peoples"
|
||||
}, {
|
||||
"id": 5,
|
||||
"name": "Anniation tv show",
|
||||
"description": "Tv show form young peoples"
|
||||
}, {
|
||||
"id": 6,
|
||||
"name": "Theater",
|
||||
"description": "recorder theater pices"
|
||||
}, {
|
||||
"id": 7,
|
||||
"name": "One man show",
|
||||
"description": "Recorded stand up"
|
||||
}, {
|
||||
"id": 8,
|
||||
"name": "Concert",
|
||||
"description": "Recorded concert"
|
||||
}, {
|
||||
"id": 9,
|
||||
"name": "Opera",
|
||||
"description": "Recorded Opera"
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
def add_interface(_name, _default_value = None):
|
||||
interface = data_interface.DataInterface(_name, os.path.join(tools.get_run_path(), app.config['REST_DATA'], "bdd_" + _name + ".json"))
|
||||
if _default_value != None:
|
||||
if interface.count() == 0:
|
||||
interface.reset_with_value(_default_value);
|
||||
data_global_elements.add_interface(_name, interface)
|
||||
|
||||
add_interface(data_global_elements.API_DATA)
|
||||
add_interface(data_global_elements.API_TYPE, default_values_type)
|
||||
add_interface(data_global_elements.API_UNIVERS)
|
||||
add_interface(data_global_elements.API_GROUP)
|
||||
add_interface(data_global_elements.API_SAISON)
|
||||
add_interface(data_global_elements.API_VIDEO)
|
||||
|
||||
import api.root as api_root
|
||||
api_root.add(app)
|
||||
|
||||
import api.type as api_type
|
||||
api_type.add(app, data_global_elements.API_TYPE)
|
||||
|
||||
import api.univers as api_univers
|
||||
api_univers.add(app, data_global_elements.API_UNIVERS)
|
||||
|
||||
import api.group as api_group
|
||||
api_group.add(app, data_global_elements.API_GROUP)
|
||||
|
||||
import api.saison as api_saison
|
||||
api_saison.add(app, data_global_elements.API_SAISON)
|
||||
|
||||
import api.video as api_video
|
||||
api_video.add(app, data_global_elements.API_VIDEO)
|
||||
|
||||
import api.data as api_data
|
||||
api_data.add(app, data_global_elements.API_DATA)
|
||||
|
||||
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
debug.info("Start REST application: " + str(app.config['REST_HOST']) + ":" + str(app.config['REST_PORT']))
|
||||
app.config.REQUEST_MAX_SIZE=10*1024*1024*1024
|
||||
app.config.REQUEST_TIMEOUT=60*60
|
||||
app.run(host=app.config['REST_HOST'], port=int(app.config['REST_PORT']))
|
||||
#app.stop()
|
||||
debug.info("Sync all BDD ... (do not force stop ...)");
|
||||
data_global_elements.save_all_before_stop();
|
||||
debug.info("END program");
|
||||
sys.exit(0)
|
||||
|
||||
|
@ -1,63 +0,0 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
##
|
||||
## @author Edouard DUPIN
|
||||
##
|
||||
## @copyright 2019, Edouard DUPIN, all right reserved
|
||||
##
|
||||
## @license MPL v2.0 (see license file)
|
||||
##
|
||||
|
||||
interfaces = {}
|
||||
|
||||
def get_list_interface():
|
||||
global interfaces
|
||||
return interfaces
|
||||
|
||||
def get_interface(_name):
|
||||
global interfaces
|
||||
return interfaces[_name]
|
||||
|
||||
def add_interface(_name, _interface):
|
||||
global interfaces
|
||||
interfaces[_name] = _interface
|
||||
|
||||
|
||||
import time, threading
|
||||
|
||||
system_stop = False
|
||||
system_counter = 0
|
||||
|
||||
def save_all():
|
||||
global system_counter
|
||||
system_counter += 1
|
||||
if system_counter <= 10:
|
||||
return
|
||||
system_counter = 0
|
||||
print(time.ctime())
|
||||
for elem in interfaces.keys():
|
||||
if system_stop == True:
|
||||
return
|
||||
interfaces[elem].check_save()
|
||||
|
||||
def save_all_before_stop():
|
||||
global system_stop
|
||||
system_stop = True
|
||||
for elem in interfaces.keys():
|
||||
interfaces[elem].check_save()
|
||||
|
||||
def check_save():
|
||||
save_all()
|
||||
if system_stop == True:
|
||||
return
|
||||
threading.Timer(1, check_save).start()
|
||||
|
||||
check_save()
|
||||
|
||||
API_TYPE = "type"
|
||||
API_UNIVERS = "univers"
|
||||
API_GROUP = "group"
|
||||
API_SAISON = "saison"
|
||||
API_VIDEO = "video"
|
||||
API_DATA = "data"
|
||||
|
@ -1,269 +0,0 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
##
|
||||
## @author Edouard DUPIN
|
||||
##
|
||||
## @copyright 2019, Edouard DUPIN, all right reserved
|
||||
##
|
||||
## @license MPL v2.0 (see license file)
|
||||
##
|
||||
|
||||
import tools
|
||||
import json
|
||||
from realog import debug
|
||||
import random
|
||||
from sanic.exceptions import ServerError
|
||||
##
|
||||
## @breif Generic interface to access to the BDD (no BDD, direct file IO)
|
||||
##
|
||||
class DataInterface():
|
||||
def __init__(self, _name, _file):
|
||||
self.model = None
|
||||
self.name = _name
|
||||
self.file = _file
|
||||
self.bdd = []
|
||||
self.need_save = False
|
||||
self.last_id = 0
|
||||
if tools.exist(self.file) == False:
|
||||
self.mark_to_store()
|
||||
else:
|
||||
data = tools.file_read_data(self.file)
|
||||
self.bdd = json.loads(data)
|
||||
self.upgrade_global_bdd_id();
|
||||
|
||||
def set_data_model(self, _data_model):
|
||||
self.model = _data_model
|
||||
|
||||
def reset_with_value(self, _data):
|
||||
self.bdd = _data
|
||||
self.last_id = 0
|
||||
self.mark_to_store()
|
||||
self.upgrade_global_bdd_id();
|
||||
|
||||
def check_with_model(self, _data):
|
||||
if self.model == None:
|
||||
return True
|
||||
values = []
|
||||
for elem in dir(self.model):
|
||||
if elem[:2] == "__":
|
||||
continue
|
||||
values.append(elem)
|
||||
have_error = False
|
||||
for key in _data.keys():
|
||||
if key not in values:
|
||||
have_error = True
|
||||
# TODO: ...
|
||||
debug.warning("Add element that is not allowed " + key + " not in " + str(values))
|
||||
for elem in values:
|
||||
if key not in _data.keys():
|
||||
have_error = True
|
||||
# TODO: ...
|
||||
debug.warning("Missing key " + elem + " not in " + str(_data.keys()))
|
||||
if have_error == True:
|
||||
return False
|
||||
for key in _data.keys():
|
||||
elem = getattr(self.model, key)
|
||||
if type(elem) == list:
|
||||
find_error = True
|
||||
for my_type in elem:
|
||||
if type(_data[key]) == my_type:
|
||||
find_error = False
|
||||
break
|
||||
if find_error == True:
|
||||
debug.warning("data : " + str(_data))
|
||||
tmp_list = []
|
||||
for my_type in elem:
|
||||
tmp_list.append(my_type.__name__)
|
||||
debug.warning("[key='" + key + "'] try to add wrong type in BDD " + type(_data[key]).__name__ + " is not: " + str(my_type))
|
||||
else:
|
||||
if type(_data[key]) != getattr(self.model, key):
|
||||
debug.warning("data : " + str(_data))
|
||||
debug.warning("[key='" + key + "'] try to add wrong type in BDD " + type(_data[key]).__name__ + " is not: " + getattr(self.model, key).__name__)
|
||||
return False
|
||||
return True
|
||||
|
||||
def upgrade_global_bdd_id(self):
|
||||
self.last_id = 0
|
||||
for elem in self.bdd:
|
||||
if 'id' not in elem.keys():
|
||||
continue
|
||||
if elem["id"] >= self.last_id:
|
||||
self.last_id = elem["id"] + 1
|
||||
# start at a random value permit to vaidate the basis inctance test
|
||||
if self.last_id == 0:
|
||||
self.last_id = random.randint(20, 100)
|
||||
|
||||
def get_table_index(self, _id):
|
||||
id_in_bdd = 0
|
||||
for elem in self.bdd:
|
||||
if 'id' in elem.keys() \
|
||||
and elem["id"] == _id:
|
||||
return id_in_bdd
|
||||
id_in_bdd += 1
|
||||
return None
|
||||
|
||||
##
|
||||
## @brief Mark the current BDD to store all in File system (sync)
|
||||
##
|
||||
def mark_to_store(self):
|
||||
self.need_save = True
|
||||
|
||||
##
|
||||
## @brief Check if the Bdd need to be stored. It is stored if it has been requested.
|
||||
## The BDD is store in a separate file and move in the old one. Safe way to store
|
||||
##
|
||||
def check_save(self):
|
||||
if self.need_save == False:
|
||||
return
|
||||
debug.warning("Save bdd: " + self.file)
|
||||
data = json.dumps(self.bdd, sort_keys=True, indent=4)
|
||||
self.need_save = False
|
||||
tools.file_write_data_safe(self.file, data)
|
||||
|
||||
def gets(self, filter=None):
|
||||
debug.info("gets " + self.name)
|
||||
if filter == None:
|
||||
return self.bdd
|
||||
return self.filter_object_values(self.bdd, filter)
|
||||
|
||||
def gets_where(self, select, filter=None):
|
||||
debug.info("gets " + self.name)
|
||||
tmp_list = self.get_sub_list(self.bdd, select)
|
||||
return self.filter_object_values(tmp_list, filter);
|
||||
|
||||
def get(self, _id):
|
||||
if type(_id) != int:
|
||||
debug.warning("get wrong input type...")
|
||||
debug.info("get " + self.name + ": " + str(_id))
|
||||
for elem in self.bdd:
|
||||
if 'id' in elem.keys() \
|
||||
and elem["id"] == _id:
|
||||
return elem
|
||||
debug.warning("not found element: " + str(len(self.bdd)))
|
||||
return None
|
||||
|
||||
def set(self, _id, _value):
|
||||
if type(_id) != int:
|
||||
debug.warning("get wrong input type...")
|
||||
for elem in self.bdd:
|
||||
if 'id' in elem.keys() \
|
||||
and elem["id"] == _id:
|
||||
elem = _value
|
||||
return elem
|
||||
debug.warning("not found element: " + str(len(self.bdd)))
|
||||
return None
|
||||
|
||||
def delete(self, _id):
|
||||
debug.info("delete " + self.name + ": " + str(_id))
|
||||
id_in_bdd = self.get_table_index(_id)
|
||||
if id_in_bdd == None:
|
||||
return False
|
||||
del self.bdd[id_in_bdd]
|
||||
self.mark_to_store()
|
||||
return True
|
||||
|
||||
def put(self, _id, _value):
|
||||
debug.info("put " + self.name + ": " + str(_id))
|
||||
id_in_bdd = self.get_table_index(_id)
|
||||
if id_in_bdd == None:
|
||||
return False
|
||||
_value["id"] = _id
|
||||
self.bdd[id_in_bdd] = _value
|
||||
self.mark_to_store()
|
||||
return True
|
||||
|
||||
def post(self, _value):
|
||||
debug.info("post " + self.name)
|
||||
_value["id"] = self.last_id
|
||||
self.last_id += 1
|
||||
if self.check_with_model(_value) == False:
|
||||
raise ServerError("Corelation with BDD error", status_code=404)
|
||||
self.bdd.append(_value)
|
||||
self.mark_to_store()
|
||||
return _value
|
||||
|
||||
# TODO : rework this
|
||||
def find(self, _list_token, _values):
|
||||
out = []
|
||||
for elem in self.bdd:
|
||||
find = True
|
||||
for token in _list_token:
|
||||
if elem[token] != _values[token]:
|
||||
find = False
|
||||
break
|
||||
if find == True:
|
||||
out.append(elem)
|
||||
return out
|
||||
|
||||
def count(self, select = None):
|
||||
if select == None:
|
||||
return len(self.bdd)
|
||||
tmp = self.get_sub_list(self.bdd, select)
|
||||
return len(tmp)
|
||||
|
||||
def get_sub_list(self, _values, _select):
|
||||
out = []
|
||||
for elem in _values:
|
||||
find = True
|
||||
if len(_select) == 0:
|
||||
find = False
|
||||
for elem_select in _select:
|
||||
if len(elem_select) != 3:
|
||||
raise ServerError("Internal Server Error: wrong select definition", 500)
|
||||
type_check = elem_select[0]
|
||||
token = elem_select[1]
|
||||
value = elem_select[2]
|
||||
if token in elem.keys():
|
||||
if type_check == "==":
|
||||
if not (elem[token] == value):
|
||||
find = False
|
||||
break
|
||||
elif type_check == "!=":
|
||||
if not (elem[token] != value):
|
||||
find = False
|
||||
break
|
||||
elif type_check == "<":
|
||||
if not (elem[token] < value):
|
||||
find = False
|
||||
break
|
||||
elif type_check == "<=":
|
||||
if not (elem[token] <= value):
|
||||
find = False
|
||||
break
|
||||
elif type_check == ">":
|
||||
if not (elem[token] >= value):
|
||||
find = False
|
||||
break
|
||||
elif type_check == ">=":
|
||||
if not (elem[token] >= value):
|
||||
find = False
|
||||
break
|
||||
else:
|
||||
raise ServerError("Internal Server Error: unknow comparing type ...", 500)
|
||||
else:
|
||||
find = False
|
||||
break
|
||||
if find == True:
|
||||
out.append(elem)
|
||||
return out
|
||||
|
||||
def filter_object_values(self, _values, _filter):
|
||||
out = []
|
||||
if len(_filter) == 1:
|
||||
token = _filter[0]
|
||||
for elem in _values:
|
||||
if token not in elem.keys():
|
||||
continue
|
||||
if elem[token] not in out:
|
||||
out.append(elem[token])
|
||||
return out
|
||||
for elem in _values:
|
||||
element_out = {}
|
||||
for token in _filter:
|
||||
if token not in elem.keys():
|
||||
continue
|
||||
element_out[token] = elem[token]
|
||||
out.append(element_out)
|
||||
return out
|
||||
|
||||
|
20
back/src/org/kar/karideo/CacheFilter.java__
Normal file
@ -0,0 +1,20 @@
|
||||
package org.kar.karideo;
|
||||
|
||||
public class CacheFilter {
|
||||
@Override
|
||||
public List<ResourceFilter> create(AbstractMethod am) {
|
||||
if (am.isAnnotationPresent(CacheMaxAge.class)) {
|
||||
CacheMaxAge maxAge = am.getAnnotation(CacheMaxAge.class);
|
||||
return newCacheFilter("max-age: " + maxAge.unit().toSeconds(maxAge.time()));
|
||||
} else if (am.isAnnotationPresent(NoCache.class)) {
|
||||
return newCacheFilter("no-cache");
|
||||
} else {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
}
|
||||
|
||||
private List<ResourceFilter> newCacheFilter(String content) {
|
||||
return Collections
|
||||
.<ResourceFilter> singletonList(new CacheResponseFilter(content));
|
||||
}
|
||||
}
|
157
back/src/org/kar/karideo/WebLauncher.java
Executable file
@ -0,0 +1,157 @@
|
||||
package org.kar.karideo;
|
||||
|
||||
import java.net.URI;
|
||||
|
||||
import org.glassfish.grizzly.http.server.HttpServer;
|
||||
import org.glassfish.jersey.grizzly2.httpserver.GrizzlyHttpServerFactory;
|
||||
import org.glassfish.jersey.jackson.JacksonFeature;
|
||||
import org.glassfish.jersey.media.multipart.MultiPartFeature;
|
||||
import org.glassfish.jersey.server.ResourceConfig;
|
||||
import org.kar.archidata.GlobalConfiguration;
|
||||
import org.kar.archidata.UpdateJwtPublicKey;
|
||||
import org.kar.archidata.api.DataResource;
|
||||
import org.kar.archidata.catcher.GenericCatcher;
|
||||
import org.kar.archidata.db.DBConfig;
|
||||
import org.kar.archidata.filter.CORSFilter;
|
||||
import org.kar.archidata.filter.OptionFilter;
|
||||
import org.kar.archidata.migration.MigrationEngine;
|
||||
import org.kar.archidata.tools.ConfigBaseVariable;
|
||||
import org.kar.karideo.api.Front;
|
||||
import org.kar.karideo.api.HealthCheck;
|
||||
import org.kar.karideo.api.MediaResource;
|
||||
import org.kar.karideo.api.SeasonResource;
|
||||
import org.kar.karideo.api.SeriesResource;
|
||||
import org.kar.karideo.api.TypeResource;
|
||||
import org.kar.karideo.api.UserMediaAdvancementResource;
|
||||
import org.kar.karideo.api.UserResource;
|
||||
import org.kar.karideo.filter.KarideoAuthenticationFilter;
|
||||
import org.kar.karideo.migration.Initialization;
|
||||
import org.kar.karideo.migration.Migration20230810;
|
||||
import org.kar.karideo.migration.Migration20231015;
|
||||
import org.kar.karideo.migration.Migration20231126;
|
||||
import org.kar.karideo.migration.Migration20240226;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import jakarta.ws.rs.core.UriBuilder;
|
||||
|
||||
public class WebLauncher {
|
||||
final static Logger LOGGER = LoggerFactory.getLogger(WebLauncher.class);
|
||||
public static DBConfig dbConfig;
|
||||
protected UpdateJwtPublicKey keyUpdater = null;
|
||||
protected HttpServer server = null;
|
||||
|
||||
public WebLauncher() {
|
||||
ConfigBaseVariable.bdDatabase = "karideo";
|
||||
}
|
||||
|
||||
private static URI getBaseURI() {
|
||||
return UriBuilder.fromUri(ConfigBaseVariable.getlocalAddress()).build();
|
||||
}
|
||||
|
||||
public void migrateDB() throws Exception {
|
||||
WebLauncher.LOGGER.info("Create migration engine");
|
||||
final MigrationEngine migrationEngine = new MigrationEngine();
|
||||
WebLauncher.LOGGER.info("Add initialization");
|
||||
migrationEngine.setInit(new Initialization());
|
||||
WebLauncher.LOGGER.info("Add migration since last version");
|
||||
migrationEngine.add(new Migration20230810());
|
||||
migrationEngine.add(new Migration20231015());
|
||||
migrationEngine.add(new Migration20231126());
|
||||
migrationEngine.add(new Migration20240226());
|
||||
WebLauncher.LOGGER.info("Migrate the DB [START]");
|
||||
migrationEngine.migrateWaitAdmin(GlobalConfiguration.dbConfig);
|
||||
WebLauncher.LOGGER.info("Migrate the DB [STOP]");
|
||||
}
|
||||
|
||||
public static void main(final String[] args) throws Exception {
|
||||
WebLauncher.LOGGER.info("[START] application wake UP");
|
||||
final WebLauncher launcher = new WebLauncher();
|
||||
launcher.migrateDB();
|
||||
launcher.process();
|
||||
WebLauncher.LOGGER.info("end-configure the server & wait finish process:");
|
||||
Thread.currentThread().join();
|
||||
WebLauncher.LOGGER.info("STOP Key updater");
|
||||
launcher.stopOther();
|
||||
WebLauncher.LOGGER.info("STOP the REST server");
|
||||
}
|
||||
|
||||
public void process() throws InterruptedException {
|
||||
|
||||
// ===================================================================
|
||||
// Configure resources
|
||||
// ===================================================================
|
||||
final ResourceConfig rc = new ResourceConfig();
|
||||
|
||||
// add multi-part models ..
|
||||
rc.register(MultiPartFeature.class);
|
||||
// global authentication system
|
||||
rc.register(OptionFilter.class);
|
||||
// remove cors ==> all time called by an other system...
|
||||
rc.register(CORSFilter.class);
|
||||
// global authentication system
|
||||
rc.register(KarideoAuthenticationFilter.class);
|
||||
// register exception catcher
|
||||
GenericCatcher.addAll(rc);
|
||||
// add default resource:
|
||||
rc.register(UserResource.class);
|
||||
rc.register(SeriesResource.class);
|
||||
rc.register(DataResource.class);
|
||||
rc.register(SeasonResource.class);
|
||||
rc.register(TypeResource.class);
|
||||
rc.register(MediaResource.class);
|
||||
rc.register(UserMediaAdvancementResource.class);
|
||||
|
||||
rc.register(HealthCheck.class);
|
||||
rc.register(Front.class);
|
||||
|
||||
// add jackson to be discover when we are ins stand-alone server
|
||||
rc.register(JacksonFeature.class);
|
||||
// enable this to show low level request
|
||||
// rc.property(LoggingFeature.LOGGING_FEATURE_LOGGER_LEVEL_SERVER, Level.WARNING.getName());
|
||||
|
||||
this.server = GrizzlyHttpServerFactory.createHttpServer(getBaseURI(), rc);
|
||||
final HttpServer serverLink = this.server;
|
||||
Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
System.out.println("Stopping server..");
|
||||
serverLink.shutdownNow();
|
||||
}
|
||||
}, "shutdownHook"));
|
||||
|
||||
// ===================================================================
|
||||
// start periodic update of the token ...
|
||||
// ===================================================================
|
||||
this.keyUpdater = new UpdateJwtPublicKey();
|
||||
this.keyUpdater.start();
|
||||
|
||||
// ===================================================================
|
||||
// run JERSEY
|
||||
// ===================================================================
|
||||
try {
|
||||
this.server.start();
|
||||
LOGGER.info("Jersey app started at {}", getBaseURI());
|
||||
} catch (final Exception e) {
|
||||
LOGGER.error("There was an error while starting Grizzly HTTP server.");
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
public void stop() {
|
||||
if (this.server != null) {
|
||||
this.server.shutdownNow();
|
||||
this.server = null;
|
||||
}
|
||||
}
|
||||
|
||||
public void stopOther() {
|
||||
this.keyUpdater.kill();
|
||||
try {
|
||||
this.keyUpdater.join(4000, 0);
|
||||
} catch (final InterruptedException e) {
|
||||
// TODO Auto-generated catch block
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
}
|
65
back/src/org/kar/karideo/WebLauncherLocal.java
Executable file
@ -0,0 +1,65 @@
|
||||
|
||||
package org.kar.karideo;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.kar.archidata.api.DataResource;
|
||||
import org.kar.archidata.externalRestApi.AnalyzeApi;
|
||||
import org.kar.archidata.externalRestApi.TsGenerateApi;
|
||||
import org.kar.archidata.tools.ConfigBaseVariable;
|
||||
import org.kar.karideo.api.Front;
|
||||
import org.kar.karideo.api.HealthCheck;
|
||||
import org.kar.karideo.api.MediaResource;
|
||||
import org.kar.karideo.api.SeasonResource;
|
||||
import org.kar.karideo.api.SeriesResource;
|
||||
import org.kar.karideo.api.TypeResource;
|
||||
import org.kar.karideo.api.UserMediaAdvancementResource;
|
||||
import org.kar.karideo.api.UserResource;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class WebLauncherLocal extends WebLauncher {
|
||||
private final static Logger LOGGER = LoggerFactory.getLogger(WebLauncherLocal.class);
|
||||
|
||||
private WebLauncherLocal() {}
|
||||
|
||||
public static void generateObjects() throws Exception {
|
||||
LOGGER.info("Generate APIs");
|
||||
final List<Class<?>> listOfResources = List.of(Front.class, HealthCheck.class, SeasonResource.class, SeriesResource.class, TypeResource.class, UserMediaAdvancementResource.class,
|
||||
UserResource.class, MediaResource.class, DataResource.class);
|
||||
final AnalyzeApi api = new AnalyzeApi();
|
||||
api.addAllApi(listOfResources);
|
||||
TsGenerateApi.generateApi(api, "../front/src/app/back-api/");
|
||||
LOGGER.info("Generate APIs (DONE)");
|
||||
}
|
||||
|
||||
public static void main(final String[] args) throws Exception {
|
||||
generateObjects();
|
||||
final WebLauncherLocal launcher = new WebLauncherLocal();
|
||||
launcher.process();
|
||||
LOGGER.info("end-configure the server & wait finish process:");
|
||||
Thread.currentThread().join();
|
||||
LOGGER.info("STOP the REST server:");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void process() throws InterruptedException {
|
||||
if (true) {
|
||||
// for local test:
|
||||
ConfigBaseVariable.apiAdress = "http://0.0.0.0:18080/karideo/api/";
|
||||
ConfigBaseVariable.dbPort = "3906";
|
||||
}
|
||||
try {
|
||||
super.migrateDB();
|
||||
} catch (final Exception e) {
|
||||
e.printStackTrace();
|
||||
while (true) {
|
||||
LOGGER.error("============================================================================");
|
||||
LOGGER.error("== Migration fail ==> waiting intervention of administrator...");
|
||||
LOGGER.error("============================================================================");
|
||||
Thread.sleep(60 * 60 * 1000);
|
||||
}
|
||||
}
|
||||
super.process();
|
||||
}
|
||||
}
|
15
back/src/org/kar/karideo/api/Front.java
Normal file
@ -0,0 +1,15 @@
|
||||
package org.kar.karideo.api;
|
||||
|
||||
import org.kar.archidata.api.FrontGeneric;
|
||||
|
||||
import jakarta.ws.rs.Path;
|
||||
|
||||
import org.kar.karideo.util.ConfigVariable;
|
||||
|
||||
@Path("/front")
|
||||
public class Front extends FrontGeneric {
|
||||
public Front() {
|
||||
this.baseFrontFolder = ConfigVariable.getFrontFolder();
|
||||
|
||||
}
|
||||
}
|
35
back/src/org/kar/karideo/api/HealthCheck.java
Normal file
@ -0,0 +1,35 @@
|
||||
package org.kar.karideo.api;
|
||||
|
||||
import org.kar.archidata.exception.FailException;
|
||||
import org.kar.archidata.tools.ConfigBaseVariable;
|
||||
import org.kar.archidata.tools.JWTWrapper;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import io.swagger.v3.oas.annotations.Operation;
|
||||
import jakarta.annotation.security.PermitAll;
|
||||
import jakarta.ws.rs.GET;
|
||||
import jakarta.ws.rs.Path;
|
||||
import jakarta.ws.rs.Produces;
|
||||
import jakarta.ws.rs.core.MediaType;
|
||||
import jakarta.ws.rs.core.Response;
|
||||
|
||||
@Path("/health_check")
|
||||
@Produces(MediaType.APPLICATION_JSON)
|
||||
public class HealthCheck {
|
||||
static final Logger LOGGER = LoggerFactory.getLogger(HealthCheck.class);
|
||||
|
||||
public record HealthResult(String value) {
|
||||
|
||||
};
|
||||
|
||||
@GET
|
||||
@PermitAll
|
||||
@Operation(description = "Get the server state (health)", tags = "SYSTEM")
|
||||
public HealthResult getHealth() throws FailException {
|
||||
if (JWTWrapper.getPublicKeyJson() == null && !ConfigBaseVariable.getTestMode()) {
|
||||
throw new FailException(Response.Status.INTERNAL_SERVER_ERROR, "Missing Jwt public token");
|
||||
}
|
||||
return new HealthResult("alive and kicking");
|
||||
}
|
||||
}
|
249
back/src/org/kar/karideo/api/MediaResource.java
Normal file
@ -0,0 +1,249 @@
|
||||
package org.kar.karideo.api;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.sql.SQLException;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
|
||||
import org.glassfish.jersey.media.multipart.FormDataParam;
|
||||
import org.kar.archidata.annotation.AsyncType;
|
||||
import org.kar.archidata.annotation.TypeScriptProgress;
|
||||
import org.kar.archidata.api.DataResource;
|
||||
import org.kar.archidata.dataAccess.DataAccess;
|
||||
import org.kar.archidata.dataAccess.addOn.AddOnDataJson;
|
||||
import org.kar.archidata.exception.FailException;
|
||||
import org.kar.archidata.exception.InputException;
|
||||
import org.kar.archidata.model.Data;
|
||||
import org.kar.archidata.tools.DataTools;
|
||||
import org.kar.karideo.model.Media;
|
||||
import org.kar.karideo.model.Season;
|
||||
import org.kar.karideo.model.Series;
|
||||
import org.kar.karideo.model.Type;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import io.swagger.v3.oas.annotations.Operation;
|
||||
import jakarta.annotation.security.RolesAllowed;
|
||||
import jakarta.ws.rs.Consumes;
|
||||
import jakarta.ws.rs.DELETE;
|
||||
import jakarta.ws.rs.GET;
|
||||
import jakarta.ws.rs.PATCH;
|
||||
import jakarta.ws.rs.POST;
|
||||
import jakarta.ws.rs.Path;
|
||||
import jakarta.ws.rs.PathParam;
|
||||
import jakarta.ws.rs.Produces;
|
||||
import jakarta.ws.rs.core.MediaType;
|
||||
|
||||
@Path("/media")
|
||||
@Produces(MediaType.APPLICATION_JSON)
|
||||
public class MediaResource {
|
||||
static final Logger LOGGER = LoggerFactory.getLogger(MediaResource.class);
|
||||
|
||||
@GET
|
||||
@RolesAllowed("USER")
|
||||
@Operation(description = "Get all Media", tags = "GLOBAL")
|
||||
public List<Media> gets() throws Exception {
|
||||
return DataAccess.gets(Media.class);
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("{id}")
|
||||
@RolesAllowed("USER")
|
||||
@Operation(description = "Get a specific Media with his ID", tags = "GLOBAL")
|
||||
public Media get(@PathParam("id") final Long id) throws Exception {
|
||||
return DataAccess.get(Media.class, id);
|
||||
}
|
||||
|
||||
@PATCH
|
||||
@Path("{id}")
|
||||
@RolesAllowed("ADMIN")
|
||||
@Consumes(MediaType.APPLICATION_JSON)
|
||||
@Operation(description = "Modify a specific Media", tags = "GLOBAL")
|
||||
public Media patch(@PathParam("id") final Long id, @AsyncType(Media.class) final String jsonRequest) throws Exception {
|
||||
LOGGER.info("update video {} ==> '{}'", id, jsonRequest);
|
||||
DataAccess.updateWithJson(Media.class, id, jsonRequest);
|
||||
return DataAccess.get(Media.class, id);
|
||||
}
|
||||
|
||||
private String multipartCorrection(final String data) {
|
||||
if (data == null) {
|
||||
return null;
|
||||
}
|
||||
if (data.isEmpty()) {
|
||||
return null;
|
||||
}
|
||||
if (data.contentEquals("null")) {
|
||||
return null;
|
||||
}
|
||||
return data;
|
||||
}
|
||||
|
||||
@POST
|
||||
@RolesAllowed("ADMIN")
|
||||
@Consumes({ MediaType.MULTIPART_FORM_DATA })
|
||||
@Operation(description = "Create a new Media", tags = "GLOBAL")
|
||||
@TypeScriptProgress
|
||||
public Media uploadFile( //
|
||||
@FormDataParam("fileName") String fileName, //
|
||||
@FormDataParam("universe") String universe, //
|
||||
@FormDataParam("series") String series, //
|
||||
// @FormDataParam("seriesId") String seriesId, // Not used ...
|
||||
@FormDataParam("season") String season, //
|
||||
@FormDataParam("episode") String episode, //
|
||||
@FormDataParam("title") String title, //
|
||||
@FormDataParam("typeId") String typeId, //
|
||||
@FormDataParam("file") final InputStream fileInputStream, //
|
||||
@FormDataParam("file") final FormDataContentDisposition fileMetaData //
|
||||
) throws FailException {
|
||||
try {
|
||||
// correct input string stream :
|
||||
fileName = multipartCorrection(fileName);
|
||||
universe = multipartCorrection(universe);
|
||||
series = multipartCorrection(series);
|
||||
season = multipartCorrection(season);
|
||||
episode = multipartCorrection(episode);
|
||||
title = multipartCorrection(title);
|
||||
typeId = multipartCorrection(typeId);
|
||||
|
||||
// public NodeSmall uploadFile(final FormDataMultiPart form) {
|
||||
LOGGER.info("Upload media file: {}", fileMetaData);
|
||||
LOGGER.info(" - fileName: {}", fileName);
|
||||
LOGGER.info(" - universe: {}", universe);
|
||||
LOGGER.info(" - series: {}", series);
|
||||
LOGGER.info(" - season: {}", season);
|
||||
LOGGER.info(" - episode: {}", episode);
|
||||
LOGGER.info(" - title: {}", title);
|
||||
LOGGER.info(" - type: {}", typeId);
|
||||
LOGGER.info(" - fileInputStream: {}", fileInputStream);
|
||||
LOGGER.info(" - fileMetaData: {}", fileMetaData);
|
||||
System.out.flush();
|
||||
if (typeId == null) {
|
||||
throw new InputException("typeId", "TypiId is not specified");
|
||||
}
|
||||
|
||||
final long tmpUID = DataResource.getTmpDataId();
|
||||
final String sha512 = DataResource.saveTemporaryFile(fileInputStream, tmpUID);
|
||||
Data data = DataResource.getWithSha512(sha512);
|
||||
if (data == null) {
|
||||
LOGGER.info("Need to add the data in the BDD ... ");
|
||||
System.out.flush();
|
||||
try {
|
||||
data = DataResource.createNewData(tmpUID, fileName, sha512);
|
||||
} catch (final IOException ex) {
|
||||
DataResource.removeTemporaryFile(tmpUID);
|
||||
ex.printStackTrace();
|
||||
throw new FailException("can not create input media (the data model has an internal error");
|
||||
}
|
||||
} else if (data != null && data.deleted != null && data.deleted) {
|
||||
LOGGER.info("Data already exist but deleted");
|
||||
System.out.flush();
|
||||
DataTools.undelete(data.uuid);
|
||||
data.deleted = false;
|
||||
} else {
|
||||
LOGGER.info("Data already exist ... all good");
|
||||
System.out.flush();
|
||||
}
|
||||
// Fist step: retieve all the Id of each parents:...
|
||||
LOGGER.info("Find typeNode");
|
||||
// check if id of type exist:
|
||||
final Type typeNode = TypeResource.getId(Long.parseLong(typeId));
|
||||
if (typeNode == null) {
|
||||
DataResource.removeTemporaryFile(tmpUID);
|
||||
throw new InputException("typeId", "TypeId does not exist ...");
|
||||
}
|
||||
LOGGER.info(" ==> {}", typeNode);
|
||||
LOGGER.info("Find seriesNode");
|
||||
// get uid of group:
|
||||
Series seriesNode = null;
|
||||
if (series != null) {
|
||||
seriesNode = SeriesResource.getOrCreate(series, typeNode.id);
|
||||
}
|
||||
|
||||
LOGGER.info(" ==> {}", seriesNode);
|
||||
LOGGER.info("Find seasonNode");
|
||||
// get uid of season:
|
||||
Season seasonNode = null;
|
||||
if (seriesNode == null && season != null) {
|
||||
DataResource.removeTemporaryFile(tmpUID);
|
||||
throw new InputException("season", "Season is set but no seraies is set !!");
|
||||
}
|
||||
if (season != null) {
|
||||
seasonNode = SeasonResource.getOrCreate(season, seriesNode.id);
|
||||
}
|
||||
|
||||
LOGGER.info(" ==> {}", seasonNode);
|
||||
LOGGER.info("add media");
|
||||
|
||||
try {
|
||||
final Media media = new Media();
|
||||
media.name = title;
|
||||
media.dataId = data.uuid;
|
||||
media.typeId = typeNode.id;
|
||||
media.seriesId = null;
|
||||
if (seriesNode != null) {
|
||||
media.seriesId = seriesNode.id;
|
||||
}
|
||||
media.seasonId = null;
|
||||
if (seasonNode != null) {
|
||||
media.seasonId = seasonNode.id;
|
||||
}
|
||||
media.episode = null;
|
||||
if (episode != null && !episode.contentEquals("")) {
|
||||
media.episode = Integer.parseInt(episode);
|
||||
}
|
||||
final Media out = DataAccess.insert(media);
|
||||
LOGGER.info("Generate new media {}", out);
|
||||
return out;
|
||||
} catch (final SQLException ex) {
|
||||
ex.printStackTrace();
|
||||
LOGGER.error("Catch error: {}", ex.getMessage());
|
||||
throw new FailException("Catch SQLerror ==> check server logs");
|
||||
} finally {
|
||||
DataResource.removeTemporaryFile(tmpUID);
|
||||
}
|
||||
} catch (final Exception ex) {
|
||||
LOGGER.error("Catch an unexpected error ... {} ", ex.getMessage());
|
||||
ex.printStackTrace();
|
||||
throw new FailException("Catch Exception ==> check server logs");
|
||||
}
|
||||
}
|
||||
|
||||
@POST
|
||||
@Path("{id}/cover")
|
||||
@RolesAllowed("ADMIN")
|
||||
@Consumes({ MediaType.MULTIPART_FORM_DATA })
|
||||
@AsyncType(Media.class)
|
||||
@Operation(description = "Upload a new season cover media", tags = "GLOBAL")
|
||||
@TypeScriptProgress
|
||||
public Media uploadCover( //
|
||||
@PathParam("id") final Long id, //
|
||||
@FormDataParam("fileName") final String fileName, //
|
||||
@FormDataParam("file") final InputStream fileInputStream, //
|
||||
@FormDataParam("file") final FormDataContentDisposition fileMetaData//
|
||||
) throws Exception {
|
||||
DataTools.uploadCover(Media.class, id, fileName, fileInputStream, fileMetaData);
|
||||
return DataAccess.get(Media.class, id);
|
||||
}
|
||||
|
||||
@DELETE
|
||||
@Path("{id}/cover/{coverId}")
|
||||
@RolesAllowed("ADMIN")
|
||||
@Operation(description = "Remove a specific cover of a media", tags = "GLOBAL")
|
||||
public Media removeCover( //
|
||||
@PathParam("id") final Long id, //
|
||||
@PathParam("coverId") final UUID coverId //
|
||||
) throws Exception {
|
||||
AddOnDataJson.removeLink(Media.class, id, "covers", coverId);
|
||||
return DataAccess.get(Media.class, id);
|
||||
}
|
||||
|
||||
@DELETE
|
||||
@Path("{id}")
|
||||
@RolesAllowed("ADMIN")
|
||||
@Operation(description = "Remove a specific Media", tags = "GLOBAL")
|
||||
public void remove(@PathParam("id") final Long id) throws Exception {
|
||||
DataAccess.delete(Media.class, id);
|
||||
}
|
||||
}
|
120
back/src/org/kar/karideo/api/SeasonResource.java
Normal file
@ -0,0 +1,120 @@
|
||||
package org.kar.karideo.api;
|
||||
|
||||
import java.io.InputStream;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
|
||||
import org.glassfish.jersey.media.multipart.FormDataParam;
|
||||
import org.kar.archidata.annotation.AsyncType;
|
||||
import org.kar.archidata.annotation.TypeScriptProgress;
|
||||
import org.kar.archidata.dataAccess.DataAccess;
|
||||
import org.kar.archidata.dataAccess.QueryAnd;
|
||||
import org.kar.archidata.dataAccess.QueryCondition;
|
||||
import org.kar.archidata.dataAccess.addOn.AddOnDataJson;
|
||||
import org.kar.archidata.dataAccess.options.Condition;
|
||||
import org.kar.archidata.tools.DataTools;
|
||||
import org.kar.karideo.model.Season;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import io.swagger.v3.oas.annotations.Operation;
|
||||
import jakarta.annotation.security.RolesAllowed;
|
||||
import jakarta.ws.rs.Consumes;
|
||||
import jakarta.ws.rs.DELETE;
|
||||
import jakarta.ws.rs.GET;
|
||||
import jakarta.ws.rs.PATCH;
|
||||
import jakarta.ws.rs.POST;
|
||||
import jakarta.ws.rs.Path;
|
||||
import jakarta.ws.rs.PathParam;
|
||||
import jakarta.ws.rs.Produces;
|
||||
import jakarta.ws.rs.core.MediaType;
|
||||
|
||||
@Path("/season")
|
||||
@Produces(MediaType.APPLICATION_JSON)
|
||||
public class SeasonResource {
|
||||
static final Logger LOGGER = LoggerFactory.getLogger(SeasonResource.class);
|
||||
|
||||
@GET
|
||||
@RolesAllowed("USER")
|
||||
@Operation(description = "Get a specific Season with his ID", tags = "GLOBAL")
|
||||
public List<Season> gets() throws Exception {
|
||||
return DataAccess.gets(Season.class);
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("{id}")
|
||||
@RolesAllowed("USER")
|
||||
@Consumes(MediaType.APPLICATION_JSON)
|
||||
@Operation(description = "Get all season", tags = "GLOBAL")
|
||||
public Season get(@PathParam("id") final Long id) throws Exception {
|
||||
return DataAccess.get(Season.class, id);
|
||||
}
|
||||
|
||||
/* ============================================================================= ADMIN SECTION: ============================================================================= */
|
||||
|
||||
@POST
|
||||
@RolesAllowed("ADMIN")
|
||||
@Consumes(MediaType.APPLICATION_JSON)
|
||||
@Operation(description = "Create a new season", tags = "GLOBAL")
|
||||
public Season post(final Season jsonRequest) throws Exception {
|
||||
return DataAccess.insert(jsonRequest);
|
||||
}
|
||||
|
||||
@PATCH
|
||||
@Path("{id}")
|
||||
@RolesAllowed("ADMIN")
|
||||
@Consumes(MediaType.APPLICATION_JSON)
|
||||
@Operation(description = "Modify a specific season", tags = "GLOBAL")
|
||||
public Season patch(@PathParam("id") final Long id, @AsyncType(Season.class) final String jsonRequest) throws Exception {
|
||||
DataAccess.updateWithJson(Season.class, id, jsonRequest);
|
||||
return DataAccess.get(Season.class, id);
|
||||
}
|
||||
|
||||
@DELETE
|
||||
@Path("{id}")
|
||||
@RolesAllowed("ADMIN")
|
||||
@Operation(description = "Remove a specific season", tags = "GLOBAL")
|
||||
public void remove(@PathParam("id") final Long id) throws Exception {
|
||||
DataAccess.delete(Season.class, id);
|
||||
}
|
||||
|
||||
@POST
|
||||
@Path("{id}/cover")
|
||||
@RolesAllowed("ADMIN")
|
||||
@Consumes(MediaType.MULTIPART_FORM_DATA)
|
||||
@Operation(description = "Upload a new season cover season", tags = "GLOBAL")
|
||||
@TypeScriptProgress
|
||||
public Season uploadCover(@PathParam("id") final Long id, @FormDataParam("fileName") final String fileName, @FormDataParam("file") final InputStream fileInputStream,
|
||||
@FormDataParam("file") final FormDataContentDisposition fileMetaData) throws Exception {
|
||||
DataTools.uploadCover(Season.class, id, fileName, fileInputStream, fileMetaData);
|
||||
return DataAccess.get(Season.class, id);
|
||||
}
|
||||
|
||||
@DELETE
|
||||
@Path("{id}/cover/{coverId}")
|
||||
@RolesAllowed("ADMIN")
|
||||
@Operation(description = "Remove a specific cover of a season", tags = "GLOBAL")
|
||||
public Season removeCover(@PathParam("id") final Long id, @PathParam("coverId") final UUID coverId) throws Exception {
|
||||
AddOnDataJson.removeLink(Season.class, id, "covers", coverId);
|
||||
return DataAccess.get(Season.class, id);
|
||||
}
|
||||
|
||||
public static Season getOrCreate(final String name, final Long seriesId) {
|
||||
try {
|
||||
Season out = DataAccess.getWhere(Season.class, new Condition(new QueryAnd(new QueryCondition("name", "=", name), new QueryCondition("parentId", "=", seriesId))));
|
||||
if (out == null) {
|
||||
out = new Season();
|
||||
out.name = name;
|
||||
out.parentId = seriesId;
|
||||
out = DataAccess.insert(out);
|
||||
}
|
||||
return out;
|
||||
} catch (final Exception e) {
|
||||
// TODO Auto-generated catch block
|
||||
e.printStackTrace();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
}
|
120
back/src/org/kar/karideo/api/SeriesResource.java
Normal file
@ -0,0 +1,120 @@
|
||||
package org.kar.karideo.api;
|
||||
|
||||
import java.io.InputStream;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
|
||||
import org.glassfish.jersey.media.multipart.FormDataParam;
|
||||
import org.kar.archidata.annotation.AsyncType;
|
||||
import org.kar.archidata.annotation.TypeScriptProgress;
|
||||
import org.kar.archidata.dataAccess.DataAccess;
|
||||
import org.kar.archidata.dataAccess.QueryAnd;
|
||||
import org.kar.archidata.dataAccess.QueryCondition;
|
||||
import org.kar.archidata.dataAccess.addOn.AddOnDataJson;
|
||||
import org.kar.archidata.dataAccess.options.Condition;
|
||||
import org.kar.archidata.tools.DataTools;
|
||||
import org.kar.karideo.model.Series;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import io.swagger.v3.oas.annotations.Operation;
|
||||
import jakarta.annotation.security.RolesAllowed;
|
||||
import jakarta.ws.rs.Consumes;
|
||||
import jakarta.ws.rs.DELETE;
|
||||
import jakarta.ws.rs.GET;
|
||||
import jakarta.ws.rs.PATCH;
|
||||
import jakarta.ws.rs.POST;
|
||||
import jakarta.ws.rs.Path;
|
||||
import jakarta.ws.rs.PathParam;
|
||||
import jakarta.ws.rs.Produces;
|
||||
import jakarta.ws.rs.core.MediaType;
|
||||
|
||||
@Path("/series")
|
||||
@Produces(MediaType.APPLICATION_JSON)
|
||||
public class SeriesResource {
|
||||
static final Logger LOGGER = LoggerFactory.getLogger(SeriesResource.class);
|
||||
|
||||
@GET
|
||||
@RolesAllowed("USER")
|
||||
@Operation(description = "Get all Series", tags = "GLOBAL")
|
||||
public List<Series> gets() throws Exception {
|
||||
return DataAccess.gets(Series.class);
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("{id}")
|
||||
@RolesAllowed("USER")
|
||||
@Consumes(MediaType.APPLICATION_JSON)
|
||||
@Operation(description = "Get a specific Series with his ID", tags = "GLOBAL")
|
||||
public Series get(@PathParam("id") final Long id) throws Exception {
|
||||
return DataAccess.get(Series.class, id);
|
||||
}
|
||||
|
||||
/* ============================================================================= ADMIN SECTION: ============================================================================= */
|
||||
|
||||
@POST
|
||||
@RolesAllowed("ADMIN")
|
||||
@Consumes(MediaType.APPLICATION_JSON)
|
||||
@Operation(description = "Create a new Series", tags = "GLOBAL")
|
||||
public Series post(final Series jsonRequest) throws Exception {
|
||||
return DataAccess.insert(jsonRequest);
|
||||
}
|
||||
|
||||
@PATCH
|
||||
@Path("{id}")
|
||||
@RolesAllowed("ADMIN")
|
||||
@Consumes(MediaType.APPLICATION_JSON)
|
||||
@Operation(description = "Modify a specific Series", tags = "GLOBAL")
|
||||
public Series patch(@PathParam("id") final Long id, @AsyncType(Series.class) final String jsonRequest) throws Exception {
|
||||
DataAccess.updateWithJson(Series.class, id, jsonRequest);
|
||||
return DataAccess.get(Series.class, id);
|
||||
}
|
||||
|
||||
@DELETE
|
||||
@Path("{id}")
|
||||
@RolesAllowed("ADMIN")
|
||||
@Operation(description = "Remove a specific Series", tags = "GLOBAL")
|
||||
public void remove(@PathParam("id") final Long id) throws Exception {
|
||||
DataAccess.delete(Series.class, id);
|
||||
}
|
||||
|
||||
@POST
|
||||
@Path("{id}/cover")
|
||||
@RolesAllowed("ADMIN")
|
||||
@Consumes({ MediaType.MULTIPART_FORM_DATA })
|
||||
@Operation(description = "Upload a new season cover Series", tags = "GLOBAL")
|
||||
@TypeScriptProgress
|
||||
public Series uploadCover(@PathParam("id") final Long id, @FormDataParam("fileName") final String fileName, @FormDataParam("file") final InputStream fileInputStream,
|
||||
@FormDataParam("file") final FormDataContentDisposition fileMetaData) throws Exception {
|
||||
DataTools.uploadCover(Series.class, id, fileName, fileInputStream, fileMetaData);
|
||||
return DataAccess.get(Series.class, id);
|
||||
}
|
||||
|
||||
@DELETE
|
||||
@Path("{id}/cover/{coverId}")
|
||||
@RolesAllowed("ADMIN")
|
||||
@Operation(description = "Remove a specific Series of a season", tags = "GLOBAL")
|
||||
public Series removeCover(@PathParam("id") final Long id, @PathParam("coverId") final UUID coverId) throws Exception {
|
||||
AddOnDataJson.removeLink(Series.class, id, "covers", coverId);
|
||||
return DataAccess.get(Series.class, id);
|
||||
}
|
||||
|
||||
public static Series getOrCreate(final String name, final Long typeId) {
|
||||
try {
|
||||
Series out = DataAccess.getWhere(Series.class, new Condition(new QueryAnd(new QueryCondition("name", "=", name), new QueryCondition("parentId", "=", typeId))));
|
||||
if (out == null) {
|
||||
out = new Series();
|
||||
out.name = name;
|
||||
out.parentId = typeId;
|
||||
out = DataAccess.insert(out);
|
||||
}
|
||||
return out;
|
||||
} catch (final Exception e) {
|
||||
// TODO Auto-generated catch block
|
||||
e.printStackTrace();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
}
|
122
back/src/org/kar/karideo/api/TypeResource.java
Normal file
@ -0,0 +1,122 @@
|
||||
package org.kar.karideo.api;
|
||||
|
||||
import java.io.InputStream;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
|
||||
import org.glassfish.jersey.media.multipart.FormDataParam;
|
||||
import org.kar.archidata.annotation.AsyncType;
|
||||
import org.kar.archidata.annotation.TypeScriptProgress;
|
||||
import org.kar.archidata.dataAccess.DataAccess;
|
||||
import org.kar.archidata.dataAccess.QueryCondition;
|
||||
import org.kar.archidata.dataAccess.addOn.AddOnDataJson;
|
||||
import org.kar.archidata.dataAccess.options.Condition;
|
||||
import org.kar.archidata.tools.DataTools;
|
||||
import org.kar.karideo.model.Type;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import io.swagger.v3.oas.annotations.Operation;
|
||||
import jakarta.annotation.security.RolesAllowed;
|
||||
import jakarta.ws.rs.Consumes;
|
||||
import jakarta.ws.rs.DELETE;
|
||||
import jakarta.ws.rs.GET;
|
||||
import jakarta.ws.rs.PATCH;
|
||||
import jakarta.ws.rs.POST;
|
||||
import jakarta.ws.rs.Path;
|
||||
import jakarta.ws.rs.PathParam;
|
||||
import jakarta.ws.rs.Produces;
|
||||
import jakarta.ws.rs.core.MediaType;
|
||||
|
||||
@Path("/type")
|
||||
@Produces(MediaType.APPLICATION_JSON)
|
||||
public class TypeResource {
|
||||
static final Logger LOGGER = LoggerFactory.getLogger(TypeResource.class);
|
||||
|
||||
@GET
|
||||
@RolesAllowed("USER")
|
||||
@Operation(description = "Get all Type", tags = "GLOBAL")
|
||||
public List<Type> gets() throws Exception {
|
||||
return DataAccess.gets(Type.class);
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("{id}")
|
||||
@RolesAllowed("USER")
|
||||
@Consumes(MediaType.APPLICATION_JSON)
|
||||
@Operation(description = "Get a specific Type with his ID", tags = "GLOBAL")
|
||||
public Type get(@PathParam("id") final Long id) throws Exception {
|
||||
return DataAccess.get(Type.class, id);
|
||||
}
|
||||
|
||||
public static Type getId(final Long id) throws Exception {
|
||||
return DataAccess.get(Type.class, id);
|
||||
}
|
||||
|
||||
/* ============================================================================= ADMIN SECTION: ============================================================================= */
|
||||
|
||||
@POST
|
||||
@RolesAllowed("ADMIN")
|
||||
@Consumes(MediaType.APPLICATION_JSON)
|
||||
@Operation(description = "Create a new Type", tags = "GLOBAL")
|
||||
public Type post(final Type jsonRequest) throws Exception {
|
||||
return DataAccess.insert(jsonRequest);
|
||||
}
|
||||
|
||||
@PATCH
|
||||
@Path("{id}")
|
||||
@RolesAllowed("ADMIN")
|
||||
@Consumes(MediaType.APPLICATION_JSON)
|
||||
@Operation(description = "Modify a specific Type", tags = "GLOBAL")
|
||||
public Type patch(@PathParam("id") final Long id, @AsyncType(Type.class) final String jsonRequest) throws Exception {
|
||||
DataAccess.updateWithJson(Type.class, id, jsonRequest);
|
||||
return DataAccess.get(Type.class, id);
|
||||
}
|
||||
|
||||
@DELETE
|
||||
@Path("{id}")
|
||||
@RolesAllowed("ADMIN")
|
||||
@Operation(description = "Remove a specific Type", tags = "GLOBAL")
|
||||
public void remove(@PathParam("id") final Long id) throws Exception {
|
||||
DataAccess.delete(Type.class, id);
|
||||
}
|
||||
|
||||
@POST
|
||||
@Path("{id}/cover")
|
||||
@RolesAllowed("ADMIN")
|
||||
@Consumes({ MediaType.MULTIPART_FORM_DATA })
|
||||
@Operation(description = "Upload a new season cover Type", tags = "GLOBAL")
|
||||
@TypeScriptProgress
|
||||
public Type uploadCover(@PathParam("id") final Long id, @FormDataParam("fileName") final String fileName, @FormDataParam("file") final InputStream fileInputStream,
|
||||
@FormDataParam("file") final FormDataContentDisposition fileMetaData) throws Exception {
|
||||
DataTools.uploadCover(Type.class, id, fileName, fileInputStream, fileMetaData);
|
||||
return DataAccess.get(Type.class, id);
|
||||
}
|
||||
|
||||
@DELETE
|
||||
@Path("{id}/cover/{coverId}")
|
||||
@RolesAllowed("ADMIN")
|
||||
@Operation(description = "Remove a specific cover of a type", tags = "GLOBAL")
|
||||
public Type removeCover(@PathParam("id") final Long id, @PathParam("coverId") final UUID coverId) throws Exception {
|
||||
AddOnDataJson.removeLink(Type.class, id, "covers", coverId);
|
||||
return DataAccess.get(Type.class, id);
|
||||
}
|
||||
|
||||
public static Type getOrCreate(final String name) {
|
||||
try {
|
||||
Type out = DataAccess.getWhere(Type.class, new Condition(new QueryCondition("name", "=", name)));
|
||||
if (out == null) {
|
||||
out = new Type();
|
||||
out.name = name;
|
||||
out = DataAccess.insert(out);
|
||||
}
|
||||
return out;
|
||||
} catch (final Exception e) {
|
||||
// TODO Auto-generated catch block
|
||||
e.printStackTrace();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
}
|
106
back/src/org/kar/karideo/api/UserMediaAdvancementResource.java
Normal file
@ -0,0 +1,106 @@
|
||||
package org.kar.karideo.api;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.kar.archidata.dataAccess.DataAccess;
|
||||
import org.kar.archidata.dataAccess.QueryAnd;
|
||||
import org.kar.archidata.dataAccess.QueryCondition;
|
||||
import org.kar.archidata.dataAccess.options.Condition;
|
||||
import org.kar.archidata.filter.GenericContext;
|
||||
import org.kar.karideo.model.UserMediaAdvancement;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import io.swagger.v3.oas.annotations.Operation;
|
||||
import jakarta.annotation.security.RolesAllowed;
|
||||
import jakarta.ws.rs.Consumes;
|
||||
import jakarta.ws.rs.DELETE;
|
||||
import jakarta.ws.rs.GET;
|
||||
import jakarta.ws.rs.PATCH;
|
||||
import jakarta.ws.rs.Path;
|
||||
import jakarta.ws.rs.PathParam;
|
||||
import jakarta.ws.rs.Produces;
|
||||
import jakarta.ws.rs.core.Context;
|
||||
import jakarta.ws.rs.core.MediaType;
|
||||
import jakarta.ws.rs.core.SecurityContext;
|
||||
|
||||
@Path("/advancement")
|
||||
@Produces(MediaType.APPLICATION_JSON)
|
||||
public class UserMediaAdvancementResource {
|
||||
static final Logger LOGGER = LoggerFactory.getLogger(UserMediaAdvancementResource.class);
|
||||
|
||||
@GET
|
||||
@Path("{id}")
|
||||
@RolesAllowed("USER")
|
||||
@Operation(description = "Get a specific user advancement with his ID", tags = "GLOBAL")
|
||||
public UserMediaAdvancement get(@Context final SecurityContext sc, @PathParam("id") final Long id) throws Exception {
|
||||
final GenericContext gc = (GenericContext) sc.getUserPrincipal();
|
||||
return DataAccess.getWhere(UserMediaAdvancement.class, new Condition(new QueryAnd(new QueryCondition("mediaId", "=", id), new QueryCondition("userId", "=", gc.userByToken.id))));
|
||||
}
|
||||
|
||||
@GET
|
||||
@RolesAllowed("USER")
|
||||
@Operation(description = "Get all user advancement", tags = "GLOBAL")
|
||||
public List<UserMediaAdvancement> gets(@Context final SecurityContext sc) throws Exception {
|
||||
final GenericContext gc = (GenericContext) sc.getUserPrincipal();
|
||||
return DataAccess.getsWhere(UserMediaAdvancement.class, new Condition(new QueryCondition("userId", "=", gc.userByToken.id)));
|
||||
}
|
||||
|
||||
/* ============================================================================= Modification SECTION: ============================================================================= */
|
||||
|
||||
public record MediaInformations(int time, float percent, int count) {
|
||||
}
|
||||
|
||||
// @POST
|
||||
// @Path("{id}")
|
||||
// @RolesAllowed("USER")
|
||||
// @Consumes(MediaType.APPLICATION_JSON)
|
||||
public UserMediaAdvancement post(@Context final SecurityContext sc, @PathParam("id") final Long id, final MediaInformations data) throws Exception {
|
||||
final GenericContext gc = (GenericContext) sc.getUserPrincipal();
|
||||
final UserMediaAdvancement elem = new UserMediaAdvancement();
|
||||
elem.userId = gc.userByToken.id;
|
||||
elem.mediaId = id;
|
||||
elem.time = data.time;
|
||||
elem.percent = data.percent;
|
||||
elem.count = data.count;
|
||||
return DataAccess.insert(elem);
|
||||
}
|
||||
|
||||
public record MediaInformationsDelta(int time, float percent, boolean addCount) {
|
||||
}
|
||||
|
||||
@PATCH
|
||||
@Path("{id}")
|
||||
@RolesAllowed("USER")
|
||||
@Consumes(MediaType.APPLICATION_JSON)
|
||||
@Operation(description = "Modify a user advancement", tags = "GLOBAL")
|
||||
public UserMediaAdvancement patch(@Context final SecurityContext sc, @PathParam("id") final Long id, final MediaInformationsDelta data) throws Exception {
|
||||
final UserMediaAdvancement elem = get(sc, id);
|
||||
if (elem == null) {
|
||||
// insert element
|
||||
if (data.addCount) {
|
||||
return post(sc, id, new MediaInformations(data.time(), data.percent(), 1));
|
||||
} else {
|
||||
return post(sc, id, new MediaInformations(data.time(), data.percent(), 0));
|
||||
}
|
||||
}
|
||||
elem.time = data.time;
|
||||
elem.percent = data.percent;
|
||||
if (data.addCount) {
|
||||
elem.count++;
|
||||
}
|
||||
LOGGER.info("{},{},{}", elem.time, elem.percent, elem.count);
|
||||
final int nbAfected = DataAccess.update(elem, elem.id, List.of("time", "percent", "count"));
|
||||
return DataAccess.get(UserMediaAdvancement.class, elem.id);
|
||||
}
|
||||
|
||||
@DELETE
|
||||
@Path("{id}")
|
||||
@RolesAllowed("USER")
|
||||
@Operation(description = "Remove a specific user advancement", tags = "GLOBAL")
|
||||
public void remove(@Context final SecurityContext sc, @PathParam("id") final Long id) throws Exception {
|
||||
final UserMediaAdvancement elem = get(sc, id);
|
||||
DataAccess.delete(UserMediaAdvancement.class, elem.id);
|
||||
}
|
||||
|
||||
}
|
88
back/src/org/kar/karideo/api/UserResource.java
Executable file
@ -0,0 +1,88 @@
|
||||
package org.kar.karideo.api;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.kar.archidata.dataAccess.DataAccess;
|
||||
import org.kar.archidata.filter.GenericContext;
|
||||
import org.kar.karideo.model.UserKarideo;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
|
||||
import io.swagger.v3.oas.annotations.Operation;
|
||||
import jakarta.annotation.security.RolesAllowed;
|
||||
import jakarta.ws.rs.GET;
|
||||
import jakarta.ws.rs.Path;
|
||||
import jakarta.ws.rs.PathParam;
|
||||
import jakarta.ws.rs.Produces;
|
||||
import jakarta.ws.rs.core.Context;
|
||||
import jakarta.ws.rs.core.MediaType;
|
||||
import jakarta.ws.rs.core.SecurityContext;
|
||||
|
||||
@Path("/users")
|
||||
@Produces(MediaType.APPLICATION_JSON)
|
||||
public class UserResource {
|
||||
static final Logger LOGGER = LoggerFactory.getLogger(UserResource.class);
|
||||
|
||||
@JsonInclude(JsonInclude.Include.NON_NULL)
|
||||
public class UserOut {
|
||||
public long id;
|
||||
public String login;
|
||||
|
||||
public UserOut(final long id, final String login) {
|
||||
this.id = id;
|
||||
this.login = login;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public UserResource() {}
|
||||
|
||||
// curl http://localhost:9993/api/users
|
||||
@GET
|
||||
@RolesAllowed("ADMIN")
|
||||
@Operation(description = "Get all the users", tags = "SYSTEM")
|
||||
public List<UserKarideo> gets() {
|
||||
System.out.println("getUsers");
|
||||
try {
|
||||
return DataAccess.gets(UserKarideo.class);
|
||||
} catch (final Exception e) {
|
||||
// TODO Auto-generated catch block
|
||||
e.printStackTrace();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
// curl http://localhost:9993/api/users/3
|
||||
@GET
|
||||
@Path("{id}")
|
||||
@RolesAllowed("ADMIN")
|
||||
@Operation(description = "Get a specific user data", tags = "SYSTEM")
|
||||
public UserKarideo get(@Context final SecurityContext sc, @PathParam("id") final long userId) {
|
||||
System.out.println("getUser " + userId);
|
||||
final GenericContext gc = (GenericContext) sc.getUserPrincipal();
|
||||
System.out.println("===================================================");
|
||||
System.out.println("== USER ? " + gc.userByToken.name);
|
||||
System.out.println("===================================================");
|
||||
try {
|
||||
return DataAccess.get(UserKarideo.class, userId);
|
||||
} catch (final Exception e) {
|
||||
// TODO Auto-generated catch block
|
||||
e.printStackTrace();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("me")
|
||||
@RolesAllowed("USER")
|
||||
@Operation(description = "Get the user personal data", tags = "SYSTEM")
|
||||
public UserOut getMe(@Context final SecurityContext sc) {
|
||||
LOGGER.debug("getMe()");
|
||||
final GenericContext gc = (GenericContext) sc.getUserPrincipal();
|
||||
LOGGER.debug("== USER ? {}", gc.userByToken);
|
||||
return new UserOut(gc.userByToken.id, gc.userByToken.name);
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,23 @@
|
||||
package org.kar.karideo.filter;
|
||||
|
||||
import org.kar.archidata.filter.AuthenticationFilter;
|
||||
|
||||
import jakarta.ws.rs.Priorities;
|
||||
import jakarta.ws.rs.ext.Provider;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import jakarta.annotation.Priority;
|
||||
|
||||
//@PreMatching
|
||||
@Provider
|
||||
@Priority(Priorities.AUTHENTICATION)
|
||||
public class KarideoAuthenticationFilter extends AuthenticationFilter {
|
||||
final Logger logger = LoggerFactory.getLogger(KarideoAuthenticationFilter.class);
|
||||
|
||||
public KarideoAuthenticationFilter() {
|
||||
super("karideo");
|
||||
}
|
||||
|
||||
}
|
60
back/src/org/kar/karideo/internal/Log.java
Normal file
@ -0,0 +1,60 @@
|
||||
package org.kar.karideo.internal;
|
||||
|
||||
//import io.scenarium.logger.LogLevel;
|
||||
//import io.scenarium.logger.Logger;
|
||||
|
||||
public class Log {
|
||||
// private static final String LIB_NAME = "logger";
|
||||
// private static final String LIB_NAME_DRAW = Logger.getDrawableName(LIB_NAME);
|
||||
// private static final boolean PRINT_CRITICAL = Logger.getNeedPrint(LIB_NAME, LogLevel.CRITICAL);
|
||||
// private static final boolean PRINT_ERROR = Logger.getNeedPrint(LIB_NAME, LogLevel.ERROR);
|
||||
// private static final boolean PRINT_WARNING = Logger.getNeedPrint(LIB_NAME, LogLevel.WARNING);
|
||||
// private static final boolean PRINT_INFO = Logger.getNeedPrint(LIB_NAME, LogLevel.INFO);
|
||||
// private static final boolean PRINT_DEBUG = Logger.getNeedPrint(LIB_NAME, LogLevel.DEBUG);
|
||||
// private static final boolean PRINT_VERBOSE = Logger.getNeedPrint(LIB_NAME, LogLevel.VERBOSE);
|
||||
// private static final boolean PRINT_TODO = Logger.getNeedPrint(LIB_NAME, LogLevel.TODO);
|
||||
// private static final boolean PRINT_PRINT = Logger.getNeedPrint(LIB_NAME, LogLevel.PRINT);
|
||||
//
|
||||
// private Log() {}
|
||||
//
|
||||
// public static void print(String data) {
|
||||
// if (PRINT_PRINT)
|
||||
// Logger.print(LIB_NAME_DRAW, data);
|
||||
// }
|
||||
//
|
||||
// public static void todo(String data) {
|
||||
// if (PRINT_TODO)
|
||||
// Logger.todo(LIB_NAME_DRAW, data);
|
||||
// }
|
||||
//
|
||||
// public static void critical(String data) {
|
||||
// if (PRINT_CRITICAL)
|
||||
// Logger.critical(LIB_NAME_DRAW, data);
|
||||
// }
|
||||
//
|
||||
// public static void error(String data) {
|
||||
// if (PRINT_ERROR)
|
||||
// Logger.error(LIB_NAME_DRAW, data);
|
||||
// }
|
||||
//
|
||||
// public static void warning(String data) {
|
||||
// if (PRINT_WARNING)
|
||||
// Logger.warning(LIB_NAME_DRAW, data);
|
||||
// }
|
||||
//
|
||||
// public static void info(String data) {
|
||||
// if (PRINT_INFO)
|
||||
// Logger.info(LIB_NAME_DRAW, data);
|
||||
// }
|
||||
//
|
||||
// public static void debug(String data) {
|
||||
// if (PRINT_DEBUG)
|
||||
// Logger.debug(LIB_NAME_DRAW, data);
|
||||
// }
|
||||
//
|
||||
// public static void verbose(String data) {
|
||||
// if (PRINT_VERBOSE)
|
||||
// Logger.verbose(LIB_NAME_DRAW, data);
|
||||
// }
|
||||
|
||||
}
|
91
back/src/org/kar/karideo/migration/Initialization.java
Normal file
@ -0,0 +1,91 @@
|
||||
package org.kar.karideo.migration;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.kar.archidata.dataAccess.DataAccess;
|
||||
import org.kar.archidata.migration.MigrationSqlStep;
|
||||
import org.kar.archidata.model.Data;
|
||||
import org.kar.archidata.model.User;
|
||||
import org.kar.karideo.model.Media;
|
||||
import org.kar.karideo.model.Season;
|
||||
import org.kar.karideo.model.Series;
|
||||
import org.kar.karideo.model.Type;
|
||||
import org.kar.karideo.model.UserMediaAdvancement;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class Initialization extends MigrationSqlStep {
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(Initialization.class);
|
||||
|
||||
public static final int KARSO_INITIALISATION_ID = 1;
|
||||
|
||||
public static final List<Class<?>> CLASSES_BASE = List.of(Data.class, Media.class, Type.class, Series.class, Season.class, User.class, UserMediaAdvancement.class);
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "Initialization";
|
||||
}
|
||||
|
||||
public Initialization() {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void generateStep() throws Exception {
|
||||
for (final Class<?> clazz : CLASSES_BASE) {
|
||||
addClass(clazz);
|
||||
}
|
||||
|
||||
addAction("""
|
||||
INSERT INTO `type` (`id`, `name`, `description`) VALUES
|
||||
(UUID_TO_BIN('15237fd7-d4ee-11ee-a8dd-02420a030203'), 'Documentary', 'Documentary (animals, space, earth...)'),
|
||||
(UUID_TO_BIN('553146c1-d4ee-11ee-a8dd-02420a030203'), 'Movie', 'Movie with real humans (film)'),
|
||||
(UUID_TO_BIN('59c430a3-d4ee-11ee-a8dd-02420a030203'), 'Animation', 'Animation movies (film)'),
|
||||
(UUID_TO_BIN('5cd619e3-d4ee-11ee-a8dd-02420a030203'), 'Short movie', 'Small movies (less 2 minutes)'),
|
||||
(UUID_TO_BIN('5fbbf085-d4ee-11ee-a8dd-02420a030203'), 'TV show', 'TV show for old peoples'),
|
||||
(UUID_TO_BIN('66dcb6ba-d4ee-11ee-a8dd-02420a030203'), 'Animation TV show', 'TV show for young peoples'),
|
||||
(UUID_TO_BIN('69ee5c15-d4ee-11ee-a8dd-02420a030203'), 'Theater', 'Theater play'),
|
||||
(UUID_TO_BIN('6ce72530-d4ee-11ee-a8dd-02420a030203'), 'One man show', 'Recorded stand up'),
|
||||
(UUID_TO_BIN('6ff1691a-d4ee-11ee-a8dd-02420a030203'), 'Concert', 'Recorded concert'),
|
||||
(UUID_TO_BIN('730815ef-d4ee-11ee-a8dd-02420a030203'), 'Opera', 'Recorded opera');
|
||||
""");
|
||||
// set start increment element to permit to add after default elements
|
||||
addAction("""
|
||||
ALTER TABLE `media` AUTO_INCREMENT = 1000;
|
||||
""", "mysql");
|
||||
addAction("""
|
||||
ALTER TABLE `type` AUTO_INCREMENT = 1000;
|
||||
""", "mysql");
|
||||
addAction("""
|
||||
ALTER TABLE `series` AUTO_INCREMENT = 1000;
|
||||
""", "mysql");
|
||||
addAction("""
|
||||
ALTER TABLE `season` AUTO_INCREMENT = 1000;
|
||||
""", "mysql");
|
||||
addAction("""
|
||||
ALTER TABLE `userMediaAdvancement` AUTO_INCREMENT = 1000;
|
||||
""", "mysql");
|
||||
}
|
||||
|
||||
public static void dropAll() {
|
||||
for (final Class<?> element : CLASSES_BASE) {
|
||||
try {
|
||||
DataAccess.drop(element);
|
||||
} catch (final Exception ex) {
|
||||
LOGGER.error("Fail to drop table !!!!!!");
|
||||
ex.printStackTrace();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static void cleanAll() {
|
||||
for (final Class<?> element : CLASSES_BASE) {
|
||||
try {
|
||||
DataAccess.cleanAll(element);
|
||||
} catch (final Exception ex) {
|
||||
LOGGER.error("Fail to clean table !!!!!!");
|
||||
ex.printStackTrace();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
28
back/src/org/kar/karideo/migration/Migration20230810.java
Normal file
@ -0,0 +1,28 @@
|
||||
package org.kar.karideo.migration;
|
||||
|
||||
import org.kar.archidata.migration.MigrationSqlStep;
|
||||
import org.kar.karideo.model.UserMediaAdvancement;
|
||||
|
||||
public class Migration20230810 extends MigrationSqlStep {
|
||||
|
||||
public static final int KARSO_INITIALISATION_ID = 1;
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "migration-2023-08-10";
|
||||
}
|
||||
|
||||
public Migration20230810() {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void generateStep() throws Exception {
|
||||
addClass(UserMediaAdvancement.class);
|
||||
|
||||
addAction("""
|
||||
ALTER TABLE `userMediaAdvancement` AUTO_INCREMENT = 1000;
|
||||
""");
|
||||
}
|
||||
|
||||
}
|
32
back/src/org/kar/karideo/migration/Migration20231015.java
Normal file
@ -0,0 +1,32 @@
|
||||
package org.kar.karideo.migration;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.kar.archidata.migration.MigrationSqlStep;
|
||||
|
||||
public class Migration20231015 extends MigrationSqlStep {
|
||||
|
||||
public static final int KARSO_INITIALISATION_ID = 1;
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "migration-2023-10-15: refactor creation and update time";
|
||||
}
|
||||
|
||||
public Migration20231015() {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void generateStep() throws Exception {
|
||||
for (String elem : List.of("data", "media", "media_link_cover", "season", "season_link_cover", "series", "series_link_cover", "type", "type_link_cover", "user", "userMediaAdvancement")) {
|
||||
addAction("""
|
||||
ALTER TABLE `""" + elem + """
|
||||
`
|
||||
RENAME COLUMN `create_date` TO `createdAt`,
|
||||
RENAME COLUMN `modify_date` TO `updatedAt`;
|
||||
""");
|
||||
}
|
||||
display();
|
||||
}
|
||||
}
|
160
back/src/org/kar/karideo/migration/Migration20231126.java
Normal file
@ -0,0 +1,160 @@
|
||||
package org.kar.karideo.migration;
|
||||
|
||||
import org.kar.archidata.migration.MigrationSqlStep;
|
||||
|
||||
public class Migration20231126 extends MigrationSqlStep {
|
||||
|
||||
public static final int KARSO_INITIALISATION_ID = 1;
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "migration-2023-11-26: re-order the migration for the new API of archidata";
|
||||
}
|
||||
|
||||
public Migration20231126() {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void generateStep() throws Exception {
|
||||
|
||||
// update migration update (last one)
|
||||
addAction("""
|
||||
ALTER TABLE `KAR_migration`
|
||||
CHANGE `id` `id` bigint NOT NULL COMMENT 'Primary key of the base' AUTO_INCREMENT FIRST,
|
||||
CHANGE `create_date` `createdAt` timestamp(3) NOT NULL DEFAULT CURRENT_TIMESTAMP(3) COMMENT 'Create time of the object' AFTER `id`,
|
||||
CHANGE `modify_date` `updatedAt` timestamp(3) NOT NULL DEFAULT CURRENT_TIMESTAMP(3) COMMENT 'When update the object' AFTER `createdAt`,
|
||||
CHANGE `deleted` `deleted` tinyint(1) NOT NULL DEFAULT '0' COMMENT 'When delete, they are not removed, they are just set in a deleted state' AFTER `updatedAt`,
|
||||
ADD `version` int NOT NULL DEFAULT '2' AFTER `deleted`,
|
||||
CHANGE `name` `name` varchar(256) COLLATE 'utf8mb4_0900_ai_ci' NULL COMMENT 'Name of the migration' AFTER `version`,
|
||||
CHANGE `terminated` `terminated` tinyint(1) NOT NULL DEFAULT '0' COMMENT 'if the migration is well terminated or not' AFTER `name`,
|
||||
CHANGE `stepId` `stepId` int NULL COMMENT 'index in the migration progression' AFTER `terminated`,
|
||||
CHANGE `count` `count` int NULL COMMENT 'number of element in the migration' AFTER `stepId`,
|
||||
CHANGE `log` `log` text COLLATE 'utf8mb3_general_ci' NULL COMMENT 'Log generate by the migration' AFTER `count`;
|
||||
""");
|
||||
|
||||
addAction("""
|
||||
ALTER TABLE `data`
|
||||
CHANGE `id` `id` bigint NOT NULL COMMENT 'Primary key of the base' AUTO_INCREMENT FIRST,
|
||||
CHANGE `createdAt` `createdAt` timestamp(3) NOT NULL DEFAULT CURRENT_TIMESTAMP(3) COMMENT 'Create time of the object' AFTER `id`,
|
||||
CHANGE `updatedAt` `updatedAt` timestamp(3) NOT NULL DEFAULT CURRENT_TIMESTAMP(3) COMMENT 'When update the object' AFTER `createdAt`,
|
||||
CHANGE `deleted` `deleted` tinyint(1) NOT NULL DEFAULT '0' COMMENT 'When delete, they are not removed, they are just set in a deleted state' AFTER `updatedAt`,
|
||||
CHANGE `sha512` `sha512` varchar(128) COLLATE 'utf8mb4_0900_ai_ci' NOT NULL COMMENT 'Sha512 of the data' AFTER `deleted`,
|
||||
CHANGE `mimeType` `mimeType` varchar(128) COLLATE 'utf8mb4_0900_ai_ci' NOT NULL COMMENT 'Mime -type of the media' AFTER `sha512`,
|
||||
CHANGE `size` `size` bigint NOT NULL COMMENT 'Size in Byte of the data' AFTER `mimeType`;
|
||||
""");
|
||||
addAction("""
|
||||
ALTER TABLE `media`
|
||||
CHANGE `id` `id` bigint NOT NULL COMMENT 'Primary key of the base' AUTO_INCREMENT FIRST,
|
||||
CHANGE `createdAt` `createdAt` timestamp(3) NOT NULL DEFAULT CURRENT_TIMESTAMP(3) COMMENT 'Create time of the object' AFTER `id`,
|
||||
CHANGE `updatedAt` `updatedAt` timestamp(3) NOT NULL DEFAULT CURRENT_TIMESTAMP(3) COMMENT 'When update the object' AFTER `createdAt`,
|
||||
CHANGE `deleted` `deleted` tinyint(1) NOT NULL DEFAULT '0' COMMENT 'When delete, they are not removed, they are just set in a deleted state' AFTER `updatedAt`,
|
||||
CHANGE `name` `name` text COLLATE 'utf8mb3_general_ci' NOT NULL COMMENT 'Name of the media (this represent the title)' AFTER `deleted`,
|
||||
CHANGE `description` `description` text COLLATE 'utf8mb3_general_ci' NULL COMMENT 'Description of the media' AFTER `name`,
|
||||
CHANGE `dataId` `dataId` bigint NOT NULL COMMENT 'Foreign Key Id of the data' AFTER `description`,
|
||||
CHANGE `typeId` `typeId` bigint NULL COMMENT 'Type of the media' AFTER `dataId`,
|
||||
CHANGE `seriesId` `seriesId` bigint NULL COMMENT 'Series reference of the media' AFTER `typeId`,
|
||||
CHANGE `seasonId` `seasonId` bigint NULL COMMENT 'Saison reference of the media' AFTER `seriesId`,
|
||||
CHANGE `episode` `episode` int NULL COMMENT 'Episide Id' AFTER `seasonId`,
|
||||
CHANGE `date` `date` int NULL AFTER `episode`,
|
||||
CHANGE `time` `time` int NULL COMMENT 'Creation years of the media' AFTER `date`,
|
||||
CHANGE `ageLimit` `ageLimit` int NULL COMMENT 'Limitation Age of the media' AFTER `time`;
|
||||
""");
|
||||
addAction("""
|
||||
ALTER TABLE `media_link_cover`
|
||||
CHANGE `createdAt` `createdAt` timestamp(3) NOT NULL DEFAULT CURRENT_TIMESTAMP(3) AFTER `id`,
|
||||
CHANGE `updatedAt` `updatedAt` timestamp(3) NOT NULL DEFAULT CURRENT_TIMESTAMP(3) AFTER `createdAt`,
|
||||
CHANGE `deleted` `deleted` tinyint(1) NOT NULL DEFAULT '0' AFTER `updatedAt`,
|
||||
CHANGE `media_id` `object1id` bigint NOT NULL AFTER `deleted`,
|
||||
CHANGE `cover_id` `object2id` bigint NOT NULL AFTER `object1id`;
|
||||
""");
|
||||
addAction("""
|
||||
ALTER TABLE `season`
|
||||
CHANGE `id` `id` bigint NOT NULL COMMENT 'Primary key of the base' AUTO_INCREMENT FIRST,
|
||||
CHANGE `createdAt` `createdAt` timestamp(3) NOT NULL DEFAULT CURRENT_TIMESTAMP(3) COMMENT 'Create time of the object' AFTER `id`,
|
||||
CHANGE `updatedAt` `updatedAt` timestamp(3) NOT NULL DEFAULT CURRENT_TIMESTAMP(3) COMMENT 'When update the object' AFTER `createdAt`,
|
||||
CHANGE `deleted` `deleted` tinyint(1) NOT NULL DEFAULT '0' COMMENT 'When delete, they are not removed, they are just set in a deleted state' AFTER `updatedAt`,
|
||||
CHANGE `name` `name` text COLLATE 'utf8mb3_general_ci' NOT NULL COMMENT 'Name of the media (this represent the title)' AFTER `deleted`,
|
||||
CHANGE `description` `description` text COLLATE 'utf8mb3_general_ci' NULL COMMENT 'Description of the media' AFTER `name`,
|
||||
CHANGE `parentId` `parentId` bigint NOT NULL COMMENT 'series parent ID' AFTER `description`;
|
||||
""");
|
||||
addAction("""
|
||||
ALTER TABLE `season_link_cover`
|
||||
CHANGE `createdAt` `createdAt` timestamp(3) NOT NULL DEFAULT CURRENT_TIMESTAMP(3) AFTER `id`,
|
||||
CHANGE `updatedAt` `updatedAt` timestamp(3) NOT NULL DEFAULT CURRENT_TIMESTAMP(3) AFTER `createdAt`,
|
||||
CHANGE `deleted` `deleted` tinyint(1) NOT NULL DEFAULT '0' AFTER `updatedAt`,
|
||||
CHANGE `season_id` `object1id` bigint NOT NULL AFTER `deleted`,
|
||||
CHANGE `cover_id` `object2id` bigint NOT NULL AFTER `object1id`;
|
||||
""");
|
||||
addAction("""
|
||||
ALTER TABLE `series`
|
||||
CHANGE `id` `id` bigint NOT NULL COMMENT 'Primary key of the base' AUTO_INCREMENT FIRST,
|
||||
CHANGE `createdAt` `createdAt` timestamp(3) NOT NULL DEFAULT CURRENT_TIMESTAMP(3) COMMENT 'Create time of the object' AFTER `id`,
|
||||
CHANGE `updatedAt` `updatedAt` timestamp(3) NOT NULL DEFAULT CURRENT_TIMESTAMP(3) COMMENT 'When update the object' AFTER `createdAt`,
|
||||
CHANGE `deleted` `deleted` tinyint(1) NOT NULL DEFAULT '0' COMMENT 'When delete, they are not removed, they are just set in a deleted state' AFTER `updatedAt`,
|
||||
CHANGE `name` `name` text COLLATE 'utf8mb3_general_ci' NOT NULL COMMENT 'Name of the media (this represent the title)' AFTER `deleted`,
|
||||
CHANGE `description` `description` text COLLATE 'utf8mb3_general_ci' NULL COMMENT 'Description of the media' AFTER `name`,
|
||||
CHANGE `parentId` `parentId` bigint NOT NULL COMMENT 'series parent ID' AFTER `description`;
|
||||
""");
|
||||
addAction("""
|
||||
ALTER TABLE `series_link_cover`
|
||||
CHANGE `createdAt` `createdAt` timestamp(3) NOT NULL DEFAULT CURRENT_TIMESTAMP(3) AFTER `id`,
|
||||
CHANGE `updatedAt` `updatedAt` timestamp(3) NOT NULL DEFAULT CURRENT_TIMESTAMP(3) AFTER `createdAt`,
|
||||
CHANGE `deleted` `deleted` tinyint(1) NOT NULL DEFAULT '0' AFTER `updatedAt`,
|
||||
CHANGE `series_id` `object1id` bigint NOT NULL AFTER `deleted`,
|
||||
CHANGE `cover_id` `object2id` bigint NOT NULL AFTER `object1id`;
|
||||
""");
|
||||
addAction("""
|
||||
ALTER TABLE `type`
|
||||
CHANGE `id` `id` bigint NOT NULL COMMENT 'Primary key of the base' AUTO_INCREMENT FIRST,
|
||||
CHANGE `createdAt` `createdAt` timestamp(3) NOT NULL DEFAULT CURRENT_TIMESTAMP(3) COMMENT 'Create time of the object' AFTER `id`,
|
||||
CHANGE `updatedAt` `updatedAt` timestamp(3) NOT NULL DEFAULT CURRENT_TIMESTAMP(3) COMMENT 'When update the object' AFTER `createdAt`,
|
||||
CHANGE `name` `name` text COLLATE 'utf8mb3_general_ci' NOT NULL COMMENT 'Name of the media (this represent the title)' AFTER `deleted`,
|
||||
CHANGE `description` `description` text COLLATE 'utf8mb3_general_ci' NULL COMMENT 'Description of the media' AFTER `name`;
|
||||
""");
|
||||
addAction("""
|
||||
ALTER TABLE `type_link_cover`
|
||||
CHANGE `createdAt` `createdAt` timestamp(3) NOT NULL DEFAULT CURRENT_TIMESTAMP(3) AFTER `id`,
|
||||
CHANGE `updatedAt` `updatedAt` timestamp(3) NOT NULL DEFAULT CURRENT_TIMESTAMP(3) AFTER `createdAt`,
|
||||
CHANGE `deleted` `deleted` tinyint(1) NOT NULL DEFAULT '0' AFTER `updatedAt`,
|
||||
CHANGE `type_id` `object1id` bigint NOT NULL AFTER `deleted`,
|
||||
CHANGE `cover_id` `object2id` bigint NOT NULL AFTER `object1id`;
|
||||
""");
|
||||
addAction("""
|
||||
ALTER TABLE `user`
|
||||
CHANGE `id` `id` bigint NOT NULL COMMENT 'Primary key of the base' AUTO_INCREMENT FIRST,
|
||||
CHANGE `createdAt` `createdAt` timestamp(3) NOT NULL DEFAULT CURRENT_TIMESTAMP(3) COMMENT 'Create time of the object' AFTER `id`,
|
||||
CHANGE `updatedAt` `updatedAt` timestamp(3) NOT NULL DEFAULT CURRENT_TIMESTAMP(3) COMMENT 'When update the object' AFTER `createdAt`,
|
||||
CHANGE `deleted` `deleted` tinyint(1) NOT NULL DEFAULT '0' COMMENT 'When delete, they are not removed, they are just set in a deleted state' AFTER `updatedAt`,
|
||||
CHANGE `login` `login` varchar(128) COLLATE 'utf8mb4_0900_ai_ci' NULL AFTER `deleted`,
|
||||
CHANGE `lastConnection` `lastConnection` timestamp(3) NULL AFTER `login`,
|
||||
CHANGE `admin` `admin` tinyint(1) NOT NULL DEFAULT '0' AFTER `lastConnection`,
|
||||
CHANGE `blocked` `blocked` tinyint(1) NOT NULL DEFAULT '0' AFTER `admin`,
|
||||
CHANGE `removed` `removed` tinyint(1) NOT NULL DEFAULT '0' AFTER `blocked`;
|
||||
""");
|
||||
addAction("""
|
||||
ALTER TABLE `userMediaAdvancement`
|
||||
CHANGE `id` `id` bigint NOT NULL COMMENT 'Primary key of the base' AUTO_INCREMENT FIRST,
|
||||
CHANGE `createdAt` `createdAt` timestamp(3) NOT NULL DEFAULT CURRENT_TIMESTAMP(3) COMMENT 'Create time of the object' AFTER `id`,
|
||||
CHANGE `updatedAt` `updatedAt` timestamp(3) NOT NULL DEFAULT CURRENT_TIMESTAMP(3) COMMENT 'When update the object' AFTER `createdAt`,
|
||||
CHANGE `deleted` `deleted` tinyint(1) NOT NULL DEFAULT '0' COMMENT 'When delete, they are not removed, they are just set in a deleted state' AFTER `updatedAt`,
|
||||
CHANGE `userId` `userId` bigint NOT NULL COMMENT 'Foreign Key Id of the user' AFTER `deleted`,
|
||||
CHANGE `mediaId` `mediaId` bigint NOT NULL COMMENT 'Id of the media' AFTER `userId`,
|
||||
CHANGE `percent` `percent` float NOT NULL COMMENT 'Percent of admencement in the media' AFTER `mediaId`,
|
||||
CHANGE `time` `time` int NOT NULL COMMENT 'Number of second of admencement in the media' AFTER `percent`,
|
||||
CHANGE `count` `count` int NOT NULL COMMENT 'Number of time this media has been read' AFTER `time`;
|
||||
""");
|
||||
addAction("""
|
||||
CREATE TABLE `user_link_cover` (
|
||||
`id` bigint NOT NULL AUTO_INCREMENT COMMENT 'Primary key of the base' ,
|
||||
`createdAt` timestamp(3) NOT NULL DEFAULT CURRENT_TIMESTAMP(3) COMMENT 'Create time of the object' ,
|
||||
`updatedAt` timestamp(3) NOT NULL DEFAULT CURRENT_TIMESTAMP(3) ON UPDATE CURRENT_TIMESTAMP(3) COMMENT 'When update the object' ,
|
||||
`deleted` tinyint(1) NOT NULL DEFAULT '0' COMMENT 'When delete, they are not removed, they are just set in a deleted state' ,
|
||||
`object1Id` bigint NOT NULL COMMENT 'Object reference 1' ,
|
||||
`object2Id` bigint NOT NULL COMMENT 'Object reference 2' ,
|
||||
PRIMARY KEY (`id`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
|
||||
""");
|
||||
}
|
||||
|
||||
}
|
137
back/src/org/kar/karideo/migration/Migration20240226.java
Normal file
@ -0,0 +1,137 @@
|
||||
package org.kar.karideo.migration;
|
||||
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.NoSuchFileException;
|
||||
import java.nio.file.Paths;
|
||||
import java.nio.file.StandardCopyOption;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.kar.archidata.api.DataResource;
|
||||
import org.kar.archidata.dataAccess.DataAccess;
|
||||
import org.kar.archidata.dataAccess.addOn.model.LinkTableLongLong;
|
||||
import org.kar.archidata.dataAccess.options.AccessDeletedItems;
|
||||
import org.kar.archidata.dataAccess.options.OverrideTableName;
|
||||
import org.kar.archidata.migration.MigrationSqlStep;
|
||||
import org.kar.archidata.tools.UuidUtils;
|
||||
import org.kar.karideo.migration.model.CoverConversion;
|
||||
import org.kar.karideo.migration.model.MediaConversion;
|
||||
import org.kar.karideo.migration.model.UUIDConversion;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class Migration20240226 extends MigrationSqlStep {
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(Migration20240226.class);
|
||||
|
||||
public static final int KARSO_INITIALISATION_ID = 1;
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "migration-2024-02-26: convert base with UUID";
|
||||
}
|
||||
|
||||
public Migration20240226() {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void generateStep() throws Exception {
|
||||
addAction("""
|
||||
ALTER TABLE `data` ADD `uuid` binary(16) AFTER `id`;
|
||||
""");
|
||||
addAction(() -> {
|
||||
final List<UUIDConversion> datas = DataAccess.gets(UUIDConversion.class, new AccessDeletedItems(), new OverrideTableName("data"));
|
||||
for (final UUIDConversion elem : datas) {
|
||||
elem.uuid = UuidUtils.nextUUID();
|
||||
}
|
||||
for (final UUIDConversion elem : datas) {
|
||||
DataAccess.update(elem, elem.id, List.of("uuid"), new OverrideTableName("data"));
|
||||
}
|
||||
});
|
||||
addAction("""
|
||||
ALTER TABLE `data` CHANGE `uuid` `uuid` binary(16) DEFAULT (UUID_TO_BIN(UUID(), TRUE));
|
||||
""");
|
||||
final List<String> tableToTransform = List.of("media", "season", "series", "type", "user");
|
||||
for (final String tableName : tableToTransform) {
|
||||
addAction("ALTER TABLE `" + tableName + "` ADD `covers` text NULL;");
|
||||
addAction(() -> {
|
||||
final List<UUIDConversion> datas = DataAccess.gets(UUIDConversion.class, new AccessDeletedItems(), new OverrideTableName("data"));
|
||||
final List<CoverConversion> medias = DataAccess.gets(CoverConversion.class, new AccessDeletedItems(), new OverrideTableName(tableName));
|
||||
final List<LinkTableLongLong> links = DataAccess.gets(LinkTableLongLong.class, new OverrideTableName(tableName + "_link_cover"));
|
||||
LOGGER.info("Get somes data: {} {} {}", datas.size(), medias.size(), links.size());
|
||||
for (final CoverConversion media : medias) {
|
||||
final List<UUID> values = new ArrayList<>();
|
||||
for (final LinkTableLongLong link : links) {
|
||||
if (link.object1Id.equals(media.id)) {
|
||||
for (final UUIDConversion data : datas) {
|
||||
if (data.id.equals(link.object2Id)) {
|
||||
values.add(data.uuid);
|
||||
break;
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (values.size() != 0) {
|
||||
media.covers = values;
|
||||
LOGGER.info(" update: {} => {}", media.id, media.covers);
|
||||
DataAccess.update(media, media.id, List.of("covers"), new OverrideTableName(tableName));
|
||||
}
|
||||
}
|
||||
});
|
||||
addAction("DROP TABLE `" + tableName + "_link_cover`;");
|
||||
}
|
||||
addAction("""
|
||||
ALTER TABLE `media` ADD `dataUUID` binary(16) AFTER dataId;
|
||||
""");
|
||||
addAction(() -> {
|
||||
final List<UUIDConversion> datas = DataAccess.gets(UUIDConversion.class, new AccessDeletedItems(), new OverrideTableName("data"));
|
||||
final List<MediaConversion> medias = DataAccess.gets(MediaConversion.class, new AccessDeletedItems(), new OverrideTableName("media"));
|
||||
for (final MediaConversion media : medias) {
|
||||
for (final UUIDConversion data : datas) {
|
||||
if (data.id.equals(media.dataId)) {
|
||||
media.dataUUID = data.uuid;
|
||||
DataAccess.update(media, media.id, List.of("dataUUID"), new OverrideTableName("media"));
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
addAction("""
|
||||
ALTER TABLE `media` DROP `dataId`;
|
||||
""");
|
||||
addAction("""
|
||||
ALTER TABLE `media` CHANGE `dataUUID` `dataId` binary(16) NOT NULL;
|
||||
""");
|
||||
// Move the files...
|
||||
addAction(() -> {
|
||||
final List<UUIDConversion> datas = DataAccess.gets(UUIDConversion.class, new AccessDeletedItems(), new OverrideTableName("data"));
|
||||
for (final UUIDConversion data : datas) {
|
||||
final String origin = DataResource.getFileDataOld(data.id);
|
||||
final String destination = DataResource.getFileData(data.uuid);
|
||||
LOGGER.info("move file = {}", origin);
|
||||
LOGGER.info(" ==> {}", destination);
|
||||
try {
|
||||
Files.move(Paths.get(origin), Paths.get(destination), StandardCopyOption.ATOMIC_MOVE);
|
||||
} catch (final NoSuchFileException ex) {
|
||||
LOGGER.error("MOVE_ERROR : {} -> {}", origin, destination);
|
||||
}
|
||||
}
|
||||
});
|
||||
/* I am not sure then I prefer keep the primary key for the moment addAction(""" ALTER TABLE `data` DROP `id`; """); */
|
||||
addAction("""
|
||||
ALTER TABLE `data` CHANGE `id` `idOld` bigint NOT NULL DEFAULT 0;
|
||||
""");
|
||||
addAction("""
|
||||
ALTER TABLE `data` DROP PRIMARY KEY;
|
||||
""");
|
||||
addAction("""
|
||||
ALTER TABLE `data` CHANGE `uuid` `id` binary(16) DEFAULT (UUID_TO_BIN(UUID(), TRUE));
|
||||
""");
|
||||
addAction("""
|
||||
ALTER TABLE `data` ADD PRIMARY KEY `id` (`id`);
|
||||
""");
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,15 @@
|
||||
package org.kar.karideo.migration.model;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.kar.archidata.annotation.DataJson;
|
||||
|
||||
import jakarta.persistence.Id;
|
||||
|
||||
public class CoverConversion {
|
||||
@Id
|
||||
public Long id = null;
|
||||
@DataJson
|
||||
public List<UUID> covers = null;
|
||||
}
|
@ -0,0 +1,12 @@
|
||||
package org.kar.karideo.migration.model;
|
||||
|
||||
import java.util.UUID;
|
||||
|
||||
import jakarta.persistence.Id;
|
||||
|
||||
public class MediaConversion {
|
||||
@Id
|
||||
public Long id = null;
|
||||
public Long dataId = null;
|
||||
public UUID dataUUID = null;
|
||||
}
|
11
back/src/org/kar/karideo/migration/model/UUIDConversion.java
Normal file
@ -0,0 +1,11 @@
|
||||
package org.kar.karideo.migration.model;
|
||||
|
||||
import java.util.UUID;
|
||||
|
||||
import jakarta.persistence.Id;
|
||||
|
||||
public class UUIDConversion {
|
||||
@Id
|
||||
public Long id = null;
|
||||
public UUID uuid = null;
|
||||
}
|
60
back/src/org/kar/karideo/model/Media.java
Normal file
@ -0,0 +1,60 @@
|
||||
package org.kar.karideo.model;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.kar.archidata.annotation.DataJson;
|
||||
import org.kar.archidata.model.Data;
|
||||
import org.kar.archidata.model.GenericDataSoftDelete;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import jakarta.persistence.Column;
|
||||
import jakarta.persistence.Entity;
|
||||
import jakarta.persistence.FetchType;
|
||||
import jakarta.persistence.ManyToOne;
|
||||
import jakarta.persistence.Table;
|
||||
|
||||
@Entity
|
||||
@Table(name = "media")
|
||||
@JsonInclude(JsonInclude.Include.NON_NULL)
|
||||
public class Media extends GenericDataSoftDelete {
|
||||
@Schema(description = "Name of the media (this represent the title)")
|
||||
@Column(nullable = false, length = 0)
|
||||
public String name;
|
||||
@Schema(description = "Description of the media")
|
||||
@Column(length = 0)
|
||||
public String description;
|
||||
@Schema(description = "Foreign Key Id of the data")
|
||||
@ManyToOne(fetch = FetchType.LAZY, targetEntity = Data.class)
|
||||
@Column(nullable = false)
|
||||
public UUID dataId;
|
||||
@Schema(description = "Type of the media")
|
||||
@ManyToOne(fetch = FetchType.LAZY, targetEntity = Type.class)
|
||||
public Long typeId;
|
||||
@Schema(description = "Series reference of the media")
|
||||
@ManyToOne(fetch = FetchType.LAZY, targetEntity = Series.class)
|
||||
public Long seriesId;
|
||||
@Schema(description = "Season reference of the media")
|
||||
@ManyToOne(fetch = FetchType.LAZY, targetEntity = Season.class)
|
||||
public Long seasonId;
|
||||
@Schema(description = "Episode Id")
|
||||
public Integer episode;
|
||||
// ")
|
||||
public Integer date;
|
||||
@Schema(description = "Creation years of the media")
|
||||
public Integer time;
|
||||
@Schema(description = "Limitation Age of the media")
|
||||
public Integer ageLimit;
|
||||
@Schema(description = "List of Id of the specific covers")
|
||||
@DataJson(targetEntity = Data.class)
|
||||
public List<UUID> covers = null;
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "Media [name=" + this.name + ", description=" + this.description + ", dataId=" + this.dataId + ", typeId=" + this.typeId + ", seriesId=" + this.seriesId + ", seasonId=" + this.seasonId
|
||||
+ ", episode=" + this.episode + ", date=" + this.date + ", time=" + this.time + ", ageLimit=" + this.ageLimit + ", covers=" + this.covers + "]";
|
||||
}
|
||||
|
||||
}
|
35
back/src/org/kar/karideo/model/Season.java
Normal file
@ -0,0 +1,35 @@
|
||||
package org.kar.karideo.model;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.kar.archidata.annotation.DataIfNotExists;
|
||||
import org.kar.archidata.annotation.DataJson;
|
||||
import org.kar.archidata.model.GenericDataSoftDelete;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import jakarta.persistence.Column;
|
||||
import jakarta.persistence.FetchType;
|
||||
import jakarta.persistence.ManyToOne;
|
||||
import jakarta.persistence.Table;
|
||||
|
||||
@Table(name = "season")
|
||||
@DataIfNotExists
|
||||
@JsonInclude(JsonInclude.Include.NON_NULL)
|
||||
public class Season extends GenericDataSoftDelete {
|
||||
@Column(nullable = false, length = 0)
|
||||
@Schema(description = "Name of the media (this represent the title)")
|
||||
public String name;
|
||||
@Column(length = 0)
|
||||
@Schema(description = "Description of the media")
|
||||
public String description;
|
||||
@Column(nullable = false)
|
||||
@Schema(description = "series parent ID")
|
||||
@ManyToOne(fetch = FetchType.LAZY, targetEntity = Series.class)
|
||||
public Long parentId;
|
||||
@Schema(description = "List of Id of the specific covers")
|
||||
@DataJson()
|
||||
public List<UUID> covers = null;
|
||||
}
|
35
back/src/org/kar/karideo/model/Series.java
Normal file
@ -0,0 +1,35 @@
|
||||
package org.kar.karideo.model;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.kar.archidata.annotation.DataIfNotExists;
|
||||
import org.kar.archidata.annotation.DataJson;
|
||||
import org.kar.archidata.model.GenericDataSoftDelete;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import jakarta.persistence.Column;
|
||||
import jakarta.persistence.FetchType;
|
||||
import jakarta.persistence.ManyToOne;
|
||||
import jakarta.persistence.Table;
|
||||
|
||||
@Table(name = "series")
|
||||
@DataIfNotExists
|
||||
@JsonInclude(JsonInclude.Include.NON_NULL)
|
||||
public class Series extends GenericDataSoftDelete {
|
||||
@Column(nullable = false, length = 0)
|
||||
@Schema(description = "Name of the media (this represent the title)")
|
||||
public String name;
|
||||
@Column(length = 0)
|
||||
@Schema(description = "Description of the media")
|
||||
public String description;
|
||||
@Column(nullable = false)
|
||||
@Schema(description = "series parent ID")
|
||||
@ManyToOne(fetch = FetchType.LAZY, targetEntity = Type.class)
|
||||
public Long parentId;
|
||||
@Schema(description = "List of Id of the specific covers")
|
||||
@DataJson()
|
||||
public List<UUID> covers = null;
|
||||
}
|
29
back/src/org/kar/karideo/model/Type.java
Normal file
@ -0,0 +1,29 @@
|
||||
package org.kar.karideo.model;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.kar.archidata.annotation.DataIfNotExists;
|
||||
import org.kar.archidata.annotation.DataJson;
|
||||
import org.kar.archidata.model.GenericDataSoftDelete;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import jakarta.persistence.Column;
|
||||
import jakarta.persistence.Table;
|
||||
|
||||
@Table(name = "type")
|
||||
@DataIfNotExists
|
||||
@JsonInclude(JsonInclude.Include.NON_NULL)
|
||||
public class Type extends GenericDataSoftDelete {
|
||||
@Column(nullable = false, length = 0)
|
||||
@Schema(description = "Name of the media (this represent the title)")
|
||||
public String name;
|
||||
@Column(length = 0)
|
||||
@Schema(description = "Description of the media")
|
||||
public String description;
|
||||
@Schema(description = "List of Id of the specific covers")
|
||||
@DataJson()
|
||||
public List<UUID> covers = null;
|
||||
}
|
15
back/src/org/kar/karideo/model/UserKarideo.java
Normal file
@ -0,0 +1,15 @@
|
||||
package org.kar.karideo.model;
|
||||
|
||||
import org.kar.archidata.annotation.DataIfNotExists;
|
||||
import org.kar.archidata.model.User;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
|
||||
import jakarta.persistence.Table;
|
||||
|
||||
@Table(name = "user")
|
||||
@DataIfNotExists
|
||||
@JsonInclude(JsonInclude.Include.NON_NULL)
|
||||
public class UserKarideo extends User {
|
||||
|
||||
}
|
37
back/src/org/kar/karideo/model/UserMediaAdvancement.java
Normal file
@ -0,0 +1,37 @@
|
||||
package org.kar.karideo.model;
|
||||
|
||||
import org.kar.archidata.annotation.DataIfNotExists;
|
||||
import org.kar.archidata.annotation.DataNotRead;
|
||||
import org.kar.archidata.model.GenericDataSoftDelete;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import jakarta.persistence.Column;
|
||||
import jakarta.persistence.FetchType;
|
||||
import jakarta.persistence.ManyToOne;
|
||||
import jakarta.persistence.Table;
|
||||
|
||||
@Table(name = "userMediaAdvancement")
|
||||
@DataIfNotExists
|
||||
@JsonInclude(JsonInclude.Include.NON_NULL)
|
||||
public class UserMediaAdvancement extends GenericDataSoftDelete {
|
||||
@DataNotRead
|
||||
@Column(nullable = false)
|
||||
@Schema(description = "Foreign Key Id of the user")
|
||||
@ManyToOne(fetch = FetchType.LAZY, targetEntity = UserKarideo.class)
|
||||
public Long userId;
|
||||
@Column(nullable = false)
|
||||
@Schema(description = "Id of the media")
|
||||
@ManyToOne(fetch = FetchType.LAZY, targetEntity = Media.class)
|
||||
public Long mediaId;
|
||||
@Column(nullable = false)
|
||||
@Schema(description = "Percent of advancement in the media")
|
||||
public Float percent;
|
||||
@Column(nullable = false)
|
||||
@Schema(description = "Number of second of advancement in the media")
|
||||
public Integer time;
|
||||
@Column(nullable = false)
|
||||
@Schema(description = "Number of time this media has been read")
|
||||
public Integer count;
|
||||
}
|
13
back/src/org/kar/karideo/util/ConfigVariable.java
Normal file
@ -0,0 +1,13 @@
|
||||
package org.kar.karideo.util;
|
||||
|
||||
public class ConfigVariable {
|
||||
public static final String BASE_NAME = "ORG_KARIDEO_";
|
||||
|
||||
public static String getFrontFolder() {
|
||||
String out = System.getenv(BASE_NAME + "FRONT_FOLDER");
|
||||
if (out == null) {
|
||||
return "/application/front";
|
||||
}
|
||||
return out;
|
||||
}
|
||||
}
|
35
back/src/resources/simplelogger.properties
Normal file
@ -0,0 +1,35 @@
|
||||
# SLF4J's SimpleLogger configuration file
|
||||
# Simple implementation of Logger that sends all enabled log messages, for all defined loggers, to System.err.
|
||||
# Default logging detail level for all instances of SimpleLogger.
|
||||
# Must be one of ("trace", "debug", "info", "warn", or "error").
|
||||
# If not specified, defaults to "info".
|
||||
org.slf4j.simpleLogger.defaultLogLevel=trace
|
||||
|
||||
# Logging detail level for a SimpleLogger instance named "xxxxx".
|
||||
# Must be one of ("trace", "debug", "info", "warn", or "error").
|
||||
# If not specified, the default logging detail level is used.
|
||||
#org.slf4j.simpleLogger.log.xxxxx=
|
||||
|
||||
# Set to true if you want the current date and time to be included in output messages.
|
||||
# Default is false, and will output the number of milliseconds elapsed since startup.
|
||||
#org.slf4j.simpleLogger.showDateTime=false
|
||||
|
||||
# The date and time format to be used in the output messages.
|
||||
# The pattern describing the date and time format is the same that is used in java.text.SimpleDateFormat.
|
||||
# If the format is not specified or is invalid, the default format is used.
|
||||
# The default format is yyyy-MM-dd HH:mm:ss:SSS Z.
|
||||
#org.slf4j.simpleLogger.dateTimeFormat=yyyy-MM-dd HH:mm:ss:SSS Z
|
||||
|
||||
# Set to true if you want to output the current thread name.
|
||||
# Defaults to true.
|
||||
org.slf4j.simpleLogger.showThreadName=true
|
||||
|
||||
# Set to true if you want the Logger instance name to be included in output messages.
|
||||
# Defaults to true.
|
||||
#org.slf4j.simpleLogger.showLogName=true
|
||||
|
||||
# Set to true if you want the last component of the name to be included in output messages.
|
||||
# Defaults to false.
|
||||
#org.slf4j.simpleLogger.showShortLogName=false
|
||||
|
||||
|
@ -1,141 +0,0 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
##
|
||||
## @author Edouard DUPIN
|
||||
##
|
||||
## @copyright 2012, Edouard DUPIN, all right reserved
|
||||
##
|
||||
## @license MPL v2.0 (see license file)
|
||||
##
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import errno
|
||||
import fnmatch
|
||||
import stat
|
||||
# Local import
|
||||
from realog import debug
|
||||
|
||||
"""
|
||||
|
||||
"""
|
||||
def get_run_path():
|
||||
return os.getcwd()
|
||||
|
||||
"""
|
||||
|
||||
"""
|
||||
def get_current_path(file):
|
||||
return os.path.dirname(os.path.realpath(file))
|
||||
|
||||
def create_directory_of_file(file):
|
||||
debug.info("Create directory of path: '" + file + "'")
|
||||
path = os.path.dirname(file)
|
||||
debug.info("Create directory: '" + path + "'")
|
||||
try:
|
||||
os.stat(path)
|
||||
except:
|
||||
os.makedirs(path)
|
||||
|
||||
def get_list_sub_path(path):
|
||||
# TODO : os.listdir(path)
|
||||
for dirname, dirnames, filenames in os.walk(path):
|
||||
return dirnames
|
||||
return []
|
||||
|
||||
def remove_path_and_sub_path(path):
|
||||
if os.path.isdir(path):
|
||||
debug.verbose("remove path : '" + path + "'")
|
||||
shutil.rmtree(path)
|
||||
|
||||
def remove_file(path):
|
||||
if os.path.isfile(path):
|
||||
os.remove(path)
|
||||
elif os.path.islink(path):
|
||||
os.remove(path)
|
||||
|
||||
def exist(path):
|
||||
if os.path.isdir(path):
|
||||
return True
|
||||
if os.path.isfile(path):
|
||||
return True
|
||||
if os.path.islink(path):
|
||||
return True
|
||||
return False
|
||||
|
||||
def file_size(path):
|
||||
if not os.path.isfile(path):
|
||||
return 0
|
||||
statinfo = os.stat(path)
|
||||
return statinfo.st_size
|
||||
|
||||
def file_read_data(path, binary=False):
|
||||
debug.verbose("path= " + path)
|
||||
if not os.path.isfile(path):
|
||||
return ""
|
||||
if binary == True:
|
||||
file = open(path, "rb")
|
||||
else:
|
||||
file = open(path, "r")
|
||||
data_file = file.read()
|
||||
file.close()
|
||||
return data_file
|
||||
|
||||
def version_to_string(version):
|
||||
version_ID = ""
|
||||
for id in version:
|
||||
if len(version_ID) != 0:
|
||||
if type(id) == str:
|
||||
version_ID += "-"
|
||||
else:
|
||||
version_ID += "."
|
||||
version_ID += str(id)
|
||||
return version_ID
|
||||
|
||||
##
|
||||
## @brief Write data in a specific path.
|
||||
## @param[in] path Path of the data might be written.
|
||||
## @param[in] data Data To write in the file.
|
||||
## @param[in] only_if_new (default: False) Write data only if data is different.
|
||||
## @return True Something has been copied
|
||||
## @return False Nothing has been copied
|
||||
##
|
||||
def file_write_data(path, data, only_if_new=False):
|
||||
if only_if_new == True:
|
||||
if os.path.exists(path) == True:
|
||||
old_data = file_read_data(path)
|
||||
if old_data == data:
|
||||
return False
|
||||
#real write of data:
|
||||
create_directory_of_file(path)
|
||||
file = open(path, "w")
|
||||
file.write(data)
|
||||
file.close()
|
||||
return True
|
||||
|
||||
def file_write_data_safe(path, data):
|
||||
#real write of data:
|
||||
create_directory_of_file(path)
|
||||
file = open(path + ".tmp", "w")
|
||||
file.write(data)
|
||||
file.close()
|
||||
shutil.move(path + ".tmp", path)
|
||||
return True
|
||||
|
||||
|
||||
def file_move(path_src, path_dst):
|
||||
#real write of data:
|
||||
create_directory_of_file(path_dst)
|
||||
shutil.move(path_src, path_dst)
|
||||
return True
|
||||
|
||||
|
||||
def list_to_str(list):
|
||||
if type(list) == type(str()):
|
||||
return list + " "
|
||||
else:
|
||||
result = ""
|
||||
# mulyiple imput in the list ...
|
||||
for elem in list:
|
||||
result += list_to_str(elem)
|
||||
return result
|
35
back/test/resources/simplelogger.properties
Normal file
@ -0,0 +1,35 @@
|
||||
# SLF4J's SimpleLogger configuration file
|
||||
# Simple implementation of Logger that sends all enabled log messages, for all defined loggers, to System.err.
|
||||
# Default logging detail level for all instances of SimpleLogger.
|
||||
# Must be one of ("trace", "debug", "info", "warn", or "error").
|
||||
# If not specified, defaults to "info".
|
||||
org.slf4j.simpleLogger.defaultLogLevel=debug
|
||||
|
||||
# Logging detail level for a SimpleLogger instance named "xxxxx".
|
||||
# Must be one of ("trace", "debug", "info", "warn", or "error").
|
||||
# If not specified, the default logging detail level is used.
|
||||
#org.slf4j.simpleLogger.log.xxxxx=
|
||||
|
||||
# Set to true if you want the current date and time to be included in output messages.
|
||||
# Default is false, and will output the number of milliseconds elapsed since startup.
|
||||
#org.slf4j.simpleLogger.showDateTime=false
|
||||
|
||||
# The date and time format to be used in the output messages.
|
||||
# The pattern describing the date and time format is the same that is used in java.text.SimpleDateFormat.
|
||||
# If the format is not specified or is invalid, the default format is used.
|
||||
# The default format is yyyy-MM-dd HH:mm:ss:SSS Z.
|
||||
#org.slf4j.simpleLogger.dateTimeFormat=yyyy-MM-dd HH:mm:ss:SSS Z
|
||||
|
||||
# Set to true if you want to output the current thread name.
|
||||
# Defaults to true.
|
||||
org.slf4j.simpleLogger.showThreadName=true
|
||||
|
||||
# Set to true if you want the Logger instance name to be included in output messages.
|
||||
# Defaults to true.
|
||||
#org.slf4j.simpleLogger.showLogName=true
|
||||
|
||||
# Set to true if you want the last component of the name to be included in output messages.
|
||||
# Defaults to false.
|
||||
#org.slf4j.simpleLogger.showShortLogName=false
|
||||
|
||||
|
10
back/test/src/test/kar/karideo/Common.java
Normal file
@ -0,0 +1,10 @@
|
||||
package test.kar.karideo;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
import org.kar.archidata.tools.JWTWrapper;
|
||||
|
||||
public class Common {
|
||||
static String USER_TOKEN = JWTWrapper.createJwtTestToken(16512, "test_user_login", "KarAuth", "karideo", Map.of("karideo", Map.of("USER", Boolean.TRUE)));
|
||||
static String ADMIN_TOKEN = JWTWrapper.createJwtTestToken(16512, "test_admin_login", "KarAuth", "karideo", Map.of("karideo", Map.of("USER", Boolean.TRUE, "ADMIN", Boolean.TRUE)));
|
||||
}
|
33
back/test/src/test/kar/karideo/StepwiseExtension.java
Normal file
@ -0,0 +1,33 @@
|
||||
package test.kar.karideo;
|
||||
|
||||
import org.junit.jupiter.api.extension.ConditionEvaluationResult;
|
||||
import org.junit.jupiter.api.extension.ExecutionCondition;
|
||||
import org.junit.jupiter.api.extension.ExtensionContext;
|
||||
import org.junit.jupiter.api.extension.TestExecutionExceptionHandler;
|
||||
|
||||
class StepwiseExtension implements ExecutionCondition, TestExecutionExceptionHandler {
|
||||
@Override
|
||||
public ConditionEvaluationResult evaluateExecutionCondition(final ExtensionContext extensionContext) {
|
||||
final ExtensionContext.Namespace namespace = namespaceFor(extensionContext);
|
||||
final ExtensionContext.Store store = storeFor(extensionContext, namespace);
|
||||
final String value = store.get(StepwiseExtension.class, String.class);
|
||||
return value == null ? ConditionEvaluationResult.enabled("No test failures in stepwise tests")
|
||||
: ConditionEvaluationResult.disabled(String.format("Stepwise test disabled due to previous failure in '%s'", value));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void handleTestExecutionException(final ExtensionContext extensionContext, final Throwable throwable) throws Throwable {
|
||||
final ExtensionContext.Namespace namespace = namespaceFor(extensionContext);
|
||||
final ExtensionContext.Store store = storeFor(extensionContext, namespace);
|
||||
store.put(StepwiseExtension.class, extensionContext.getDisplayName());
|
||||
throw throwable;
|
||||
}
|
||||
|
||||
private ExtensionContext.Namespace namespaceFor(final ExtensionContext extensionContext) {
|
||||
return ExtensionContext.Namespace.create(StepwiseExtension.class, extensionContext.getParent());
|
||||
}
|
||||
|
||||
private ExtensionContext.Store storeFor(final ExtensionContext extensionContext, final ExtensionContext.Namespace namespace) {
|
||||
return extensionContext.getParent().get().getStore(namespace);
|
||||
}
|
||||
}
|
70
back/test/src/test/kar/karideo/TestHealthCheck.java
Normal file
@ -0,0 +1,70 @@
|
||||
package test.kar.karideo;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.junit.jupiter.api.AfterAll;
|
||||
import org.junit.jupiter.api.Assertions;
|
||||
import org.junit.jupiter.api.BeforeAll;
|
||||
import org.junit.jupiter.api.MethodOrderer;
|
||||
import org.junit.jupiter.api.Order;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.TestMethodOrder;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.kar.archidata.db.DBEntry;
|
||||
import org.kar.archidata.exception.RESTErrorResponseExeption;
|
||||
import org.kar.archidata.tools.ConfigBaseVariable;
|
||||
import org.kar.archidata.tools.RESTApi;
|
||||
import org.kar.karideo.api.HealthCheck.HealthResult;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@ExtendWith(StepwiseExtension.class)
|
||||
@TestMethodOrder(MethodOrderer.OrderAnnotation.class)
|
||||
public class TestHealthCheck {
|
||||
private final static Logger LOGGER = LoggerFactory.getLogger(TestHealthCheck.class);
|
||||
|
||||
static WebLauncherTest webInterface = null;
|
||||
static RESTApi api = null;
|
||||
|
||||
@BeforeAll
|
||||
public static void configureWebServer() throws Exception {
|
||||
LOGGER.info("configure server ...");
|
||||
webInterface = new WebLauncherTest();
|
||||
LOGGER.info("Create DB");
|
||||
try {
|
||||
webInterface.migrateDB();
|
||||
} catch (final Exception ex) {
|
||||
ex.printStackTrace();
|
||||
LOGGER.error("Detect an error: {}", ex.getMessage());
|
||||
}
|
||||
LOGGER.info("Start REST (BEGIN)");
|
||||
webInterface.process();
|
||||
LOGGER.info("Start REST (DONE)");
|
||||
api = new RESTApi(ConfigBaseVariable.apiAdress);
|
||||
}
|
||||
|
||||
@AfterAll
|
||||
public static void stopWebServer() throws InterruptedException, IOException {
|
||||
LOGGER.info("Kill the web server");
|
||||
webInterface.stop();
|
||||
webInterface = null;
|
||||
LOGGER.info("Remove the test db");
|
||||
DBEntry.closeAllForceMode();
|
||||
ConfigBaseVariable.clearAllValue();
|
||||
}
|
||||
|
||||
@Order(1)
|
||||
@Test
|
||||
// @RepeatedTest(10)
|
||||
public void checkHealthCheck() throws Exception {
|
||||
final HealthResult result = api.get(HealthResult.class, "health_check");
|
||||
Assertions.assertEquals(result.value(), "alive and kicking");
|
||||
}
|
||||
|
||||
@Order(2)
|
||||
@Test
|
||||
public void checkHealthCheckWrongAPI() throws Exception {
|
||||
Assertions.assertThrows(RESTErrorResponseExeption.class, () -> api.get(HealthResult.class, "health_checks"));
|
||||
}
|
||||
|
||||
}
|
34
back/test/src/test/kar/karideo/WebLauncherTest.java
Executable file
@ -0,0 +1,34 @@
|
||||
|
||||
package test.kar.karideo;
|
||||
|
||||
import org.kar.archidata.tools.ConfigBaseVariable;
|
||||
import org.kar.karideo.WebLauncher;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class WebLauncherTest extends WebLauncher {
|
||||
final private static Logger LOGGER = LoggerFactory.getLogger(WebLauncherTest.class);
|
||||
|
||||
public WebLauncherTest() {
|
||||
LOGGER.debug("Configure REST system");
|
||||
// for local test:
|
||||
ConfigBaseVariable.apiAdress = "http://127.0.0.1:12342/test/api/";
|
||||
// Enable the test mode permit to access to the test token (never use it in production).
|
||||
ConfigBaseVariable.testMode = "true";
|
||||
// for the test we a in memory sqlite..
|
||||
if (true) {
|
||||
if (!"true".equalsIgnoreCase(System.getenv("TEST_E2E_MODE"))) {
|
||||
ConfigBaseVariable.dbType = "sqlite";
|
||||
ConfigBaseVariable.dbHost = "memory";
|
||||
// for test we need to connect all time the DB
|
||||
ConfigBaseVariable.dbKeepConnected = "true";
|
||||
}
|
||||
} else {
|
||||
// Enable this if you want to access to a local MySQL base to test with an adminer
|
||||
ConfigBaseVariable.bdDatabase = "test_db";
|
||||
ConfigBaseVariable.dbPort = "3309";
|
||||
ConfigBaseVariable.dbUser = "root";
|
||||
ConfigBaseVariable.dbPassword = "password";
|
||||
}
|
||||
}
|
||||
}
|
@ -1,42 +0,0 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
##
|
||||
## @author Edouard DUPIN
|
||||
##
|
||||
## @copyright 2019, Edouard DUPIN, all right reserved
|
||||
##
|
||||
## @license MPL v2.0 (see license file)
|
||||
##
|
||||
import os
|
||||
import sys
|
||||
import requests # pip install requests
|
||||
|
||||
class upload_in_chunks(object):
|
||||
def __init__(self, filename, chunksize=1 << 13):
|
||||
self.filename = filename
|
||||
self.chunksize = chunksize
|
||||
self.totalsize = os.path.getsize(filename)
|
||||
self.readsofar = 0
|
||||
|
||||
def __iter__(self):
|
||||
with open(self.filename, 'rb') as file:
|
||||
while True:
|
||||
data = file.read(self.chunksize)
|
||||
if not data:
|
||||
sys.stderr.write("\n")
|
||||
break
|
||||
self.readsofar += len(data)
|
||||
percent = self.readsofar * 1e2 / self.totalsize
|
||||
sys.stderr.write("\rSendfing data: {percent:3.0f}% {size:14.0f} / {total_size}".format(percent=percent, size=self.readsofar, total_size=self.totalsize))
|
||||
yield data
|
||||
|
||||
def __len__(self):
|
||||
return self.totalsize
|
||||
|
||||
filename = 'Totally_Spies.mp4'
|
||||
|
||||
result = requests.post("http://127.0.0.1:15080/data", data=upload_in_chunks(filename, chunksize=4096))
|
||||
|
||||
|
||||
print("result : " + str(result) + " " + result.text)#str(dir(result)))
|
||||
|
@ -1,860 +0,0 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
##
|
||||
## @author Edouard DUPIN
|
||||
##
|
||||
## @copyright 2019, Edouard DUPIN, all right reserved
|
||||
##
|
||||
## @license MPL v2.0 (see license file)
|
||||
##
|
||||
import os
|
||||
import copy
|
||||
import sys
|
||||
import datetime
|
||||
import hashlib
|
||||
import requests # pip install requests
|
||||
import realog.debug as debug
|
||||
#import magic
|
||||
import json
|
||||
|
||||
debug.enable_color();
|
||||
|
||||
|
||||
|
||||
property = {
|
||||
#"hostname": "192.168.1.157",
|
||||
"hostname": "127.0.0.1",
|
||||
"port": 15080,
|
||||
"login": None,
|
||||
"password": None,
|
||||
}
|
||||
|
||||
def get_base_url():
|
||||
return "http://" + property["hostname"] + ":" + str(property["port"]) + "/"
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
class upload_in_chunks(object):
|
||||
def __init__(self, filename, chunksize=1 + 13):
|
||||
self.filename = filename
|
||||
self.chunksize = chunksize
|
||||
self.totalsize = os.path.getsize(filename)
|
||||
self.start_time = datetime.datetime.utcnow()
|
||||
self.readsofar = 0
|
||||
|
||||
def __iter__(self):
|
||||
with open(self.filename, 'rb') as file:
|
||||
while True:
|
||||
data = file.read(self.chunksize)
|
||||
if not data:
|
||||
sys.stderr.write("\n")
|
||||
break
|
||||
self.readsofar += len(data)
|
||||
percent = self.readsofar * 1e2 / self.totalsize
|
||||
since_time = datetime.datetime.utcnow() - self.start_time
|
||||
sys.stderr.write("\rSending data: {percent:3.0f}% {size:14.0f} / {total_size} {timeee}".format(percent=percent, size=self.readsofar, total_size=self.totalsize, timeee=str(since_time)))
|
||||
yield data
|
||||
|
||||
def __len__(self):
|
||||
return self.totalsize
|
||||
|
||||
#filename = 'Totally_Spies.mp4'
|
||||
#result = requests.post(get_base_url() + "data", data=upload_in_chunks(filename, chunksize=4096))
|
||||
#debug.info("result : " + str(result) + " " + result.text)#str(dir(result)))
|
||||
|
||||
|
||||
def extract_and_remove(_input_value, _start_mark, _stop_mark):
|
||||
values = []
|
||||
out = ""
|
||||
inside = False
|
||||
inside_data = ""
|
||||
for it in _input_value:
|
||||
if inside == False \
|
||||
and it == _start_mark:
|
||||
inside = True
|
||||
elif inside == True \
|
||||
and it == _stop_mark:
|
||||
inside = False
|
||||
values.append(inside_data)
|
||||
inside_data = ""
|
||||
elif inside == True:
|
||||
inside_data += it
|
||||
else:
|
||||
out += it
|
||||
return (out, values)
|
||||
|
||||
def create_directory_of_file(_file):
|
||||
path = os.path.dirname(_file)
|
||||
try:
|
||||
os.stat(path)
|
||||
except:
|
||||
os.makedirs(path)
|
||||
|
||||
##
|
||||
## @brief Write data in a specific path.
|
||||
## @param[in] path Path of the data might be written.
|
||||
## @param[in] data Data To write in the file.
|
||||
## @param[in] only_if_new (default: False) Write data only if data is different.
|
||||
## @return True Something has been copied
|
||||
## @return False Nothing has been copied
|
||||
##
|
||||
def file_write_data(_path, _data, _only_if_new=False):
|
||||
if _only_if_new == True:
|
||||
if os.path.exists(_path) == True:
|
||||
old_data = file_read_data(_path)
|
||||
if old_data == _data:
|
||||
return False
|
||||
#real write of data:
|
||||
create_directory_of_file(_path)
|
||||
file = open(_path, "w")
|
||||
file.write(_data)
|
||||
file.close()
|
||||
return True
|
||||
|
||||
def get_modify_time(_path):
|
||||
return os.stat(_path).st_mtime
|
||||
|
||||
def file_read_data(_path, _binary=False):
|
||||
debug.verbose("path= " + _path)
|
||||
if not os.path.isfile(_path):
|
||||
return ""
|
||||
if _binary == True:
|
||||
file = open(_path, "rb")
|
||||
else:
|
||||
file = open(_path, "r")
|
||||
data_file = file.read()
|
||||
file.close()
|
||||
return data_file
|
||||
|
||||
def calculate_sha512(_path):
|
||||
sha1 = hashlib.sha512()
|
||||
file = open(_path, "rb")
|
||||
while True:
|
||||
body = file.read(4096)
|
||||
sha1.update(body)
|
||||
file.close()
|
||||
return str(sha1.hexdigest())
|
||||
|
||||
def push_video_file(_path, _basic_key={}):
|
||||
file_name, file_extension = os.path.splitext(_path);
|
||||
debug.info("Send file: '" + file_name + "' with extention " + file_extension)
|
||||
# internal file_extension ...
|
||||
if file_extension == "sha512":
|
||||
debug.verbose("file: '" + _path + "' sha512 extention ...")
|
||||
return True
|
||||
|
||||
debug.info("Add media : '" + _path + "'")
|
||||
# "avi", , "mov", , "ts", "cover_1.tiff", "cover_1.bmp", "cover_1.tga"] copy only file that is supported by the html5 video player (chrome mode only)
|
||||
if file_extension[1:] not in ["webm", "mkv", "mp4"] \
|
||||
and file_name not in ["cover_1.jpg","cover_1.png"]:
|
||||
debug.warning("Not send file : " + _path + " Not manage file_extension... " + file_extension)
|
||||
return False
|
||||
|
||||
if file_name in ["cover_1.jpg","cover_1.png", "cover_1.till", "cover_1.bmp", "cover_1.tga"]:
|
||||
# find a cover...
|
||||
debug.warning("Not send cover Not managed ... : " + _path + " Not manage ...")
|
||||
"""
|
||||
result_group_data = requests.post(get_base_url() + "group/find", data=json.dumps({"name":_basic_key["series-name"]}, sort_keys=True, indent=4))
|
||||
debug.info("Create group ??? *********** : " + str(result_group_data) + " " + result_group_data.text)
|
||||
if result_group_data.status_code == 404:
|
||||
result_group_data = requests.post(get_base_url() + "group", data=json.dumps({"name":_basic_key["series-name"]}, sort_keys=True, indent=4))
|
||||
debug.info("yes we create new group *********** : " + str(result_group_data) + " " + result_group_data.text)
|
||||
group_id = result_group_data.json()["id"]
|
||||
|
||||
|
||||
result_group_data = requests.post(get_base_url() + "group", data=json.dumps({"name":_basic_key["series-name"]}, sort_keys=True, indent=4))
|
||||
debug.info("yes we create new group *********** : " + str(result_group_data) + " " + result_group_data.text)
|
||||
"""
|
||||
|
||||
"""
|
||||
debug.info("Send cover for: " + _basic_key["series-name"] + " " + _basic_key["saison"]);
|
||||
if _basic_key["series-name"] == "":
|
||||
debug.error(" ==> can not asociate at a specific seri");
|
||||
return False;
|
||||
|
||||
etk::String groupName = _basic_key["series-name"];
|
||||
if _basic_key["saison"] != "":
|
||||
groupName += ":" + _basic_key["saison"];
|
||||
|
||||
auto sending = _srv.setGroupCover(zeus::File::create(_path.getString(), ""), groupName);
|
||||
sending.onSignal(progressCallback);
|
||||
sending.waitFor(echrono::seconds(20000));
|
||||
"""
|
||||
return True
|
||||
|
||||
"""
|
||||
if etk::path::exist(_path + ".sha512") == True:
|
||||
debug.verbose("file sha512 exist ==> read it");
|
||||
uint64_t time_sha512 = get_modify_time(_path + ".sha512");
|
||||
uint64_t time_elem = get_modify_time(_path);
|
||||
storedSha512_file = file_read_data(_path + ".sha512")
|
||||
debug.verbose("file sha == " + storedSha512_file);
|
||||
if time_elem > time_sha512:
|
||||
debug.verbose("file time > sha time ==> regenerate new one ...");
|
||||
# check the current sha512
|
||||
storedSha512 = calculate_sha512(_path);
|
||||
debug.verbose("calculated new sha'" + storedSha512 + "'");
|
||||
if storedSha512_file != storedSha512:
|
||||
# need to remove the old sha file
|
||||
auto idFileToRemove_fut = _srv.getId(storedSha512_file).waitFor(echrono::seconds(2));
|
||||
if idFileToRemove_fut.hasError() == True:
|
||||
debug.error("can not remove the remote file with sha " + storedSha512_file);
|
||||
else:
|
||||
debug.info("Remove old deprecated file: " + storedSha512_file);
|
||||
_srv.remove(idFileToRemove_fut.get());
|
||||
# note, no need to wait the call is async ... and the user does not interested with the result ...
|
||||
|
||||
|
||||
# store new sha512 ==> this update tile too ...
|
||||
file.open(etk::io::OpenMode::Write);
|
||||
file.writeAll(storedSha512);
|
||||
file.close();
|
||||
else:
|
||||
# store new sha512
|
||||
/*
|
||||
storedSha512 = file.readAllString();
|
||||
file.open(etk::io::OpenMode::Read);
|
||||
file.writeAll(storedSha512);
|
||||
file.close();
|
||||
*/
|
||||
storedSha512 = storedSha512_file;
|
||||
debug.verbose("read all sha from the file'" + storedSha512 + "'");
|
||||
|
||||
else:
|
||||
"""
|
||||
"""
|
||||
if True:
|
||||
storedSha512 = calculate_sha512(_path)
|
||||
file_write_data(_path + ".sha512", storedSha512);
|
||||
debug.info("calculate and store sha512 '" + storedSha512 + "'");
|
||||
debug.info("check file existance: sha='" + storedSha512 + "'");
|
||||
"""
|
||||
|
||||
|
||||
# push only if the file exist
|
||||
"""
|
||||
# TODO : Check the metadata updating ...
|
||||
auto idFile_fut = _srv.getId(storedSha512).waitFor(echrono::seconds(2));
|
||||
if idFile_fut.hasError() == False:
|
||||
# media already exit ==> stop here ...
|
||||
return True;
|
||||
|
||||
# TODO: Do it better ==> add the calback to know the push progression ...
|
||||
debug.verbose("Add File : " + _path + " sha='" + storedSha512 + "'");
|
||||
auto sending = _srv.add(zeus::File::create(_path, storedSha512));
|
||||
sending.onSignal(progressCallback);
|
||||
debug.verbose("Add done ... now waiting ... ");
|
||||
uint32_t mediaId = sending.waitFor(echrono::seconds(20000)).get();
|
||||
debug.verbose("END WAITING ... ");
|
||||
if mediaId == 0:
|
||||
debug.error("Get media ID = 0 With no error");
|
||||
return False;
|
||||
"""
|
||||
#mime = magic.Magic(mime=True)
|
||||
#mime_type = mime.from_file(_path)
|
||||
mime_type = "unknown"
|
||||
# do it by myself .. it is better ...
|
||||
filename___, file_extension = os.path.splitext(_path)
|
||||
if file_extension == "mkv":
|
||||
mime_type = "video/x-matroska"
|
||||
elif file_extension == "mka":
|
||||
mime_type = "audio/x-matroska"
|
||||
elif file_extension == "mp4":
|
||||
mime_type = "video/mp4"
|
||||
elif file_extension == "webm":
|
||||
mime_type = "video/webm"
|
||||
elif file_extension == "json":
|
||||
mime_type = "application/json"
|
||||
elif file_extension == "jpeg":
|
||||
mime_type = "image/jpeg"
|
||||
elif file_extension == "png":
|
||||
mime_type = "image/png"
|
||||
headers_values = {
|
||||
'filename': _path,
|
||||
'mime-type': mime_type
|
||||
}
|
||||
"""
|
||||
,
|
||||
'Connection': "keep-alive"
|
||||
}
|
||||
"""
|
||||
result_send_data = requests.post(get_base_url() + "data", headers=headers_values, data=upload_in_chunks(_path, chunksize=4096))
|
||||
debug.info("result *********** : " + str(result_send_data) + " " + result_send_data.text)
|
||||
file_name = os.path.basename(file_name)
|
||||
debug.info("Find file_name : '" + file_name + "'");
|
||||
# Remove Date (XXXX) or other titreadsofarle
|
||||
file_name, dates = extract_and_remove(file_name, '(', ')');
|
||||
have_date = False
|
||||
have_Title = False
|
||||
for it in dates:
|
||||
if len(it) == 0:
|
||||
continue
|
||||
if it[0] == '0' \
|
||||
or it[0] == '1' \
|
||||
or it[0] == '2' \
|
||||
or it[0] == '3' \
|
||||
or it[0] == '4' \
|
||||
or it[0] == '5' \
|
||||
or it[0] == '6' \
|
||||
or it[0] == '7' \
|
||||
or it[0] == '8' \
|
||||
or it[0] == '9':
|
||||
# find a date ...
|
||||
if have_date == True:
|
||||
debug.info(" '" + file_name + "'")
|
||||
debug.error("Parse Date error : () : " + it + " ==> multiple date")
|
||||
continue
|
||||
have_date = True
|
||||
_basic_key["date"] = it
|
||||
else:
|
||||
if have_Title == True:
|
||||
debug.info(" '" + file_name + "'")
|
||||
debug.error("Parse Title error : () : " + it + " ==> multiple title")
|
||||
continue
|
||||
have_Title = True
|
||||
# Other title
|
||||
_basic_key.set["title2"] = it;
|
||||
|
||||
# Remove the actors [XXX YYY][EEE TTT]...
|
||||
file_name, actors = extract_and_remove(file_name, '[', ']');
|
||||
if len(actors) > 0:
|
||||
debug.info(" '" + file_name + "'")
|
||||
actor_list = []
|
||||
for it_actor in actors:
|
||||
if actor_list != "":
|
||||
actor_list += ";"
|
||||
actor_list.append(it_actor)
|
||||
_basic_key["actors"] = actor_list
|
||||
list_element_base = file_name.split('-')
|
||||
debug.warning("==> Title file: " + file_name)
|
||||
debug.warning("==> Title cut : " + str(list_element_base))
|
||||
|
||||
list_element = [];
|
||||
tmp_start_string = "";
|
||||
iii = 0
|
||||
while iii <len(list_element_base):
|
||||
if list_element_base[iii][0] != 's' \
|
||||
and list_element_base[iii][0] != 'e':
|
||||
if tmp_start_string != "":
|
||||
tmp_start_string += '-'
|
||||
tmp_start_string += list_element_base[iii]
|
||||
else:
|
||||
list_element.append(tmp_start_string)
|
||||
tmp_start_string = ""
|
||||
while iii<len(list_element_base):
|
||||
list_element.append(list_element_base[iii])
|
||||
iii += 1
|
||||
iii += 1
|
||||
|
||||
debug.warning("==> start elem: " + str(tmp_start_string))
|
||||
|
||||
if tmp_start_string != "":
|
||||
list_element.append(tmp_start_string)
|
||||
|
||||
debug.warning("==> list_element : " + str(list_element))
|
||||
|
||||
if len(list_element) == 1:
|
||||
# nothing to do , it might be a film ...
|
||||
_basic_key["title"] = list_element[0]
|
||||
else:
|
||||
if len(list_element) > 3 \
|
||||
and list_element[1][0] == 's' \
|
||||
and list_element[2][0] == 'e':
|
||||
debug.warning("Parse format: xxx-sXX-eXX-kjhlkjlkj(1234).*")
|
||||
# internal formalisme ...
|
||||
saison = -1;
|
||||
episode = -1;
|
||||
series_name = list_element[0];
|
||||
|
||||
_basic_key["series-name"] = series_name
|
||||
full_episode_name = list_element[3]
|
||||
for yyy in range(4, len(list_element)):
|
||||
full_episode_name += "-" + list_element[yyy]
|
||||
|
||||
_basic_key["title"] = full_episode_name
|
||||
if list_element[1][1:] == "XX":
|
||||
# saison unknow ... ==> nothing to do ...
|
||||
#saison = 123456789;
|
||||
pass
|
||||
else:
|
||||
saison = int(list_element[1][1:]);
|
||||
|
||||
if list_element[2][1:] == "XX":
|
||||
# episode unknow ... ==> nothing to do ...
|
||||
pass
|
||||
else:
|
||||
episode = int(list_element[2][1:]);
|
||||
_basic_key["episode"] = int(episode)
|
||||
|
||||
debug.info("Find a internal mode series: :");
|
||||
debug.info(" origin : '" + file_name + "'");
|
||||
saisonPrint = "XX";
|
||||
episodePrint = "XX";
|
||||
if saison < 0:
|
||||
# nothing to do
|
||||
pass
|
||||
else:
|
||||
saisonPrint = str(saison)
|
||||
_basic_key["saison"] = saison
|
||||
|
||||
if episode < 0:
|
||||
# nothing to do
|
||||
pass
|
||||
elif episode < 10:
|
||||
episodePrint = "0" + str(episode);
|
||||
_basic_key["episode"] = episode
|
||||
else:
|
||||
episodePrint = str(episode);
|
||||
_basic_key["episode"] = episode
|
||||
|
||||
debug.info(" ==> '" + series_name + "-s" + saisonPrint + "-e" + episodePrint + "-" + full_episode_name + "'");
|
||||
elif len(list_element) > 2 \
|
||||
and list_element[1][0] == 'e':
|
||||
debug.warning("Parse format: xxx-eXX-kjhlkjlkj(1234).*")
|
||||
# internal formalisme ...
|
||||
saison = -1;
|
||||
episode = -1;
|
||||
series_name = list_element[0];
|
||||
|
||||
_basic_key["series-name"] = series_name
|
||||
full_episode_name = list_element[2]
|
||||
for yyy in range(3, len(list_element)):
|
||||
full_episode_name += "-" + list_element[yyy]
|
||||
|
||||
_basic_key["title"] = full_episode_name
|
||||
if list_element[1][1:] == "XX":
|
||||
# episode unknow ... ==> nothing to do ...
|
||||
pass
|
||||
else:
|
||||
episode = int(list_element[1][1:]);
|
||||
_basic_key["episode"] = int(episode)
|
||||
|
||||
debug.info("Find a internal mode series: :");
|
||||
debug.info(" origin : '" + file_name + "'");
|
||||
saisonPrint = "XX";
|
||||
episodePrint = "XX";
|
||||
if episode < 0:
|
||||
# nothing to do
|
||||
pass
|
||||
elif episode < 10:
|
||||
episodePrint = "0" + str(episode);
|
||||
_basic_key["episode"] = episode
|
||||
else:
|
||||
episodePrint = str(episode);
|
||||
_basic_key["episode"] = episode
|
||||
|
||||
debug.info(" ==> '" + series_name + "-s" + saisonPrint + "-e" + episodePrint + "-" + full_episode_name + "'");
|
||||
|
||||
|
||||
result_send_data_json = json.loads(result_send_data.text)
|
||||
debug.info("pared meta data: " + json.dumps(_basic_key, sort_keys=True, indent=4))
|
||||
data_model = {
|
||||
"type_id": _basic_key["type"],
|
||||
"data_id": result_send_data_json["id"],
|
||||
#"group_id": int,
|
||||
"name": _basic_key["title"],
|
||||
# number of second
|
||||
"time": None,
|
||||
}
|
||||
for elem in ["date", "description", "episode"]: #["actors", "date", "description", "episode", "title2"]:
|
||||
if elem in _basic_key.keys():
|
||||
data_model[elem] = _basic_key[elem]
|
||||
if "series-name" in _basic_key.keys():
|
||||
result_group_data = requests.post(get_base_url() + "group/find", data=json.dumps({"name":_basic_key["series-name"]}, sort_keys=True, indent=4))
|
||||
debug.info("Create group ??? *********** : " + str(result_group_data) + " " + result_group_data.text)
|
||||
if result_group_data.status_code == 404:
|
||||
result_group_data = requests.post(get_base_url() + "group", data=json.dumps({"name":_basic_key["series-name"]}, sort_keys=True, indent=4))
|
||||
debug.info("yes we create new group *********** : " + str(result_group_data) + " " + result_group_data.text)
|
||||
group_id = result_group_data.json()["id"]
|
||||
data_model["group_id"] = group_id
|
||||
if "saison" in _basic_key.keys():
|
||||
result_saison_data = requests.post(get_base_url() + "saison/find", data=json.dumps({"number":_basic_key["saison"], "group_id":group_id}, sort_keys=True, indent=4))
|
||||
debug.info("Create saison ??? *********** : " + str(result_saison_data) + " " + result_saison_data.text)
|
||||
if result_saison_data.status_code == 404:
|
||||
result_saison_data = requests.post(get_base_url() + "saison", data=json.dumps({"number":_basic_key["saison"], "group_id":group_id}, sort_keys=True, indent=4))
|
||||
debug.info("yes we create new saison *********** : " + str(result_saison_data) + " " + result_saison_data.text)
|
||||
saison_id = result_saison_data.json()["id"]
|
||||
data_model["saison_id"] = saison_id
|
||||
|
||||
result_send_data = requests.post(get_base_url() + "video", data=json.dumps(data_model, sort_keys=True, indent=4))
|
||||
debug.info("result *********** : " + str(result_send_data) + " " + result_send_data.text)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def install_video_path( _path, _basic_key = {}):
|
||||
debug.info("Parse : '" + _path + "'");
|
||||
list_sub_path = [fff for fff in os.listdir(_path) if os.path.isdir(os.path.join(_path, fff))]
|
||||
for it_path in list_sub_path:
|
||||
basic_key_tmp = copy.deepcopy(_basic_key)
|
||||
debug.info("Add Sub path: '" + it_path + "'");
|
||||
if len(basic_key_tmp) == 0:
|
||||
debug.info("find A '" + it_path + "' " + str(len(basic_key_tmp)));
|
||||
if it_path == "documentary":
|
||||
basic_key_tmp["type"] = 0
|
||||
elif it_path == "film":
|
||||
basic_key_tmp["type"] = 1
|
||||
elif it_path == "film-annimation":
|
||||
basic_key_tmp["type"] = 2
|
||||
elif it_path == "film-short":
|
||||
basic_key_tmp["type"] = 3
|
||||
elif it_path == "tv-show":
|
||||
basic_key_tmp["type"] = 4
|
||||
elif it_path == "tv-show-annimation":
|
||||
basic_key_tmp["type"] = 5
|
||||
elif it_path == "theater":
|
||||
basic_key_tmp["type"] = 6
|
||||
elif it_path == "one-man":
|
||||
basic_key_tmp["type"] = 7
|
||||
elif it_path == "concert":
|
||||
basic_key_tmp["type"] = 8
|
||||
elif it_path == "opera":
|
||||
basic_key_tmp["type"] = 9
|
||||
else:
|
||||
debug.info("find B '" + it_path + "' " + str(len(basic_key_tmp)))
|
||||
if it_path == "saison_01":
|
||||
basic_key_tmp["saison"] = 1
|
||||
elif it_path == "saison_02":
|
||||
basic_key_tmp["saison"] = 2
|
||||
elif it_path == "saison_03":
|
||||
basic_key_tmp["saison"] = 3
|
||||
elif it_path == "saison_04":
|
||||
basic_key_tmp["saison"] = 4
|
||||
elif it_path == "saison_05":
|
||||
basic_key_tmp["saison"] = 5
|
||||
elif it_path == "saison_06":
|
||||
basic_key_tmp["saison"] = 6
|
||||
elif it_path == "saison_07":
|
||||
basic_key_tmp["saison"] = 7
|
||||
elif it_path == "saison_08":
|
||||
basic_key_tmp["saison"] = 8
|
||||
elif it_path == "saison_09":
|
||||
basic_key_tmp["saison"] = 9
|
||||
elif it_path == "saison_10":
|
||||
basic_key_tmp["saison"] = 10
|
||||
elif it_path == "saison_11":
|
||||
basic_key_tmp["saison"] = 11
|
||||
elif it_path == "saison_12":
|
||||
basic_key_tmp["saison"] = 12
|
||||
elif it_path == "saison_13":
|
||||
basic_key_tmp["saison"] = 13
|
||||
elif it_path == "saison_14":
|
||||
basic_key_tmp["saison"] = 14
|
||||
elif it_path == "saison_15":
|
||||
basic_key_tmp["saison"] = 15
|
||||
elif it_path == "saison_16":
|
||||
basic_key_tmp["saison"] = 16
|
||||
elif it_path == "saison_17":
|
||||
basic_key_tmp["saison"] = 17
|
||||
elif it_path == "saison_18":
|
||||
basic_key_tmp["saison"] = 18
|
||||
elif it_path == "saison_19":
|
||||
basic_key_tmp["saison"] = 19
|
||||
elif it_path == "saison_20":
|
||||
basic_key_tmp["saison"] = 20
|
||||
elif it_path == "saison_21":
|
||||
basic_key_tmp["saison"] = 21
|
||||
elif it_path == "saison_22":
|
||||
basic_key_tmp["saison"] = 22
|
||||
elif it_path == "saison_23":
|
||||
basic_key_tmp["saison"] = 23
|
||||
elif it_path == "saison_24":
|
||||
basic_key_tmp["saison"] = 24
|
||||
elif it_path == "saison_25":
|
||||
basic_key_tmp["saison"] = 25
|
||||
elif it_path == "saison_26":
|
||||
basic_key_tmp["saison"] = 26
|
||||
elif it_path == "saison_27":
|
||||
basic_key_tmp["saison"] = 27
|
||||
elif it_path == "saison_28":
|
||||
basic_key_tmp["saison"] = 28
|
||||
elif it_path == "saison_29":
|
||||
basic_key_tmp["saison"] = 29
|
||||
else:
|
||||
basic_key_tmp["series-name"] = it_path
|
||||
debug.info("add a path " + os.path.join(_path, it_path) + " with keys " + str(basic_key_tmp))
|
||||
install_video_path(os.path.join(_path, it_path), basic_key_tmp);
|
||||
|
||||
# Add files :
|
||||
list_sub_file = [fff for fff in os.listdir(_path) if os.path.isfile(os.path.join(_path, fff))]
|
||||
for it_file in list_sub_file:
|
||||
basic_key_tmp = copy.deepcopy(_basic_key)
|
||||
push_video_file(os.path.join(_path, it_file), basic_key_tmp);
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
import death.Arguments as arguments
|
||||
import death.ArgElement as arg_element
|
||||
|
||||
my_args = arguments.Arguments()
|
||||
my_args.add_section("option", "Can be set one time in all case")
|
||||
my_args.add("h", "help", desc="Display this help")
|
||||
my_args.add("", "version", desc="Display the application version")
|
||||
my_args.add("v", "verbose", list=[
|
||||
["0","None"],
|
||||
["1","error"],
|
||||
["2","warning"],
|
||||
["3","info"],
|
||||
["4","debug"],
|
||||
["5","verbose"],
|
||||
["6","extreme_verbose"],
|
||||
], desc="display debug level (verbose) default =2")
|
||||
my_args.add("a", "action", list=[
|
||||
["tree","List all the files in a tree view ..."],
|
||||
["list","List all the files"],
|
||||
["push","push a single file"],
|
||||
["push_path","push a full folder"],
|
||||
], desc="possible action")
|
||||
my_args.add("c", "color", desc="Display message in color")
|
||||
my_args.add("f", "folder", haveParam=False, desc="Display the folder instead of the git repository name")
|
||||
local_argument = my_args.parse()
|
||||
|
||||
##
|
||||
## @brief Display the help of this package.
|
||||
##
|
||||
def usage():
|
||||
color = debug.get_color_set()
|
||||
# generic argument displayed :
|
||||
my_args.display()
|
||||
exit(0)
|
||||
|
||||
##
|
||||
## @brief Display the version of this package.
|
||||
##
|
||||
def version():
|
||||
color = debug.get_color_set()
|
||||
import pkg_resources
|
||||
debug.info("version: 0.0.0")
|
||||
foldername = os.path.dirname(__file__)
|
||||
debug.info("source folder is: " + foldername)
|
||||
exit(0)
|
||||
|
||||
folder = "dataPush"
|
||||
requestAction = "list"
|
||||
|
||||
# preparse the argument to get the verbose element for debug mode
|
||||
def parse_arg(argument):
|
||||
debug.warning("parse arg : " + argument.get_option_name() + " " + argument.get_arg())
|
||||
if argument.get_option_name() == "help":
|
||||
usage()
|
||||
return True
|
||||
elif argument.get_option_name() == "version":
|
||||
version()
|
||||
return True
|
||||
elif argument.get_option_name() == "verbose":
|
||||
debug.set_level(int(argument.get_arg()))
|
||||
return True
|
||||
elif argument.get_option_name() == "color":
|
||||
if check_boolean(argument.get_arg()) == True:
|
||||
debug.enable_color()
|
||||
else:
|
||||
debug.disable_color()
|
||||
return True
|
||||
elif argument.get_option_name() == "folder":
|
||||
folder = argument.get_arg()
|
||||
return True
|
||||
elif argument.get_option_name() == "action":
|
||||
global requestAction
|
||||
requestAction = argument.get_arg()
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
# parse default unique argument:
|
||||
for argument in local_argument:
|
||||
parse_arg(argument)
|
||||
|
||||
debug.info("==================================");
|
||||
debug.info("== ZEUS test client start ==");
|
||||
debug.info("==================================");
|
||||
|
||||
|
||||
def show_video(elem_video_id, indent):
|
||||
indent_data = ""
|
||||
while indent > 0:
|
||||
indent_data += "\t"
|
||||
indent -= 1
|
||||
result_video = requests.get(get_base_url() + "video/" + str(elem_video_id) + "")
|
||||
if result_video.status_code == 200:
|
||||
video = result_video.json()
|
||||
debug.info(indent_data + "- " + str(video["generated_name"]))
|
||||
else:
|
||||
debug.warning(indent_data + "get video id: " + str(elem_video_id) + " !!!!!! " + str(result_video.status_code) + "")
|
||||
|
||||
# ****************************************************************************************
|
||||
# ** Clear All the data base ...
|
||||
# ****************************************************************************************
|
||||
if requestAction == "clear":
|
||||
debug.info("============================================");
|
||||
debug.info("== Clear data base: ");
|
||||
debug.info("============================================");
|
||||
# TODO : Do it :
|
||||
debug.error("NEED to add check in cmd line to execute it ...");
|
||||
"""
|
||||
uint32_t count = remoteServiceVideo.count().wait().get();
|
||||
debug.debug("have " + count + " medias");
|
||||
for (uint32_t iii=0; iii<count ; iii += 1024:
|
||||
uint32_t tmpMax = etk::min(iii + 1024, count);
|
||||
debug.debug("read section " + iii + " -> " + tmpMax);
|
||||
etk::Vector<uint32_t> list = remoteServiceVideo.getIds(iii,tmpMax).wait().get();
|
||||
zeus::FutureGroup groupWait;
|
||||
for (auto& it : list:
|
||||
debug.info("remove ELEMENT : " + it);
|
||||
groupWait.add(remoteServiceVideo.remove(it));
|
||||
groupWait.waitFor(echrono::seconds(2000));
|
||||
"""
|
||||
debug.info("============================================");
|
||||
debug.info("== DONE ==");
|
||||
debug.info("============================================");
|
||||
elif requestAction == "list":
|
||||
debug.info("============================================");
|
||||
debug.info("== list files: ");
|
||||
debug.info("============================================");
|
||||
list_types = requests.get(get_base_url() + "type")
|
||||
if list_types.status_code != 200:
|
||||
debug.warning(" !! ca, ot get type list ... " + str(list_types.status_code) + "")
|
||||
for elem in list_types.json():
|
||||
debug.info(" get type id: " + str(elem["id"]))
|
||||
debug.info(" name: " + str(elem["name"]))
|
||||
# get the count of video in this type
|
||||
result_count = requests.get(get_base_url() + "type/" + str(elem["id"]) + "/count")
|
||||
if result_count.status_code == 200:
|
||||
debug.info(" count: " + str(result_count.json()["count"]))
|
||||
else:
|
||||
debug.warning(" count: !!!!!! " + str(result_count.status_code) + "")
|
||||
# get all the video list
|
||||
result_video = requests.get(get_base_url() + "type/" + str(elem["id"]) + "/video")
|
||||
if result_video.status_code == 200:
|
||||
if len(result_video.json()) != 0:
|
||||
debug.info(" List video: " + str(result_video.json()))
|
||||
else:
|
||||
debug.warning(" List video: !!!!!! " + str(result_video.status_code) + "")
|
||||
# get list of groups for this type
|
||||
result_groups = requests.get(get_base_url() + "type/" + str(elem["id"]) + "/group")
|
||||
if result_groups.status_code == 200:
|
||||
if len(result_groups.json()) != 0:
|
||||
debug.info(" List group: " + str(result_groups.json()))
|
||||
else:
|
||||
debug.warning(" List group: !!!!!! " + str(result_groups.status_code) + "")
|
||||
# get list of video without groups
|
||||
result_video_solo = requests.get(get_base_url() + "type/" + str(elem["id"]) + "/video_no_group")
|
||||
if result_video_solo.status_code == 200:
|
||||
if len(result_video_solo.json()) != 0:
|
||||
debug.info(" List video solo: " + str(result_video_solo.json()))
|
||||
else:
|
||||
debug.warning(" List video solo: !!!!!! " + str(result_video_solo.status_code) + "")
|
||||
elif requestAction == "tree":
|
||||
debug.info("============================================");
|
||||
debug.info("== tree files: ");
|
||||
debug.info("============================================");
|
||||
list_types = requests.get(get_base_url() + "type")
|
||||
if list_types.status_code != 200:
|
||||
debug.warning(" !! ca, ot get type list ... " + str(list_types.status_code) + "")
|
||||
for elem in list_types.json():
|
||||
debug.info("-------------------------------------------------")
|
||||
debug.info(" " + str(elem["name"]))
|
||||
debug.info("-------------------------------------------------")
|
||||
# First get all the groups:
|
||||
result_groups = requests.get(get_base_url() + "type/" + str(elem["id"]) + "/group")
|
||||
if result_groups.status_code == 200:
|
||||
for elem_group_id in result_groups.json():
|
||||
result_group = requests.get(get_base_url() + "group/" + str(elem_group_id) + "")
|
||||
if result_group.status_code == 200:
|
||||
group = result_group.json()
|
||||
debug.info("\to- " + str(group["name"]))
|
||||
# step 1: all the saison:
|
||||
result_saison_in_group = requests.get(get_base_url() + "group/" + str(elem_group_id) + "/saison")
|
||||
if result_saison_in_group.status_code == 200:
|
||||
for elem_saison_id in result_saison_in_group.json():
|
||||
result_saison = requests.get(get_base_url() + "saison/" + str(elem_saison_id) + "")
|
||||
if result_saison.status_code == 200:
|
||||
debug.info("\t\t* saison " + str(result_saison.json()["number"]))
|
||||
result_videos_in_saison = requests.get(get_base_url() + "saison/" + str(result_saison.json()["id"]) + "/video")
|
||||
if result_videos_in_saison.status_code == 200:
|
||||
for elem_video_id in result_videos_in_saison.json():
|
||||
show_video(elem_video_id, 3)
|
||||
else:
|
||||
debug.warning("\t\tget video in saison id: " + str(elem_saison_id) + " !!!!!! " + str(result_videos_in_saison.status_code) + "")
|
||||
show_video(elem_video_id, 2)
|
||||
else:
|
||||
debug.warning("\t\tget saison id: " + str(elem_saison_id) + " !!!!!! " + str(result_saison.status_code) + "")
|
||||
else:
|
||||
debug.warning("\t\tget saison in group id: " + str(elem_group_id) + " !!!!!! " + str(result_saison_in_group.status_code) + "")
|
||||
# step 2: all the video with no saison:
|
||||
result_videos_in_group = requests.get(get_base_url() + "group/" + str(elem_group_id) + "/video_no_saison")
|
||||
if result_videos_in_group.status_code == 200:
|
||||
for elem_video_id in result_videos_in_group.json():
|
||||
show_video(elem_video_id, 2)
|
||||
else:
|
||||
debug.warning("\t\tget video in group id: " + str(elem_group_id) + " !!!!!! " + str(result_videos_in_group.status_code) + "")
|
||||
else:
|
||||
debug.warning("\tget group id: " + str(elem_group_id) + " !!!!!! " + str(result_group.status_code) + "")
|
||||
else:
|
||||
debug.warning("\t\tList group: !!!!!! " + str(result_groups.status_code) + "")
|
||||
# get list of video without groups
|
||||
result_video_solo = requests.get(get_base_url() + "type/" + str(elem["id"]) + "/video_no_group")
|
||||
if result_video_solo.status_code == 200:
|
||||
for elem_video_id in result_video_solo.json():
|
||||
show_video(elem_video_id, 1)
|
||||
else:
|
||||
debug.warning("\t\tList video solo: !!!!!! " + str(result_video_solo.status_code) + "")
|
||||
|
||||
|
||||
"""
|
||||
uint32_t count = remoteServiceVideo.count().wait().get();
|
||||
debug.debug("have " + count + " medias");
|
||||
for (uint32_t iii=0; iii<count ; iii += 1024:
|
||||
uint32_t tmpMax = etk::min(iii + 1024, count);
|
||||
debug.debug("read section " + iii + " -> " + tmpMax);
|
||||
etk::Vector<uint32_t> list = remoteServiceVideo.getIds(iii, tmpMax).wait().get();
|
||||
for (auto& it : list:
|
||||
# Get the media
|
||||
zeus::ProxyMedia media = remoteServiceVideo.get(it).waitFor(echrono::seconds(2000)).get();
|
||||
if media.exist() == False:
|
||||
debug.error("get media error");
|
||||
return -1;
|
||||
debug.debug(" Get title ...");
|
||||
etk::String name = media.getMetadata("title").wait().get();
|
||||
debug.debug(" Get series-name ...");
|
||||
etk::String serie = media.getMetadata("series-name").wait().get();
|
||||
debug.debug(" Get episode ...");
|
||||
etk::String episode = media.getMetadata("episode").wait().get();
|
||||
debug.debug(" Get saison ...");
|
||||
etk::String saison = media.getMetadata("saison").wait().get();
|
||||
etk::String outputDesc = "";
|
||||
if serie != "":
|
||||
outputDesc += serie + "-";
|
||||
if saison != "":
|
||||
outputDesc += "s" + saison + "-";
|
||||
if episode != "":
|
||||
outputDesc += "e" + episode + "-";
|
||||
outputDesc += name;
|
||||
debug.info("[" + it + "] '" + outputDesc + "'");
|
||||
"""
|
||||
debug.info("============================================");
|
||||
debug.info("== DONE ==");
|
||||
debug.info("============================================");
|
||||
elif requestAction == "push":
|
||||
debug.info("============================================");
|
||||
debug.info("== push file: ");
|
||||
debug.info("============================================");
|
||||
push_video_file(folder);
|
||||
debug.info("============================================");
|
||||
debug.info("== DONE ==");
|
||||
debug.info("============================================");
|
||||
elif requestAction == "push_path":
|
||||
debug.info("============================================");
|
||||
debug.info("== push path: ");
|
||||
debug.info("============================================");
|
||||
install_video_path(folder);
|
||||
debug.info("============================================");
|
||||
debug.info("== DONE ==");
|
||||
debug.info("============================================");
|
||||
else:
|
||||
debug.info("============================================");
|
||||
debug.error("== Unknow action: '" + requestAction + "'");
|
||||
debug.info("============================================");
|
@ -1,354 +0,0 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
##
|
||||
## @author Edouard DUPIN
|
||||
##
|
||||
## @copyright 2016, Edouard DUPIN, all right reserved
|
||||
##
|
||||
## @license APACHE v2.0 (see license file)
|
||||
##
|
||||
import os
|
||||
import fnmatch
|
||||
import sys
|
||||
import subprocess
|
||||
import shlex
|
||||
##
|
||||
## @brief Execute the command with no get of output
|
||||
##
|
||||
def run_command(cmd_line):
|
||||
# prepare command line:
|
||||
args = shlex.split(cmd_line)
|
||||
print("[INFO] cmd = " + str(args))
|
||||
try:
|
||||
# create the subprocess
|
||||
p = subprocess.Popen(args)
|
||||
except subprocess.CalledProcessError as e:
|
||||
print("[ERROR] subprocess.CalledProcessError : " + str(args))
|
||||
return False
|
||||
#except:
|
||||
# debug.error("Exception on : " + str(args))
|
||||
# launch the subprocess:
|
||||
output, err = p.communicate()
|
||||
# Check error :
|
||||
if p.returncode == 0:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
##
|
||||
## @brief Get list of all Files in a specific path (with a regex)
|
||||
## @param[in] path (string) Full path of the machine to search files (start with / or x:)
|
||||
## @param[in] regex (string) Regular expression to search data
|
||||
## @param[in] recursive (bool) List file with recursive search
|
||||
## @param[in] remove_path (string) Data to remove in the path
|
||||
## @return (list) return files requested
|
||||
##
|
||||
def get_list_of_file_in_path(path, regex="*", recursive = False, remove_path=""):
|
||||
out = []
|
||||
if os.path.isdir(os.path.realpath(path)):
|
||||
tmp_path = os.path.realpath(path)
|
||||
tmp_rule = regex
|
||||
else:
|
||||
debug.error("path does not exist : '" + str(path) + "'")
|
||||
|
||||
for root, dirnames, filenames in os.walk(tmp_path):
|
||||
deltaRoot = root[len(tmp_path):]
|
||||
while len(deltaRoot) > 0 \
|
||||
and ( deltaRoot[0] == '/' \
|
||||
or deltaRoot[0] == '\\' ):
|
||||
deltaRoot = deltaRoot[1:]
|
||||
if recursive == False \
|
||||
and deltaRoot != "":
|
||||
return out
|
||||
tmpList = filenames
|
||||
if len(tmp_rule) > 0:
|
||||
tmpList = fnmatch.filter(filenames, tmp_rule)
|
||||
# Import the module :
|
||||
for cycleFile in tmpList:
|
||||
#for cycleFile in filenames:
|
||||
add_file = os.path.join(tmp_path, deltaRoot, cycleFile)
|
||||
if len(remove_path) != 0:
|
||||
if add_file[:len(remove_path)] != remove_path:
|
||||
print("ERROR : Request remove start of a path that is not the same: '" + add_file[:len(remove_path)] + "' demand remove of '" + str(remove_path) + "'")
|
||||
else:
|
||||
add_file = add_file[len(remove_path)+1:]
|
||||
out.append(add_file)
|
||||
return out;
|
||||
|
||||
#ffmpeg -i 000.ts -threads 0 -vcodec libx264 -crf 20 -force_key_frames expr:gte\(t,n_forced*1\) -s 720x540 -acodec mp2 -ac 2 -ab 192k -ar 48000 -async 1 -deinterlace 000_transcoded.ts
|
||||
#ffmpeg -i 000.ts -threads 0 -vcodec libx264 -crf 20 -force_key_frames expr:gte\(t,n_forced*1\) -acodec mp2 -ac 2 -ab 192k -ar 48000 -async 1 -deinterlace 000_transcoded.ts
|
||||
|
||||
"""
|
||||
def remove_group(list_of_file=[], total_count_of_file=0):
|
||||
id_elem = 0
|
||||
for elem in list_of_file:
|
||||
id_elem += 1
|
||||
tmpfile_name = elem.replace(" ", "\ ").replace("!", "\\!").replace("'", "\\'")
|
||||
print(" [" + str(id_elem) + " / " + str(total_count_of_file) + "] " + tmpfile_name)
|
||||
cmd_line = "rm " + tmpfile_name
|
||||
ret = run_command(cmd_line)
|
||||
|
||||
list_files_tmp = get_list_of_file_in_path('.', "*__", recursive = True)
|
||||
remove_group(list_files_tmp, len(list_files_tmp))
|
||||
list_files_sha512 = get_list_of_file_in_path('.', "*.sha512", recursive = True)
|
||||
remove_group(list_files_sha512, len(list_files_sha512))
|
||||
exit(0)
|
||||
"""
|
||||
|
||||
list_files_ts = get_list_of_file_in_path('.', "*.ts", recursive = True)
|
||||
list_files_flv = get_list_of_file_in_path('.', "*.flv", recursive = True)
|
||||
list_files_mp4 = get_list_of_file_in_path('.', "*.mp4", recursive = True)
|
||||
list_files_avi = get_list_of_file_in_path('.', "*.avi", recursive = True)
|
||||
list_files_mkv = get_list_of_file_in_path('.', "*.mkv", recursive = True)
|
||||
list_files_wmv = get_list_of_file_in_path('.', "*.wmv", recursive = True)
|
||||
list_files_divx = get_list_of_file_in_path('.', "*.divx", recursive = True)
|
||||
list_files_webm = get_list_of_file_in_path('.', "*.webm", recursive = True)
|
||||
"""
|
||||
# remove all encoded element in the other files (TS)
|
||||
for elem_mkv in list_files_mkv:
|
||||
index = 0
|
||||
for elem_ts in list_files_ts:
|
||||
if elem_mkv[:-3]+"ts" == elem_ts:
|
||||
break;
|
||||
index += 1
|
||||
if index != len(list_files_ts):
|
||||
print("[INFO] remove from list '" + list_files_ts[index] + "' ==> already transcoded")
|
||||
del list_files_ts[index]
|
||||
|
||||
|
||||
|
||||
# remove all encoded element in the other files (FLV)
|
||||
for elem_mkv in list_files_mkv:
|
||||
index = 0
|
||||
for elem_flv in list_files_flv:
|
||||
if elem_mkv[:-3]+"flv" == elem_flv:
|
||||
break;
|
||||
index += 1
|
||||
if index != len(list_files_flv):
|
||||
print("[INFO] remove from list '" + list_files_flv[index] + "' ==> already transcoded")
|
||||
del list_files_flv[index]
|
||||
|
||||
|
||||
# remove all encoded element in the other files (mp4)
|
||||
for elem_mkv in list_files_mkv:
|
||||
index = 0
|
||||
for elem_mp4 in list_files_mp4:
|
||||
if elem_mkv[:-3]+"mp4" == elem_mp4:
|
||||
break;
|
||||
index += 1
|
||||
if index != len(list_files_mp4):
|
||||
print("[INFO] remove from list '" + list_files_mp4[index] + "' ==> already transcoded")
|
||||
del list_files_mp4[index]
|
||||
|
||||
|
||||
# remove all encoded element in the other files (TS)
|
||||
for elem_mkv in list_files_mkv:
|
||||
index = 0
|
||||
for elem_avi in list_files_avi:
|
||||
if elem_mkv[:-3]+"ts" == elem_avi:
|
||||
break;
|
||||
index += 1
|
||||
if index != len(list_files_avi):
|
||||
print("[INFO] remove from list '" + list_files_avi[index] + "' ==> already transcoded")
|
||||
del list_files_avi[index]
|
||||
|
||||
|
||||
# remove all encoded element in the other files (wmv)
|
||||
for elem_mkv in list_files_mkv:
|
||||
index = 0
|
||||
for elem_wmv in list_files_wmv:
|
||||
if elem_mkv[:-3]+"wmv" == elem_wmv:
|
||||
break;
|
||||
index += 1
|
||||
if index != len(list_files_wmv):
|
||||
print("[INFO] remove from list '" + list_files_wmv[index] + "' ==> already transcoded")
|
||||
del list_files_wmv[index]
|
||||
|
||||
# remove all encoded element in the other files (divx)
|
||||
for elem_mkv in list_files_mkv:
|
||||
index = 0
|
||||
for elem_divx in list_files_divx:
|
||||
if elem_mkv[:-3]+"divx" == elem_divx:
|
||||
break;
|
||||
index += 1
|
||||
if index != len(list_files_divx):
|
||||
print("[INFO] remove from list '" + list_files_divx[index] + "' ==> already transcoded")
|
||||
del list_files_divx[index]
|
||||
"""
|
||||
|
||||
print("list of elements TS : ")
|
||||
for elem in list_files_ts:
|
||||
print(" - '" + str(elem) + "'")
|
||||
print("list of elements MP4 : ")
|
||||
for elem in list_files_mp4:
|
||||
print(" - '" + str(elem) + "'")
|
||||
print("list of elements FLV : ")
|
||||
for elem in list_files_flv:
|
||||
print(" - '" + str(elem) + "'")
|
||||
print("list of elements AVI : ")
|
||||
for elem in list_files_avi:
|
||||
print(" - '" + str(elem) + "'")
|
||||
print("list of elements WMV : ")
|
||||
for elem in list_files_wmv:
|
||||
print(" - '" + str(elem) + "'")
|
||||
print("list of elements MKV : ")
|
||||
for elem in list_files_mkv:
|
||||
print(" - '" + str(elem) + "'")
|
||||
print("list of elements divx : ")
|
||||
for elem in list_files_divx:
|
||||
print(" - '" + str(elem) + "'")
|
||||
print("list of elements webm : ")
|
||||
for elem in list_files_webm:
|
||||
print(" - '" + str(elem) + "'")
|
||||
|
||||
import random
|
||||
from pymediainfo import MediaInfo
|
||||
|
||||
for arg in sys.argv:
|
||||
print("arg: " + arg)
|
||||
|
||||
id_value = 0
|
||||
if len(sys.argv) == 2:
|
||||
id_value = int(sys.argv[1])
|
||||
|
||||
tmp_name_encoded_file = "zzz_transcoded_" + str(id_value) + ".mkv"
|
||||
|
||||
print("lement name: " + tmp_name_encoded_file)
|
||||
|
||||
element_error=[]
|
||||
|
||||
|
||||
def trancode_local(list_of_file=[], extention="ts", total_count_of_file=0, offset=0) :
|
||||
global element_error;
|
||||
print("Start strancoding: '." + extention + "' ... " + str(len(list_of_file)))
|
||||
id_elem = 0
|
||||
for elem in list_of_file:
|
||||
id_elem += 1
|
||||
print(" ========================================================================================")
|
||||
print(" == " + str(offset+id_elem) + " / " + str(total_count_of_file))
|
||||
print(" == Trancode: '" + elem.replace("'", "\'") + "'")
|
||||
print(" ========================================================================================")
|
||||
if not os.path.isfile(elem):
|
||||
print(" ==> file does not exist")
|
||||
continue
|
||||
|
||||
cmd_line = "rm " + tmp_name_encoded_file
|
||||
ret = run_command(cmd_line)
|
||||
|
||||
# collect media info ...
|
||||
#if it is a mk: .. chack the opus format...
|
||||
if extention == "mkv":
|
||||
media_info = MediaInfo.parse(elem)
|
||||
print("media-info: ... " + str(len(media_info.tracks)))
|
||||
need_trascode_audio = False
|
||||
for elem_track in media_info.tracks:
|
||||
data_print = "[" + str(elem_track.track_id) + "] " + str(elem_track.track_type)
|
||||
#print('track_id = ' + str(elem_track.track_id))
|
||||
#print('track_type = ' + str(elem_track.track_type))
|
||||
if elem_track.track_type == "Audio":
|
||||
data_print += " (" + str(elem_track.language) + ") enc=" + str(elem_track.format);
|
||||
#print('language = ' + str(elem_track.language))
|
||||
#print('format = ' + str(elem_track.format))
|
||||
if elem_track.format != "Opus":
|
||||
need_trascode_audio = True
|
||||
elif elem_track.track_type == "Video":
|
||||
data_print += " enc=" + str(elem_track.format);
|
||||
print(" - " + data_print)
|
||||
#print("media-info: ..." + str(dir(elem_track)))
|
||||
if need_trascode_audio == False:
|
||||
print(" ==> No transcoding, already in the good format...")
|
||||
continue
|
||||
|
||||
|
||||
"""
|
||||
media_info = MediaInfo.parse(elem)
|
||||
print("media-info: ..." + str(len(media_info.tracks)))
|
||||
for elem_track in media_info.tracks:
|
||||
print('track_type = ' + str(elem_track.track_type))
|
||||
print('track_id = ' + str(elem_track.track_id))
|
||||
print('language = ' + str(elem_track.language))
|
||||
#print("media-info: ..." + str(dir(elem_track)))
|
||||
continue
|
||||
"""
|
||||
|
||||
if extention != "mkv":
|
||||
cmd_line = "ffmpeg -fflags +genpts -i "
|
||||
#cmd_line = "ffmpeg -fflags +igndts -i "
|
||||
else:
|
||||
cmd_line = "ffmpeg -i "
|
||||
cmd_line += elem.replace(" ", "\ ").replace("'", "\\'")
|
||||
#cmd_line += " -threads 4 -vcodec libx264 -crf 22 -force_key_frames expr:gte\(t,n_forced*1\) -acodec mp2 -ac 2 -ab 192k -ar 48000 -async 1 -deinterlace zzz_transcoded.mkv_tmp"
|
||||
#cmd_line += " -threads 4 -vcodec copy -acodec mp2 -ac 2 -ab 192k -ar 48000 -async 1 -deinterlace tmp_transcoded.avi"
|
||||
#cmd_line += " -threads 4 -vcodec copy -acodec mp2 -ac 2 -ab 192k -ar 48000 -async 1 -deinterlace tmp_transcoded.mp4"
|
||||
#cmd_line += " -threads 6 -c:v libvpx-vp9 -lossless 1 -c:a libopus -b:a 128k -deinterlace tmp_transcoded.webm"
|
||||
#cmd_line += " -threads 6 -c:v libvpx-vp9 -row-mt 1 -c:a libopus -b:a 128k -deinterlace tmp_transcoded.webm"
|
||||
# -map 0:v ==> copy all video stream
|
||||
# -map 0:a ==> copy all audio stream
|
||||
# -map 0:s ==> copy all subtitle stream
|
||||
|
||||
cmd_line += " -map 0:v -map 0:a -c:v copy -c:a libopus -ac 2 -b:a 192k -r:a 48000 -deinterlace -threads 6 " + tmp_name_encoded_file
|
||||
#cmd_line += " -threads 4 -vcodec copy -acodec copy tmp_transcoded.webm"
|
||||
ret = run_command(cmd_line)
|
||||
print(" ret value = " + str(ret))
|
||||
if ret == False:
|
||||
print("[ERROR] Trancode: error occured ...")
|
||||
element_error.append(elem)
|
||||
#exit(-1)
|
||||
continue
|
||||
print(" move in: '" + elem[:-len(extention)] + "mkv'")
|
||||
# cmd_line = "mv " + elem.replace(" ", "\ ").replace("'", "\\'") + " last_transcoded.xx"
|
||||
cmd_line = "mv " + elem.replace(" ", "\ ").replace("!", "\\!").replace("'", "\\'") + " last_transcoded"
|
||||
ret = run_command(cmd_line)
|
||||
cmd_line = "mv " + tmp_name_encoded_file + " " + elem.replace(" ", "\ ").replace("!", "\\!").replace("'", "\\'")[:-len(extention)] + "mkv"
|
||||
ret = run_command(cmd_line)
|
||||
|
||||
|
||||
#cmd_line = "mv " + elem.replace(" ", "\ ").replace("'", "\\'") + " last_transcoded.ts"
|
||||
#ret = run_command(cmd_line)
|
||||
#break
|
||||
|
||||
full_list_size = len(list_files_ts) + len(list_files_mp4) + len(list_files_flv) + len(list_files_avi) + len(list_files_wmv) + len(list_files_divx) + len(list_files_mkv) + len(list_files_webm)
|
||||
offset = 0;
|
||||
|
||||
|
||||
reverse_sort = False
|
||||
|
||||
list_files_ts.sort(reverse=reverse_sort)
|
||||
list_files_mp4.sort(reverse=reverse_sort)
|
||||
list_files_flv.sort(reverse=reverse_sort)
|
||||
list_files_avi.sort(reverse=reverse_sort)
|
||||
list_files_wmv.sort(reverse=reverse_sort)
|
||||
list_files_divx.sort(reverse=reverse_sort)
|
||||
list_files_mkv.sort(reverse=reverse_sort)
|
||||
list_files_webm.sort(reverse=reverse_sort)
|
||||
|
||||
random.shuffle(list_files_mp4)
|
||||
random.shuffle(list_files_avi)
|
||||
random.shuffle(list_files_mkv)
|
||||
|
||||
trancode_local(list_files_ts , "ts", full_list_size, offset)
|
||||
offset += len(list_files_ts)
|
||||
trancode_local(list_files_mp4 , "mp4", full_list_size, offset)
|
||||
offset += len(list_files_mp4)
|
||||
trancode_local(list_files_flv , "flv", full_list_size, offset)
|
||||
offset += len(list_files_flv)
|
||||
trancode_local(list_files_avi , "avi", full_list_size, offset)
|
||||
offset += len(list_files_avi)
|
||||
trancode_local(list_files_wmv , "wmv", full_list_size, offset)
|
||||
offset += len(list_files_wmv)
|
||||
trancode_local(list_files_divx , "divx", full_list_size, offset)
|
||||
offset += len(list_files_divx)
|
||||
trancode_local(list_files_mkv , "mkv", full_list_size, offset)
|
||||
offset += len(list_files_mkv)
|
||||
#trancode_local(list_files_webm , "webm", full_list_size, offset)
|
||||
#offset += len(list_files_webm)
|
||||
|
||||
print("List error transcode: " + len(element_error))
|
||||
for elem in element_error:
|
||||
print(" == Trancode: '" + elem.replace("'", "\'") + "'")
|
||||
|
||||
|
||||
## extract a thumb from a video
|
||||
## ffmpeg -i Passenger.mkv -ss 00:05:00 -f image2 -vframes 1 thumb.jpg
|
||||
|
@ -1,173 +0,0 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
##
|
||||
## @author Edouard DUPIN
|
||||
##
|
||||
## @copyright 2016, Edouard DUPIN, all right reserved
|
||||
##
|
||||
## @license APACHE v2.0 (see license file)
|
||||
##
|
||||
import os
|
||||
import fnmatch
|
||||
import sys
|
||||
import subprocess
|
||||
import shlex
|
||||
import shutil
|
||||
|
||||
##
|
||||
## @brief Execute the command with no get of output
|
||||
##
|
||||
def run_command(cmd_line):
|
||||
# prepare command line:
|
||||
args = shlex.split(cmd_line)
|
||||
print("[INFO] cmd = " + str(args))
|
||||
try:
|
||||
# create the subprocess
|
||||
p = subprocess.Popen(args)
|
||||
except subprocess.CalledProcessError as e:
|
||||
print("[ERROR] subprocess.CalledProcessError : " + str(args))
|
||||
return False
|
||||
#except:
|
||||
# debug.error("Exception on : " + str(args))
|
||||
# launch the subprocess:
|
||||
output, err = p.communicate()
|
||||
# Check error :
|
||||
if p.returncode == 0:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
##
|
||||
## @brief Get list of all Files in a specific path (with a regex)
|
||||
## @param[in] path (string) Full path of the machine to search files (start with / or x:)
|
||||
## @param[in] regex (string) Regular expression to search data
|
||||
## @param[in] recursive (bool) List file with recursive search
|
||||
## @param[in] remove_path (string) Data to remove in the path
|
||||
## @return (list) return files requested
|
||||
##
|
||||
def get_list_of_file_in_path(path, regex="*", recursive = False, remove_path=""):
|
||||
out = []
|
||||
if os.path.isdir(os.path.realpath(path)):
|
||||
tmp_path = os.path.realpath(path)
|
||||
tmp_rule = regex
|
||||
else:
|
||||
debug.error("path does not exist : '" + str(path) + "'")
|
||||
|
||||
for root, dirnames, filenames in os.walk(tmp_path):
|
||||
deltaRoot = root[len(tmp_path):]
|
||||
while len(deltaRoot) > 0 \
|
||||
and ( deltaRoot[0] == '/' \
|
||||
or deltaRoot[0] == '\\' ):
|
||||
deltaRoot = deltaRoot[1:]
|
||||
if recursive == False \
|
||||
and deltaRoot != "":
|
||||
return out
|
||||
tmpList = filenames
|
||||
if len(tmp_rule) > 0:
|
||||
tmpList = fnmatch.filter(filenames, tmp_rule)
|
||||
# Import the module :
|
||||
for cycleFile in tmpList:
|
||||
#for cycleFile in filenames:
|
||||
add_file = os.path.join(tmp_path, deltaRoot, cycleFile)
|
||||
if len(remove_path) != 0:
|
||||
if add_file[:len(remove_path)] != remove_path:
|
||||
print("ERROR : Request remove start of a path that is not the same: '" + add_file[:len(remove_path)] + "' demand remove of '" + str(remove_path) + "'")
|
||||
else:
|
||||
add_file = add_file[len(remove_path)+1:]
|
||||
out.append(add_file)
|
||||
return out;
|
||||
|
||||
def get_run_path():
|
||||
return os.getcwd()
|
||||
|
||||
src_path = get_run_path()
|
||||
dst_path = os.path.join(src_path, "..", "zzz_video_push_correct")
|
||||
list_files_mkv = get_list_of_file_in_path(src_path, "*.mkv", recursive = True)
|
||||
list_files_webm = get_list_of_file_in_path(src_path, "*.webm", recursive = True)
|
||||
list_files_jpg = get_list_of_file_in_path(src_path, "*.jpg", recursive = True)
|
||||
list_files_png = get_list_of_file_in_path(src_path, "*.png", recursive = True)
|
||||
|
||||
print("list of elements MKV : ")
|
||||
for elem in list_files_mkv:
|
||||
print(" - '" + str(elem) + "'")
|
||||
print("list of elements webm : ")
|
||||
for elem in list_files_webm:
|
||||
print(" - '" + str(elem) + "'")
|
||||
|
||||
import random
|
||||
from pymediainfo import MediaInfo
|
||||
|
||||
for arg in sys.argv:
|
||||
print("arg: " + arg)
|
||||
|
||||
id_value = 0
|
||||
if len(sys.argv) == 2:
|
||||
id_value = int(sys.argv[1])
|
||||
|
||||
|
||||
|
||||
def create_directory_of_file(file):
|
||||
path = os.path.dirname(file)
|
||||
try:
|
||||
os.stat(path)
|
||||
except:
|
||||
os.makedirs(path)
|
||||
|
||||
def file_move(path_src, path_dst):
|
||||
#real write of data:
|
||||
print("kljlkjlkjklj " + path_src)
|
||||
print("kljlkjlkjklj " + path_dst)
|
||||
create_directory_of_file(path_dst)
|
||||
shutil.move(path_src, path_dst)
|
||||
return True
|
||||
|
||||
|
||||
def move_local(list_of_file=[], extention="mkv") :
|
||||
global element_error;
|
||||
print("Start strancoding: '." + extention + "' ... " + str(len(list_of_file)))
|
||||
id_elem = 0
|
||||
total_count_of_file = len(list_of_file)
|
||||
for elem in list_of_file:
|
||||
id_elem += 1
|
||||
print(" ========================================================================================")
|
||||
print(" == " + str(id_elem) + " / " + str(total_count_of_file))
|
||||
print(" == Trancode: '" + elem.replace("'", "\'") + "'")
|
||||
print(" ========================================================================================")
|
||||
if not os.path.isfile(elem):
|
||||
print(" ==> file does not exist")
|
||||
continue
|
||||
|
||||
# collect media info ...
|
||||
#if it is a mk: .. chack the opus format...
|
||||
if extention == "mkv":
|
||||
media_info = MediaInfo.parse(elem)
|
||||
print("media-info: ... " + str(len(media_info.tracks)))
|
||||
need_move_file = True
|
||||
for elem_track in media_info.tracks:
|
||||
data_print = "[" + str(elem_track.track_id) + "] " + str(elem_track.track_type)
|
||||
#print('track_id = ' + str(elem_track.track_id))
|
||||
#print('track_type = ' + str(elem_track.track_type))
|
||||
if elem_track.track_type == "Audio":
|
||||
data_print += " (" + str(elem_track.language) + ") enc=" + str(elem_track.format);
|
||||
#print('language = ' + str(elem_track.language))
|
||||
#print('format = ' + str(elem_track.format))
|
||||
if elem_track.format != "Opus":
|
||||
need_move_file = False
|
||||
elif elem_track.track_type == "Video":
|
||||
data_print += " enc=" + str(elem_track.format);
|
||||
if elem_track.format != "AVC":
|
||||
need_move_file = False
|
||||
print(" - " + data_print)
|
||||
#print("media-info: ..." + str(dir(elem_track)))
|
||||
if need_move_file == False:
|
||||
print(" ==> Need transcode, NOT already in the good format...")
|
||||
continue
|
||||
|
||||
file_move(elem, os.path.join(dst_path, elem[len(src_path)+1:]))
|
||||
|
||||
move_local(list_files_mkv, "mkv")
|
||||
move_local(list_files_webm, "webm")
|
||||
move_local(list_files_jpg, "jpg")
|
||||
move_local(list_files_png, "png")
|
||||
|
25
bdd/docker-compose.yaml
Normal file
@ -0,0 +1,25 @@
|
||||
# Use root/example as user/password credentials
|
||||
version: '3.1'
|
||||
|
||||
services:
|
||||
db_service:
|
||||
image: mysql:latest
|
||||
container_name: mysql_db
|
||||
restart: always
|
||||
command: --default-authentication-plugin=mysql_native_password
|
||||
env_file:
|
||||
- ./config.env
|
||||
#environment:
|
||||
# MYSQL_ROOT_PASSWORD: changeme
|
||||
# MYSQL_DATABASE: mybdd
|
||||
volumes:
|
||||
- ./data:/var/lib/mysql
|
||||
ports:
|
||||
- 15306:3306
|
||||
adminer_service:
|
||||
image: adminer:latest
|
||||
restart: always
|
||||
ports:
|
||||
- 8080:8080
|
||||
links:
|
||||
- db_service:db
|
50
docker-compose.yaml
Normal file
@ -0,0 +1,50 @@
|
||||
version: '3'
|
||||
|
||||
services:
|
||||
karideo_db_service:
|
||||
image: mysql:latest
|
||||
restart: always
|
||||
command: --default-authentication-plugin=mysql_native_password
|
||||
env_file:
|
||||
- ./config.env
|
||||
volumes:
|
||||
- /workspace/data/karideo/db:/var/lib/mysql
|
||||
mem_limit: 600m
|
||||
healthcheck:
|
||||
test: ["CMD", "mysqladmin" ,"ping", "-h", "localhost"]
|
||||
timeout: 20s
|
||||
retries: 10
|
||||
|
||||
karauth_adminer_service:
|
||||
image: adminer:latest
|
||||
restart: always
|
||||
depends_on:
|
||||
- karideo_db_service
|
||||
ports:
|
||||
- 18079:8080
|
||||
links:
|
||||
- karideo_db_service:db
|
||||
mem_limit: 100m
|
||||
|
||||
karideo_back_service:
|
||||
build: .
|
||||
restart: always
|
||||
image: gitea.atria-soft.org/kangaroo-and-rabbit/karideo:latest
|
||||
depends_on:
|
||||
- karideo_db_service
|
||||
ports:
|
||||
- 18080:18080
|
||||
env_file:
|
||||
- ./config.env
|
||||
links:
|
||||
- karideo_db_service:db
|
||||
volumes:
|
||||
- /workspace/data/karideo/media:/application/data
|
||||
- /workspace/data/karideo/tmp:/application/tmp
|
||||
# readonly error due to the temporary files, need test: System.setProperty("java.io.tmpdir", "/application/tmp"); maybe create some problem in the read/write access for the HDD drive...
|
||||
#read_only: true
|
||||
mem_limit: 1200m
|
||||
healthcheck:
|
||||
test: ["CMD", "wget" ,"http://localhost:18080/karideo/api/health_check", "-O", "/dev/null"]
|
||||
timeout: 20s
|
||||
retries: 3
|
4
front/.eslintignore
Normal file
@ -0,0 +1,4 @@
|
||||
node_modules/*
|
||||
build/*
|
||||
out/*
|
||||
dist/*
|
225
front/.eslintrc.js
Normal file
@ -0,0 +1,225 @@
|
||||
var OFF = 0, WARN = 1, ERROR = 2;
|
||||
|
||||
module.exports = {
|
||||
'env': {
|
||||
'browser': true,
|
||||
'es2021': true,
|
||||
},
|
||||
'extends': [
|
||||
'eslint:recommended',
|
||||
],
|
||||
'parser': '@typescript-eslint/parser',
|
||||
'parserOptions': {
|
||||
'ecmaVersion': 'latest',
|
||||
'sourceType': 'module',
|
||||
},
|
||||
'plugins': [
|
||||
'@typescript-eslint',
|
||||
],
|
||||
"rules": {
|
||||
// Possible Errors (overrides from recommended set)
|
||||
"no-extra-parens": ERROR,
|
||||
"no-unexpected-multiline": ERROR,
|
||||
// All JSDoc comments must be valid
|
||||
"valid-jsdoc": [ OFF, {
|
||||
"requireReturn": false,
|
||||
"requireReturnDescription": false,
|
||||
"requireParamDescription": true,
|
||||
"prefer": {
|
||||
"return": "returns"
|
||||
}
|
||||
}],
|
||||
|
||||
// Best Practices
|
||||
|
||||
// Allowed a getter without setter, but all setters require getters
|
||||
"accessor-pairs": [ OFF, {
|
||||
"getWithoutSet": false,
|
||||
"setWithoutGet": true
|
||||
}],
|
||||
"block-scoped-var": WARN,
|
||||
"consistent-return": OFF,
|
||||
"curly": ERROR,
|
||||
"default-case": WARN,
|
||||
// the dot goes with the property when doing multiline
|
||||
"dot-location": [ WARN, "property" ],
|
||||
"dot-notation": WARN,
|
||||
"eqeqeq": [ ERROR, "smart" ],
|
||||
"guard-for-in": WARN,
|
||||
"no-alert": ERROR,
|
||||
"no-caller": ERROR,
|
||||
"no-case-declarations": WARN,
|
||||
"no-div-regex": WARN,
|
||||
"no-else-return": WARN,
|
||||
"no-empty-pattern": WARN,
|
||||
"no-eq-null": ERROR,
|
||||
"no-eval": ERROR,
|
||||
"no-extend-native": ERROR,
|
||||
"no-extra-bind": WARN,
|
||||
"no-floating-decimal": WARN,
|
||||
"no-implicit-coercion": [ WARN, {
|
||||
"boolean": true,
|
||||
"number": true,
|
||||
"string": true
|
||||
}],
|
||||
"no-implied-eval": ERROR,
|
||||
"no-invalid-this": ERROR,
|
||||
"no-iterator": ERROR,
|
||||
"no-labels": WARN,
|
||||
"no-lone-blocks": WARN,
|
||||
"no-loop-func": ERROR,
|
||||
"no-magic-numbers": OFF,
|
||||
"no-multi-spaces": ERROR,
|
||||
"no-multi-str": WARN,
|
||||
"no-native-reassign": ERROR,
|
||||
"no-new-func": ERROR,
|
||||
"no-new-wrappers": ERROR,
|
||||
"no-new": ERROR,
|
||||
"no-octal-escape": ERROR,
|
||||
"no-param-reassign": ERROR,
|
||||
"no-process-env": WARN,
|
||||
"no-proto": ERROR,
|
||||
"no-redeclare": ERROR,
|
||||
"no-return-assign": ERROR,
|
||||
"no-script-url": ERROR,
|
||||
"no-self-compare": ERROR,
|
||||
"no-throw-literal": ERROR,
|
||||
"no-unused-expressions": ERROR,
|
||||
"no-useless-call": ERROR,
|
||||
"no-useless-concat": ERROR,
|
||||
"no-void": WARN,
|
||||
// Produce warnings when something is commented as TODO or FIXME
|
||||
"no-warning-comments": [ WARN, {
|
||||
"terms": [ "TODO", "FIXME" ],
|
||||
"location": "start"
|
||||
}],
|
||||
"no-with": WARN,
|
||||
"radix": WARN,
|
||||
"vars-on-top": ERROR,
|
||||
// Enforces the style of wrapped functions
|
||||
"wrap-iife": [ ERROR, "outside" ],
|
||||
"yoda": ERROR,
|
||||
|
||||
// Strict Mode - for ES6, never use strict.
|
||||
"strict": [ ERROR, "never" ],
|
||||
|
||||
// Variables
|
||||
"init-declarations": [ OFF, "always" ],
|
||||
"no-catch-shadow": WARN,
|
||||
"no-delete-var": ERROR,
|
||||
"no-label-var": ERROR,
|
||||
"no-shadow-restricted-names": ERROR,
|
||||
"no-shadow": WARN,
|
||||
// We require all vars to be initialized (see init-declarations)
|
||||
// If we NEED a var to be initialized to undefined, it needs to be explicit
|
||||
"no-undef-init": OFF,
|
||||
"no-undef": ERROR,
|
||||
"no-undefined": OFF,
|
||||
"no-unused-vars": OFF,
|
||||
// Disallow hoisting - let & const don't allow hoisting anyhow
|
||||
"no-use-before-define": ERROR,
|
||||
|
||||
// Node.js and CommonJS
|
||||
"callback-return": [ WARN, [ "callback", "next" ]],
|
||||
"global-require": ERROR,
|
||||
"handle-callback-err": WARN,
|
||||
"no-mixed-requires": WARN,
|
||||
"no-new-require": ERROR,
|
||||
// Use path.concat instead
|
||||
"no-path-concat": ERROR,
|
||||
"no-process-exit": ERROR,
|
||||
"no-restricted-modules": OFF,
|
||||
"no-sync": WARN,
|
||||
|
||||
// ECMAScript 6 support
|
||||
"arrow-body-style": [ ERROR, "always" ],
|
||||
"arrow-parens": [ ERROR, "always" ],
|
||||
"arrow-spacing": [ ERROR, { "before": true, "after": true }],
|
||||
"constructor-super": ERROR,
|
||||
"generator-star-spacing": [ ERROR, "before" ],
|
||||
"no-confusing-arrow": ERROR,
|
||||
"no-class-assign": ERROR,
|
||||
"no-const-assign": ERROR,
|
||||
"no-dupe-class-members": ERROR,
|
||||
"no-this-before-super": ERROR,
|
||||
"no-var": WARN,
|
||||
"object-shorthand": [ WARN, "never" ],
|
||||
"prefer-arrow-callback": WARN,
|
||||
"prefer-spread": WARN,
|
||||
"prefer-template": WARN,
|
||||
"require-yield": ERROR,
|
||||
|
||||
// Stylistic - everything here is a warning because of style.
|
||||
"array-bracket-spacing": [ WARN, "always" ],
|
||||
"block-spacing": [ WARN, "always" ],
|
||||
"brace-style": [ WARN, "1tbs", { "allowSingleLine": false } ],
|
||||
"camelcase": WARN,
|
||||
"comma-spacing": [ WARN, { "before": false, "after": true } ],
|
||||
"comma-style": [ WARN, "last" ],
|
||||
"computed-property-spacing": [ WARN, "never" ],
|
||||
"consistent-this": [ WARN, "self" ],
|
||||
"eol-last": WARN,
|
||||
"func-names": WARN,
|
||||
"func-style": [ WARN, "declaration" ],
|
||||
"id-length": [ WARN, { "min": 2, "max": 32 } ],
|
||||
"indent": [ WARN, 'tab' ],
|
||||
"jsx-quotes": [ WARN, "prefer-double" ],
|
||||
"linebreak-style": [ WARN, "unix" ],
|
||||
"lines-around-comment": [ OFF, { "beforeBlockComment": true } ],
|
||||
"max-depth": [ WARN, 8 ],
|
||||
"max-len": [ WARN, 182 ],
|
||||
"max-nested-callbacks": [ WARN, 8 ],
|
||||
"max-params": [ WARN, 10 ],
|
||||
"new-cap": OFF,
|
||||
"new-parens": WARN,
|
||||
"no-array-constructor": WARN,
|
||||
"no-bitwise": OFF,
|
||||
"no-continue": OFF,
|
||||
"no-inline-comments": OFF,
|
||||
"no-lonely-if": OFF,
|
||||
"no-mixed-spaces-and-tabs": OFF,
|
||||
"no-multiple-empty-lines": WARN,
|
||||
"no-negated-condition": OFF,
|
||||
"no-nested-ternary": WARN,
|
||||
"no-new-object": WARN,
|
||||
"no-plusplus": OFF,
|
||||
"no-spaced-func": WARN,
|
||||
"no-ternary": OFF,
|
||||
"no-trailing-spaces": WARN,
|
||||
"no-underscore-dangle": WARN,
|
||||
"no-unneeded-ternary": WARN,
|
||||
"object-curly-spacing": [ WARN, "always" ],
|
||||
"one-var": OFF,
|
||||
"operator-assignment": [ WARN, "never" ],
|
||||
"operator-linebreak": [ WARN, "after" ],
|
||||
"padded-blocks": [ WARN, "never" ],
|
||||
"quote-props": [ WARN, "consistent-as-needed" ],
|
||||
"quotes": [ WARN, "single" ],
|
||||
"require-jsdoc": [ OFF, {
|
||||
"require": {
|
||||
"FunctionDeclaration": true,
|
||||
"MethodDefinition": true,
|
||||
"ClassDeclaration": false
|
||||
}
|
||||
}],
|
||||
"semi-spacing": [ WARN, { "before": false, "after": true }],
|
||||
"semi": [ ERROR, "always" ],
|
||||
"sort-vars": OFF,
|
||||
"keyword-spacing": [WARN, {
|
||||
"overrides": {
|
||||
"if": { "after": false },
|
||||
"for": { "after": false },
|
||||
"while": { "after": false },
|
||||
"static": { "after": false },
|
||||
"as": { "after": false }
|
||||
}
|
||||
}],
|
||||
"space-before-blocks": [ WARN, "always" ],
|
||||
"space-before-function-paren": [ WARN, "never" ],
|
||||
"space-in-parens": [ WARN, "never" ],
|
||||
"space-infix-ops": [ WARN, { "int32Hint": true } ],
|
||||
"space-unary-ops": ERROR,
|
||||
"spaced-comment": [ WARN, "always" ],
|
||||
"wrap-regex": WARN,
|
||||
},
|
||||
};
|
3
front/.gitignore
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
/node_modules/
|
||||
/.angular/
|
||||
/.idea/
|
@ -1,24 +1,25 @@
|
||||
# base image
|
||||
FROM node:latest as build
|
||||
|
||||
ADD src /application/src
|
||||
ADD e2e /application/e2e
|
||||
ADD package-lock.json /application/
|
||||
ADD package.json /application/
|
||||
ADD angular.json /application/
|
||||
ADD browserslist /application/
|
||||
ADD karma.conf.js /application/
|
||||
ADD protractor.conf.js /application/
|
||||
ADD tsconfig.json /application/
|
||||
ADD tslint.json /application/
|
||||
WORKDIR /application/
|
||||
FROM node:lts as build
|
||||
|
||||
# add `/application/node_modules/.bin` to $PATH
|
||||
ENV PATH /application/node_modules/.bin:$PATH
|
||||
|
||||
ADD package-lock.json /application/
|
||||
ADD package.json /application/
|
||||
#ADD browserslist /application/
|
||||
ADD karma.conf.js /application/
|
||||
ADD protractor.conf.js /application/
|
||||
WORKDIR /application/
|
||||
|
||||
# install and cache app dependencies
|
||||
RUN npm install
|
||||
|
||||
ADD e2e /application/e2e
|
||||
ADD tsconfig.json /application/
|
||||
ADD tslint.json /application/
|
||||
ADD angular.json /application/
|
||||
ADD src /application/src
|
||||
|
||||
# generate build
|
||||
RUN ng build --output-path=dist --configuration=production --base-href=/karideo/ --deploy-url=/karideo/
|
||||
|
||||
@ -27,13 +28,8 @@ RUN ng build --output-path=dist --configuration=production --base-href=/karideo/
|
||||
############
|
||||
|
||||
# base image
|
||||
FROM nginx:1.16.0-alpine
|
||||
FROM httpd:latest
|
||||
|
||||
# copy artifact build from the 'build environment'
|
||||
COPY --from=build /application/dist /usr/share/nginx/html
|
||||
|
||||
# expose port 80
|
||||
EXPOSE 80
|
||||
|
||||
# run nginx
|
||||
CMD ["nginx", "-g", "daemon off;"]
|
||||
COPY --from=build /application/dist /usr/local/apache2/htdocs/
|
||||
COPY httpd/httpd.conf /usr/local/apache2/conf/httpd.conf
|
||||
|
@ -3,7 +3,7 @@
|
||||
"version": 1,
|
||||
"newProjectRoot": "projects",
|
||||
"projects": {
|
||||
"no-comment": {
|
||||
"karideo": {
|
||||
"root": "",
|
||||
"sourceRoot": "src",
|
||||
"projectType": "application",
|
||||
@ -15,13 +15,17 @@
|
||||
"index": "src/index.html",
|
||||
"main": "src/main.ts",
|
||||
"tsConfig": "src/tsconfig.app.json",
|
||||
"polyfills": "src/polyfills.ts",
|
||||
"preserveSymlinks": true,
|
||||
"polyfills": [
|
||||
"zone.js"
|
||||
],
|
||||
"assets": [
|
||||
"src/assets",
|
||||
"src/favicon.ico"
|
||||
],
|
||||
"styles": [
|
||||
"src/styles.less",
|
||||
"src/generic_page.less",
|
||||
"src/theme.color.blue.less",
|
||||
"src/theme.checkbox.less",
|
||||
"src/theme.modal.less"
|
||||
@ -33,7 +37,6 @@
|
||||
"optimization": true,
|
||||
"outputHashing": "all",
|
||||
"sourceMap": false,
|
||||
"extractCss": true,
|
||||
"namedChunks": false,
|
||||
"aot": true,
|
||||
"extractLicenses": true,
|
||||
@ -45,24 +48,42 @@
|
||||
"with": "src/environments/environment.prod.ts"
|
||||
}
|
||||
]
|
||||
},
|
||||
"develop": {
|
||||
"optimization": false,
|
||||
"outputHashing": "none",
|
||||
"namedChunks": true,
|
||||
"aot": false,
|
||||
"extractLicenses": true,
|
||||
"vendorChunk": true,
|
||||
"buildOptimizer": false,
|
||||
"sourceMap": {
|
||||
"scripts": true,
|
||||
"styles": true,
|
||||
"hidden": false,
|
||||
"vendor": true
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"serve": {
|
||||
"builder": "@angular-devkit/build-angular:dev-server",
|
||||
"options": {
|
||||
"browserTarget": "no-comment:build"
|
||||
"buildTarget": "karideo:build"
|
||||
},
|
||||
"configurations": {
|
||||
"production": {
|
||||
"browserTarget": "no-comment:build:production"
|
||||
"buildTarget": "karideo:build:production"
|
||||
},
|
||||
"develop": {
|
||||
"buildTarget": "karideo:build:develop"
|
||||
}
|
||||
}
|
||||
},
|
||||
"extract-i18n": {
|
||||
"builder": "@angular-devkit/build-angular:extract-i18n",
|
||||
"options": {
|
||||
"browserTarget": "no-comment:build"
|
||||
"buildTarget": "karideo:build"
|
||||
}
|
||||
},
|
||||
"test": {
|
||||
@ -70,11 +91,14 @@
|
||||
"options": {
|
||||
"main": "src/test.ts",
|
||||
"karmaConfig": "./karma.conf.js",
|
||||
"polyfills": "src/polyfills.ts",
|
||||
"polyfills": [
|
||||
"zone.js"
|
||||
],
|
||||
"tsConfig": "src/tsconfig.spec.json",
|
||||
"scripts": [],
|
||||
"styles": [
|
||||
"src/styles.less",
|
||||
"src/generic_page.less",
|
||||
"src/theme.color.blue.less",
|
||||
"src/theme.checkbox.less",
|
||||
"src/theme.modal.less"
|
||||
@ -86,6 +110,17 @@
|
||||
}
|
||||
},
|
||||
"lint": {
|
||||
"builder": "@angular-eslint/builder:lint",
|
||||
"options": {
|
||||
"fix": true,
|
||||
"eslintConfig": ".eslintrc.js",
|
||||
"lintFilePatterns": [
|
||||
"src/**/*.spec.ts",
|
||||
"src/**/*.ts"
|
||||
]
|
||||
}
|
||||
},
|
||||
"TTTTTTlint": {
|
||||
"builder": "@angular-devkit/build-angular:tslint",
|
||||
"options": {
|
||||
"tsConfig": [
|
||||
@ -99,7 +134,7 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"no-comment-e2e": {
|
||||
"karideo-e2e": {
|
||||
"root": "e2e",
|
||||
"sourceRoot": "e2e",
|
||||
"projectType": "application",
|
||||
@ -108,7 +143,7 @@
|
||||
"builder": "@angular-devkit/build-angular:protractor",
|
||||
"options": {
|
||||
"protractorConfig": "./protractor.conf.js",
|
||||
"devServerTarget": "no-comment:serve"
|
||||
"devServerTarget": "karideo:serve"
|
||||
}
|
||||
},
|
||||
"lint": {
|
||||
@ -125,11 +160,10 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"defaultProject": "no-comment",
|
||||
"schematics": {
|
||||
"@schematics/angular:component": {
|
||||
"prefix": "app",
|
||||
"styleext": "less"
|
||||
"style": "less"
|
||||
},
|
||||
"@schematics/angular:directive": {
|
||||
"prefix": "app"
|
||||
|
541
front/httpd/httpd.conf
Normal file
@ -0,0 +1,541 @@
|
||||
#
|
||||
# This is the main Apache HTTP server configuration file. It contains the
|
||||
# configuration directives that give the server its instructions.
|
||||
# See <URL:http://httpd.apache.org/docs/2.4/> for detailed information.
|
||||
# In particular, see
|
||||
# <URL:http://httpd.apache.org/docs/2.4/mod/directives.html>
|
||||
# for a discussion of each configuration directive.
|
||||
#
|
||||
# Do NOT simply read the instructions in here without understanding
|
||||
# what they do. They're here only as hints or reminders. If you are unsure
|
||||
# consult the online docs. You have been warned.
|
||||
#
|
||||
# Configuration and logfile names: If the filenames you specify for many
|
||||
# of the server's control files begin with "/" (or "drive:/" for Win32), the
|
||||
# server will use that explicit path. If the filenames do *not* begin
|
||||
# with "/", the value of ServerRoot is prepended -- so "logs/access_log"
|
||||
# with ServerRoot set to "/usr/local/apache2" will be interpreted by the
|
||||
# server as "/usr/local/apache2/logs/access_log", whereas "/logs/access_log"
|
||||
# will be interpreted as '/logs/access_log'.
|
||||
|
||||
#
|
||||
# ServerRoot: The top of the directory tree under which the server's
|
||||
# configuration, error, and log files are kept.
|
||||
#
|
||||
# Do not add a slash at the end of the directory path. If you point
|
||||
# ServerRoot at a non-local disk, be sure to specify a local disk on the
|
||||
# Mutex directive, if file-based mutexes are used. If you wish to share the
|
||||
# same ServerRoot for multiple httpd daemons, you will need to change at
|
||||
# least PidFile.
|
||||
#
|
||||
ServerRoot "/usr/local/apache2"
|
||||
|
||||
#
|
||||
# Mutex: Allows you to set the mutex mechanism and mutex file directory
|
||||
# for individual mutexes, or change the global defaults
|
||||
#
|
||||
# Uncomment and change the directory if mutexes are file-based and the default
|
||||
# mutex file directory is not on a local disk or is not appropriate for some
|
||||
# other reason.
|
||||
#
|
||||
# Mutex default:logs
|
||||
|
||||
#
|
||||
# Listen: Allows you to bind Apache to specific IP addresses and/or
|
||||
# ports, instead of the default. See also the <VirtualHost>
|
||||
# directive.
|
||||
#
|
||||
# Change this to Listen on specific IP addresses as shown below to
|
||||
# prevent Apache from glomming onto all bound IP addresses.
|
||||
#
|
||||
Listen 80
|
||||
Listen 443
|
||||
|
||||
#
|
||||
# Dynamic Shared Object (DSO) Support
|
||||
#
|
||||
# To be able to use the functionality of a module which was built as a DSO you
|
||||
# have to place corresponding `LoadModule' lines at this location so the
|
||||
# directives contained in it are actually available _before_ they are used.
|
||||
# Statically compiled modules (those listed by `httpd -l') do not need
|
||||
# to be loaded here.
|
||||
#
|
||||
# Example:
|
||||
# LoadModule foo_module modules/mod_foo.so
|
||||
#
|
||||
LoadModule mpm_event_module modules/mod_mpm_event.so
|
||||
#LoadModule mpm_prefork_module modules/mod_mpm_prefork.so
|
||||
#LoadModule mpm_worker_module modules/mod_mpm_worker.so
|
||||
LoadModule authn_file_module modules/mod_authn_file.so
|
||||
#LoadModule authn_dbm_module modules/mod_authn_dbm.so
|
||||
#LoadModule authn_anon_module modules/mod_authn_anon.so
|
||||
#LoadModule authn_dbd_module modules/mod_authn_dbd.so
|
||||
#LoadModule authn_socache_module modules/mod_authn_socache.so
|
||||
LoadModule authn_core_module modules/mod_authn_core.so
|
||||
LoadModule authz_host_module modules/mod_authz_host.so
|
||||
LoadModule authz_groupfile_module modules/mod_authz_groupfile.so
|
||||
LoadModule authz_user_module modules/mod_authz_user.so
|
||||
#LoadModule authz_dbm_module modules/mod_authz_dbm.so
|
||||
#LoadModule authz_owner_module modules/mod_authz_owner.so
|
||||
#LoadModule authz_dbd_module modules/mod_authz_dbd.so
|
||||
LoadModule authz_core_module modules/mod_authz_core.so
|
||||
#LoadModule authnz_ldap_module modules/mod_authnz_ldap.so
|
||||
#LoadModule authnz_fcgi_module modules/mod_authnz_fcgi.so
|
||||
LoadModule access_compat_module modules/mod_access_compat.so
|
||||
LoadModule auth_basic_module modules/mod_auth_basic.so
|
||||
#LoadModule auth_form_module modules/mod_auth_form.so
|
||||
#LoadModule auth_digest_module modules/mod_auth_digest.so
|
||||
#LoadModule allowmethods_module modules/mod_allowmethods.so
|
||||
#LoadModule isapi_module modules/mod_isapi.so
|
||||
#LoadModule file_cache_module modules/mod_file_cache.so
|
||||
#LoadModule cache_module modules/mod_cache.so
|
||||
#LoadModule cache_disk_module modules/mod_cache_disk.so
|
||||
#LoadModule cache_socache_module modules/mod_cache_socache.so
|
||||
#LoadModule socache_shmcb_module modules/mod_socache_shmcb.so
|
||||
#LoadModule socache_dbm_module modules/mod_socache_dbm.so
|
||||
#LoadModule socache_memcache_module modules/mod_socache_memcache.so
|
||||
#LoadModule watchdog_module modules/mod_watchdog.so
|
||||
#LoadModule macro_module modules/mod_macro.so
|
||||
#LoadModule dbd_module modules/mod_dbd.so
|
||||
#LoadModule bucketeer_module modules/mod_bucketeer.so
|
||||
#LoadModule dumpio_module modules/mod_dumpio.so
|
||||
#LoadModule echo_module modules/mod_echo.so
|
||||
#LoadModule example_hooks_module modules/mod_example_hooks.so
|
||||
#LoadModule case_filter_module modules/mod_case_filter.so
|
||||
#LoadModule case_filter_in_module modules/mod_case_filter_in.so
|
||||
#LoadModule example_ipc_module modules/mod_example_ipc.so
|
||||
#LoadModule buffer_module modules/mod_buffer.so
|
||||
#LoadModule data_module modules/mod_data.so
|
||||
#LoadModule ratelimit_module modules/mod_ratelimit.so
|
||||
LoadModule reqtimeout_module modules/mod_reqtimeout.so
|
||||
#LoadModule ext_filter_module modules/mod_ext_filter.so
|
||||
#LoadModule request_module modules/mod_request.so
|
||||
#LoadModule include_module modules/mod_include.so
|
||||
LoadModule filter_module modules/mod_filter.so
|
||||
#LoadModule reflector_module modules/mod_reflector.so
|
||||
#LoadModule substitute_module modules/mod_substitute.so
|
||||
#LoadModule sed_module modules/mod_sed.so
|
||||
#LoadModule charset_lite_module modules/mod_charset_lite.so
|
||||
#LoadModule deflate_module modules/mod_deflate.so
|
||||
LoadModule xml2enc_module modules/mod_xml2enc.so
|
||||
LoadModule proxy_html_module modules/mod_proxy_html.so
|
||||
LoadModule mime_module modules/mod_mime.so
|
||||
#LoadModule ldap_module modules/mod_ldap.so
|
||||
LoadModule log_config_module modules/mod_log_config.so
|
||||
#LoadModule log_debug_module modules/mod_log_debug.so
|
||||
#LoadModule log_forensic_module modules/mod_log_forensic.so
|
||||
#LoadModule logio_module modules/mod_logio.so
|
||||
#LoadModule lua_module modules/mod_lua.so
|
||||
LoadModule env_module modules/mod_env.so
|
||||
#LoadModule mime_magic_module modules/mod_mime_magic.so
|
||||
#LoadModule cern_meta_module modules/mod_cern_meta.so
|
||||
#LoadModule expires_module modules/mod_expires.so
|
||||
LoadModule headers_module modules/mod_headers.so
|
||||
#LoadModule ident_module modules/mod_ident.so
|
||||
#LoadModule usertrack_module modules/mod_usertrack.so
|
||||
#LoadModule unique_id_module modules/mod_unique_id.so
|
||||
LoadModule setenvif_module modules/mod_setenvif.so
|
||||
LoadModule version_module modules/mod_version.so
|
||||
#LoadModule remoteip_module modules/mod_remoteip.so
|
||||
LoadModule proxy_module modules/mod_proxy.so
|
||||
#LoadModule proxy_connect_module modules/mod_proxy_connect.so
|
||||
#LoadModule proxy_ftp_module modules/mod_proxy_ftp.so
|
||||
LoadModule proxy_http_module modules/mod_proxy_http.so
|
||||
#LoadModule proxy_fcgi_module modules/mod_proxy_fcgi.so
|
||||
#LoadModule proxy_scgi_module modules/mod_proxy_scgi.so
|
||||
#LoadModule proxy_uwsgi_module modules/mod_proxy_uwsgi.so
|
||||
#LoadModule proxy_fdpass_module modules/mod_proxy_fdpass.so
|
||||
#LoadModule proxy_wstunnel_module modules/mod_proxy_wstunnel.so
|
||||
#LoadModule proxy_ajp_module modules/mod_proxy_ajp.so
|
||||
#LoadModule proxy_balancer_module modules/mod_proxy_balancer.so
|
||||
#LoadModule proxy_express_module modules/mod_proxy_express.so
|
||||
#LoadModule proxy_hcheck_module modules/mod_proxy_hcheck.so
|
||||
#LoadModule session_module modules/mod_session.so
|
||||
#LoadModule session_cookie_module modules/mod_session_cookie.so
|
||||
#LoadModule session_crypto_module modules/mod_session_crypto.so
|
||||
#LoadModule session_dbd_module modules/mod_session_dbd.so
|
||||
#LoadModule slotmem_shm_module modules/mod_slotmem_shm.so
|
||||
#LoadModule slotmem_plain_module modules/mod_slotmem_plain.so
|
||||
LoadModule ssl_module modules/mod_ssl.so
|
||||
#LoadModule optional_hook_export_module modules/mod_optional_hook_export.so
|
||||
#LoadModule optional_hook_import_module modules/mod_optional_hook_import.so
|
||||
#LoadModule optional_fn_import_module modules/mod_optional_fn_import.so
|
||||
#LoadModule optional_fn_export_module modules/mod_optional_fn_export.so
|
||||
#LoadModule dialup_module modules/mod_dialup.so
|
||||
#LoadModule http2_module modules/mod_http2.so
|
||||
#LoadModule proxy_http2_module modules/mod_proxy_http2.so
|
||||
#LoadModule lbmethod_byrequests_module modules/mod_lbmethod_byrequests.so
|
||||
#LoadModule lbmethod_bytraffic_module modules/mod_lbmethod_bytraffic.so
|
||||
#LoadModule lbmethod_bybusyness_module modules/mod_lbmethod_bybusyness.so
|
||||
#LoadModule lbmethod_heartbeat_module modules/mod_lbmethod_heartbeat.so
|
||||
LoadModule unixd_module modules/mod_unixd.so
|
||||
#LoadModule heartbeat_module modules/mod_heartbeat.so
|
||||
#LoadModule heartmonitor_module modules/mod_heartmonitor.so
|
||||
#LoadModule dav_module modules/mod_dav.so
|
||||
LoadModule status_module modules/mod_status.so
|
||||
LoadModule autoindex_module modules/mod_autoindex.so
|
||||
#LoadModule asis_module modules/mod_asis.so
|
||||
#LoadModule info_module modules/mod_info.so
|
||||
#LoadModule suexec_module modules/mod_suexec.so
|
||||
<IfModule !mpm_prefork_module>
|
||||
#LoadModule cgid_module modules/mod_cgid.so
|
||||
</IfModule>
|
||||
<IfModule mpm_prefork_module>
|
||||
#LoadModule cgi_module modules/mod_cgi.so
|
||||
</IfModule>
|
||||
#LoadModule dav_fs_module modules/mod_dav_fs.so
|
||||
#LoadModule dav_lock_module modules/mod_dav_lock.so
|
||||
#LoadModule vhost_alias_module modules/mod_vhost_alias.so
|
||||
#LoadModule negotiation_module modules/mod_negotiation.so
|
||||
LoadModule dir_module modules/mod_dir.so
|
||||
#LoadModule imagemap_module modules/mod_imagemap.so
|
||||
#LoadModule actions_module modules/mod_actions.so
|
||||
#LoadModule speling_module modules/mod_speling.so
|
||||
#LoadModule userdir_module modules/mod_userdir.so
|
||||
LoadModule alias_module modules/mod_alias.so
|
||||
LoadModule rewrite_module modules/mod_rewrite.so
|
||||
|
||||
<IfModule unixd_module>
|
||||
#
|
||||
# If you wish httpd to run as a different user or group, you must run
|
||||
# httpd as root initially and it will switch.
|
||||
#
|
||||
# User/Group: The name (or #number) of the user/group to run httpd as.
|
||||
# It is usually good practice to create a dedicated user and group for
|
||||
# running httpd, as with most system services.
|
||||
#
|
||||
User daemon
|
||||
Group daemon
|
||||
|
||||
</IfModule>
|
||||
|
||||
# 'Main' server configuration
|
||||
#
|
||||
# The directives in this section set up the values used by the 'main'
|
||||
# server, which responds to any requests that aren't handled by a
|
||||
# <VirtualHost> definition. These values also provide defaults for
|
||||
# any <VirtualHost> containers you may define later in the file.
|
||||
#
|
||||
# All of these directives may appear inside <VirtualHost> containers,
|
||||
# in which case these default settings will be overridden for the
|
||||
# virtual host being defined.
|
||||
#
|
||||
|
||||
#
|
||||
# ServerAdmin: Your address, where problems with the server should be
|
||||
# e-mailed. This address appears on some server-generated pages, such
|
||||
# as error documents. e.g. admin@your-domain.com
|
||||
#
|
||||
ServerAdmin yui.heero@gmail.com
|
||||
|
||||
#
|
||||
# ServerName gives the name and port that the server uses to identify itself.
|
||||
# This can often be determined automatically, but we recommend you specify
|
||||
# it explicitly to prevent problems during startup.
|
||||
#
|
||||
# If your host doesn't have a registered DNS name, enter its IP address here.
|
||||
#
|
||||
#ServerName www.example.com:80
|
||||
|
||||
#
|
||||
# Deny access to the entirety of your server's filesystem. You must
|
||||
# explicitly permit access to web content directories in other
|
||||
# <Directory> blocks below.
|
||||
#
|
||||
<Directory />
|
||||
AllowOverride none
|
||||
Require all denied
|
||||
</Directory>
|
||||
|
||||
# intermediate configuration, tweak to your needs
|
||||
SSLProtocol all -SSLv2 -SSLv3
|
||||
SSLCipherSuite ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA256:ECDHE-ECDSA-AES128-SHA:ECDHE-RSA-AES256-SHA384:ECDHE-RSA-AES128-SHA:ECDHE-ECDSA-AES256-SHA384:ECDHE-ECDSA-AES256-SHA:ECDHE-RSA-AES256-SHA:DHE-RSA-AES128-SHA256:DHE-RSA-AES128-SHA:DHE-RSA-AES256-SHA256:DHE-RSA-AES256-SHA:ECDHE-ECDSA-DES-CBC3-SHA:ECDHE-RSA-DES-CBC3-SHA:EDH-RSA-DES-CBC3-SHA:AES128-GCM-SHA256:AES256-GCM-SHA384:AES128-SHA256:AES256-SHA256:AES128-SHA:AES256-SHA:DES-CBC3-SHA:!DSS
|
||||
SSLHonorCipherOrder on
|
||||
|
||||
#
|
||||
# Note that from this point forward you must specifically allow
|
||||
# particular features to be enabled - so if something's not working as
|
||||
# you might expect, make sure that you have specifically enabled it
|
||||
# below.
|
||||
#
|
||||
|
||||
#
|
||||
# DocumentRoot: The directory out of which you will serve your
|
||||
# documents. By default, all requests are taken from this directory, but
|
||||
# symbolic links and aliases may be used to point to other locations.
|
||||
#
|
||||
<VirtualHost *:80>
|
||||
ServerName my-app
|
||||
DocumentRoot "/usr/local/apache2/htdocs"
|
||||
<Directory "/usr/local/apache2/htdocs">
|
||||
Options Indexes FollowSymLinks
|
||||
AllowOverride None
|
||||
Require all granted
|
||||
RewriteEngine on
|
||||
# Don't rewrite files or directories
|
||||
RewriteCond %{REQUEST_FILENAME} -f [OR]
|
||||
RewriteCond %{REQUEST_FILENAME} -d
|
||||
RewriteRule ^ - [L]
|
||||
# Rewrite everything else to index.html to allow HTML5 state links
|
||||
RewriteRule ^ index.html [L]
|
||||
</Directory>
|
||||
</VirtualHost>
|
||||
#
|
||||
# DirectoryIndex: sets the file that Apache will serve if a directory
|
||||
# is requested.
|
||||
#
|
||||
<IfModule dir_module>
|
||||
DirectoryIndex index.html
|
||||
</IfModule>
|
||||
|
||||
#
|
||||
# The following lines prevent .htaccess and .htpasswd files from being
|
||||
# viewed by Web clients.
|
||||
#
|
||||
<Files ".ht*">
|
||||
Require all denied
|
||||
</Files>
|
||||
|
||||
#
|
||||
# ErrorLog: The location of the error log file.
|
||||
# If you do not specify an ErrorLog directive within a <VirtualHost>
|
||||
# container, error messages relating to that virtual host will be
|
||||
# logged here. If you *do* define an error logfile for a <VirtualHost>
|
||||
# container, that host's errors will be logged there and not here.
|
||||
#
|
||||
ErrorLog /proc/self/fd/2
|
||||
|
||||
#
|
||||
# LogLevel: Control the number of messages logged to the error_log.
|
||||
# Possible values include: debug, info, notice, warn, error, crit,
|
||||
# alert, emerg.
|
||||
#
|
||||
LogLevel warn
|
||||
|
||||
<IfModule log_config_module>
|
||||
#
|
||||
# The following directives define some format nicknames for use with
|
||||
# a CustomLog directive (see below).
|
||||
#
|
||||
LogFormat "%h %l %u %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-Agent}i\"" combined
|
||||
LogFormat "%h %l %u %t \"%r\" %>s %b" common
|
||||
|
||||
<IfModule logio_module>
|
||||
# You need to enable mod_logio.c to use %I and %O
|
||||
LogFormat "%h %l %u %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-Agent}i\" %I %O" combinedio
|
||||
</IfModule>
|
||||
|
||||
#
|
||||
# The location and format of the access logfile (Common Logfile Format).
|
||||
# If you do not define any access logfiles within a <VirtualHost>
|
||||
# container, they will be logged here. Contrariwise, if you *do*
|
||||
# define per-<VirtualHost> access logfiles, transactions will be
|
||||
# logged therein and *not* in this file.
|
||||
#
|
||||
CustomLog /proc/self/fd/1 common
|
||||
|
||||
#
|
||||
# If you prefer a logfile with access, agent, and referer information
|
||||
# (Combined Logfile Format) you can use the following directive.
|
||||
#
|
||||
#CustomLog "logs/access_log" combined
|
||||
</IfModule>
|
||||
|
||||
<IfModule alias_module>
|
||||
#
|
||||
# Redirect: Allows you to tell clients about documents that used to
|
||||
# exist in your server's namespace, but do not anymore. The client
|
||||
# will make a new request for the document at its new location.
|
||||
# Example:
|
||||
# Redirect permanent /foo http://www.example.com/bar
|
||||
|
||||
#
|
||||
# Alias: Maps web paths into filesystem paths and is used to
|
||||
# access content that does not live under the DocumentRoot.
|
||||
# Example:
|
||||
# Alias /webpath /full/filesystem/path
|
||||
#
|
||||
# If you include a trailing / on /webpath then the server will
|
||||
# require it to be present in the URL. You will also likely
|
||||
# need to provide a <Directory> section to allow access to
|
||||
# the filesystem path.
|
||||
|
||||
#
|
||||
# ScriptAlias: This controls which directories contain server scripts.
|
||||
# ScriptAliases are essentially the same as Aliases, except that
|
||||
# documents in the target directory are treated as applications and
|
||||
# run by the server when requested rather than as documents sent to the
|
||||
# client. The same rules about trailing "/" apply to ScriptAlias
|
||||
# directives as to Alias.
|
||||
#
|
||||
ScriptAlias /cgi-bin/ "/usr/local/apache2/cgi-bin/"
|
||||
|
||||
</IfModule>
|
||||
|
||||
<IfModule cgid_module>
|
||||
#
|
||||
# ScriptSock: On threaded servers, designate the path to the UNIX
|
||||
# socket used to communicate with the CGI daemon of mod_cgid.
|
||||
#
|
||||
#Scriptsock cgisock
|
||||
</IfModule>
|
||||
|
||||
#
|
||||
# "/usr/local/apache2/cgi-bin" should be changed to whatever your ScriptAliased
|
||||
# CGI directory exists, if you have that configured.
|
||||
#
|
||||
<Directory "/usr/local/apache2/cgi-bin">
|
||||
AllowOverride None
|
||||
Options None
|
||||
Require all granted
|
||||
</Directory>
|
||||
|
||||
<IfModule headers_module>
|
||||
#
|
||||
# Avoid passing HTTP_PROXY environment to CGI's on this or any proxied
|
||||
# backend servers which have lingering "httpoxy" defects.
|
||||
# 'Proxy' request header is undefined by the IETF, not listed by IANA
|
||||
#
|
||||
RequestHeader unset Proxy early
|
||||
</IfModule>
|
||||
|
||||
<IfModule mime_module>
|
||||
#
|
||||
# TypesConfig points to the file containing the list of mappings from
|
||||
# filename extension to MIME-type.
|
||||
#
|
||||
TypesConfig conf/mime.types
|
||||
|
||||
#
|
||||
# AddType allows you to add to or override the MIME configuration
|
||||
# file specified in TypesConfig for specific file types.
|
||||
#
|
||||
#AddType application/x-gzip .tgz
|
||||
#
|
||||
# AddEncoding allows you to have certain browsers uncompress
|
||||
# information on the fly. Note: Not all browsers support this.
|
||||
#
|
||||
#AddEncoding x-compress .Z
|
||||
#AddEncoding x-gzip .gz .tgz
|
||||
#
|
||||
# If the AddEncoding directives above are commented-out, then you
|
||||
# probably should define those extensions to indicate media types:
|
||||
#
|
||||
AddType application/x-compress .Z
|
||||
AddType application/x-gzip .gz .tgz
|
||||
|
||||
#
|
||||
# AddHandler allows you to map certain file extensions to "handlers":
|
||||
# actions unrelated to filetype. These can be either built into the server
|
||||
# or added with the Action directive (see below)
|
||||
#
|
||||
# To use CGI scripts outside of ScriptAliased directories:
|
||||
# (You will also need to add "ExecCGI" to the "Options" directive.)
|
||||
#
|
||||
#AddHandler cgi-script .cgi
|
||||
|
||||
# For type maps (negotiated resources):
|
||||
#AddHandler type-map var
|
||||
|
||||
#
|
||||
# Filters allow you to process content before it is sent to the client.
|
||||
#
|
||||
# To parse .shtml files for server-side includes (SSI):
|
||||
# (You will also need to add "Includes" to the "Options" directive.)
|
||||
#
|
||||
#AddType text/html .shtml
|
||||
#AddOutputFilter INCLUDES .shtml
|
||||
</IfModule>
|
||||
|
||||
#
|
||||
# The mod_mime_magic module allows the server to use various hints from the
|
||||
# contents of the file itself to determine its type. The MIMEMagicFile
|
||||
# directive tells the module where the hint definitions are located.
|
||||
#
|
||||
#MIMEMagicFile conf/magic
|
||||
|
||||
#
|
||||
# Customizable error responses come in three flavors:
|
||||
# 1) plain text 2) local redirects 3) external redirects
|
||||
#
|
||||
# Some examples:
|
||||
#ErrorDocument 500 "The server made a boo boo."
|
||||
#ErrorDocument 404 /missing.html
|
||||
#ErrorDocument 404 "/cgi-bin/missing_handler.pl"
|
||||
#ErrorDocument 402 http://www.example.com/subscription_info.html
|
||||
#
|
||||
|
||||
#
|
||||
# MaxRanges: Maximum number of Ranges in a request before
|
||||
# returning the entire resource, or one of the special
|
||||
# values 'default', 'none' or 'unlimited'.
|
||||
# Default setting is to accept 200 Ranges.
|
||||
#MaxRanges unlimited
|
||||
|
||||
#
|
||||
# EnableMMAP and EnableSendfile: On systems that support it,
|
||||
# memory-mapping or the sendfile syscall may be used to deliver
|
||||
# files. This usually improves server performance, but must
|
||||
# be turned off when serving from networked-mounted
|
||||
# filesystems or if support for these functions is otherwise
|
||||
# broken on your system.
|
||||
# Defaults: EnableMMAP On, EnableSendfile Off
|
||||
#
|
||||
#EnableMMAP off
|
||||
#EnableSendfile on
|
||||
|
||||
# Supplemental configuration
|
||||
#
|
||||
# The configuration files in the conf/extra/ directory can be
|
||||
# included to add extra features or to modify the default configuration of
|
||||
# the server, or you may simply copy their contents here and change as
|
||||
# necessary.
|
||||
|
||||
# Server-pool management (MPM specific)
|
||||
#Include conf/extra/httpd-mpm.conf
|
||||
|
||||
# Multi-language error messages
|
||||
#Include conf/extra/httpd-multilang-errordoc.conf
|
||||
|
||||
# Fancy directory listings
|
||||
#Include conf/extra/httpd-autoindex.conf
|
||||
|
||||
# Language settings
|
||||
#Include conf/extra/httpd-languages.conf
|
||||
|
||||
# User home directories
|
||||
#Include conf/extra/httpd-userdir.conf
|
||||
|
||||
# Real-time info on requests and configuration
|
||||
#Include conf/extra/httpd-info.conf
|
||||
|
||||
# Virtual hosts
|
||||
#Include conf/extra/httpd-vhosts.conf
|
||||
|
||||
# Local access to the Apache HTTP Server Manual
|
||||
#Include conf/extra/httpd-manual.conf
|
||||
|
||||
# Distributed authoring and versioning (WebDAV)
|
||||
#Include conf/extra/httpd-dav.conf
|
||||
|
||||
# Various default settings
|
||||
#Include conf/extra/httpd-default.conf
|
||||
|
||||
|
||||
# Configure mod_proxy_html to understand HTML4/XHTML1
|
||||
<IfModule proxy_html_module>
|
||||
Include conf/extra/proxy-html.conf
|
||||
</IfModule>
|
||||
|
||||
# Secure (SSL/TLS) connections
|
||||
#Include conf/extra/httpd-ssl.conf
|
||||
#
|
||||
# Note: The following must must be present to support
|
||||
# starting without SSL on platforms with no /dev/random equivalent
|
||||
# but a statically compiled-in mod_ssl.
|
||||
#
|
||||
<IfModule ssl_module>
|
||||
SSLRandomSeed startup builtin
|
||||
SSLRandomSeed connect builtin
|
||||
</IfModule>
|
||||
|
11418
front/package-lock.json
generated
@ -1,49 +1,50 @@
|
||||
{
|
||||
"name": "karideo",
|
||||
"version": "0.0.0",
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
"ng": "ng",
|
||||
"start": "ng serve",
|
||||
"build": "ng build --prod",
|
||||
"test": "ng test",
|
||||
"lint": "ng lint",
|
||||
"e2e": "ng e2e"
|
||||
},
|
||||
"private": true,
|
||||
"dependencies": {
|
||||
"@angular/animations": "^8.2.14",
|
||||
"@angular/common": "^8.2.14",
|
||||
"@angular/compiler": "^8.2.14",
|
||||
"@angular/core": "^8.2.14",
|
||||
"@angular/forms": "^8.2.14",
|
||||
"@angular/platform-browser": "^8.2.14",
|
||||
"@angular/platform-browser-dynamic": "^8.2.14",
|
||||
"@angular/router": "^8.2.14",
|
||||
"core-js": "^3.6.2",
|
||||
"rxjs": "^6.5.4",
|
||||
"tslib": "^1.10.0",
|
||||
"zone.js": "~0.9.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@angular-devkit/build-angular": "~0.803.21",
|
||||
"@angular/cli": "^8.3.21",
|
||||
"@angular/compiler-cli": "^8.2.14",
|
||||
"@angular/language-service": "^8.2.14",
|
||||
"@types/jasmine": "^3.5.0",
|
||||
"@types/jasminewd2": "^2.0.8",
|
||||
"@types/node": "^13.1.4",
|
||||
"codelyzer": "^5.0.1",
|
||||
"jasmine-core": "^3.5.0",
|
||||
"jasmine-spec-reporter": "^4.2.1",
|
||||
"karma": "^4.4.1",
|
||||
"karma-chrome-launcher": "^3.1.0",
|
||||
"karma-coverage-istanbul-reporter": "^2.1.1",
|
||||
"karma-jasmine": "^2.0.1",
|
||||
"karma-jasmine-html-reporter": "^1.5.1",
|
||||
"protractor": "^5.4.2",
|
||||
"ts-node": "^8.5.4",
|
||||
"tslint": "^5.20.1",
|
||||
"typescript": "~3.5.0"
|
||||
}
|
||||
"name": "karideo",
|
||||
"version": "0.0.0",
|
||||
"license": "MPL-2",
|
||||
"scripts": {
|
||||
"all": "npm run build && npm run test",
|
||||
"ng": "ng",
|
||||
"dev": "ng serve --configuration=develop --watch --port 4202",
|
||||
"dev-hot-update": "ng serve --configuration=develop --watch --hmr --port 4202",
|
||||
"build": "ng build --prod",
|
||||
"test": "ng test",
|
||||
"lint": "ng lint",
|
||||
"style": "prettier --write .",
|
||||
"e2e": "ng e2e",
|
||||
"update_packages": "ncu --upgrade",
|
||||
"install_dependency": "pnpm install --force",
|
||||
"link_kar_cw": "pnpm link ../../kar-cw/dist/kar-cw/",
|
||||
"unlink_kar_cw": "pnpm unlink ../../kar-cw/dist/kar-cw/"
|
||||
},
|
||||
"private": true,
|
||||
"dependencies": {
|
||||
"@angular/animations": "^18.0.2",
|
||||
"@angular/cdk": "^18.0.2",
|
||||
"@angular/common": "^18.0.2",
|
||||
"@angular/compiler": "^18.0.2",
|
||||
"@angular/core": "^18.0.2",
|
||||
"@angular/forms": "^18.0.2",
|
||||
"@angular/material": "^18.0.2",
|
||||
"@angular/platform-browser": "^18.0.2",
|
||||
"@angular/platform-browser-dynamic": "^18.0.2",
|
||||
"@angular/router": "^18.0.2",
|
||||
"rxjs": "^7.8.1",
|
||||
"zone.js": "^0.14.6",
|
||||
"zod": "3.23.8",
|
||||
"@kangaroo-and-rabbit/kar-cw": "^0.4.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@angular-devkit/build-angular": "^18.0.3",
|
||||
"@angular-eslint/builder": "18.0.1",
|
||||
"@angular-eslint/eslint-plugin": "18.0.1",
|
||||
"@angular-eslint/eslint-plugin-template": "18.0.1",
|
||||
"@angular-eslint/schematics": "18.0.1",
|
||||
"@angular-eslint/template-parser": "18.0.1",
|
||||
"@angular/cli": "^18.0.3",
|
||||
"@angular/compiler-cli": "^18.0.2",
|
||||
"@angular/language-service": "^18.0.2",
|
||||
"npm-check-updates": "^16.14.20",
|
||||
"tslib": "^2.6.3"
|
||||
}
|
||||
}
|
9816
front/pnpm-lock.yaml
Normal file
@ -12,11 +12,31 @@ upgrade package
|
||||
npm audit fix
|
||||
```
|
||||
|
||||
## npm install -g angular-cli
|
||||
|
||||
start the application:
|
||||
```
|
||||
npx ng serve --watch
|
||||
```
|
||||
|
||||
plus facilement:
|
||||
npm install @angular-devkit/build-angular@0.901.9
|
||||
npm start
|
||||
|
||||
|
||||
|
||||
Apply linter:
|
||||
==============
|
||||
```
|
||||
npx ng lint
|
||||
```
|
||||
|
||||
|
||||
build the local image:
|
||||
|
||||
docker build -t gitea.atria-soft.org/kangaroo-and-rabbit/karideo:latest .
|
||||
|
||||
docker login gitea.atria-soft.org
|
||||
|
||||
docker push gitea.atria-soft.org/kangaroo-and-rabbit/karideo:latest
|
||||
|
||||
|
@ -1,16 +0,0 @@
|
||||
import { trigger, state, animate, transition, style } from '@angular/animations';
|
||||
|
||||
export const fadeInAnimation =
|
||||
trigger('fadeInAnimation', [
|
||||
// route 'enter' transition
|
||||
transition(':enter', [
|
||||
|
||||
// styles at start of transition
|
||||
style({ opacity: 0 }),
|
||||
|
||||
// animation and styles at end of transition
|
||||
animate('.5s', style({ opacity: 1 }))
|
||||
]),
|
||||
]);
|
||||
|
||||
|
@ -1,2 +0,0 @@
|
||||
export * from './fade-in.animation';
|
||||
export * from './slide-in-out.animation';
|
@ -1,51 +0,0 @@
|
||||
import { trigger, state, animate, transition, style } from '@angular/animations';
|
||||
|
||||
export const slideInOutAnimation =
|
||||
trigger('slideInOutAnimation', [
|
||||
|
||||
// end state styles for route container (host)
|
||||
state('*', style({
|
||||
// the view covers the whole screen with a semi tranparent background
|
||||
position: 'fixed',
|
||||
top: 0,
|
||||
left: 0,
|
||||
right: 0,
|
||||
bottom: 0,
|
||||
backgroundColor: 'rgba(0, 0, 0, 0.8)'
|
||||
})),
|
||||
|
||||
// route 'enter' transition
|
||||
transition(':enter', [
|
||||
|
||||
// styles at start of transition
|
||||
style({
|
||||
// start with the content positioned off the right of the screen,
|
||||
// -400% is required instead of -100% because the negative position adds to the width of the element
|
||||
right: '-400%',
|
||||
|
||||
// start with background opacity set to 0 (invisible)
|
||||
backgroundColor: 'rgba(0, 0, 0, 0)'
|
||||
}),
|
||||
|
||||
// animation and styles at end of transition
|
||||
animate('.5s ease-in-out', style({
|
||||
// transition the right position to 0 which slides the content into view
|
||||
right: 0,
|
||||
|
||||
// transition the background opacity to 0.8 to fade it in
|
||||
backgroundColor: 'rgba(0, 0, 0, 0.8)'
|
||||
}))
|
||||
]),
|
||||
|
||||
// route 'leave' transition
|
||||
transition(':leave', [
|
||||
// animation and styles at end of transition
|
||||
animate('.5s ease-in-out', style({
|
||||
// transition the right position to -400% which slides the content out of view
|
||||
right: '-400%',
|
||||
|
||||
// transition the background opacity to 0 to fade it out
|
||||
backgroundColor: 'rgba(0, 0, 0, 0)'
|
||||
}))
|
||||
])
|
||||
]);
|
@ -5,46 +5,117 @@
|
||||
*/
|
||||
|
||||
import { NgModule } from '@angular/core';
|
||||
import { RouterModule, Routes } from '@angular/router';
|
||||
import { ModuleWithProviders } from '@angular/core';
|
||||
import { Routes, RouterModule } from '@angular/router'; // CLI imports router
|
||||
|
||||
import { HomeComponent } from './home/home.component';
|
||||
import { TypeDetailComponent } from './type-detail/type-detail.component';
|
||||
import { GroupDetailComponent } from './group-detail/group-detail.component';
|
||||
import { SaisonDetailComponent } from './saison-detail/saison-detail.component';
|
||||
import { VideoDetailComponent } from './video-detail/video-detail.component';
|
||||
import { LoginComponent } from './login/login.component';
|
||||
import { SignUpComponent } from './sign-up/sign-up.component';
|
||||
import { SettingsComponent } from './settings/settings.component';
|
||||
import { VideoEditComponent } from './scene/video-edit/video-edit';
|
||||
//import { HelpComponent } from './help/help.component';
|
||||
import { HelpScene, HomeScene, SeasonEditScene, SeasonScene, SeriesEditScene, SeriesScene, SettingsScene, TypeScene, VideoEditScene, VideoScene } from './scene';
|
||||
import { UploadScene } from './scene/upload/upload';
|
||||
import { ForbiddenScene, OnlyUsersGuardHome, HomeOutScene, OnlyUnregisteredGuardHome, SsoScene, OnlyAdminGuard, OnlyUsersGuard, NotFound404Scene } from '@kangaroo-and-rabbit/kar-cw';
|
||||
// import { HelpComponent } from './help/help.component';
|
||||
|
||||
// see https://angular.io/guide/router
|
||||
|
||||
|
||||
const routes: Routes = [
|
||||
{ path: '', redirectTo: '/home', pathMatch: 'full'},
|
||||
{ path: 'home', component: HomeComponent },
|
||||
{ path: 'type/:id', component: TypeDetailComponent },
|
||||
{ path: 'group/:id', component: GroupDetailComponent },
|
||||
{ path: 'saison/:id', component: SaisonDetailComponent },
|
||||
{ path: 'video/:id', component: VideoDetailComponent },
|
||||
{ path: 'video-edit/:id', component: VideoEditComponent },
|
||||
{ path: 'login', component: LoginComponent },
|
||||
{ path: 'signup', component: SignUpComponent },
|
||||
{ path: 'settings', component: SettingsComponent },
|
||||
/*{ path: 'help', component: HelpComponent }*/
|
||||
{ path: '', redirectTo: '/home', pathMatch: 'full' },
|
||||
|
||||
{ path: 'forbidden', component: ForbiddenScene },
|
||||
|
||||
// ------------------------------------
|
||||
// -- home global interface
|
||||
// ------------------------------------
|
||||
{
|
||||
path: 'home',
|
||||
component: HomeScene,
|
||||
canActivate: [OnlyUsersGuardHome], // this route to unregistered path when not logged ==> permit to simplify display
|
||||
},
|
||||
{
|
||||
path: 'unregistered',
|
||||
component: HomeOutScene,
|
||||
canActivate: [OnlyUnregisteredGuardHome], // jump to the home when registered
|
||||
},
|
||||
// ------------------------------------
|
||||
// -- SSO Generic interface
|
||||
// ------------------------------------
|
||||
{ path: 'sso/:data/:keepConnected/:token', component: SsoScene },
|
||||
{ path: 'sso/:keepConnected/:token', component: SsoScene },
|
||||
{ path: 'sso', component: SsoScene },
|
||||
|
||||
// ------------------------------------
|
||||
// -- Generic pages
|
||||
// ------------------------------------
|
||||
{ path: 'help/:page', component: HelpScene },
|
||||
{ path: 'help', component: HelpScene },
|
||||
|
||||
// ------------------------------------
|
||||
// -- upload new data:
|
||||
// ------------------------------------
|
||||
{
|
||||
path: 'upload',
|
||||
component: UploadScene,
|
||||
canActivate: [OnlyAdminGuard],
|
||||
},
|
||||
{
|
||||
path: 'type/:typeId/:seriesId/:seasonId/:videoId',
|
||||
component: TypeScene,
|
||||
canActivate: [OnlyUsersGuard],
|
||||
},
|
||||
|
||||
{
|
||||
path: 'series/:typeId/:seriesId/:seasonId/:videoId',
|
||||
component: SeriesScene,
|
||||
canActivate: [OnlyUsersGuard],
|
||||
},
|
||||
{
|
||||
path: 'series-edit/:typeId/:seriesId/:seasonId/:videoId',
|
||||
component: SeriesEditScene,
|
||||
canActivate: [OnlyAdminGuard],
|
||||
},
|
||||
|
||||
{
|
||||
path: 'season/:typeId/:seriesId/:seasonId/:videoId',
|
||||
component: SeasonScene,
|
||||
canActivate: [OnlyUsersGuard],
|
||||
},
|
||||
{
|
||||
path: 'season-edit/:typeId/:seriesId/:seasonId/:videoId',
|
||||
component: SeasonEditScene,
|
||||
canActivate: [OnlyAdminGuard],
|
||||
},
|
||||
|
||||
{
|
||||
path: 'video/:typeId/:seriesId/:seasonId/:videoId',
|
||||
component: VideoScene,
|
||||
canActivate: [OnlyUsersGuard],
|
||||
},
|
||||
{
|
||||
path: 'video-edit/:typeId/:seriesId/:seasonId/:videoId',
|
||||
component: VideoEditScene,
|
||||
canActivate: [OnlyAdminGuard],
|
||||
},
|
||||
{
|
||||
path: 'settings',
|
||||
component: SettingsScene,
|
||||
canActivate: [OnlyUsersGuard],
|
||||
},
|
||||
{
|
||||
path: '**',
|
||||
component: NotFound404Scene,
|
||||
},
|
||||
];
|
||||
/*
|
||||
|
||||
@NgModule({
|
||||
imports: [
|
||||
RouterModule.forRoot(routes)
|
||||
],
|
||||
RouterModule.forRoot(
|
||||
routes,
|
||||
{
|
||||
//enableTracing: true, // <-- debugging purposes only
|
||||
},
|
||||
),
|
||||
],
|
||||
exports: [
|
||||
RouterModule
|
||||
]
|
||||
RouterModule,
|
||||
]
|
||||
})
|
||||
|
||||
export class AppRoutingModule {}
|
||||
*/
|
||||
|
||||
export const routing: ModuleWithProviders = RouterModule.forRoot(routes);
|
||||
export class AppRoutingModule { }
|
||||
// export const routing: ModuleWithProviders = RouterModule.forRoot(routes);
|
||||
|
||||
|
@ -1,10 +1,19 @@
|
||||
|
||||
<!-- exercice section -->
|
||||
<app-top-menu></app-top-menu>
|
||||
<!--
|
||||
<div class="main-content" ng-include="currentDisplay" ng-if="currentDisplay != ''"></div>
|
||||
<div class="main-modal" ng-include="currentModal" ng-if="currentModal != ''" ></div> <!-- (click)="onOutModal()" -->
|
||||
-->
|
||||
<div class="main-content">
|
||||
<router-outlet></router-outlet>
|
||||
</div>
|
||||
<!-- Generig global menu -->
|
||||
<karcw-top-menu [menu]="currentMenu" (callback)="eventOnMenu($event)" />
|
||||
<!-- all interfaced pages -->
|
||||
@if(autoConnectedDone) {
|
||||
<div class="main-content">
|
||||
<router-outlet ></router-outlet>
|
||||
</div>
|
||||
}
|
||||
@else {
|
||||
<div class="main-content">
|
||||
<div class="generic-page">
|
||||
<div class="fill-all colomn_mutiple">
|
||||
<b style="color:red;">Auto-connection in progress</b>
|
||||
<div class="clear"></div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
}
|
||||
|
@ -22,6 +22,7 @@
|
||||
|
||||
.main-content {
|
||||
position: absolute;
|
||||
//width: ~"calc(calc(100% / 5 ) * 5)";
|
||||
width: 100%;
|
||||
height: ~"calc(100% - 56px)";
|
||||
top: 56px;
|
||||
@ -32,12 +33,14 @@
|
||||
position: fixed;
|
||||
overflow-y: auto;
|
||||
//background-color:#FF0;
|
||||
/*
|
||||
.main-reduce {
|
||||
width: 80%;
|
||||
width: 40%;
|
||||
height: 100%;
|
||||
margin: 0;
|
||||
padding: 0px 10% 0px 10%;
|
||||
display: block;
|
||||
overflow-y:scroll;
|
||||
}
|
||||
*/
|
||||
}
|
||||
|
@ -1,27 +0,0 @@
|
||||
import { TestBed, async } from '@angular/core/testing';
|
||||
import { AppComponent } from './app.component';
|
||||
describe('AppComponent', () => {
|
||||
beforeEach(async(() => {
|
||||
TestBed.configureTestingModule({
|
||||
declarations: [
|
||||
AppComponent
|
||||
],
|
||||
}).compileComponents();
|
||||
}));
|
||||
it('should create the app', async(() => {
|
||||
const fixture = TestBed.createComponent(AppComponent);
|
||||
const app = fixture.debugElement.componentInstance;
|
||||
expect(app).toBeTruthy();
|
||||
}));
|
||||
it(`should have as title 'app'`, async(() => {
|
||||
const fixture = TestBed.createComponent(AppComponent);
|
||||
const app = fixture.debugElement.componentInstance;
|
||||
expect(app.title).toEqual('app');
|
||||
}));
|
||||
it('should render title in a h1 tag', async(() => {
|
||||
const fixture = TestBed.createComponent(AppComponent);
|
||||
fixture.detectChanges();
|
||||
const compiled = fixture.debugElement.nativeElement;
|
||||
expect(compiled.querySelector('h1').textContent).toContain('Welcome to app!');
|
||||
}));
|
||||
});
|
@ -4,51 +4,243 @@
|
||||
* @license PROPRIETARY (see license file)
|
||||
*/
|
||||
|
||||
import { Component, OnInit} from '@angular/core';
|
||||
import { UserService } from './user.service';
|
||||
import { SessionService } from './session.service';
|
||||
import { CookiesService } from './cookies.service';
|
||||
import { Component, OnInit } from '@angular/core';
|
||||
import { ArianeService, MediaService, SeasonService, SeriesService, TypeService } from './service';
|
||||
import { EventOnMenu, MenuItem, MenuPosition, SSOService, SessionService, UserRoles222, UserService, isNullOrUndefined } from '@kangaroo-and-rabbit/kar-cw';
|
||||
|
||||
enum MenuEventType {
|
||||
SSO_LOGIN = "SSO_CALL_LOGIN",
|
||||
SSO_LOGOUT = "SSO_CALL_LOGOUT",
|
||||
SSO_SIGNUP = "SSO_CALL_SIGNUP",
|
||||
TYPE = "TYPE",
|
||||
SERIES = "SERIES",
|
||||
SEASON = "SEASON",
|
||||
VIDEO = "VIDEO",
|
||||
}
|
||||
|
||||
@Component({
|
||||
selector: 'app-root',
|
||||
templateUrl: './app.component.html',
|
||||
styleUrls: [
|
||||
'./app.component.less',
|
||||
]
|
||||
]
|
||||
})
|
||||
export class AppComponent implements OnInit {
|
||||
title = 'Karideo';
|
||||
constructor(private cookiesService: CookiesService,
|
||||
private userService: UserService,
|
||||
private sessionService: SessionService) {
|
||||
title: string = 'Karideo';
|
||||
autoConnectedDone: boolean = false;
|
||||
isConnected: boolean = false;
|
||||
signUpEnable: boolean = true;
|
||||
currentMenu: MenuItem[] = [];
|
||||
location: string = "home";
|
||||
|
||||
constructor(
|
||||
private mediaService: MediaService,
|
||||
private seasonService: SeasonService,
|
||||
private seriesService: SeriesService,
|
||||
private typeService: TypeService,
|
||||
|
||||
private userService: UserService,
|
||||
private sessionService: SessionService,
|
||||
private ssoService: SSOService,
|
||||
private arianeService: ArianeService) {
|
||||
|
||||
|
||||
}
|
||||
|
||||
ngOnInit() {
|
||||
let login = this.cookiesService.get("yota-login");
|
||||
let password = this.cookiesService.get("yota-password");
|
||||
if ( login != ""
|
||||
&& password != ""
|
||||
&& password.length > 40) {
|
||||
console.log("Get previous connection ... " + login + ":xxxxxx");
|
||||
let self = this;
|
||||
this.userService.loginSha(login, password)
|
||||
.then(function(response) {
|
||||
console.log("auto log ==> OK");
|
||||
self.sessionService.create(response['sessionId'],
|
||||
response['login'],
|
||||
response['email'],
|
||||
response['role'],
|
||||
response['avatar']);
|
||||
//self.router.navigate(['home']);
|
||||
}).catch(function(response) {
|
||||
console.log("auto log ==> Error");
|
||||
self.cookiesService.remove("yota-login");
|
||||
self.cookiesService.remove("yota-password");
|
||||
this.autoConnectedDone = false;
|
||||
this.isConnected = false;
|
||||
this.updateMainMenu();
|
||||
let self = this;
|
||||
this.sessionService.change.subscribe((isConnected) => {
|
||||
|
||||
self.isConnected = isConnected;
|
||||
self.autoConnectedDone = true;
|
||||
self.updateMainMenu();
|
||||
});
|
||||
this.ssoService.checkSignUpEnable()
|
||||
.then((value: boolean) => {
|
||||
console.log(`Get value signUp = ${value}`);
|
||||
self.signUpEnable = value;
|
||||
self.updateMainMenu();
|
||||
}).catch((error: any) => {
|
||||
console.log(`Can not call the sso to check the sign-up_interface: ${error}`);
|
||||
});
|
||||
|
||||
this.userService.checkAutoConnect().then(() => {
|
||||
self.autoConnectedDone = true;
|
||||
}).catch(() => {
|
||||
self.autoConnectedDone = true;
|
||||
}).finally(() => {
|
||||
self.autoConnectedDone = true;
|
||||
});
|
||||
this.arianeService.segmentChange.subscribe((_segmentName: string) => {
|
||||
self.updateMainMenu();
|
||||
});
|
||||
this.arianeService.typeChange.subscribe((_typeId: number) => {
|
||||
self.updateMainMenu();
|
||||
});
|
||||
this.arianeService.seriesChange.subscribe((_seriesId: number) => {
|
||||
self.updateMainMenu();
|
||||
});
|
||||
this.arianeService.seasonChange.subscribe((_seasonId: number) => {
|
||||
self.updateMainMenu();
|
||||
});
|
||||
this.arianeService.videoChange.subscribe((_videoId: number) => {
|
||||
self.updateMainMenu();
|
||||
});
|
||||
}
|
||||
|
||||
eventOnMenu(data: EventOnMenu): void {
|
||||
//console.log(`plopppppppppp ${JSON.stringify(this.route.snapshot.url)}`);
|
||||
//console.log(`Get event on menu: ${JSON.stringify(data, null, 4)}`);
|
||||
switch (data.menu.otherData) {
|
||||
case MenuEventType.SSO_LOGIN:
|
||||
this.ssoService.requestSignIn();
|
||||
break;
|
||||
case MenuEventType.SSO_LOGOUT:
|
||||
this.ssoService.requestSignOut();
|
||||
break;
|
||||
case MenuEventType.SSO_SIGNUP:
|
||||
this.ssoService.requestSignUp();
|
||||
break;
|
||||
case MenuEventType.TYPE:
|
||||
this.arianeService.navigateType(this.arianeService.getTypeId(), data.newWindows, data.ctrl);
|
||||
break;
|
||||
case MenuEventType.SERIES:
|
||||
this.arianeService.navigateSeries(this.arianeService.getSeriesId(), data.newWindows, data.ctrl);
|
||||
break;
|
||||
case MenuEventType.SEASON:
|
||||
this.arianeService.navigateSeason(this.arianeService.getSeasonId(), data.newWindows, data.ctrl);
|
||||
break;
|
||||
case MenuEventType.VIDEO:
|
||||
this.arianeService.navigateVideo(this.arianeService.getVideoId(), data.newWindows, data.ctrl);
|
||||
break;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
updateMainMenu(): void {
|
||||
if (this.isConnected) {
|
||||
this.currentMenu = [
|
||||
{
|
||||
position: MenuPosition.LEFT,
|
||||
hover: `You are logged as: ${this.sessionService.getLogin()}`,
|
||||
icon: "menu",
|
||||
title: "Menu",
|
||||
subMenu: [
|
||||
{
|
||||
position: MenuPosition.LEFT,
|
||||
hover: "Go to Home page",
|
||||
icon: "home",
|
||||
title: "Home",
|
||||
navigateTo: "home",
|
||||
}, {
|
||||
position: MenuPosition.LEFT,
|
||||
icon: "group_work",
|
||||
title: this.arianeService.getTypeName(),
|
||||
otherData: MenuEventType.TYPE,
|
||||
callback: true,
|
||||
enable: !isNullOrUndefined(this.arianeService.getTypeId()),
|
||||
}, {
|
||||
position: MenuPosition.LEFT,
|
||||
icon: "tag",
|
||||
title: this.arianeService.getSeriesName(),
|
||||
otherData: MenuEventType.SERIES,
|
||||
callback: true,
|
||||
enable: !isNullOrUndefined(this.arianeService.getSeriesId()),
|
||||
}, {
|
||||
position: MenuPosition.LEFT,
|
||||
icon: "album",
|
||||
title: `Season ${this.arianeService.getSeasonName()}`,
|
||||
otherData: MenuEventType.SEASON,
|
||||
callback: true,
|
||||
enable: !isNullOrUndefined(this.arianeService.getSeasonId()),
|
||||
}, {
|
||||
position: MenuPosition.LEFT,
|
||||
icon: "movie",
|
||||
title: this.arianeService.getVideoName(),
|
||||
otherData: MenuEventType.VIDEO,
|
||||
callback: true,
|
||||
enable: !isNullOrUndefined(this.arianeService.getVideoId()),
|
||||
},
|
||||
],
|
||||
}, {
|
||||
position: MenuPosition.RIGHT,
|
||||
image: "assets/images/avatar_generic.svg",
|
||||
title: "",
|
||||
subMenu: [
|
||||
{
|
||||
position: MenuPosition.LEFT,
|
||||
hover: `You are logged as: <b>${this.sessionService.getLogin()}</b>`,
|
||||
title: `Sign in as ${this.sessionService.getLogin()}`,
|
||||
}, {
|
||||
position: MenuPosition.LEFT,
|
||||
icon: "add_circle",
|
||||
title: "Add media",
|
||||
navigateTo: "upload",
|
||||
enable: this.sessionService.hasRight(UserRoles222.admin),
|
||||
}, {
|
||||
position: MenuPosition.LEFT,
|
||||
icon: "settings",
|
||||
title: "Settings",
|
||||
navigateTo: "settings",
|
||||
}, {
|
||||
position: MenuPosition.LEFT,
|
||||
icon: "help",
|
||||
title: "Help",
|
||||
navigateTo: "help",
|
||||
}, {
|
||||
position: MenuPosition.LEFT,
|
||||
hover: "Exit connection",
|
||||
icon: "exit_to_app",
|
||||
title: "Sign out",
|
||||
callback: true,
|
||||
otherData: MenuEventType.SSO_LOGOUT,
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
} else {
|
||||
this.currentMenu = [
|
||||
{
|
||||
position: MenuPosition.LEFT,
|
||||
hover: "Go to Home page",
|
||||
icon: "home",
|
||||
title: "Home",
|
||||
navigateTo: "home",
|
||||
}, {
|
||||
position: MenuPosition.RIGHT,
|
||||
hover: "Create a new account",
|
||||
icon: "add_circle_outline",
|
||||
title: "Sign-up",
|
||||
callback: true,
|
||||
model: this.signUpEnable ? undefined : "disable",
|
||||
otherData: MenuEventType.SSO_SIGNUP,
|
||||
}, {
|
||||
position: MenuPosition.RIGHT,
|
||||
hover: "Login page",
|
||||
icon: "account_circle",
|
||||
title: "Sign-in",
|
||||
callback: true,
|
||||
otherData: MenuEventType.SSO_LOGIN,
|
||||
},
|
||||
];
|
||||
}
|
||||
}
|
||||
getSegmentDisplayable(): string {
|
||||
let segment = this.arianeService.getCurrentSegment();
|
||||
if (segment === "type") {
|
||||
return "Type";
|
||||
}
|
||||
if (segment === "season") {
|
||||
return "Season";
|
||||
}
|
||||
if (segment === "series") {
|
||||
return "Series";
|
||||
}
|
||||
if (segment === "video") {
|
||||
return "Video";
|
||||
}
|
||||
return "";
|
||||
}
|
||||
}
|
||||
|
||||
|