Compare commits
15 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
a88e7d2ce5 | ||
3d78b90ff1 | |||
3d7ee42ca8 | |||
77acd2060b | |||
88f65f0806 | |||
6df71e3341 | |||
263a4ae4c0 | |||
57a48de52f | |||
ae9f00a60a | |||
93b95becb7 | |||
bd66703a08 | |||
86fec254aa | |||
6f16bffe31 | |||
1fc1b5d35d | |||
7d6e3bb8c8 |
4
.gitignore
vendored
4
.gitignore
vendored
@ -53,6 +53,8 @@ testem.log
|
||||
# System Files
|
||||
.DS_Store
|
||||
Thumbs.db
|
||||
/env_dev/data
|
||||
/env_dev/dataMongo
|
||||
|
||||
backPY/env
|
||||
|
||||
@ -62,3 +64,5 @@ __pycache__
|
||||
|
||||
.design/
|
||||
.vscode/
|
||||
front/storybook-static
|
||||
back/bin
|
||||
|
@ -5,7 +5,7 @@ version_file="../version.txt"
|
||||
# update new release dependency
|
||||
cd back
|
||||
# update the Maven version number
|
||||
mvn versions:set -DnewVersion=$(sed 's/DEV/SNAPSHOT/g' $version_file)
|
||||
mvn versions:set -DnewVersion=$(sed 's/dev/SNAPSHOT/g' $version_file)
|
||||
if grep -q "DEV" "$version_file"; then
|
||||
# update all versions release of dependency
|
||||
mvn versions:use-latest-releases
|
||||
@ -19,11 +19,14 @@ cd -
|
||||
|
||||
|
||||
cd front
|
||||
if grep -q "DEV" "$version_file"; then
|
||||
if grep -q "dev" "$version_file"; then
|
||||
# update all dependency
|
||||
pnpm install
|
||||
pnpm run update_packages
|
||||
else
|
||||
# in case of release ==> can not do it automatically ...
|
||||
echo not implemented
|
||||
fi
|
||||
|
||||
cd -
|
||||
|
||||
|
93
Dockerfile
93
Dockerfile
@ -3,23 +3,31 @@
|
||||
## buyilding-end install applications:
|
||||
##
|
||||
######################################################################################
|
||||
FROM archlinux:base-devel AS builder
|
||||
FROM archlinux:base-devel AS common
|
||||
# update system
|
||||
RUN pacman -Syu --noconfirm && pacman-db-upgrade \
|
||||
&& pacman -S --noconfirm jdk-openjdk maven npm pnpm \
|
||||
&& pacman -S --noconfirm jdk-openjdk wget\
|
||||
&& pacman -Scc --noconfirm
|
||||
|
||||
WORKDIR /tmp
|
||||
|
||||
FROM common AS builder
|
||||
# update system
|
||||
RUN pacman -Syu --noconfirm && pacman-db-upgrade \
|
||||
&& pacman -S --noconfirm maven npm pnpm \
|
||||
&& pacman -Scc --noconfirm
|
||||
|
||||
ENV PATH /tmp/node_modules/.bin:$PATH
|
||||
WORKDIR /tmp
|
||||
|
||||
######################################################################################
|
||||
##
|
||||
## Build back:
|
||||
##
|
||||
######################################################################################
|
||||
FROM builder AS buildBack
|
||||
COPY back/pom.xml /tmp
|
||||
COPY back/src /tmp/src/
|
||||
FROM builder AS build_back
|
||||
COPY back/pom.xml ./
|
||||
COPY back/Formatter.xml ./
|
||||
COPY back/src ./src/
|
||||
RUN mvn clean compile assembly:single
|
||||
|
||||
######################################################################################
|
||||
@ -27,27 +35,44 @@ RUN mvn clean compile assembly:single
|
||||
## Build front:
|
||||
##
|
||||
######################################################################################
|
||||
FROM builder AS buildFront
|
||||
FROM builder AS dependency_front
|
||||
|
||||
RUN echo "@kangaroo-and-rabbit:registry=https://gitea.atria-soft.org/api/packages/kangaroo-and-rabbit/npm/" > /root/.npmrc
|
||||
|
||||
ADD front/package.json \
|
||||
front/karma.conf.js \
|
||||
front/protractor.conf.js \
|
||||
/tmp/
|
||||
front/pnpm-lock.yaml \
|
||||
./
|
||||
ADD front/src/theme ./src/theme
|
||||
|
||||
# install and cache app dependencies
|
||||
RUN pnpm install
|
||||
RUN pnpm install --prod=false
|
||||
|
||||
ADD front/e2e \
|
||||
front/tsconfig.json \
|
||||
front/tslint.json \
|
||||
front/angular.json \
|
||||
/tmp/
|
||||
ADD front/src /tmp/src
|
||||
###############################################################
|
||||
## Install sources
|
||||
###############################################################
|
||||
FROM dependency_front AS load_sources_front
|
||||
|
||||
# generate build
|
||||
RUN ng build --output-path=dist --configuration=production --base-href=/karideo/ --deploy-url=/karideo/
|
||||
# JUST to get the vertion of the application and his sha...
|
||||
COPY \
|
||||
front/tsconfig.json \
|
||||
front/tsconfig.node.json \
|
||||
front/vite.config.mts \
|
||||
front/index.html \
|
||||
./
|
||||
|
||||
COPY front/public ./public
|
||||
COPY front/src ./src
|
||||
|
||||
#We are not in prod mode ==> we need to overwrite the production env.
|
||||
ARG env=front/.env.production
|
||||
COPY ${env} .env
|
||||
|
||||
###############################################################
|
||||
## Build the sources
|
||||
###############################################################
|
||||
FROM load_sources_front AS build_front
|
||||
# build in bundle mode all the application
|
||||
RUN pnpm static:build
|
||||
|
||||
######################################################################################
|
||||
##
|
||||
@ -55,26 +80,24 @@ RUN ng build --output-path=dist --configuration=production --base-href=/karideo/
|
||||
##
|
||||
######################################################################################
|
||||
|
||||
FROM bellsoft/liberica-openjdk-alpine:latest
|
||||
# add wget to manage the health check...
|
||||
RUN apk add --no-cache wget
|
||||
#FROM bellsoft/liberica-openjdk-alpine:latest
|
||||
## add wget to manage the health check...
|
||||
#RUN apk add --no-cache wget
|
||||
FROM common AS prod
|
||||
|
||||
#FROM archlinux:base
|
||||
#RUN pacman -Syu --noconfirm && pacman-db-upgrade
|
||||
## install package
|
||||
#RUN pacman -S --noconfirm jdk-openjdk wget
|
||||
## intall npm
|
||||
#RUN pacman -S --noconfirm npm
|
||||
## clean all the caches Need only on the release environment
|
||||
#RUN pacman -Scc --noconfirm
|
||||
ENV LANG C.UTF-8
|
||||
|
||||
ENV LANG=C.UTF-8
|
||||
|
||||
COPY --from=buildBack /tmp/out/maven/*.jar /application/application.jar
|
||||
COPY --from=buildFront /tmp/dist /application/front/
|
||||
COPY --from=build_back /tmp/out/maven/*.jar /application/application.jar
|
||||
COPY --from=build_front /tmp/dist /application/front/
|
||||
|
||||
WORKDIR /application/
|
||||
|
||||
EXPOSE 80
|
||||
|
||||
CMD ["java", "-Xms64M", "-Xmx1G", "-cp", "/application/application.jar", "org.kar.karideo.WebLauncher"]
|
||||
# To verify health-check: docker inspect --format "{{json .State.Health }}" YOUR_SERVICE_NAME | jq
|
||||
HEALTHCHECK --start-period=10s --start-interval=2s --interval=30s --timeout=5s --retries=10 \
|
||||
CMD wget --no-verbose --tries=1 --spider http://localhost:80/api/health_check || exit 1
|
||||
|
||||
CMD ["java", "-Xms128M", "-Xmx1G", "-cp", "/application/application.jar", "org.atriasoft.karideo.WebLauncher"]
|
||||
|
||||
#RUN cat /etc/passwd
|
125
README.md
Normal file
125
README.md
Normal file
@ -0,0 +1,125 @@
|
||||
Karideo
|
||||
=======
|
||||
|
||||
**K**angaroo **A**nd **R**abbit (v)ideo is a simple framework to propose video streaming for personal network
|
||||
|
||||
Run in local:
|
||||
=============
|
||||
|
||||
Start tools
|
||||
-----------
|
||||
|
||||
Start the server basic interfaces: (DB(mySQL), Adminer)
|
||||
|
||||
```{.bash}
|
||||
# start the Bdd interface (no big data > 50Mo)
|
||||
docker compose -f env_dev/docker-compose.yaml up -d
|
||||
```
|
||||
|
||||
Start the Back-end:
|
||||
-------------------
|
||||
|
||||
backend is developed in JAVA
|
||||
|
||||
The first step is configuring your JAVA version (or select the JVM with the OS)
|
||||
```bash
|
||||
export PATH=$(ls -d --color=never /usr/lib/jvm/java-2*-openjdk)/bin:$PATH
|
||||
```
|
||||
|
||||
Install the dependency:
|
||||
```bash
|
||||
mvn install
|
||||
```
|
||||
|
||||
Run the test
|
||||
```bash
|
||||
mvn test
|
||||
```
|
||||
|
||||
Install it for external use
|
||||
```bash
|
||||
mvn install
|
||||
```
|
||||
|
||||
Execute the local server:
|
||||
```bash
|
||||
mvn exec:java@dev-mode
|
||||
```
|
||||
|
||||
Start the Front-end:
|
||||
--------------------
|
||||
|
||||
backend is developed in JAVA
|
||||
```bash
|
||||
cd front
|
||||
pnpm install
|
||||
pnpm dev
|
||||
```
|
||||
|
||||
Display the result:
|
||||
-------------------
|
||||
|
||||
[show the webpage: http://localhost:4203](http://localhost:4203)
|
||||
|
||||
Some other dev tools:
|
||||
=====================
|
||||
|
||||
Format code:
|
||||
------------
|
||||
|
||||
```bash
|
||||
export PATH=$(ls -d --color=never /usr/lib/jvm/java-2*-openjdk)/bin:$PATH
|
||||
mvn formatter:format
|
||||
mvn test
|
||||
```
|
||||
|
||||
Tools in production mode
|
||||
========================
|
||||
|
||||
Changing the Log Level
|
||||
----------------------
|
||||
|
||||
In a production environment, you can adjust the log level to help diagnose bugs more effectively.
|
||||
|
||||
The available log levels are:
|
||||
| **Log Level Tag** | **org.kar.karideo** | **org.kar.archidata** | **other** |
|
||||
| ----------------- | ------------------- | --------------------- | --------- |
|
||||
| `prod` | INFO | INFO | INFO |
|
||||
| `prod-debug` | DEBUG | INFO | INFO |
|
||||
| `prod-trace` | TRACE | DEBUG | INFO |
|
||||
| `prod-trace-full` | TRACE | TRACE | INFO |
|
||||
| `dev` | TRACE | DEBUG | INFO |
|
||||
|
||||
|
||||
Manual set in production:
|
||||
=========================
|
||||
|
||||
Connect on the registry
|
||||
------------------------
|
||||
|
||||
To log-in and log-out from the registry:
|
||||
```bash
|
||||
export REGISTRY_ADDRESS=gitea.atria-soft.org
|
||||
docker login -u <<YOUR_USER_NAME>> ${REGISTRY_ADDRESS}
|
||||
docker logout ${REGISTRY_ADDRESS}
|
||||
```
|
||||
|
||||
pull the root image of dockers
|
||||
------------------------------
|
||||
|
||||
```bash
|
||||
docker pull archlinux:base-devel
|
||||
docker pull bellsoft/liberica-openjdk-alpine:latest
|
||||
```
|
||||
|
||||
Create the version
|
||||
------------------
|
||||
|
||||
Execute in the local folder: (use ```dev``` for development and ```latest``` for production release)
|
||||
|
||||
```bash
|
||||
export TAG_DOCKER=latest
|
||||
export REGISTRY_ADDRESS=gitea.atria-soft.org
|
||||
docker build -t ${REGISTRY_ADDRESS}/kangaroo-and-rabbit/karideo:${TAG_DOCKER} .
|
||||
docker push ${REGISTRY_ADDRESS}/kangaroo-and-rabbit/karideo:${TAG_DOCKER}
|
||||
```
|
@ -53,6 +53,9 @@ Checkstyle configuration that checks the sun coding conventions.
|
||||
<module name="LambdaParameterName"/>
|
||||
<module name="Regexp"/>
|
||||
<module name="RegexpSinglelineJava"/>
|
||||
<module name="UnusedPrivateField">
|
||||
<property name="ignorePattern" value="LOGGER"/>
|
||||
</module>
|
||||
</module>
|
||||
<module name="BeforeExecutionExclusionFileFilter">
|
||||
<property name="fileNamePattern" value="module\-info\.java$"/>
|
||||
|
@ -1,7 +1,8 @@
|
||||
FROM maven:3.6.3-openjdk-16 AS build
|
||||
FROM maven:3-openjdk-23 AS build
|
||||
|
||||
COPY pom.xml /tmp/
|
||||
COPY src /tmp/src/
|
||||
COPY Formatter.xml /tmp/
|
||||
WORKDIR /tmp/
|
||||
RUN mvn clean compile assembly:single
|
||||
|
||||
|
@ -11,7 +11,7 @@ mvn package
|
||||
// download all dependency in out/maven/dependency
|
||||
mvn dependency:copy-dependencies
|
||||
|
||||
java -cp out/maven/kar-karideo-0.1.0.jar org.kar.karideo.WebLauncher
|
||||
java -cp out/maven/kar-karideo-0.1.0.jar org.atriasoft.karideo.WebLauncher
|
||||
|
||||
|
||||
// create a single package jar
|
||||
@ -19,7 +19,7 @@ mvn clean compile assembly:single
|
||||
|
||||
|
||||
|
||||
java -cp out/maven/karideo-0.1.0-jar-with-dependencies.jar org.kar.karideo.WebLauncher
|
||||
java -cp out/maven/karideo-0.1.0-jar-with-dependencies.jar org.atriasoft.karideo.WebLauncher
|
||||
|
||||
|
||||
|
||||
|
191
back/pom.xml
191
back/pom.xml
@ -1,36 +1,47 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>org.kar</groupId>
|
||||
<groupId>org.atriasoft</groupId>
|
||||
<artifactId>karideo</artifactId>
|
||||
<version>0.3.0</version>
|
||||
<properties>
|
||||
<maven.compiler.version>3.1</maven.compiler.version>
|
||||
<maven.compiler.source>21</maven.compiler.source>
|
||||
<maven.compiler.target>21</maven.compiler.target>
|
||||
<maven.compiler.version>3.13.0</maven.compiler.version>
|
||||
<maven.compiler.source>23</maven.compiler.source>
|
||||
<maven.compiler.target>23</maven.compiler.target>
|
||||
<maven.dependency.version>3.1.1</maven.dependency.version>
|
||||
</properties>
|
||||
<repositories>
|
||||
<repository>
|
||||
<id>gitea</id>
|
||||
<url>https://gitea.atria-soft.org/api/packages/kangaroo-and-rabbit/maven</url>
|
||||
</repository>
|
||||
</repositories>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>kangaroo-and-rabbit</groupId>
|
||||
<groupId>org.atria-soft</groupId>
|
||||
<artifactId>archidata</artifactId>
|
||||
<version>0.8.4</version>
|
||||
<version>0.28.0</version>
|
||||
</dependency>
|
||||
<!-- Loopback of logger JDK logging API to SLF4J -->
|
||||
<dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-simple</artifactId>
|
||||
<artifactId>jul-to-slf4j</artifactId>
|
||||
<version>2.0.9</version>
|
||||
</dependency>
|
||||
<!-- generic logger of SLF4J to console (in color) -->
|
||||
<dependency>
|
||||
<groupId>ch.qos.logback</groupId>
|
||||
<artifactId>logback-classic</artifactId>
|
||||
<version>1.4.11</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>xerces</groupId>
|
||||
<artifactId>xercesImpl</artifactId>
|
||||
<version>2.12.2</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.codehaus.janino</groupId>
|
||||
<artifactId>janino</artifactId>
|
||||
<version>3.1.9</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.datatype</groupId>
|
||||
<artifactId>jackson-datatype-jsr310</artifactId>
|
||||
<version>2.16.1</version>
|
||||
<version>2.18.1</version>
|
||||
</dependency>
|
||||
<!--
|
||||
************************************************************
|
||||
@ -40,15 +51,25 @@
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter-api</artifactId>
|
||||
<version>5.10.1</version>
|
||||
<version>5.11.0</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter-engine</artifactId>
|
||||
<version>5.10.1</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter-engine</artifactId>
|
||||
<version>5.11.0</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>net.revelc.code.formatter</groupId>
|
||||
<artifactId>formatter-maven-plugin</artifactId>
|
||||
<version>2.24.1</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-checkstyle-plugin</artifactId>
|
||||
<version>3.5.0</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
<build>
|
||||
<sourceDirectory>src</sourceDirectory>
|
||||
@ -77,16 +98,45 @@
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>exec-maven-plugin</artifactId>
|
||||
<version>1.4.0</version>
|
||||
<version>3.2.0</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>prod-mode</id>
|
||||
<goals>
|
||||
<goal>java</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<mainClass>org.atriasoft.karideo.WebLauncher</mainClass>
|
||||
</configuration>
|
||||
</execution>
|
||||
<execution>
|
||||
<id>dev-mode</id>
|
||||
<goals>
|
||||
<goal>java</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<mainClass>org.atriasoft.karideo.WebLauncherLocal</mainClass>
|
||||
</configuration>
|
||||
</execution>
|
||||
<execution>
|
||||
<id>generate-api</id>
|
||||
<goals>
|
||||
<goal>java</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<mainClass>org.atriasoft.karideo.GenerateApi</mainClass>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
<configuration>
|
||||
<mainClass>org.kar.karideo.WebLauncher</mainClass>
|
||||
<mainClass/>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<!-- Create the source bundle -->
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-source-plugin</artifactId>
|
||||
<version>3.2.1</version>
|
||||
<version>4.0.0-beta-1</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>attach-sources</id>
|
||||
@ -100,10 +150,12 @@
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
<version>3.0.0-M5</version>
|
||||
<version>3.2.5</version>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-assembly-plugin</artifactId>
|
||||
<version>3.7.1</version>
|
||||
<configuration>
|
||||
<archive>
|
||||
<manifest>
|
||||
@ -115,94 +167,21 @@
|
||||
</descriptorRefs>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<!-- Create coverage -->
|
||||
<!--
|
||||
<plugin>
|
||||
<groupId>org.jacoco</groupId>
|
||||
<artifactId>jacoco-maven-plugin</artifactId>
|
||||
<version>0.8.10</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>prepare-agent</id>
|
||||
<goals>
|
||||
<goal>prepare-agent</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
<execution>
|
||||
<id>report</id>
|
||||
<phase>test</phase>
|
||||
<goals>
|
||||
<goal>report</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
<execution>
|
||||
<id>jacoco-check</id>
|
||||
<goals>
|
||||
<goal>check</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<rules>
|
||||
<rule>
|
||||
<element>PACKAGE</element>
|
||||
<limits>
|
||||
<limit>
|
||||
<counter>LINE</counter>
|
||||
<value>COVEREDRATIO</value>
|
||||
<minimum>0.50</minimum>
|
||||
</limit>
|
||||
</limits>
|
||||
</rule>
|
||||
</rules>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
-->
|
||||
<!-- Java-doc generation for stand-alone site -->
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-javadoc-plugin</artifactId>
|
||||
<version>3.2.0</version>
|
||||
<version>3.3.0</version>
|
||||
<configuration>
|
||||
<show>private</show>
|
||||
<nohelp>true</nohelp>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>exec-maven-plugin</artifactId>
|
||||
<version>3.1.0</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>exec-application</id>
|
||||
<phase>package</phase>
|
||||
<goals>
|
||||
<goal>java</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
<configuration>
|
||||
<mainClass>org.kar.karideo.WebLauncher</mainClass>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<!-- Check the style of the code -->
|
||||
<!--
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-checkstyle-plugin</artifactId>
|
||||
<version>3.1.0</version>
|
||||
<configuration>
|
||||
<configLocation>CheckStyle.xml</configLocation>
|
||||
<consoleOutput>true</consoleOutput>
|
||||
<failOnViolation>true</failOnViolation>
|
||||
<failsOnError>true</failsOnError>
|
||||
<includeTestSourceDirectory>true</includeTestSourceDirectory>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>net.revelc.code.formatter</groupId>
|
||||
<artifactId>formatter-maven-plugin</artifactId>
|
||||
<version>2.12.2</version>
|
||||
<version>2.24.1</version>
|
||||
<configuration>
|
||||
<encoding>UTF-8</encoding>
|
||||
<lineEnding>LF</lineEnding>
|
||||
@ -226,7 +205,15 @@
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
-->
|
||||
<plugin>
|
||||
<groupId>com.github.spotbugs</groupId>
|
||||
<artifactId>spotbugs-maven-plugin</artifactId>
|
||||
<version>4.8.5.0</version>
|
||||
<configuration>
|
||||
<includeFilterFile>spotbugs-security-include.xml</includeFilterFile>
|
||||
<excludeFilterFile>spotbugs-security-exclude.xml</excludeFilterFile>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
<!-- Generate Java-docs As Part Of Project Reports -->
|
||||
@ -235,7 +222,7 @@
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-javadoc-plugin</artifactId>
|
||||
<version>3.2.0</version>
|
||||
<version>3.3.0</version>
|
||||
<configuration>
|
||||
<show>public</show>
|
||||
</configuration>
|
||||
|
245
back/pom.xml.versionsBackup
Normal file
245
back/pom.xml.versionsBackup
Normal file
@ -0,0 +1,245 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>org.kar</groupId>
|
||||
<artifactId>karideo</artifactId>
|
||||
<version>0.3.0</version>
|
||||
<properties>
|
||||
<maven.compiler.version>3.1</maven.compiler.version>
|
||||
<maven.compiler.source>21</maven.compiler.source>
|
||||
<maven.compiler.target>21</maven.compiler.target>
|
||||
<maven.dependency.version>3.1.1</maven.dependency.version>
|
||||
</properties>
|
||||
<repositories>
|
||||
<repository>
|
||||
<id>gitea</id>
|
||||
<url>https://gitea.atria-soft.org/api/packages/kangaroo-and-rabbit/maven</url>
|
||||
</repository>
|
||||
</repositories>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>kangaroo-and-rabbit</groupId>
|
||||
<artifactId>archidata</artifactId>
|
||||
<version>0.11.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-simple</artifactId>
|
||||
<version>2.0.9</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.datatype</groupId>
|
||||
<artifactId>jackson-datatype-jsr310</artifactId>
|
||||
<version>2.16.1</version>
|
||||
</dependency>
|
||||
<!--
|
||||
************************************************************
|
||||
** TEST dependency **
|
||||
************************************************************
|
||||
-->
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter-api</artifactId>
|
||||
<version>5.10.1</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter-engine</artifactId>
|
||||
<version>5.10.1</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
<build>
|
||||
<sourceDirectory>src</sourceDirectory>
|
||||
<testSourceDirectory>test/src</testSourceDirectory>
|
||||
<directory>${project.basedir}/out/maven/</directory>
|
||||
<resources>
|
||||
<resource>
|
||||
<directory>src/resources</directory>
|
||||
</resource>
|
||||
</resources>
|
||||
<testResources>
|
||||
<testResource>
|
||||
<directory>${basedir}/test/resources</directory>
|
||||
</testResource>
|
||||
</testResources>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-compiler-plugin</artifactId>
|
||||
<version>${maven.compiler.version}</version>
|
||||
<configuration>
|
||||
<source>${maven.compiler.source}</source>
|
||||
<target>${maven.compiler.target}</target>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>exec-maven-plugin</artifactId>
|
||||
<version>1.4.0</version>
|
||||
<configuration>
|
||||
<mainClass>org.kar.karideo.WebLauncher</mainClass>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<!-- Create the source bundle -->
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-source-plugin</artifactId>
|
||||
<version>3.2.1</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>attach-sources</id>
|
||||
<goals>
|
||||
<goal>jar</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<!-- junit results -->
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
<version>3.0.0-M5</version>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<artifactId>maven-assembly-plugin</artifactId>
|
||||
<configuration>
|
||||
<archive>
|
||||
<manifest>
|
||||
<mainClass>fully.qualified.MainClass</mainClass>
|
||||
</manifest>
|
||||
</archive>
|
||||
<descriptorRefs>
|
||||
<descriptorRef>jar-with-dependencies</descriptorRef>
|
||||
</descriptorRefs>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<!-- Create coverage -->
|
||||
<!--
|
||||
<plugin>
|
||||
<groupId>org.jacoco</groupId>
|
||||
<artifactId>jacoco-maven-plugin</artifactId>
|
||||
<version>0.8.10</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>prepare-agent</id>
|
||||
<goals>
|
||||
<goal>prepare-agent</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
<execution>
|
||||
<id>report</id>
|
||||
<phase>test</phase>
|
||||
<goals>
|
||||
<goal>report</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
<execution>
|
||||
<id>jacoco-check</id>
|
||||
<goals>
|
||||
<goal>check</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<rules>
|
||||
<rule>
|
||||
<element>PACKAGE</element>
|
||||
<limits>
|
||||
<limit>
|
||||
<counter>LINE</counter>
|
||||
<value>COVEREDRATIO</value>
|
||||
<minimum>0.50</minimum>
|
||||
</limit>
|
||||
</limits>
|
||||
</rule>
|
||||
</rules>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
-->
|
||||
<!-- Java-doc generation for stand-alone site -->
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-javadoc-plugin</artifactId>
|
||||
<version>3.2.0</version>
|
||||
<configuration>
|
||||
<show>private</show>
|
||||
<nohelp>true</nohelp>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>exec-maven-plugin</artifactId>
|
||||
<version>3.1.0</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>exec-application</id>
|
||||
<phase>package</phase>
|
||||
<goals>
|
||||
<goal>java</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
<configuration>
|
||||
<mainClass>org.kar.karideo.WebLauncher</mainClass>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<!-- Check the style of the code -->
|
||||
<!--
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-checkstyle-plugin</artifactId>
|
||||
<version>3.1.0</version>
|
||||
<configuration>
|
||||
<configLocation>CheckStyle.xml</configLocation>
|
||||
<consoleOutput>true</consoleOutput>
|
||||
<failOnViolation>true</failOnViolation>
|
||||
<failsOnError>true</failsOnError>
|
||||
<includeTestSourceDirectory>true</includeTestSourceDirectory>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>net.revelc.code.formatter</groupId>
|
||||
<artifactId>formatter-maven-plugin</artifactId>
|
||||
<version>2.12.2</version>
|
||||
<configuration>
|
||||
<encoding>UTF-8</encoding>
|
||||
<lineEnding>LF</lineEnding>
|
||||
<configFile>Formatter.xml</configFile>
|
||||
<directories>
|
||||
<directory>src/</directory>
|
||||
<directory>test/src</directory>
|
||||
</directories>
|
||||
<includes>
|
||||
<include>**/*.java</include>
|
||||
</includes>
|
||||
<excludes>
|
||||
<exclude>module-info.java</exclude>
|
||||
</excludes>
|
||||
</configuration>
|
||||
<executions>
|
||||
<execution>
|
||||
<goals>
|
||||
<goal>validate</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
-->
|
||||
</plugins>
|
||||
</build>
|
||||
<!-- Generate Java-docs As Part Of Project Reports -->
|
||||
<reporting>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-javadoc-plugin</artifactId>
|
||||
<version>3.2.0</version>
|
||||
<configuration>
|
||||
<show>public</show>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</reporting>
|
||||
</project>
|
@ -1,9 +1,9 @@
|
||||
org.kar.karideo.dataTmpFolder=/application/data/tmp
|
||||
org.kar.karideo.dataTmpFolder=/application/data/media
|
||||
org.kar.karideo.rest.oauth=http://192.168.1.156:21080/oauth/api/
|
||||
org.kar.karideo.db.host=1992.156.1.156
|
||||
org.kar.karideo.db.port=20306
|
||||
org.kar.karideo.db.login=root
|
||||
org.kar.karideo.db.port=klkhj456gkgtkhjgvkujfhjgkjhgsdfhb3467465fgdhdesfgh
|
||||
org.kar.karideo.db.name=karideo
|
||||
org.kar.karideo.address=http://0.0.0.0:18080/karideo/api/
|
||||
org.atriasoft.karideo.dataTmpFolder=/application/data/tmp
|
||||
org.atriasoft.karideo.dataTmpFolder=/application/data/media
|
||||
org.atriasoft.karideo.rest.oauth=http://192.168.1.156:21080/oauth/api/
|
||||
org.atriasoft.karideo.db.host=1992.156.1.156
|
||||
org.atriasoft.karideo.db.port=20306
|
||||
org.atriasoft.karideo.db.login=root
|
||||
org.atriasoft.karideo.db.port=klkhj456gkgtkhjgvkujfhjgkjhgsdfhb3467465fgdhdesfgh
|
||||
org.atriasoft.karideo.db.name=karideo
|
||||
org.atriasoft.karideo.address=http://0.0.0.0:18080/karideo/api/
|
||||
|
17
back/src/org/atriasoft/karideo/GenerateApi.java
Normal file
17
back/src/org/atriasoft/karideo/GenerateApi.java
Normal file
@ -0,0 +1,17 @@
|
||||
package org.atriasoft.karideo;
|
||||
|
||||
import org.atriasoft.karideo.migration.Initialization;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class GenerateApi {
|
||||
private final static Logger LOGGER = LoggerFactory.getLogger(GenerateApi.class);
|
||||
|
||||
private GenerateApi() {}
|
||||
|
||||
public static void main(final String[] args) throws Exception {
|
||||
LOGGER.info("Generate API");
|
||||
Initialization.generateObjects();
|
||||
LOGGER.info("STOP the REST server.");
|
||||
}
|
||||
}
|
@ -1,50 +1,54 @@
|
||||
package org.kar.karideo;
|
||||
package org.atriasoft.karideo;
|
||||
|
||||
import java.net.URI;
|
||||
import java.util.TimeZone;
|
||||
import java.util.logging.LogManager;
|
||||
|
||||
import org.atriasoft.archidata.UpdateJwtPublicKey;
|
||||
import org.atriasoft.archidata.catcher.GenericCatcher;
|
||||
import org.atriasoft.archidata.db.DbConfig;
|
||||
import org.atriasoft.archidata.filter.CORSFilter;
|
||||
import org.atriasoft.archidata.filter.OptionFilter;
|
||||
import org.atriasoft.archidata.migration.MigrationEngine;
|
||||
import org.atriasoft.archidata.tools.ConfigBaseVariable;
|
||||
import org.atriasoft.archidata.tools.ContextGenericTools;
|
||||
import org.atriasoft.karideo.api.DataResource;
|
||||
import org.atriasoft.karideo.api.Front;
|
||||
import org.atriasoft.karideo.api.HealthCheck;
|
||||
import org.atriasoft.karideo.api.MediaResource;
|
||||
import org.atriasoft.karideo.api.SeasonResource;
|
||||
import org.atriasoft.karideo.api.SeriesResource;
|
||||
import org.atriasoft.karideo.api.TypeResource;
|
||||
import org.atriasoft.karideo.api.UserMediaAdvancementResource;
|
||||
import org.atriasoft.karideo.api.UserResource;
|
||||
import org.atriasoft.karideo.filter.KarideoAuthenticationFilter;
|
||||
import org.atriasoft.karideo.migration.Initialization;
|
||||
import org.atriasoft.karideo.migration.Migration20230810;
|
||||
import org.atriasoft.karideo.migration.Migration20231015;
|
||||
import org.atriasoft.karideo.migration.Migration20231126;
|
||||
import org.atriasoft.karideo.migration.Migration20240226;
|
||||
import org.atriasoft.karideo.migration.Migration20240611;
|
||||
import org.atriasoft.karideo.migration.Migration20250214;
|
||||
import org.atriasoft.karideo.migration.Migration20250414;
|
||||
import org.glassfish.grizzly.http.server.HttpServer;
|
||||
import org.glassfish.jersey.grizzly2.httpserver.GrizzlyHttpServerFactory;
|
||||
import org.glassfish.jersey.jackson.JacksonFeature;
|
||||
import org.glassfish.jersey.media.multipart.MultiPartFeature;
|
||||
import org.glassfish.jersey.server.ResourceConfig;
|
||||
import org.kar.archidata.GlobalConfiguration;
|
||||
import org.kar.archidata.UpdateJwtPublicKey;
|
||||
import org.kar.archidata.api.DataResource;
|
||||
import org.kar.archidata.catcher.ExceptionCatcher;
|
||||
import org.kar.archidata.catcher.FailExceptionCatcher;
|
||||
import org.kar.archidata.catcher.InputExceptionCatcher;
|
||||
import org.kar.archidata.catcher.SystemExceptionCatcher;
|
||||
import org.kar.archidata.db.DBConfig;
|
||||
import org.kar.archidata.filter.CORSFilter;
|
||||
import org.kar.archidata.filter.OptionFilter;
|
||||
import org.kar.archidata.migration.MigrationEngine;
|
||||
import org.kar.archidata.tools.ConfigBaseVariable;
|
||||
import org.kar.karideo.api.Front;
|
||||
import org.kar.karideo.api.HealthCheck;
|
||||
import org.kar.karideo.api.MediaResource;
|
||||
import org.kar.karideo.api.SeasonResource;
|
||||
import org.kar.karideo.api.SeriesResource;
|
||||
import org.kar.karideo.api.TypeResource;
|
||||
import org.kar.karideo.api.UserMediaAdvancementResource;
|
||||
import org.kar.karideo.api.UserResource;
|
||||
import org.kar.karideo.filter.KarideoAuthenticationFilter;
|
||||
import org.kar.karideo.migration.Initialization;
|
||||
import org.kar.karideo.migration.Migration20230810;
|
||||
import org.kar.karideo.migration.Migration20231015;
|
||||
import org.kar.karideo.migration.Migration20231126;
|
||||
import org.kar.karideo.migration.Migration20240226;
|
||||
import org.glassfish.jersey.server.validation.ValidationFeature;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.slf4j.bridge.SLF4JBridgeHandler;
|
||||
|
||||
import jakarta.ws.rs.core.UriBuilder;
|
||||
|
||||
public class WebLauncher {
|
||||
final static Logger LOGGER = LoggerFactory.getLogger(WebLauncher.class);
|
||||
public static DBConfig dbConfig;
|
||||
protected UpdateJwtPublicKey keyUpdater = null;
|
||||
protected HttpServer server = null;
|
||||
|
||||
public WebLauncher() {
|
||||
TimeZone.setDefault(TimeZone.getTimeZone("UTC"));
|
||||
ConfigBaseVariable.bdDatabase = "karideo";
|
||||
}
|
||||
|
||||
@ -62,12 +66,19 @@ public class WebLauncher {
|
||||
migrationEngine.add(new Migration20231015());
|
||||
migrationEngine.add(new Migration20231126());
|
||||
migrationEngine.add(new Migration20240226());
|
||||
migrationEngine.add(new Migration20240611());
|
||||
migrationEngine.add(new Migration20250214());
|
||||
migrationEngine.add(new Migration20250414());
|
||||
WebLauncher.LOGGER.info("Migrate the DB [START]");
|
||||
migrationEngine.migrateWaitAdmin(GlobalConfiguration.dbConfig);
|
||||
migrationEngine.migrateWaitAdmin(new DbConfig());
|
||||
WebLauncher.LOGGER.info("Migrate the DB [STOP]");
|
||||
}
|
||||
|
||||
public static void main(final String[] args) throws Exception {
|
||||
// Loop-back of logger JDK logging API to SLF4J
|
||||
LogManager.getLogManager().reset();
|
||||
SLF4JBridgeHandler.install();
|
||||
|
||||
WebLauncher.LOGGER.info("[START] application wake UP");
|
||||
final WebLauncher launcher = new WebLauncher();
|
||||
launcher.migrateDB();
|
||||
@ -95,10 +106,7 @@ public class WebLauncher {
|
||||
// global authentication system
|
||||
rc.register(KarideoAuthenticationFilter.class);
|
||||
// register exception catcher
|
||||
rc.register(InputExceptionCatcher.class);
|
||||
rc.register(SystemExceptionCatcher.class);
|
||||
rc.register(FailExceptionCatcher.class);
|
||||
rc.register(ExceptionCatcher.class);
|
||||
GenericCatcher.addAll(rc);
|
||||
// add default resource:
|
||||
rc.register(UserResource.class);
|
||||
rc.register(SeriesResource.class);
|
||||
@ -110,14 +118,20 @@ public class WebLauncher {
|
||||
|
||||
rc.register(HealthCheck.class);
|
||||
rc.register(Front.class);
|
||||
|
||||
ContextGenericTools.addJsr310(rc);
|
||||
// add jackson to be discover when we are ins stand-alone server
|
||||
rc.register(JacksonFeature.class);
|
||||
// enable jersey specific validations (@Valid)
|
||||
rc.register(ValidationFeature.class);
|
||||
// enable this to show low level request
|
||||
//rc.property(LoggingFeature.LOGGING_FEATURE_LOGGER_LEVEL_SERVER, Level.WARNING.getName());
|
||||
// rc.property(LoggingFeature.LOGGING_FEATURE_LOGGER_LEVEL_SERVER, Level.WARNING.getName());
|
||||
|
||||
this.server = GrizzlyHttpServerFactory.createHttpServer(getBaseURI(), rc);
|
||||
final HttpServer serverLink = this.server;
|
||||
// for (final NetworkListener listener : serverLink.getListeners()) {
|
||||
// listener.getKeepAlive().setIdleTimeoutInSeconds(30); // Set idle timeout
|
||||
// listener.getKeepAlive().setMaxRequestsCount(80); // Set request timeout
|
||||
// }
|
||||
Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
@ -1,40 +1,25 @@
|
||||
|
||||
package org.kar.karideo;
|
||||
package org.atriasoft.karideo;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.logging.LogManager;
|
||||
|
||||
import org.kar.archidata.api.DataResource;
|
||||
import org.kar.archidata.dataAccess.DataFactoryTsApi;
|
||||
import org.kar.archidata.tools.ConfigBaseVariable;
|
||||
import org.kar.karideo.api.Front;
|
||||
import org.kar.karideo.api.HealthCheck;
|
||||
import org.kar.karideo.api.MediaResource;
|
||||
import org.kar.karideo.api.SeasonResource;
|
||||
import org.kar.karideo.api.SeriesResource;
|
||||
import org.kar.karideo.api.TypeResource;
|
||||
import org.kar.karideo.api.UserMediaAdvancementResource;
|
||||
import org.kar.karideo.api.UserResource;
|
||||
import org.kar.karideo.migration.Initialization;
|
||||
import org.atriasoft.archidata.tools.ConfigBaseVariable;
|
||||
import org.atriasoft.karideo.migration.Initialization;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.slf4j.bridge.SLF4JBridgeHandler;
|
||||
|
||||
public class WebLauncherLocal extends WebLauncher {
|
||||
private final static Logger LOGGER = LoggerFactory.getLogger(WebLauncherLocal.class);
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(WebLauncherLocal.class);
|
||||
|
||||
private WebLauncherLocal() {}
|
||||
|
||||
public static void main(final String[] args) throws Exception {
|
||||
DataFactoryTsApi.generatePackage(List.of(
|
||||
Front.class,
|
||||
HealthCheck.class,
|
||||
SeasonResource.class,
|
||||
SeriesResource.class,
|
||||
TypeResource.class,
|
||||
UserMediaAdvancementResource.class,
|
||||
UserResource.class,
|
||||
MediaResource.class,
|
||||
DataResource.class),
|
||||
Initialization.CLASSES_BASE, "../front/src/app/back-api/");
|
||||
// Loop-back of logger JDK logging API to SLF4J
|
||||
LogManager.getLogManager().reset();
|
||||
SLF4JBridgeHandler.install();
|
||||
// Generate the APIs in type-script
|
||||
Initialization.generateObjects();
|
||||
final WebLauncherLocal launcher = new WebLauncherLocal();
|
||||
launcher.process();
|
||||
LOGGER.info("end-configure the server & wait finish process:");
|
||||
@ -48,6 +33,7 @@ public class WebLauncherLocal extends WebLauncher {
|
||||
// for local test:
|
||||
ConfigBaseVariable.apiAdress = "http://0.0.0.0:18080/karideo/api/";
|
||||
ConfigBaseVariable.dbPort = "3906";
|
||||
ConfigBaseVariable.testMode = "true";
|
||||
}
|
||||
try {
|
||||
super.migrateDB();
|
531
back/src/org/atriasoft/karideo/api/DataResource.java
Normal file
531
back/src/org/atriasoft/karideo/api/DataResource.java
Normal file
@ -0,0 +1,531 @@
|
||||
package org.atriasoft.karideo.api;
|
||||
|
||||
import java.awt.Graphics2D;
|
||||
import java.awt.image.BufferedImage;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.io.RandomAccessFile;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Paths;
|
||||
import java.nio.file.StandardCopyOption;
|
||||
import java.security.MessageDigest;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import java.util.Date;
|
||||
import java.util.UUID;
|
||||
|
||||
import javax.imageio.ImageIO;
|
||||
|
||||
import org.atriasoft.archidata.annotation.apiGenerator.ApiInputOptional;
|
||||
import org.atriasoft.archidata.annotation.security.PermitTokenInURI;
|
||||
import org.atriasoft.archidata.api.MediaStreamer;
|
||||
import org.atriasoft.archidata.dataAccess.DataAccess;
|
||||
import org.atriasoft.archidata.dataAccess.QueryCondition;
|
||||
import org.atriasoft.archidata.dataAccess.options.Condition;
|
||||
import org.atriasoft.archidata.exception.FailException;
|
||||
import org.atriasoft.archidata.filter.GenericContext;
|
||||
import org.atriasoft.archidata.model.Data;
|
||||
import org.atriasoft.archidata.tools.ConfigBaseVariable;
|
||||
import org.bson.types.ObjectId;
|
||||
import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
|
||||
import org.glassfish.jersey.media.multipart.FormDataParam;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import io.swagger.v3.oas.annotations.Operation;
|
||||
import jakarta.annotation.security.RolesAllowed;
|
||||
import jakarta.ws.rs.Consumes;
|
||||
import jakarta.ws.rs.GET;
|
||||
import jakarta.ws.rs.HeaderParam;
|
||||
import jakarta.ws.rs.InternalServerErrorException;
|
||||
import jakarta.ws.rs.POST;
|
||||
import jakarta.ws.rs.Path;
|
||||
import jakarta.ws.rs.PathParam;
|
||||
import jakarta.ws.rs.Produces;
|
||||
import jakarta.ws.rs.QueryParam;
|
||||
import jakarta.ws.rs.core.CacheControl;
|
||||
import jakarta.ws.rs.core.Context;
|
||||
import jakarta.ws.rs.core.HttpHeaders;
|
||||
import jakarta.ws.rs.core.MediaType;
|
||||
import jakarta.ws.rs.core.Response;
|
||||
import jakarta.ws.rs.core.SecurityContext;
|
||||
import jakarta.ws.rs.core.StreamingOutput;
|
||||
|
||||
// https://stackoverflow.com/questions/35367113/jersey-webservice-scalable-approach-to-download-file-and-reply-to-client
|
||||
// https://gist.github.com/aitoroses/4f7a2b197b732a6a691d
|
||||
|
||||
@Path("/data")
|
||||
@Produces(MediaType.APPLICATION_JSON)
|
||||
public class DataResource {
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(DataResource.class);
|
||||
private final static int CHUNK_SIZE = 1024 * 1024; // 1MB chunks
|
||||
private final static int CHUNK_SIZE_IN = 50 * 1024 * 1024; // 1MB chunks
|
||||
/** Upload some datas */
|
||||
private static long tmpFolderId = 1;
|
||||
|
||||
private static void createFolder(final String path) throws IOException {
|
||||
if (!Files.exists(java.nio.file.Path.of(path))) {
|
||||
// Log.print("Create folder: " + path);
|
||||
Files.createDirectories(java.nio.file.Path.of(path));
|
||||
}
|
||||
}
|
||||
|
||||
public static long getTmpDataId() {
|
||||
return tmpFolderId++;
|
||||
}
|
||||
|
||||
public static String getTmpFileInData(final long tmpFolderId) {
|
||||
final String filePath = ConfigBaseVariable.getTmpDataFolder() + File.separator + tmpFolderId;
|
||||
try {
|
||||
createFolder(ConfigBaseVariable.getTmpDataFolder() + File.separator);
|
||||
} catch (final IOException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
return filePath;
|
||||
}
|
||||
|
||||
public static String getFileDataOld(final UUID uuid) {
|
||||
final String stringUUID = uuid.toString();
|
||||
final String part1 = stringUUID.substring(0, 2);
|
||||
final String part2 = stringUUID.substring(2, 4);
|
||||
final String part3 = stringUUID.substring(4);
|
||||
final String finalPath = part1 + File.separator + part2;
|
||||
String filePath = ConfigBaseVariable.getMediaDataFolder() + "_uuid" + File.separator + finalPath
|
||||
+ File.separator;
|
||||
try {
|
||||
createFolder(filePath);
|
||||
} catch (final IOException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
filePath += part3;
|
||||
return filePath;
|
||||
}
|
||||
|
||||
public static String getFileData(final ObjectId oid) {
|
||||
final String stringOid = oid.toHexString();
|
||||
String dir1 = stringOid.substring(0, 2);
|
||||
String dir2 = stringOid.substring(2, 4);
|
||||
String dir3 = stringOid.substring(4, 6);
|
||||
try {
|
||||
final MessageDigest digest = MessageDigest.getInstance("SHA-256");
|
||||
final byte[] hashBytes = digest.digest(oid.toByteArray());
|
||||
dir1 = String.format("%02x", hashBytes[0]);
|
||||
dir2 = String.format("%02x", hashBytes[1]);
|
||||
dir3 = String.format("%02x", hashBytes[2]);
|
||||
} catch (final NoSuchAlgorithmException ex) {
|
||||
LOGGER.error("Fail to generate the hash of the objectId ==> ise direct value ... {}", ex.getMessage());
|
||||
}
|
||||
final String finalPath = dir1 + File.separator + dir2 + File.separator + dir3;
|
||||
String filePath = ConfigBaseVariable.getMediaDataFolder() + "_oid" + File.separator + finalPath
|
||||
+ File.separator;
|
||||
try {
|
||||
createFolder(filePath);
|
||||
} catch (final IOException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
filePath += stringOid;
|
||||
return filePath;
|
||||
}
|
||||
|
||||
public static String getFileMetaData(final ObjectId oid) {
|
||||
return getFileData(oid) + ".json";
|
||||
}
|
||||
|
||||
public Data getWithSha512(final String sha512) {
|
||||
LOGGER.info("find sha512 = {}", sha512);
|
||||
try {
|
||||
return DataAccess.getWhere(Data.class, new Condition(new QueryCondition("sha512", "=", sha512)));
|
||||
} catch (final Exception e) {
|
||||
// TODO Auto-generated catch block
|
||||
e.printStackTrace();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public Data getWithId(final long id) {
|
||||
LOGGER.info("find id = {}", id);
|
||||
try {
|
||||
return DataAccess.get(Data.class, id);
|
||||
} catch (final Exception e) {
|
||||
// TODO Auto-generated catch block
|
||||
e.printStackTrace();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
protected String getMimeType(final String extension) throws IOException {
|
||||
return switch (extension.toLowerCase()) {
|
||||
case "jpg", "jpeg" -> "image/jpeg";
|
||||
case "png" -> "image/png";
|
||||
case "webp" -> "image/webp";
|
||||
case "mka" -> "audio/x-matroska";
|
||||
case "mkv" -> "video/x-matroska";
|
||||
case "webm" -> "video/webm";
|
||||
default -> throw new IOException("Can not find the mime type of data input: '" + extension + "'");
|
||||
};
|
||||
}
|
||||
|
||||
public Data createNewData(final long tmpUID, final String originalFileName, final String sha512)
|
||||
throws IOException {
|
||||
// determine mime type:
|
||||
Data injectedData = new Data();
|
||||
String mimeType = "";
|
||||
final String extension = originalFileName.substring(originalFileName.lastIndexOf('.') + 1);
|
||||
mimeType = getMimeType(extension);
|
||||
injectedData.mimeType = mimeType;
|
||||
injectedData.sha512 = sha512;
|
||||
final String tmpPath = getTmpFileInData(tmpUID);
|
||||
injectedData.size = Files.size(Paths.get(tmpPath));
|
||||
|
||||
try {
|
||||
injectedData = DataAccess.insert(injectedData);
|
||||
} catch (final Exception e) {
|
||||
e.printStackTrace();
|
||||
return null;
|
||||
}
|
||||
final String mediaPath = getFileData(injectedData.oid);
|
||||
LOGGER.info("src = {}", tmpPath);
|
||||
LOGGER.info("dst = {}", mediaPath);
|
||||
Files.move(Paths.get(tmpPath), Paths.get(mediaPath), StandardCopyOption.ATOMIC_MOVE);
|
||||
LOGGER.info("Move done");
|
||||
return injectedData;
|
||||
}
|
||||
|
||||
public static void modeFileOldModelToNewModel(final UUID uuid, final ObjectId oid) throws IOException {
|
||||
String mediaCurentPath = getFileDataOld(uuid);
|
||||
String mediaDestPath = getFileData(oid);
|
||||
LOGGER.info("src = {}", mediaCurentPath);
|
||||
LOGGER.info("dst = {}", mediaDestPath);
|
||||
if (Files.exists(Paths.get(mediaCurentPath))) {
|
||||
LOGGER.info("move: {} ==> {}", mediaCurentPath, mediaDestPath);
|
||||
Files.move(Paths.get(mediaCurentPath), Paths.get(mediaDestPath), StandardCopyOption.ATOMIC_MOVE);
|
||||
}
|
||||
// Move old meta-data...
|
||||
mediaCurentPath = mediaCurentPath.substring(mediaCurentPath.length() - 4) + "meta.json";
|
||||
mediaDestPath = mediaCurentPath.substring(mediaDestPath.length() - 4) + "meta.json";
|
||||
if (Files.exists(Paths.get(mediaCurentPath))) {
|
||||
LOGGER.info("moveM: {} ==> {}", mediaCurentPath, mediaDestPath);
|
||||
Files.move(Paths.get(mediaCurentPath), Paths.get(mediaDestPath), StandardCopyOption.ATOMIC_MOVE);
|
||||
}
|
||||
LOGGER.info("Move done");
|
||||
}
|
||||
|
||||
public static String saveTemporaryFile(final InputStream uploadedInputStream, final long idData)
|
||||
throws FailException {
|
||||
return saveFile(uploadedInputStream, DataResource.getTmpFileInData(idData));
|
||||
}
|
||||
|
||||
public static void removeTemporaryFile(final long idData) {
|
||||
final String filepath = DataResource.getTmpFileInData(idData);
|
||||
if (Files.exists(Paths.get(filepath))) {
|
||||
try {
|
||||
Files.delete(Paths.get(filepath));
|
||||
} catch (final IOException e) {
|
||||
LOGGER.info("can not delete temporary file : {}", Paths.get(filepath));
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// save uploaded file to a defined location on the server
|
||||
static String saveFile(final InputStream uploadedInputStream, final String serverLocation) throws FailException {
|
||||
String out = "";
|
||||
MessageDigest md = null;
|
||||
try (OutputStream outpuStream = new FileOutputStream(new File(serverLocation))) {
|
||||
md = MessageDigest.getInstance("SHA-512");
|
||||
outpuStream.flush();
|
||||
} catch (final IOException ex) {
|
||||
throw new FailException(Response.Status.INTERNAL_SERVER_ERROR, "Can not write in temporary file", ex);
|
||||
} catch (final NoSuchAlgorithmException ex) {
|
||||
throw new FailException(Response.Status.INTERNAL_SERVER_ERROR, "Can not find sha512 algorithms", ex);
|
||||
}
|
||||
if (md != null) {
|
||||
try (OutputStream outpuStream = new FileOutputStream(new File(serverLocation))) {
|
||||
int read = 0;
|
||||
final byte[] bytes = new byte[CHUNK_SIZE_IN];
|
||||
while ((read = uploadedInputStream.read(bytes)) != -1) {
|
||||
// logger.info("write {}", read);
|
||||
md.update(bytes, 0, read);
|
||||
outpuStream.write(bytes, 0, read);
|
||||
}
|
||||
LOGGER.info("Flush input stream ... {}", serverLocation);
|
||||
outpuStream.flush();
|
||||
// create the end of sha512
|
||||
final byte[] sha512Digest = md.digest();
|
||||
// convert in hexadecimal
|
||||
out = bytesToHex(sha512Digest);
|
||||
uploadedInputStream.close();
|
||||
} catch (final IOException ex) {
|
||||
throw new FailException(Response.Status.INTERNAL_SERVER_ERROR, "Can not write in temporary file", ex);
|
||||
}
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
public static String bytesToHex(final byte[] bytes) {
|
||||
final StringBuilder sb = new StringBuilder();
|
||||
for (final byte b : bytes) {
|
||||
sb.append(String.format("%02x", b));
|
||||
}
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
public Data getSmall(final ObjectId oid) {
|
||||
try {
|
||||
return DataAccess.get(Data.class, oid);
|
||||
} catch (final Exception e) {
|
||||
// TODO Auto-generated catch block
|
||||
e.printStackTrace();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@POST
|
||||
@Path("/upload/")
|
||||
@Consumes({ MediaType.MULTIPART_FORM_DATA })
|
||||
@RolesAllowed("ADMIN")
|
||||
@Operation(description = "Insert a new data in the data environment", tags = "SYSTEM")
|
||||
public void uploadFile(
|
||||
@Context final SecurityContext sc,
|
||||
@FormDataParam("file") final InputStream fileInputStream,
|
||||
@FormDataParam("file") final FormDataContentDisposition fileMetaData) throws FailException {
|
||||
final GenericContext gc = (GenericContext) sc.getUserPrincipal();
|
||||
LOGGER.info("===================================================");
|
||||
LOGGER.info("== DATA uploadFile {}", (gc == null ? "null" : gc.userByToken));
|
||||
LOGGER.info("===================================================");
|
||||
// public NodeSmall uploadFile(final FormDataMultiPart form) {
|
||||
LOGGER.info("Upload file: ");
|
||||
final String filePath = ConfigBaseVariable.getTmpDataFolder() + File.separator + tmpFolderId++;
|
||||
try {
|
||||
createFolder(ConfigBaseVariable.getTmpDataFolder() + File.separator);
|
||||
} catch (final IOException ex) {
|
||||
throw new FailException(Response.Status.INTERNAL_SERVER_ERROR,
|
||||
"Impossible to create the folder in the server", ex);
|
||||
}
|
||||
saveFile(fileInputStream, filePath);
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("{oid}")
|
||||
@PermitTokenInURI
|
||||
@RolesAllowed("USER")
|
||||
@Produces(MediaType.APPLICATION_OCTET_STREAM)
|
||||
@Operation(description = "Get back some data from the data environment", tags = "SYSTEM")
|
||||
public Response retrieveDataId(
|
||||
@Context final SecurityContext sc,
|
||||
@QueryParam(HttpHeaders.AUTHORIZATION) final String token,
|
||||
@HeaderParam("Range") final String range,
|
||||
@PathParam("oid") final ObjectId oid) throws FailException {
|
||||
final GenericContext gc = (GenericContext) sc.getUserPrincipal();
|
||||
// logger.info("===================================================");
|
||||
LOGGER.info("== DATA retrieveDataId ? oid={} user={}", oid, (gc == null ? "null" : gc.userByToken));
|
||||
// logger.info("===================================================");
|
||||
final Data value = getSmall(oid);
|
||||
if (value == null) {
|
||||
return Response.status(404).entity("media NOT FOUND: " + oid).type("text/plain").build();
|
||||
}
|
||||
try {
|
||||
return buildStream(getFileData(oid), range,
|
||||
value.mimeType == null ? "application/octet-stream" : value.mimeType);
|
||||
} catch (final Exception ex) {
|
||||
throw new FailException(Response.Status.INTERNAL_SERVER_ERROR, "Fail to build output stream", ex);
|
||||
}
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("thumbnail/{oid}")
|
||||
@RolesAllowed("USER")
|
||||
@PermitTokenInURI
|
||||
@Produces(MediaType.APPLICATION_OCTET_STREAM)
|
||||
@Operation(description = "Get a thumbnail of from the data environment (if resize is possible)", tags = "SYSTEM")
|
||||
// @CacheMaxAge(time = 10, unit = TimeUnit.DAYS)
|
||||
public Response retrieveDataThumbnailId(
|
||||
@Context final SecurityContext sc,
|
||||
@QueryParam(HttpHeaders.AUTHORIZATION) final String token,
|
||||
@HeaderParam("Range") final String range,
|
||||
@PathParam("oid") final ObjectId oid) throws FailException {
|
||||
final GenericContext gc = (GenericContext) sc.getUserPrincipal();
|
||||
LOGGER.info("===================================================");
|
||||
LOGGER.info("== DATA retrieveDataThumbnailId ? {}", (gc == null ? "null" : gc.userByToken));
|
||||
LOGGER.info("===================================================");
|
||||
final Data value = getSmall(oid);
|
||||
if (value == null) {
|
||||
return Response.status(404).entity("media NOT FOUND: " + oid).type("text/plain").build();
|
||||
}
|
||||
final String filePathName = getFileData(oid);
|
||||
final File inputFile = new File(filePathName);
|
||||
if (!inputFile.exists()) {
|
||||
return Response.status(404).entity("{\"error\":\"media Does not exist: " + oid + "\"}")
|
||||
.type("application/json").build();
|
||||
}
|
||||
if (value.mimeType.contentEquals("image/jpeg") || value.mimeType.contentEquals("image/png")
|
||||
// || value.mimeType.contentEquals("image/webp")
|
||||
) {
|
||||
// reads input image
|
||||
BufferedImage inputImage;
|
||||
try {
|
||||
inputImage = ImageIO.read(inputFile);
|
||||
} catch (final IOException ex) {
|
||||
throw new FailException(Response.Status.INTERNAL_SERVER_ERROR, "Fail to READ the image", ex);
|
||||
}
|
||||
LOGGER.info("input size image: {}x{} type={}", inputImage.getWidth(), inputImage.getHeight(),
|
||||
inputImage.getType());
|
||||
final int scaledWidth = ConfigBaseVariable.getThumbnailWidth();
|
||||
final int scaledHeight = (int) ((float) inputImage.getHeight() / (float) inputImage.getWidth()
|
||||
* scaledWidth);
|
||||
// creates output image
|
||||
final BufferedImage outputImage = new BufferedImage(scaledWidth, scaledHeight, inputImage.getType());
|
||||
|
||||
// scales the input image to the output image
|
||||
final Graphics2D g2d = outputImage.createGraphics();
|
||||
LOGGER.info("output size image: {}x{}", scaledWidth, scaledHeight);
|
||||
g2d.drawImage(inputImage, 0, 0, scaledWidth, scaledHeight, null);
|
||||
g2d.dispose();
|
||||
// create the output stream:
|
||||
final ByteArrayOutputStream baos = new ByteArrayOutputStream();
|
||||
try {
|
||||
ImageIO.write(outputImage, ConfigBaseVariable.getThumbnailFormat(), baos);
|
||||
} catch (final IOException e) {
|
||||
e.printStackTrace();
|
||||
return Response.status(500).entity("Internal Error: resize fail: " + e.getMessage()).type("text/plain")
|
||||
.build();
|
||||
}
|
||||
final byte[] imageData = baos.toByteArray();
|
||||
LOGGER.info("output length {}", imageData.length);
|
||||
if (imageData.length == 0) {
|
||||
LOGGER.error("Fail to convert image... Availlable format:");
|
||||
for (final String data : ImageIO.getWriterFormatNames()) {
|
||||
LOGGER.error(" - {}", data);
|
||||
}
|
||||
}
|
||||
final Response.ResponseBuilder out = Response.ok(imageData).header(HttpHeaders.CONTENT_LENGTH,
|
||||
imageData.length);
|
||||
try {
|
||||
out.type(getMimeType(ConfigBaseVariable.getThumbnailFormat()));
|
||||
} catch (final IOException ex) {
|
||||
throw new FailException(Response.Status.INTERNAL_SERVER_ERROR,
|
||||
"Fail to convert mime type of " + ConfigBaseVariable.getThumbnailFormat(), ex);
|
||||
}
|
||||
// TODO: move this in a decorator !!!
|
||||
final CacheControl cc = new CacheControl();
|
||||
cc.setMaxAge(3600);
|
||||
cc.setNoCache(false);
|
||||
out.cacheControl(cc);
|
||||
return out.build();
|
||||
}
|
||||
try {
|
||||
return buildStream(filePathName, range, value.mimeType);
|
||||
} catch (final Exception ex) {
|
||||
throw new FailException(Response.Status.INTERNAL_SERVER_ERROR, "Fail to build output stream", ex);
|
||||
}
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("{oid}/{name}")
|
||||
@PermitTokenInURI
|
||||
@RolesAllowed("USER")
|
||||
@Produces(MediaType.APPLICATION_OCTET_STREAM)
|
||||
@Operation(description = "Get back some data from the data environment (with a beautiful name (permit download with basic name)", tags = "SYSTEM")
|
||||
public Response retrieveDataFull(
|
||||
@Context final SecurityContext sc,
|
||||
@QueryParam(HttpHeaders.AUTHORIZATION) final String token,
|
||||
@ApiInputOptional @HeaderParam("Range") final String range,
|
||||
@PathParam("oid") final ObjectId oid,
|
||||
@PathParam("name") final String name) throws Exception {
|
||||
final GenericContext gc = (GenericContext) sc.getUserPrincipal();
|
||||
// logger.info("===================================================");
|
||||
LOGGER.info("== DATA retrieveDataFull ? id={} user={}", oid, (gc == null ? "null" : gc.userByToken));
|
||||
// logger.info("===================================================");
|
||||
final Data value = getSmall(oid);
|
||||
if (value == null) {
|
||||
return Response.status(404).entity("media NOT FOUND: " + oid).type("text/plain").build();
|
||||
}
|
||||
return buildStream(getFileData(oid), range,
|
||||
value.mimeType == null ? "application/octet-stream" : value.mimeType);
|
||||
}
|
||||
|
||||
/** Adapted from http://stackoverflow.com/questions/12768812/video-streaming-to-ipad-does-not-work-with-tapestry5/12829541#12829541
|
||||
*
|
||||
* @param range range header
|
||||
* @return Streaming output
|
||||
* @throws FileNotFoundException
|
||||
* @throws Exception IOException if an error occurs in streaming. */
|
||||
private Response buildStream(final String filename, final String range, final String mimeType)
|
||||
throws FailException {
|
||||
final File file = new File(filename);
|
||||
// logger.info("request range : {}", range);
|
||||
// range not requested : Firefox does not send range headers
|
||||
if (range == null) {
|
||||
final StreamingOutput output = new StreamingOutput() {
|
||||
@Override
|
||||
public void write(final OutputStream out) {
|
||||
try (FileInputStream in = new FileInputStream(file)) {
|
||||
final byte[] buf = new byte[1024 * 1024];
|
||||
int len;
|
||||
while ((len = in.read(buf)) != -1) {
|
||||
try {
|
||||
out.write(buf, 0, len);
|
||||
out.flush();
|
||||
// logger.info("---- wrote {} bytes file ----", len);
|
||||
} catch (final IOException ex) {
|
||||
LOGGER.info("remote close connection");
|
||||
break;
|
||||
}
|
||||
}
|
||||
} catch (final IOException ex) {
|
||||
throw new InternalServerErrorException(ex);
|
||||
}
|
||||
}
|
||||
};
|
||||
final Response.ResponseBuilder out = Response.ok(output).header(HttpHeaders.CONTENT_LENGTH, file.length());
|
||||
if (mimeType != null) {
|
||||
out.type(mimeType);
|
||||
}
|
||||
return out.build();
|
||||
|
||||
}
|
||||
|
||||
final String[] ranges = range.split("=")[1].split("-");
|
||||
final long from = Long.parseLong(ranges[0]);
|
||||
|
||||
// logger.info("request range : {}", ranges.length);
|
||||
// Chunk media if the range upper bound is unspecified. Chrome, Opera sends "bytes=0-"
|
||||
long to = CHUNK_SIZE + from;
|
||||
if (ranges.length == 1) {
|
||||
to = file.length() - 1;
|
||||
} else if (to >= file.length()) {
|
||||
to = file.length() - 1;
|
||||
}
|
||||
final String responseRange = String.format("bytes %d-%d/%d", from, to, file.length());
|
||||
// LOGGER.info("responseRange: {}", responseRange);
|
||||
try {
|
||||
final RandomAccessFile raf = new RandomAccessFile(file, "r");
|
||||
raf.seek(from);
|
||||
|
||||
final long len = to - from + 1;
|
||||
final MediaStreamer streamer = new MediaStreamer(len, raf);
|
||||
final Response.ResponseBuilder out = Response.ok(streamer).status(Response.Status.PARTIAL_CONTENT)
|
||||
.header("Accept-Ranges", "bytes").header("Content-Range", responseRange)
|
||||
.header(HttpHeaders.CONTENT_LENGTH, streamer.getLenth())
|
||||
.header(HttpHeaders.LAST_MODIFIED, new Date(file.lastModified()));
|
||||
if (mimeType != null) {
|
||||
out.type(mimeType);
|
||||
}
|
||||
return out.build();
|
||||
} catch (final FileNotFoundException ex) {
|
||||
throw new FailException(Response.Status.INTERNAL_SERVER_ERROR, "Fail to find the required file.", ex);
|
||||
} catch (final IOException ex) {
|
||||
throw new FailException(Response.Status.INTERNAL_SERVER_ERROR, "Fail to access to the required file.", ex);
|
||||
}
|
||||
}
|
||||
|
||||
public void undelete(final Long id) throws Exception {
|
||||
DataAccess.unsetDelete(Data.class, id);
|
||||
}
|
||||
|
||||
}
|
@ -1,16 +1,15 @@
|
||||
package org.kar.karideo.api;
|
||||
package org.atriasoft.karideo.api;
|
||||
|
||||
|
||||
import org.kar.archidata.api.FrontGeneric;
|
||||
import org.atriasoft.archidata.api.FrontGeneric;
|
||||
|
||||
import jakarta.ws.rs.Path;
|
||||
|
||||
import org.kar.karideo.util.ConfigVariable;
|
||||
import org.atriasoft.karideo.util.ConfigVariable;
|
||||
|
||||
@Path("/front")
|
||||
public class Front extends FrontGeneric {
|
||||
public Front() {
|
||||
this.baseFrontFolder = ConfigVariable.getFrontFolder();
|
||||
|
||||
|
||||
}
|
||||
}
|
@ -1,8 +1,8 @@
|
||||
package org.kar.karideo.api;
|
||||
package org.atriasoft.karideo.api;
|
||||
|
||||
import org.kar.archidata.exception.FailException;
|
||||
import org.kar.archidata.tools.ConfigBaseVariable;
|
||||
import org.kar.archidata.tools.JWTWrapper;
|
||||
import org.atriasoft.archidata.exception.FailException;
|
||||
import org.atriasoft.archidata.tools.ConfigBaseVariable;
|
||||
import org.atriasoft.archidata.tools.JWTWrapper;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
@ -1,36 +1,38 @@
|
||||
package org.kar.karideo.api;
|
||||
package org.atriasoft.karideo.api;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.sql.SQLException;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.atriasoft.archidata.annotation.apiGenerator.ApiAsyncType;
|
||||
import org.atriasoft.archidata.annotation.apiGenerator.ApiTypeScriptProgress;
|
||||
import org.atriasoft.archidata.api.DataResource;
|
||||
import org.atriasoft.archidata.dataAccess.DBAccess;
|
||||
import org.atriasoft.archidata.dataAccess.DataAccess;
|
||||
import org.atriasoft.archidata.dataAccess.addOnSQL.AddOnDataJson;
|
||||
import org.atriasoft.archidata.exception.FailException;
|
||||
import org.atriasoft.archidata.exception.InputException;
|
||||
import org.atriasoft.archidata.model.Data;
|
||||
import org.atriasoft.archidata.tools.DataTools;
|
||||
import org.atriasoft.karideo.model.Media;
|
||||
import org.atriasoft.karideo.model.Season;
|
||||
import org.atriasoft.karideo.model.Series;
|
||||
import org.atriasoft.karideo.model.Type;
|
||||
import org.bson.types.ObjectId;
|
||||
import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
|
||||
import org.glassfish.jersey.media.multipart.FormDataParam;
|
||||
import org.kar.archidata.annotation.AsyncType;
|
||||
import org.kar.archidata.annotation.TypeScriptProgress;
|
||||
import org.kar.archidata.api.DataResource;
|
||||
import org.kar.archidata.dataAccess.DataAccess;
|
||||
import org.kar.archidata.dataAccess.addOn.AddOnDataJson;
|
||||
import org.kar.archidata.exception.FailException;
|
||||
import org.kar.archidata.exception.InputException;
|
||||
import org.kar.archidata.model.Data;
|
||||
import org.kar.archidata.tools.DataTools;
|
||||
import org.kar.karideo.model.Media;
|
||||
import org.kar.karideo.model.Season;
|
||||
import org.kar.karideo.model.Series;
|
||||
import org.kar.karideo.model.Type;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import io.swagger.v3.oas.annotations.Operation;
|
||||
import jakarta.annotation.security.RolesAllowed;
|
||||
import jakarta.validation.Valid;
|
||||
import jakarta.ws.rs.Consumes;
|
||||
import jakarta.ws.rs.DELETE;
|
||||
import jakarta.ws.rs.GET;
|
||||
import jakarta.ws.rs.PATCH;
|
||||
import jakarta.ws.rs.POST;
|
||||
import jakarta.ws.rs.PUT;
|
||||
import jakarta.ws.rs.Path;
|
||||
import jakarta.ws.rs.PathParam;
|
||||
import jakarta.ws.rs.Produces;
|
||||
@ -56,14 +58,15 @@ public class MediaResource {
|
||||
return DataAccess.get(Media.class, id);
|
||||
}
|
||||
|
||||
@PATCH
|
||||
@PUT
|
||||
@Path("{id}")
|
||||
@RolesAllowed("ADMIN")
|
||||
@Consumes(MediaType.APPLICATION_JSON)
|
||||
@Operation(description = "Modify a specific Media", tags = "GLOBAL")
|
||||
public Media patch(@PathParam("id") final Long id, @AsyncType(Media.class) final String jsonRequest) throws Exception {
|
||||
LOGGER.info("update video {} ==> '{}'", id, jsonRequest);
|
||||
DataAccess.updateWithJson(Media.class, id, jsonRequest);
|
||||
public Media patch(@PathParam("id") final Long id, @Valid final Media media) throws Exception {
|
||||
LOGGER.info("update video {} ==> '{}'", id, media);
|
||||
media.id = id;
|
||||
DataAccess.update(media, id);
|
||||
return DataAccess.get(Media.class, id);
|
||||
}
|
||||
|
||||
@ -77,6 +80,9 @@ public class MediaResource {
|
||||
if (data.contentEquals("null")) {
|
||||
return null;
|
||||
}
|
||||
if (data.contentEquals("undefined")) {
|
||||
return null;
|
||||
}
|
||||
return data;
|
||||
}
|
||||
|
||||
@ -84,35 +90,44 @@ public class MediaResource {
|
||||
@RolesAllowed("ADMIN")
|
||||
@Consumes({ MediaType.MULTIPART_FORM_DATA })
|
||||
@Operation(description = "Create a new Media", tags = "GLOBAL")
|
||||
@TypeScriptProgress
|
||||
public Media uploadFile( //
|
||||
@FormDataParam("fileName") String fileName, //
|
||||
@FormDataParam("universe") String universe, //
|
||||
@FormDataParam("series") String series, //
|
||||
//@FormDataParam("seriesId") String seriesId, // Not used ...
|
||||
@FormDataParam("season") String season, //
|
||||
@FormDataParam("episode") String episode, //
|
||||
@FormDataParam("title") String title, //
|
||||
@ApiTypeScriptProgress
|
||||
public Media uploadMedia( //
|
||||
// @AsyncType(Long.class) @FormDataParam("universeId") String universeId, //
|
||||
// @AsyncType(Long.class) @FormDataParam("typeId") String typeId, //
|
||||
// @AsyncType(Long.class) @FormDataParam("seriesId") String seriesId, //
|
||||
// @AsyncType(Long.class) @FormDataParam("season") String season, // value of the season ==> local add if needed
|
||||
// @AsyncType(Long.class) @FormDataParam("episode") String episode, // value of the season ==> local add if needed
|
||||
@FormDataParam("universeId") String universeId, //
|
||||
@FormDataParam("typeId") String typeId, //
|
||||
@FormDataParam("seriesId") String seriesId, //
|
||||
@FormDataParam("season") String season, // value of the season ==> local add if needed
|
||||
@FormDataParam("episode") String episode, // value of the season ==> local add if needed
|
||||
@FormDataParam("title") String title, //
|
||||
@FormDataParam("file") final InputStream fileInputStream, //
|
||||
@FormDataParam("file") final FormDataContentDisposition fileMetaData //
|
||||
) throws FailException {
|
||||
try {
|
||||
) throws FailException {
|
||||
try (DBAccess db = DBAccess.createInterface()) {
|
||||
// correct input string stream :
|
||||
fileName = multipartCorrection(fileName);
|
||||
universe = multipartCorrection(universe);
|
||||
series = multipartCorrection(series);
|
||||
final String fileName = multipartCorrection(fileMetaData.getFileName());
|
||||
universeId = multipartCorrection(universeId);
|
||||
final Long universeIdLong = universeId != null ? Long.parseLong(universeId) : null;
|
||||
typeId = multipartCorrection(typeId);
|
||||
final Long typeIdLong = typeId != null ? Long.parseLong(typeId) : null;
|
||||
seriesId = multipartCorrection(seriesId);
|
||||
final Long seriesIdLong = seriesId != null ? Long.parseLong(seriesId) : null;
|
||||
season = multipartCorrection(season);
|
||||
final Long seasonLong = season != null ? Long.parseLong(season) : null;
|
||||
episode = multipartCorrection(episode);
|
||||
title = multipartCorrection(title);
|
||||
typeId = multipartCorrection(typeId);
|
||||
|
||||
//public NodeSmall uploadFile(final FormDataMultiPart form) {
|
||||
// todo: check if all remotes Id exist ...
|
||||
|
||||
// public NodeSmall uploadFile(final FormDataMultiPart form) {
|
||||
LOGGER.info("Upload media file: {}", fileMetaData);
|
||||
LOGGER.info(" - fileName: {}", fileName);
|
||||
LOGGER.info(" - universe: {}", universe);
|
||||
LOGGER.info(" - series: {}", series);
|
||||
LOGGER.info(" - season: {}", season);
|
||||
LOGGER.info(" - universe: {}", universeIdLong);
|
||||
LOGGER.info(" - series: {}", seriesIdLong);
|
||||
LOGGER.info(" - season: {}", seasonLong);
|
||||
LOGGER.info(" - episode: {}", episode);
|
||||
LOGGER.info(" - title: {}", title);
|
||||
LOGGER.info(" - type: {}", typeId);
|
||||
@ -125,21 +140,21 @@ public class MediaResource {
|
||||
|
||||
final long tmpUID = DataResource.getTmpDataId();
|
||||
final String sha512 = DataResource.saveTemporaryFile(fileInputStream, tmpUID);
|
||||
Data data = DataResource.getWithSha512(sha512);
|
||||
Data data = DataTools.getWithSha512(db, sha512);
|
||||
if (data == null) {
|
||||
LOGGER.info("Need to add the data in the BDD ... ");
|
||||
System.out.flush();
|
||||
try {
|
||||
data = DataResource.createNewData(tmpUID, fileName, sha512);
|
||||
data = DataTools.createNewData(db, tmpUID, fileName, sha512);
|
||||
} catch (final IOException ex) {
|
||||
DataResource.removeTemporaryFile(tmpUID);
|
||||
ex.printStackTrace();
|
||||
throw new FailException("can not create input media (the data model has an internal error");
|
||||
}
|
||||
} else if (data!= null && data.deleted != null && data.deleted) {
|
||||
} else if (data != null && data.deleted != null && data.deleted) {
|
||||
LOGGER.info("Data already exist but deleted");
|
||||
System.out.flush();
|
||||
DataTools.undelete(data.uuid);
|
||||
DataTools.undelete(db, data.oid);
|
||||
data.deleted = false;
|
||||
} else {
|
||||
LOGGER.info("Data already exist ... all good");
|
||||
@ -148,26 +163,32 @@ public class MediaResource {
|
||||
// Fist step: retieve all the Id of each parents:...
|
||||
LOGGER.info("Find typeNode");
|
||||
// check if id of type exist:
|
||||
final Type typeNode = TypeResource.getId(Long.parseLong(typeId));
|
||||
final Type typeNode = TypeResource.getId(typeIdLong);
|
||||
if (typeNode == null) {
|
||||
DataResource.removeTemporaryFile(tmpUID);
|
||||
throw new InputException("typeId", "TypeId does not exist ...");
|
||||
}
|
||||
LOGGER.info(" ==> {}", typeNode);
|
||||
LOGGER.info("Find seriesNode");
|
||||
// get uid of group:
|
||||
// check if id of type exist:
|
||||
Series seriesNode = null;
|
||||
if (series != null) {
|
||||
seriesNode = SeriesResource.getOrCreate(series, typeNode.id);
|
||||
if (seriesIdLong != null) {
|
||||
seriesNode = SeriesResource.getId(seriesIdLong);
|
||||
if (seriesNode == null) {
|
||||
DataResource.removeTemporaryFile(tmpUID);
|
||||
throw new InputException("seriesId", "seriesId does not exist ...");
|
||||
}
|
||||
if (seriesNode.parentId.equals(typeNode.id)) {
|
||||
DataResource.removeTemporaryFile(tmpUID);
|
||||
throw new InputException("seriesId", "seriesId object have not the correct parent...");
|
||||
}
|
||||
}
|
||||
|
||||
LOGGER.info(" ==> {}", seriesNode);
|
||||
LOGGER.info("Find seasonNode");
|
||||
// get uid of season:
|
||||
Season seasonNode = null;
|
||||
if (seriesNode == null && season != null) {
|
||||
if (seriesNode == null && seasonLong != null) {
|
||||
DataResource.removeTemporaryFile(tmpUID);
|
||||
throw new InputException("season", "Season is set but no seraies is set !!");
|
||||
throw new InputException("season", "Season is set but no series is set !!");
|
||||
}
|
||||
if (season != null) {
|
||||
seasonNode = SeasonResource.getOrCreate(season, seriesNode.id);
|
||||
@ -179,7 +200,7 @@ public class MediaResource {
|
||||
try {
|
||||
final Media media = new Media();
|
||||
media.name = title;
|
||||
media.dataId = data.uuid;
|
||||
media.dataId = data.oid;
|
||||
media.typeId = typeNode.id;
|
||||
media.seriesId = null;
|
||||
if (seriesNode != null) {
|
||||
@ -193,7 +214,7 @@ public class MediaResource {
|
||||
if (episode != null && !episode.contentEquals("")) {
|
||||
media.episode = Integer.parseInt(episode);
|
||||
}
|
||||
final Media out = DataAccess.insert(media);
|
||||
final Media out = db.insert(media);
|
||||
LOGGER.info("Generate new media {}", out);
|
||||
return out;
|
||||
} catch (final SQLException ex) {
|
||||
@ -214,17 +235,18 @@ public class MediaResource {
|
||||
@Path("{id}/cover")
|
||||
@RolesAllowed("ADMIN")
|
||||
@Consumes({ MediaType.MULTIPART_FORM_DATA })
|
||||
@AsyncType(Media.class)
|
||||
@ApiAsyncType(Media.class)
|
||||
@Operation(description = "Upload a new season cover media", tags = "GLOBAL")
|
||||
@TypeScriptProgress
|
||||
@ApiTypeScriptProgress
|
||||
public Media uploadCover( //
|
||||
@PathParam("id") final Long id, //
|
||||
@FormDataParam("fileName") final String fileName, //
|
||||
@FormDataParam("file") final InputStream fileInputStream, //
|
||||
@FormDataParam("file") final FormDataContentDisposition fileMetaData//
|
||||
) throws Exception {
|
||||
DataTools.uploadCover(Media.class, id, fileName, fileInputStream, fileMetaData);
|
||||
return DataAccess.get(Media.class, id);
|
||||
) throws Exception {
|
||||
try (DBAccess dbIo = DBAccess.createInterface()) {
|
||||
DataTools.uploadCover(dbIo, Media.class, id, fileInputStream, fileMetaData);
|
||||
return dbIo.get(Media.class, id);
|
||||
}
|
||||
}
|
||||
|
||||
@DELETE
|
||||
@ -233,10 +255,12 @@ public class MediaResource {
|
||||
@Operation(description = "Remove a specific cover of a media", tags = "GLOBAL")
|
||||
public Media removeCover( //
|
||||
@PathParam("id") final Long id, //
|
||||
@PathParam("coverId") final UUID coverId //
|
||||
) throws Exception {
|
||||
AddOnDataJson.removeLink(Media.class, id, "covers", coverId);
|
||||
return DataAccess.get(Media.class, id);
|
||||
@PathParam("coverId") final ObjectId coverId //
|
||||
) throws Exception {
|
||||
try (DBAccess dbIo = DBAccess.createInterface()) {
|
||||
AddOnDataJson.removeLink(dbIo, Media.class, "id", id, "covers", coverId);
|
||||
return dbIo.get(Media.class, id);
|
||||
}
|
||||
}
|
||||
|
||||
@DELETE
|
@ -1,30 +1,31 @@
|
||||
package org.kar.karideo.api;
|
||||
package org.atriasoft.karideo.api;
|
||||
|
||||
import java.io.InputStream;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.atriasoft.archidata.annotation.apiGenerator.ApiTypeScriptProgress;
|
||||
import org.atriasoft.archidata.dataAccess.DBAccess;
|
||||
import org.atriasoft.archidata.dataAccess.DataAccess;
|
||||
import org.atriasoft.archidata.dataAccess.QueryAnd;
|
||||
import org.atriasoft.archidata.dataAccess.QueryCondition;
|
||||
import org.atriasoft.archidata.dataAccess.addOnSQL.AddOnDataJson;
|
||||
import org.atriasoft.archidata.dataAccess.options.Condition;
|
||||
import org.atriasoft.archidata.tools.DataTools;
|
||||
import org.atriasoft.karideo.model.Season;
|
||||
import org.bson.types.ObjectId;
|
||||
import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
|
||||
import org.glassfish.jersey.media.multipart.FormDataParam;
|
||||
import org.kar.archidata.annotation.AsyncType;
|
||||
import org.kar.archidata.annotation.TypeScriptProgress;
|
||||
import org.kar.archidata.dataAccess.DataAccess;
|
||||
import org.kar.archidata.dataAccess.QueryAnd;
|
||||
import org.kar.archidata.dataAccess.QueryCondition;
|
||||
import org.kar.archidata.dataAccess.addOn.AddOnDataJson;
|
||||
import org.kar.archidata.dataAccess.options.Condition;
|
||||
import org.kar.archidata.tools.DataTools;
|
||||
import org.kar.karideo.model.Season;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import io.swagger.v3.oas.annotations.Operation;
|
||||
import jakarta.annotation.security.RolesAllowed;
|
||||
import jakarta.validation.Valid;
|
||||
import jakarta.ws.rs.Consumes;
|
||||
import jakarta.ws.rs.DELETE;
|
||||
import jakarta.ws.rs.GET;
|
||||
import jakarta.ws.rs.PATCH;
|
||||
import jakarta.ws.rs.POST;
|
||||
import jakarta.ws.rs.PUT;
|
||||
import jakarta.ws.rs.Path;
|
||||
import jakarta.ws.rs.PathParam;
|
||||
import jakarta.ws.rs.Produces;
|
||||
@ -35,7 +36,6 @@ import jakarta.ws.rs.core.MediaType;
|
||||
public class SeasonResource {
|
||||
static final Logger LOGGER = LoggerFactory.getLogger(SeasonResource.class);
|
||||
|
||||
|
||||
@GET
|
||||
@RolesAllowed("USER")
|
||||
@Operation(description = "Get a specific Season with his ID", tags = "GLOBAL")
|
||||
@ -52,25 +52,24 @@ public class SeasonResource {
|
||||
return DataAccess.get(Season.class, id);
|
||||
}
|
||||
|
||||
/* =============================================================================
|
||||
* ADMIN SECTION:
|
||||
* ============================================================================= */
|
||||
/* ============================================================================= ADMIN SECTION: ============================================================================= */
|
||||
|
||||
@POST
|
||||
@RolesAllowed("ADMIN")
|
||||
@Consumes(MediaType.APPLICATION_JSON)
|
||||
@Operation(description = "Create a new season", tags = "GLOBAL")
|
||||
public Season post(final Season jsonRequest) throws Exception {
|
||||
public Season post(@Valid final Season jsonRequest) throws Exception {
|
||||
return DataAccess.insert(jsonRequest);
|
||||
}
|
||||
|
||||
@PATCH
|
||||
@PUT
|
||||
@Path("{id}")
|
||||
@RolesAllowed("ADMIN")
|
||||
@Consumes(MediaType.APPLICATION_JSON)
|
||||
@Operation(description = "Modify a specific season", tags = "GLOBAL")
|
||||
public Season patch(@PathParam("id") final Long id, @AsyncType(Season.class) final String jsonRequest) throws Exception {
|
||||
DataAccess.updateWithJson(Season.class, id, jsonRequest);
|
||||
public Season patch(@PathParam("id") final Long id, @Valid final Season season) throws Exception {
|
||||
season.id = id;
|
||||
DataAccess.update(season, id);
|
||||
return DataAccess.get(Season.class, id);
|
||||
}
|
||||
|
||||
@ -87,20 +86,24 @@ public class SeasonResource {
|
||||
@RolesAllowed("ADMIN")
|
||||
@Consumes(MediaType.MULTIPART_FORM_DATA)
|
||||
@Operation(description = "Upload a new season cover season", tags = "GLOBAL")
|
||||
@TypeScriptProgress
|
||||
public Season uploadCover(@PathParam("id") final Long id, @FormDataParam("fileName") final String fileName, @FormDataParam("file") final InputStream fileInputStream,
|
||||
@FormDataParam("file") final FormDataContentDisposition fileMetaData) throws Exception {
|
||||
DataTools.uploadCover(Season.class, id, fileName, fileInputStream, fileMetaData);
|
||||
return DataAccess.get(Season.class, id);
|
||||
@ApiTypeScriptProgress
|
||||
public Season uploadCover(@PathParam("id") final Long id, @FormDataParam("file") final InputStream fileInputStream, @FormDataParam("file") final FormDataContentDisposition fileMetaData)
|
||||
throws Exception {
|
||||
try (DBAccess dbIo = DBAccess.createInterface()) {
|
||||
DataTools.uploadCover(dbIo, Season.class, id, fileInputStream, fileMetaData);
|
||||
return dbIo.get(Season.class, id);
|
||||
}
|
||||
}
|
||||
|
||||
@DELETE
|
||||
@Path("{id}/cover/{coverId}")
|
||||
@RolesAllowed("ADMIN")
|
||||
@Operation(description = "Remove a specific cover of a season", tags = "GLOBAL")
|
||||
public Season removeCover(@PathParam("id") final Long id, @PathParam("coverId") final UUID coverId) throws Exception {
|
||||
AddOnDataJson.removeLink(Season.class, id, "covers", coverId);
|
||||
return DataAccess.get(Season.class, id);
|
||||
public Season removeCover(@PathParam("id") final Long id, @PathParam("coverId") final ObjectId coverId) throws Exception {
|
||||
try (DBAccess dbIo = DBAccess.createInterface()) {
|
||||
AddOnDataJson.removeLink(dbIo, Season.class, "id", id, "covers", coverId);
|
||||
return dbIo.get(Season.class, id);
|
||||
}
|
||||
}
|
||||
|
||||
public static Season getOrCreate(final String name, final Long seriesId) {
|
@ -1,30 +1,31 @@
|
||||
package org.kar.karideo.api;
|
||||
package org.atriasoft.karideo.api;
|
||||
|
||||
import java.io.InputStream;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.atriasoft.archidata.annotation.apiGenerator.ApiTypeScriptProgress;
|
||||
import org.atriasoft.archidata.dataAccess.DBAccess;
|
||||
import org.atriasoft.archidata.dataAccess.DataAccess;
|
||||
import org.atriasoft.archidata.dataAccess.QueryAnd;
|
||||
import org.atriasoft.archidata.dataAccess.QueryCondition;
|
||||
import org.atriasoft.archidata.dataAccess.addOnSQL.AddOnDataJson;
|
||||
import org.atriasoft.archidata.dataAccess.options.Condition;
|
||||
import org.atriasoft.archidata.tools.DataTools;
|
||||
import org.atriasoft.karideo.model.Series;
|
||||
import org.bson.types.ObjectId;
|
||||
import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
|
||||
import org.glassfish.jersey.media.multipart.FormDataParam;
|
||||
import org.kar.archidata.annotation.AsyncType;
|
||||
import org.kar.archidata.annotation.TypeScriptProgress;
|
||||
import org.kar.archidata.dataAccess.DataAccess;
|
||||
import org.kar.archidata.dataAccess.QueryAnd;
|
||||
import org.kar.archidata.dataAccess.QueryCondition;
|
||||
import org.kar.archidata.dataAccess.addOn.AddOnDataJson;
|
||||
import org.kar.archidata.dataAccess.options.Condition;
|
||||
import org.kar.archidata.tools.DataTools;
|
||||
import org.kar.karideo.model.Series;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import io.swagger.v3.oas.annotations.Operation;
|
||||
import jakarta.annotation.security.RolesAllowed;
|
||||
import jakarta.validation.Valid;
|
||||
import jakarta.ws.rs.Consumes;
|
||||
import jakarta.ws.rs.DELETE;
|
||||
import jakarta.ws.rs.GET;
|
||||
import jakarta.ws.rs.PATCH;
|
||||
import jakarta.ws.rs.POST;
|
||||
import jakarta.ws.rs.PUT;
|
||||
import jakarta.ws.rs.Path;
|
||||
import jakarta.ws.rs.PathParam;
|
||||
import jakarta.ws.rs.Produces;
|
||||
@ -51,25 +52,27 @@ public class SeriesResource {
|
||||
return DataAccess.get(Series.class, id);
|
||||
}
|
||||
|
||||
/* =============================================================================
|
||||
* ADMIN SECTION:
|
||||
* ============================================================================= */
|
||||
public static Series getId(final Long id) throws Exception {
|
||||
return DataAccess.get(Series.class, id);
|
||||
}
|
||||
/* ============================================================================= ADMIN SECTION: ============================================================================= */
|
||||
|
||||
@POST
|
||||
@RolesAllowed("ADMIN")
|
||||
@Consumes(MediaType.APPLICATION_JSON)
|
||||
@Operation(description = "Create a new Series", tags = "GLOBAL")
|
||||
public Series post(final Series jsonRequest) throws Exception {
|
||||
public Series post(@Valid final Series jsonRequest) throws Exception {
|
||||
return DataAccess.insert(jsonRequest);
|
||||
}
|
||||
|
||||
@PATCH
|
||||
@PUT
|
||||
@Path("{id}")
|
||||
@RolesAllowed("ADMIN")
|
||||
@Consumes(MediaType.APPLICATION_JSON)
|
||||
@Operation(description = "Modify a specific Series", tags = "GLOBAL")
|
||||
public Series patch(@PathParam("id") final Long id, @AsyncType(Series.class) final String jsonRequest) throws Exception {
|
||||
DataAccess.updateWithJson(Series.class, id, jsonRequest);
|
||||
public Series patch(@PathParam("id") final Long id, @Valid final Series series) throws Exception {
|
||||
series.id = id;
|
||||
DataAccess.update(series, id);
|
||||
return DataAccess.get(Series.class, id);
|
||||
}
|
||||
|
||||
@ -86,20 +89,24 @@ public class SeriesResource {
|
||||
@RolesAllowed("ADMIN")
|
||||
@Consumes({ MediaType.MULTIPART_FORM_DATA })
|
||||
@Operation(description = "Upload a new season cover Series", tags = "GLOBAL")
|
||||
@TypeScriptProgress
|
||||
public Series uploadCover(@PathParam("id") final Long id, @FormDataParam("fileName") final String fileName, @FormDataParam("file") final InputStream fileInputStream,
|
||||
@FormDataParam("file") final FormDataContentDisposition fileMetaData) throws Exception {
|
||||
DataTools.uploadCover(Series.class, id, fileName, fileInputStream, fileMetaData);
|
||||
return DataAccess.get(Series.class, id);
|
||||
@ApiTypeScriptProgress
|
||||
public Series uploadCover(@PathParam("id") final Long id, @FormDataParam("file") final InputStream fileInputStream, @FormDataParam("file") final FormDataContentDisposition fileMetaData)
|
||||
throws Exception {
|
||||
try (DBAccess dbIo = DBAccess.createInterface()) {
|
||||
DataTools.uploadCover(dbIo, Series.class, id, fileInputStream, fileMetaData);
|
||||
return dbIo.get(Series.class, id);
|
||||
}
|
||||
}
|
||||
|
||||
@DELETE
|
||||
@Path("{id}/cover/{coverId}")
|
||||
@RolesAllowed("ADMIN")
|
||||
@Operation(description = "Remove a specific Series of a season", tags = "GLOBAL")
|
||||
public Series removeCover(@PathParam("id") final Long id, @PathParam("coverId") final UUID coverId) throws Exception {
|
||||
AddOnDataJson.removeLink(Series.class, id, "covers", coverId);
|
||||
return DataAccess.get(Series.class, id);
|
||||
public Series removeCover(@PathParam("id") final Long id, @PathParam("coverId") final ObjectId coverId) throws Exception {
|
||||
try (DBAccess dbIo = DBAccess.createInterface()) {
|
||||
AddOnDataJson.removeLink(dbIo, Series.class, "id", id, "covers", coverId);
|
||||
return dbIo.get(Series.class, id);
|
||||
}
|
||||
}
|
||||
|
||||
public static Series getOrCreate(final String name, final Long typeId) {
|
@ -1,29 +1,30 @@
|
||||
package org.kar.karideo.api;
|
||||
package org.atriasoft.karideo.api;
|
||||
|
||||
import java.io.InputStream;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.atriasoft.archidata.annotation.apiGenerator.ApiTypeScriptProgress;
|
||||
import org.atriasoft.archidata.dataAccess.DBAccess;
|
||||
import org.atriasoft.archidata.dataAccess.DataAccess;
|
||||
import org.atriasoft.archidata.dataAccess.QueryCondition;
|
||||
import org.atriasoft.archidata.dataAccess.addOnSQL.AddOnDataJson;
|
||||
import org.atriasoft.archidata.dataAccess.options.Condition;
|
||||
import org.atriasoft.archidata.tools.DataTools;
|
||||
import org.atriasoft.karideo.model.Type;
|
||||
import org.bson.types.ObjectId;
|
||||
import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
|
||||
import org.glassfish.jersey.media.multipart.FormDataParam;
|
||||
import org.kar.archidata.annotation.AsyncType;
|
||||
import org.kar.archidata.annotation.TypeScriptProgress;
|
||||
import org.kar.archidata.dataAccess.DataAccess;
|
||||
import org.kar.archidata.dataAccess.QueryCondition;
|
||||
import org.kar.archidata.dataAccess.addOn.AddOnDataJson;
|
||||
import org.kar.archidata.dataAccess.options.Condition;
|
||||
import org.kar.archidata.tools.DataTools;
|
||||
import org.kar.karideo.model.Type;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import io.swagger.v3.oas.annotations.Operation;
|
||||
import jakarta.annotation.security.RolesAllowed;
|
||||
import jakarta.validation.Valid;
|
||||
import jakarta.ws.rs.Consumes;
|
||||
import jakarta.ws.rs.DELETE;
|
||||
import jakarta.ws.rs.GET;
|
||||
import jakarta.ws.rs.PATCH;
|
||||
import jakarta.ws.rs.POST;
|
||||
import jakarta.ws.rs.PUT;
|
||||
import jakarta.ws.rs.Path;
|
||||
import jakarta.ws.rs.PathParam;
|
||||
import jakarta.ws.rs.Produces;
|
||||
@ -34,7 +35,6 @@ import jakarta.ws.rs.core.MediaType;
|
||||
public class TypeResource {
|
||||
static final Logger LOGGER = LoggerFactory.getLogger(TypeResource.class);
|
||||
|
||||
|
||||
@GET
|
||||
@RolesAllowed("USER")
|
||||
@Operation(description = "Get all Type", tags = "GLOBAL")
|
||||
@ -55,25 +55,24 @@ public class TypeResource {
|
||||
return DataAccess.get(Type.class, id);
|
||||
}
|
||||
|
||||
/* =============================================================================
|
||||
* ADMIN SECTION:
|
||||
* ============================================================================= */
|
||||
/* ============================================================================= ADMIN SECTION: ============================================================================= */
|
||||
|
||||
@POST
|
||||
@RolesAllowed("ADMIN")
|
||||
@Consumes(MediaType.APPLICATION_JSON)
|
||||
@Operation(description = "Create a new Type", tags = "GLOBAL")
|
||||
public Type post(final Type jsonRequest) throws Exception {
|
||||
public Type post(@Valid final Type jsonRequest) throws Exception {
|
||||
return DataAccess.insert(jsonRequest);
|
||||
}
|
||||
|
||||
@PATCH
|
||||
@PUT
|
||||
@Path("{id}")
|
||||
@RolesAllowed("ADMIN")
|
||||
@Consumes(MediaType.APPLICATION_JSON)
|
||||
@Operation(description = "Modify a specific Type", tags = "GLOBAL")
|
||||
public Type patch(@PathParam("id") final Long id, @AsyncType(Type.class) final String jsonRequest) throws Exception {
|
||||
DataAccess.updateWithJson(Type.class, id, jsonRequest);
|
||||
public Type patch(@PathParam("id") final Long id, @Valid final Type type) throws Exception {
|
||||
type.id = id;
|
||||
DataAccess.update(type, id);
|
||||
return DataAccess.get(Type.class, id);
|
||||
}
|
||||
|
||||
@ -90,20 +89,25 @@ public class TypeResource {
|
||||
@RolesAllowed("ADMIN")
|
||||
@Consumes({ MediaType.MULTIPART_FORM_DATA })
|
||||
@Operation(description = "Upload a new season cover Type", tags = "GLOBAL")
|
||||
@TypeScriptProgress
|
||||
public Type uploadCover(@PathParam("id") final Long id, @FormDataParam("fileName") final String fileName, @FormDataParam("file") final InputStream fileInputStream,
|
||||
@FormDataParam("file") final FormDataContentDisposition fileMetaData) throws Exception {
|
||||
DataTools.uploadCover(Type.class, id, fileName, fileInputStream, fileMetaData);
|
||||
return DataAccess.get(Type.class, id);
|
||||
@ApiTypeScriptProgress
|
||||
public Type uploadCover(@PathParam("id") final Long id, @FormDataParam("file") final InputStream fileInputStream, @FormDataParam("file") final FormDataContentDisposition fileMetaData)
|
||||
throws Exception {
|
||||
try (DBAccess dbIo = DBAccess.createInterface()) {
|
||||
DataTools.uploadCover(dbIo, Type.class, id, fileInputStream, fileMetaData);
|
||||
return dbIo.get(Type.class, id);
|
||||
}
|
||||
}
|
||||
|
||||
@DELETE
|
||||
@Path("{id}/cover/{coverId}")
|
||||
@RolesAllowed("ADMIN")
|
||||
@Operation(description = "Remove a specific cover of a type", tags = "GLOBAL")
|
||||
public Type removeCover(@PathParam("id") final Long id, @PathParam("coverId") final UUID coverId) throws Exception {
|
||||
AddOnDataJson.removeLink(Type.class, id, "covers", coverId);
|
||||
return DataAccess.get(Type.class, id);
|
||||
public Type removeCover(@PathParam("id") final Long id, @PathParam("coverId") final ObjectId coverId) throws Exception {
|
||||
|
||||
try (DBAccess dbIo = DBAccess.createInterface()) {
|
||||
AddOnDataJson.removeLink(dbIo, Type.class, "id", id, "covers", coverId);
|
||||
return dbIo.get(Type.class, id);
|
||||
}
|
||||
}
|
||||
|
||||
public static Type getOrCreate(final String name) {
|
@ -1,22 +1,23 @@
|
||||
package org.kar.karideo.api;
|
||||
package org.atriasoft.karideo.api;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.kar.archidata.dataAccess.DataAccess;
|
||||
import org.kar.archidata.dataAccess.QueryAnd;
|
||||
import org.kar.archidata.dataAccess.QueryCondition;
|
||||
import org.kar.archidata.dataAccess.options.Condition;
|
||||
import org.kar.archidata.filter.GenericContext;
|
||||
import org.kar.karideo.model.UserMediaAdvancement;
|
||||
import org.atriasoft.archidata.dataAccess.DataAccess;
|
||||
import org.atriasoft.archidata.dataAccess.QueryAnd;
|
||||
import org.atriasoft.archidata.dataAccess.QueryCondition;
|
||||
import org.atriasoft.archidata.dataAccess.options.Condition;
|
||||
import org.atriasoft.archidata.filter.GenericContext;
|
||||
import org.atriasoft.karideo.model.UserMediaAdvancement;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import io.swagger.v3.oas.annotations.Operation;
|
||||
import jakarta.annotation.security.RolesAllowed;
|
||||
import jakarta.validation.Valid;
|
||||
import jakarta.ws.rs.Consumes;
|
||||
import jakarta.ws.rs.DELETE;
|
||||
import jakarta.ws.rs.GET;
|
||||
import jakarta.ws.rs.PATCH;
|
||||
import jakarta.ws.rs.PUT;
|
||||
import jakarta.ws.rs.Path;
|
||||
import jakarta.ws.rs.PathParam;
|
||||
import jakarta.ws.rs.Produces;
|
||||
@ -28,38 +29,43 @@ import jakarta.ws.rs.core.SecurityContext;
|
||||
@Produces(MediaType.APPLICATION_JSON)
|
||||
public class UserMediaAdvancementResource {
|
||||
static final Logger LOGGER = LoggerFactory.getLogger(UserMediaAdvancementResource.class);
|
||||
|
||||
|
||||
@GET
|
||||
@Path("{id}")
|
||||
@RolesAllowed("USER")
|
||||
@Operation(description = "Get a specific user advancement with his ID", tags = "GLOBAL")
|
||||
public UserMediaAdvancement get(@Context final SecurityContext sc, @PathParam("id") final Long id) throws Exception {
|
||||
public UserMediaAdvancement get(@Context final SecurityContext sc, @PathParam("id") final Long id)
|
||||
throws Exception {
|
||||
final GenericContext gc = (GenericContext) sc.getUserPrincipal();
|
||||
return DataAccess.getWhere(UserMediaAdvancement.class, new Condition(new QueryAnd(new QueryCondition("mediaId", "=", id), new QueryCondition("userId", "=", gc.userByToken.id))));
|
||||
return DataAccess.getWhere(UserMediaAdvancement.class,
|
||||
new Condition(new QueryAnd(new QueryCondition("mediaId", "=", id),
|
||||
new QueryCondition("userId", "=", gc.userByToken.id))));
|
||||
}
|
||||
|
||||
|
||||
@GET
|
||||
@RolesAllowed("USER")
|
||||
@Operation(description = "Get all user advancement", tags = "GLOBAL")
|
||||
public List<UserMediaAdvancement> gets(@Context final SecurityContext sc) throws Exception {
|
||||
final GenericContext gc = (GenericContext) sc.getUserPrincipal();
|
||||
return DataAccess.getsWhere(UserMediaAdvancement.class, new Condition(new QueryCondition("userId", "=", gc.userByToken.id)));
|
||||
return DataAccess.getsWhere(UserMediaAdvancement.class,
|
||||
new Condition(new QueryCondition("userId", "=", gc.userByToken.id)));
|
||||
}
|
||||
|
||||
/* =============================================================================
|
||||
* Modification SECTION:
|
||||
* ============================================================================= */
|
||||
|
||||
|
||||
/* ============================================================================= Modification SECTION: ============================================================================= */
|
||||
|
||||
public record MediaInformations(
|
||||
int time,
|
||||
float percent,
|
||||
int count) {}
|
||||
|
||||
//@POST
|
||||
//@Path("{id}")
|
||||
//@RolesAllowed("USER")
|
||||
//@Consumes(MediaType.APPLICATION_JSON)
|
||||
public UserMediaAdvancement post(@Context final SecurityContext sc, @PathParam("id") final Long id, final MediaInformations data) throws Exception {
|
||||
|
||||
// @POST
|
||||
// @Path("{id}")
|
||||
// @RolesAllowed("USER")
|
||||
// @Consumes(MediaType.APPLICATION_JSON)
|
||||
public UserMediaAdvancement post(
|
||||
@Context final SecurityContext sc,
|
||||
@PathParam("id") final Long id,
|
||||
final MediaInformations data) throws Exception {
|
||||
final GenericContext gc = (GenericContext) sc.getUserPrincipal();
|
||||
final UserMediaAdvancement elem = new UserMediaAdvancement();
|
||||
elem.userId = gc.userByToken.id;
|
||||
@ -69,18 +75,21 @@ public class UserMediaAdvancementResource {
|
||||
elem.count = data.count;
|
||||
return DataAccess.insert(elem);
|
||||
}
|
||||
|
||||
|
||||
public record MediaInformationsDelta(
|
||||
int time,
|
||||
float percent,
|
||||
boolean addCount) {}
|
||||
|
||||
@PATCH
|
||||
|
||||
@PUT
|
||||
@Path("{id}")
|
||||
@RolesAllowed("USER")
|
||||
@Consumes(MediaType.APPLICATION_JSON)
|
||||
@Operation(description = "Modify a user advancement", tags = "GLOBAL")
|
||||
public UserMediaAdvancement patch(@Context final SecurityContext sc, @PathParam("id") final Long id, final MediaInformationsDelta data) throws Exception {
|
||||
public UserMediaAdvancement patch(
|
||||
@Context final SecurityContext sc,
|
||||
@PathParam("id") final Long id,
|
||||
@Valid final MediaInformationsDelta data) throws Exception {
|
||||
final UserMediaAdvancement elem = get(sc, id);
|
||||
if (elem == null) {
|
||||
// insert element
|
||||
@ -96,10 +105,10 @@ public class UserMediaAdvancementResource {
|
||||
elem.count++;
|
||||
}
|
||||
LOGGER.info("{},{},{}", elem.time, elem.percent, elem.count);
|
||||
final int nbAfected = DataAccess.update(elem, elem.id, List.of("time", "percent", "count"));
|
||||
DataAccess.update(elem, elem.id, List.of("time", "percent", "count"));
|
||||
return DataAccess.get(UserMediaAdvancement.class, elem.id);
|
||||
}
|
||||
|
||||
|
||||
@DELETE
|
||||
@Path("{id}")
|
||||
@RolesAllowed("USER")
|
||||
@ -108,5 +117,5 @@ public class UserMediaAdvancementResource {
|
||||
final UserMediaAdvancement elem = get(sc, id);
|
||||
DataAccess.delete(UserMediaAdvancement.class, elem.id);
|
||||
}
|
||||
|
||||
|
||||
}
|
@ -1,10 +1,10 @@
|
||||
package org.kar.karideo.api;
|
||||
package org.atriasoft.karideo.api;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.kar.archidata.dataAccess.DataAccess;
|
||||
import org.kar.archidata.filter.GenericContext;
|
||||
import org.kar.karideo.model.UserKarideo;
|
||||
import org.atriasoft.archidata.dataAccess.DataAccess;
|
||||
import org.atriasoft.archidata.filter.GenericContext;
|
||||
import org.atriasoft.karideo.model.UserKarideo;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
@ -1,6 +1,6 @@
|
||||
package org.kar.karideo.filter;
|
||||
package org.atriasoft.karideo.filter;
|
||||
|
||||
import org.kar.archidata.filter.AuthenticationFilter;
|
||||
import org.atriasoft.archidata.filter.AuthenticationFilter;
|
||||
|
||||
import jakarta.ws.rs.Priorities;
|
||||
import jakarta.ws.rs.ext.Provider;
|
||||
@ -15,9 +15,9 @@ import jakarta.annotation.Priority;
|
||||
@Priority(Priorities.AUTHENTICATION)
|
||||
public class KarideoAuthenticationFilter extends AuthenticationFilter {
|
||||
final Logger logger = LoggerFactory.getLogger(KarideoAuthenticationFilter.class);
|
||||
|
||||
public KarideoAuthenticationFilter() {
|
||||
|
||||
public KarideoAuthenticationFilter() {
|
||||
super("karideo");
|
||||
}
|
||||
|
||||
|
||||
}
|
60
back/src/org/atriasoft/karideo/internal/Log.java
Normal file
60
back/src/org/atriasoft/karideo/internal/Log.java
Normal file
@ -0,0 +1,60 @@
|
||||
package org.atriasoft.karideo.internal;
|
||||
|
||||
//import io.scenarium.logger.LogLevel;
|
||||
//import io.scenarium.logger.Logger;
|
||||
|
||||
public class Log {
|
||||
// private static final String LIB_NAME = "logger";
|
||||
// private static final String LIB_NAME_DRAW = Logger.getDrawableName(LIB_NAME);
|
||||
// private static final boolean PRINT_CRITICAL = Logger.getNeedPrint(LIB_NAME, LogLevel.CRITICAL);
|
||||
// private static final boolean PRINT_ERROR = Logger.getNeedPrint(LIB_NAME, LogLevel.ERROR);
|
||||
// private static final boolean PRINT_WARNING = Logger.getNeedPrint(LIB_NAME, LogLevel.WARNING);
|
||||
// private static final boolean PRINT_INFO = Logger.getNeedPrint(LIB_NAME, LogLevel.INFO);
|
||||
// private static final boolean PRINT_DEBUG = Logger.getNeedPrint(LIB_NAME, LogLevel.DEBUG);
|
||||
// private static final boolean PRINT_VERBOSE = Logger.getNeedPrint(LIB_NAME, LogLevel.VERBOSE);
|
||||
// private static final boolean PRINT_TODO = Logger.getNeedPrint(LIB_NAME, LogLevel.TODO);
|
||||
// private static final boolean PRINT_PRINT = Logger.getNeedPrint(LIB_NAME, LogLevel.PRINT);
|
||||
//
|
||||
// private Log() {}
|
||||
//
|
||||
// public static void print(String data) {
|
||||
// if (PRINT_PRINT)
|
||||
// Logger.print(LIB_NAME_DRAW, data);
|
||||
// }
|
||||
//
|
||||
// public static void todo(String data) {
|
||||
// if (PRINT_TODO)
|
||||
// Logger.todo(LIB_NAME_DRAW, data);
|
||||
// }
|
||||
//
|
||||
// public static void critical(String data) {
|
||||
// if (PRINT_CRITICAL)
|
||||
// Logger.critical(LIB_NAME_DRAW, data);
|
||||
// }
|
||||
//
|
||||
// public static void error(String data) {
|
||||
// if (PRINT_ERROR)
|
||||
// Logger.error(LIB_NAME_DRAW, data);
|
||||
// }
|
||||
//
|
||||
// public static void warning(String data) {
|
||||
// if (PRINT_WARNING)
|
||||
// Logger.warning(LIB_NAME_DRAW, data);
|
||||
// }
|
||||
//
|
||||
// public static void info(String data) {
|
||||
// if (PRINT_INFO)
|
||||
// Logger.info(LIB_NAME_DRAW, data);
|
||||
// }
|
||||
//
|
||||
// public static void debug(String data) {
|
||||
// if (PRINT_DEBUG)
|
||||
// Logger.debug(LIB_NAME_DRAW, data);
|
||||
// }
|
||||
//
|
||||
// public static void verbose(String data) {
|
||||
// if (PRINT_VERBOSE)
|
||||
// Logger.verbose(LIB_NAME_DRAW, data);
|
||||
// }
|
||||
|
||||
}
|
114
back/src/org/atriasoft/karideo/migration/Initialization.java
Normal file
114
back/src/org/atriasoft/karideo/migration/Initialization.java
Normal file
@ -0,0 +1,114 @@
|
||||
package org.atriasoft.karideo.migration;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.atriasoft.archidata.api.DataResource;
|
||||
import org.atriasoft.archidata.dataAccess.DBAccess;
|
||||
import org.atriasoft.archidata.externalRestApi.AnalyzeApi;
|
||||
import org.atriasoft.archidata.externalRestApi.TsGenerateApi;
|
||||
import org.atriasoft.archidata.filter.PartRight;
|
||||
import org.atriasoft.archidata.migration.MigrationSqlStep;
|
||||
import org.atriasoft.archidata.model.Data;
|
||||
import org.atriasoft.archidata.model.User;
|
||||
import org.atriasoft.archidata.model.token.JwtToken;
|
||||
import org.atriasoft.karideo.api.Front;
|
||||
import org.atriasoft.karideo.api.HealthCheck;
|
||||
import org.atriasoft.karideo.api.MediaResource;
|
||||
import org.atriasoft.karideo.api.SeasonResource;
|
||||
import org.atriasoft.karideo.api.SeriesResource;
|
||||
import org.atriasoft.karideo.api.TypeResource;
|
||||
import org.atriasoft.karideo.api.UserMediaAdvancementResource;
|
||||
import org.atriasoft.karideo.api.UserResource;
|
||||
import org.atriasoft.karideo.model.Media;
|
||||
import org.atriasoft.karideo.model.Season;
|
||||
import org.atriasoft.karideo.model.Series;
|
||||
import org.atriasoft.karideo.model.Type;
|
||||
import org.atriasoft.karideo.model.UserMediaAdvancement;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class Initialization extends MigrationSqlStep {
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(Initialization.class);
|
||||
|
||||
public static final int KARSO_INITIALISATION_ID = 1;
|
||||
|
||||
public static final List<Class<?>> CLASSES_BASE = List.of(Data.class, Media.class, Type.class, Series.class, Season.class, User.class, UserMediaAdvancement.class);
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "Initialization";
|
||||
}
|
||||
|
||||
public static void generateObjects() throws Exception {
|
||||
LOGGER.info("Generate APIs");
|
||||
final List<Class<?>> listOfResources = List.of(Front.class, HealthCheck.class, SeasonResource.class, SeriesResource.class, TypeResource.class, UserMediaAdvancementResource.class,
|
||||
UserResource.class, MediaResource.class, DataResource.class);
|
||||
final AnalyzeApi api = new AnalyzeApi();
|
||||
api.addAllApi(listOfResources);
|
||||
api.addModel(JwtToken.class);
|
||||
api.addModel(PartRight.class);
|
||||
TsGenerateApi.generateApi(api, "../front/src/back-api/");
|
||||
LOGGER.info("Generate APIs (DONE)");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void generateStep() throws Exception {
|
||||
for (final Class<?> clazz : CLASSES_BASE) {
|
||||
addClass(clazz);
|
||||
}
|
||||
|
||||
addAction((final DBAccess da) -> {
|
||||
final List<Type> data = List.of(//
|
||||
new Type("Documentary", "Documentary (animals, space, earth...)"), //
|
||||
new Type("Movie", "Movie with real humans (film)"), //
|
||||
new Type("Animation", "Animation movies (film)"), //
|
||||
new Type("Short movie", "Small movies (less 2 minutes)"), //
|
||||
new Type("TV show", "TV show for old peoples"), //
|
||||
new Type("Animation TV show", "TV show for young peoples"), //
|
||||
new Type("Theater", "Theater play"), //
|
||||
new Type("One man show", "Recorded stand up"), //
|
||||
new Type("Concert", "Recorded concert"), //
|
||||
new Type("Opera", "Recorded opera") //
|
||||
);
|
||||
da.insertMultiple(data);
|
||||
});
|
||||
// set start increment element to permit to add after default elements
|
||||
addAction("""
|
||||
ALTER TABLE `media` AUTO_INCREMENT = 1000;
|
||||
""", "mysql");
|
||||
addAction("""
|
||||
ALTER TABLE `type` AUTO_INCREMENT = 1000;
|
||||
""", "mysql");
|
||||
addAction("""
|
||||
ALTER TABLE `series` AUTO_INCREMENT = 1000;
|
||||
""", "mysql");
|
||||
addAction("""
|
||||
ALTER TABLE `season` AUTO_INCREMENT = 1000;
|
||||
""", "mysql");
|
||||
addAction("""
|
||||
ALTER TABLE `userMediaAdvancement` AUTO_INCREMENT = 1000;
|
||||
""", "mysql");
|
||||
}
|
||||
|
||||
public static void dropAll(final DBAccess da) {
|
||||
for (final Class<?> element : CLASSES_BASE) {
|
||||
try {
|
||||
da.drop(element);
|
||||
} catch (final Exception ex) {
|
||||
LOGGER.error("Fail to drop table !!!!!!");
|
||||
ex.printStackTrace();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static void cleanAll(final DBAccess da) {
|
||||
for (final Class<?> element : CLASSES_BASE) {
|
||||
try {
|
||||
da.cleanAll(element);
|
||||
} catch (final Exception ex) {
|
||||
LOGGER.error("Fail to clean table !!!!!!");
|
||||
ex.printStackTrace();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -1,28 +1,28 @@
|
||||
package org.kar.karideo.migration;
|
||||
package org.atriasoft.karideo.migration;
|
||||
|
||||
import org.kar.archidata.migration.MigrationSqlStep;
|
||||
import org.kar.karideo.model.UserMediaAdvancement;
|
||||
import org.atriasoft.archidata.migration.MigrationSqlStep;
|
||||
import org.atriasoft.karideo.model.UserMediaAdvancement;
|
||||
|
||||
public class Migration20230810 extends MigrationSqlStep {
|
||||
|
||||
|
||||
public static final int KARSO_INITIALISATION_ID = 1;
|
||||
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "migration-2023-08-10";
|
||||
}
|
||||
|
||||
|
||||
public Migration20230810() {
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void generateStep() throws Exception {
|
||||
addClass(UserMediaAdvancement.class);
|
||||
|
||||
|
||||
addAction("""
|
||||
ALTER TABLE `userMediaAdvancement` AUTO_INCREMENT = 1000;
|
||||
""");
|
||||
}
|
||||
|
||||
|
||||
}
|
@ -1,8 +1,8 @@
|
||||
package org.kar.karideo.migration;
|
||||
package org.atriasoft.karideo.migration;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.kar.archidata.migration.MigrationSqlStep;
|
||||
import org.atriasoft.archidata.migration.MigrationSqlStep;
|
||||
|
||||
public class Migration20231015 extends MigrationSqlStep {
|
||||
|
@ -1,6 +1,6 @@
|
||||
package org.kar.karideo.migration;
|
||||
package org.atriasoft.karideo.migration;
|
||||
|
||||
import org.kar.archidata.migration.MigrationSqlStep;
|
||||
import org.atriasoft.archidata.migration.MigrationSqlStep;
|
||||
|
||||
public class Migration20231126 extends MigrationSqlStep {
|
||||
|
||||
@ -17,7 +17,7 @@ public class Migration20231126 extends MigrationSqlStep {
|
||||
|
||||
@Override
|
||||
public void generateStep() throws Exception {
|
||||
|
||||
|
||||
// update migration update (last one)
|
||||
addAction("""
|
||||
ALTER TABLE `KAR_migration`
|
@ -0,0 +1,24 @@
|
||||
package org.atriasoft.karideo.migration;
|
||||
|
||||
import org.atriasoft.archidata.migration.MigrationSqlStep;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class Migration20240226 extends MigrationSqlStep {
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(Migration20240226.class);
|
||||
|
||||
public static final int KARSO_INITIALISATION_ID = 1;
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "migration-2024-02-26: convert base with UUID";
|
||||
}
|
||||
|
||||
public Migration20240226() {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void generateStep() throws Exception {}
|
||||
|
||||
}
|
@ -0,0 +1,28 @@
|
||||
package org.atriasoft.karideo.migration;
|
||||
|
||||
import org.atriasoft.archidata.migration.MigrationSqlStep;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class Migration20240611 extends MigrationSqlStep {
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(Migration20240226.class);
|
||||
|
||||
public static final int KARSO_INITIALISATION_ID = 1;
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "migration-2024-06-11: corect data uuid";
|
||||
}
|
||||
|
||||
public Migration20240611() {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void generateStep() throws Exception {
|
||||
addAction("""
|
||||
ALTER TABLE `data` CHANGE `id` `uuid` binary(16) DEFAULT (UUID_TO_BIN(UUID(), TRUE));
|
||||
""");
|
||||
}
|
||||
|
||||
}
|
139
back/src/org/atriasoft/karideo/migration/Migration20250214.java
Normal file
139
back/src/org/atriasoft/karideo/migration/Migration20250214.java
Normal file
@ -0,0 +1,139 @@
|
||||
package org.atriasoft.karideo.migration;
|
||||
|
||||
import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.NoSuchFileException;
|
||||
import java.nio.file.Paths;
|
||||
import java.nio.file.StandardCopyOption;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.bson.types.ObjectId;
|
||||
import org.atriasoft.archidata.api.DataResource;
|
||||
import org.atriasoft.archidata.dataAccess.DBAccess;
|
||||
import org.atriasoft.archidata.dataAccess.options.AccessDeletedItems;
|
||||
import org.atriasoft.archidata.dataAccess.options.OverrideTableName;
|
||||
import org.atriasoft.archidata.migration.MigrationSqlStep;
|
||||
import org.atriasoft.archidata.tools.ConfigBaseVariable;
|
||||
import org.atriasoft.karideo.migration.model.CoverConversion;
|
||||
import org.atriasoft.karideo.migration.model.MediaConversion;
|
||||
import org.atriasoft.karideo.migration.model.OIDConversion;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class Migration20250214 extends MigrationSqlStep {
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(Migration20240226.class);
|
||||
|
||||
public static final int KARSO_INITIALISATION_ID = 1;
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "migration-2025-01-04: convert base from UUID to OID";
|
||||
}
|
||||
|
||||
@Override
|
||||
public void generateStep() throws Exception {
|
||||
|
||||
addAction("""
|
||||
ALTER TABLE `data` ADD `_id` binary(12) AFTER `uuid`;
|
||||
""");
|
||||
addAction((final DBAccess da) -> {
|
||||
final List<OIDConversion> datas = da.gets(OIDConversion.class, new AccessDeletedItems(), new OverrideTableName("data"));
|
||||
for (final OIDConversion elem : datas) {
|
||||
elem._id = new ObjectId();
|
||||
}
|
||||
for (final OIDConversion elem : datas) {
|
||||
da.update(elem, elem.uuid, List.of("_id"), new OverrideTableName("data"));
|
||||
}
|
||||
});
|
||||
final List<String> tableToTransform = List.of("media", "season", "series", "type", "user");
|
||||
for (final String tableName : tableToTransform) {
|
||||
addAction("ALTER TABLE `" + tableName + "` ADD `covers_oid` text NULL;");
|
||||
addAction((final DBAccess da) -> {
|
||||
final List<OIDConversion> datas = da.gets(OIDConversion.class, new AccessDeletedItems(), new OverrideTableName("data"));
|
||||
final List<CoverConversion> tableCoverTransforms = da.gets(CoverConversion.class, new AccessDeletedItems(), new OverrideTableName(tableName));
|
||||
LOGGER.info("Get somes data: {} {}", datas.size(), tableCoverTransforms.size());
|
||||
for (final CoverConversion tableTransform : tableCoverTransforms) {
|
||||
final List<ObjectId> values = new ArrayList<>();
|
||||
if (tableTransform.covers == null) {
|
||||
continue;
|
||||
}
|
||||
for (final UUID link : tableTransform.covers) {
|
||||
for (final OIDConversion data : datas) {
|
||||
if (data.uuid.equals(link)) {
|
||||
values.add(data._id);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (values.size() != 0) {
|
||||
tableTransform.covers_oid = values;
|
||||
LOGGER.info(" update: {}: {} => {}", tableTransform.id, tableTransform.covers, tableTransform.covers_oid);
|
||||
da.update(tableTransform, tableTransform.id, List.of("covers_oid"), new OverrideTableName(tableName));
|
||||
}
|
||||
}
|
||||
});
|
||||
addAction("ALTER TABLE `" + tableName + "` DROP `covers`;");
|
||||
addAction("ALTER TABLE `" + tableName + "` CHANGE `covers_oid` `covers` text NULL;");
|
||||
}
|
||||
addAction("""
|
||||
ALTER TABLE `media` ADD `dataOid` binary(12) AFTER dataId;
|
||||
""");
|
||||
addAction((final DBAccess da) -> {
|
||||
final List<OIDConversion> datas = da.gets(OIDConversion.class, new AccessDeletedItems(), new OverrideTableName("data"));
|
||||
final List<MediaConversion> medias = da.gets(MediaConversion.class, new AccessDeletedItems(), new OverrideTableName("media"));
|
||||
for (final MediaConversion media : medias) {
|
||||
for (final OIDConversion data : datas) {
|
||||
if (data.uuid.equals(media.dataId)) {
|
||||
media.dataOid = data._id;
|
||||
da.update(media, media.id, List.of("dataOid"), new OverrideTableName("media"));
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
// addAction("""
|
||||
// ALTER TABLE `media` DROP `dataId`;
|
||||
// """);
|
||||
addAction("""
|
||||
ALTER TABLE `media` CHANGE `dataId` `dataIdOld` binary(16);
|
||||
""");
|
||||
addAction("""
|
||||
ALTER TABLE `media` CHANGE `dataOid` `dataId` binary(12) NOT NULL;
|
||||
""");
|
||||
// Move the files...
|
||||
addAction((final DBAccess da) -> {
|
||||
final String filePath = ConfigBaseVariable.getMediaDataFolder() + "/Migration20250214.txt";
|
||||
try (FileWriter writer = new FileWriter(filePath, true)) {
|
||||
final List<OIDConversion> datas = da.gets(OIDConversion.class, new AccessDeletedItems(), new OverrideTableName("data"));
|
||||
for (final OIDConversion data : datas) {
|
||||
final String origin = DataResource.getFileDataOld(data.uuid);
|
||||
final String destination = DataResource.getFileData(data._id);
|
||||
LOGGER.info("move file = {}", origin);
|
||||
LOGGER.info(" ==> {}", destination);
|
||||
try {
|
||||
Files.move(Paths.get(origin), Paths.get(destination), StandardCopyOption.ATOMIC_MOVE);
|
||||
} catch (final NoSuchFileException ex) {
|
||||
LOGGER.warn("Fail to move file : {}", ex.getMessage());
|
||||
}
|
||||
writer.write(origin + " | " + destination + "\n");
|
||||
writer.flush();
|
||||
}
|
||||
} catch (final IOException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
});
|
||||
addAction("""
|
||||
ALTER TABLE `data` DROP INDEX `PRIMARY`;
|
||||
""");
|
||||
// addAction("""
|
||||
// ALTER TABLE `data` DROP `uuid`;
|
||||
// """);
|
||||
addAction("""
|
||||
ALTER TABLE `data` ADD PRIMARY KEY `_id` (`_id`);
|
||||
""");
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,24 @@
|
||||
package org.atriasoft.karideo.migration;
|
||||
|
||||
import org.atriasoft.archidata.migration.MigrationSqlStep;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class Migration20250414 extends MigrationSqlStep {
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(Migration20240226.class);
|
||||
|
||||
public static final int KARSO_INITIALISATION_ID = 1;
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "migration-2025-04-14: update to archidata 0.28.0";
|
||||
}
|
||||
|
||||
@Override
|
||||
public void generateStep() throws Exception {
|
||||
addAction("""
|
||||
ALTER TABLE `media` ADD `datePublication` timestamp(3) NULL;
|
||||
""");
|
||||
}
|
||||
|
||||
}
|
@ -1,9 +1,10 @@
|
||||
package org.kar.karideo.migration.model;
|
||||
package org.atriasoft.karideo.migration.model;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.kar.archidata.annotation.DataJson;
|
||||
import org.bson.types.ObjectId;
|
||||
import org.atriasoft.archidata.annotation.DataJson;
|
||||
|
||||
import jakarta.persistence.Id;
|
||||
|
||||
@ -12,4 +13,6 @@ public class CoverConversion {
|
||||
public Long id = null;
|
||||
@DataJson
|
||||
public List<UUID> covers = null;
|
||||
@DataJson
|
||||
public List<ObjectId> covers_oid = null;
|
||||
}
|
@ -0,0 +1,13 @@
|
||||
package org.atriasoft.karideo.migration.model;
|
||||
|
||||
import java.util.UUID;
|
||||
|
||||
import org.bson.types.ObjectId;
|
||||
import jakarta.persistence.Id;
|
||||
|
||||
public class MediaConversion {
|
||||
@Id
|
||||
public Long id = null;
|
||||
public UUID dataId = null;
|
||||
public ObjectId dataOid = null;
|
||||
}
|
@ -0,0 +1,13 @@
|
||||
package org.atriasoft.karideo.migration.model;
|
||||
|
||||
import java.util.UUID;
|
||||
|
||||
import org.bson.types.ObjectId;
|
||||
|
||||
import jakarta.persistence.Id;
|
||||
|
||||
public class OIDConversion {
|
||||
@Id
|
||||
public UUID uuid = null;
|
||||
public ObjectId _id = null;
|
||||
}
|
78
back/src/org/atriasoft/karideo/model/Media.java
Normal file
78
back/src/org/atriasoft/karideo/model/Media.java
Normal file
@ -0,0 +1,78 @@
|
||||
package org.atriasoft.karideo.model;
|
||||
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
||||
import org.atriasoft.archidata.annotation.DataJson;
|
||||
import org.atriasoft.archidata.annotation.apiGenerator.ApiGenerationMode;
|
||||
import org.atriasoft.archidata.annotation.checker.CheckForeignKey;
|
||||
import org.atriasoft.archidata.annotation.checker.CollectionNotEmpty;
|
||||
import org.atriasoft.archidata.model.Data;
|
||||
import org.atriasoft.archidata.model.GenericDataSoftDelete;
|
||||
import org.bson.types.ObjectId;
|
||||
import org.hibernate.validator.constraints.UniqueElements;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import jakarta.annotation.Nullable;
|
||||
import jakarta.persistence.Column;
|
||||
import jakarta.persistence.Entity;
|
||||
import jakarta.persistence.FetchType;
|
||||
import jakarta.persistence.ManyToOne;
|
||||
import jakarta.persistence.Table;
|
||||
import jakarta.validation.constraints.PositiveOrZero;
|
||||
import jakarta.validation.constraints.Size;
|
||||
|
||||
@Entity
|
||||
@Table(name = "media")
|
||||
@JsonInclude(JsonInclude.Include.NON_NULL)
|
||||
@ApiGenerationMode(create = true, update = true)
|
||||
public class Media extends GenericDataSoftDelete {
|
||||
@Schema(description = "Name of the media (this represent the title)")
|
||||
@Column(nullable = false, length = 0)
|
||||
@Size(min = 0, max = 256)
|
||||
public String name;
|
||||
@Schema(description = "Description of the media")
|
||||
@Column(length = 0)
|
||||
@Size(min = 0, max = 8192)
|
||||
public String description;
|
||||
@Schema(description = "Foreign Key Id of the data")
|
||||
@ManyToOne(fetch = FetchType.LAZY, targetEntity = Data.class)
|
||||
@Column(nullable = false)
|
||||
@CheckForeignKey(target = Data.class)
|
||||
public ObjectId dataId;
|
||||
@Schema(description = "Type of the media")
|
||||
@ManyToOne(fetch = FetchType.LAZY, targetEntity = Type.class)
|
||||
@CheckForeignKey(target = Type.class)
|
||||
public Long typeId;
|
||||
@Schema(description = "Series reference of the media")
|
||||
@ManyToOne(fetch = FetchType.LAZY, targetEntity = Series.class)
|
||||
@CheckForeignKey(target = Series.class)
|
||||
public Long seriesId;
|
||||
@Schema(description = "Season reference of the media")
|
||||
@ManyToOne(fetch = FetchType.LAZY, targetEntity = Season.class)
|
||||
@CheckForeignKey(target = Season.class)
|
||||
public Long seasonId;
|
||||
@Schema(description = "Episode Id")
|
||||
@PositiveOrZero
|
||||
public Integer episode;
|
||||
@Schema(description = "Creation years of the media")
|
||||
public Date datePublication;
|
||||
@Schema(description = "Limitation Age of the media")
|
||||
@PositiveOrZero
|
||||
public Integer ageLimit;
|
||||
@Schema(description = "List of Id of the specific covers")
|
||||
@DataJson()
|
||||
@Nullable
|
||||
@CollectionNotEmpty
|
||||
@UniqueElements
|
||||
public List<@CheckForeignKey(target = Data.class) ObjectId> covers = null;
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "Media [name=" + this.name + ", description=" + this.description + ", dataId=" + this.dataId + ", typeId=" + this.typeId + ", seriesId=" + this.seriesId + ", seasonId=" + this.seasonId
|
||||
+ ", episode=" + this.episode + ", date=" + this.datePublication + ", ageLimit=" + this.ageLimit + ", covers=" + this.covers + "]";
|
||||
}
|
||||
|
||||
}
|
50
back/src/org/atriasoft/karideo/model/Season.java
Normal file
50
back/src/org/atriasoft/karideo/model/Season.java
Normal file
@ -0,0 +1,50 @@
|
||||
package org.atriasoft.karideo.model;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.atriasoft.archidata.annotation.DataIfNotExists;
|
||||
import org.atriasoft.archidata.annotation.DataJson;
|
||||
import org.atriasoft.archidata.annotation.apiGenerator.ApiGenerationMode;
|
||||
import org.atriasoft.archidata.annotation.checker.CheckForeignKey;
|
||||
import org.atriasoft.archidata.annotation.checker.CollectionNotEmpty;
|
||||
import org.atriasoft.archidata.model.Data;
|
||||
import org.atriasoft.archidata.model.GenericDataSoftDelete;
|
||||
import org.bson.types.ObjectId;
|
||||
import org.hibernate.validator.constraints.UniqueElements;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import jakarta.annotation.Nullable;
|
||||
import jakarta.persistence.Column;
|
||||
import jakarta.persistence.FetchType;
|
||||
import jakarta.persistence.ManyToOne;
|
||||
import jakarta.persistence.Table;
|
||||
import jakarta.validation.constraints.NotNull;
|
||||
import jakarta.validation.constraints.Size;
|
||||
|
||||
@Table(name = "season")
|
||||
@DataIfNotExists
|
||||
@JsonInclude(JsonInclude.Include.NON_NULL)
|
||||
@ApiGenerationMode(create = true, update = true)
|
||||
public class Season extends GenericDataSoftDelete {
|
||||
@Column(nullable = false, length = 0)
|
||||
@Schema(description = "Name of the media (this represent the title)")
|
||||
@Size(min = 0, max = 256)
|
||||
public String name;
|
||||
@Column(length = 0)
|
||||
@Schema(description = "Description of the media")
|
||||
@Size(min = 0, max = 8192)
|
||||
public String description;
|
||||
@Column(nullable = false)
|
||||
@Schema(description = "series parent ID")
|
||||
@ManyToOne(fetch = FetchType.LAZY, targetEntity = Series.class)
|
||||
@CheckForeignKey(target = Series.class)
|
||||
public Long parentId;
|
||||
@Schema(description = "List of Id of the specific covers")
|
||||
@DataJson()
|
||||
@Nullable
|
||||
@CollectionNotEmpty
|
||||
@UniqueElements
|
||||
public List<@CheckForeignKey(target = Data.class) @NotNull ObjectId> covers = null;
|
||||
}
|
50
back/src/org/atriasoft/karideo/model/Series.java
Normal file
50
back/src/org/atriasoft/karideo/model/Series.java
Normal file
@ -0,0 +1,50 @@
|
||||
package org.atriasoft.karideo.model;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.atriasoft.archidata.annotation.DataIfNotExists;
|
||||
import org.atriasoft.archidata.annotation.DataJson;
|
||||
import org.atriasoft.archidata.annotation.apiGenerator.ApiGenerationMode;
|
||||
import org.atriasoft.archidata.annotation.checker.CheckForeignKey;
|
||||
import org.atriasoft.archidata.annotation.checker.CollectionNotEmpty;
|
||||
import org.atriasoft.archidata.model.Data;
|
||||
import org.atriasoft.archidata.model.GenericDataSoftDelete;
|
||||
import org.bson.types.ObjectId;
|
||||
import org.hibernate.validator.constraints.UniqueElements;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import jakarta.annotation.Nullable;
|
||||
import jakarta.persistence.Column;
|
||||
import jakarta.persistence.FetchType;
|
||||
import jakarta.persistence.ManyToOne;
|
||||
import jakarta.persistence.Table;
|
||||
import jakarta.validation.constraints.NotNull;
|
||||
import jakarta.validation.constraints.Size;
|
||||
|
||||
@Table(name = "series")
|
||||
@DataIfNotExists
|
||||
@JsonInclude(JsonInclude.Include.NON_NULL)
|
||||
@ApiGenerationMode(create = true, update = true)
|
||||
public class Series extends GenericDataSoftDelete {
|
||||
@Column(nullable = false, length = 0)
|
||||
@Size(min = 0, max = 256)
|
||||
@Schema(description = "Name of the media (this represent the title)")
|
||||
public String name;
|
||||
@Column(length = 0)
|
||||
@Schema(description = "Description of the media")
|
||||
@Size(min = 0, max = 8192)
|
||||
public String description;
|
||||
@Column(nullable = false)
|
||||
@Schema(description = "series parent ID")
|
||||
@ManyToOne(fetch = FetchType.LAZY, targetEntity = Type.class)
|
||||
@CheckForeignKey(target = Type.class)
|
||||
public Long parentId;
|
||||
@Schema(description = "List of Id of the specific covers")
|
||||
@DataJson()
|
||||
@Nullable
|
||||
@CollectionNotEmpty
|
||||
@UniqueElements
|
||||
public List<@CheckForeignKey(target = Data.class) @NotNull ObjectId> covers = null;
|
||||
}
|
51
back/src/org/atriasoft/karideo/model/Type.java
Normal file
51
back/src/org/atriasoft/karideo/model/Type.java
Normal file
@ -0,0 +1,51 @@
|
||||
package org.atriasoft.karideo.model;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.atriasoft.archidata.annotation.DataIfNotExists;
|
||||
import org.atriasoft.archidata.annotation.DataJson;
|
||||
import org.atriasoft.archidata.annotation.apiGenerator.ApiGenerationMode;
|
||||
import org.atriasoft.archidata.annotation.checker.CheckForeignKey;
|
||||
import org.atriasoft.archidata.annotation.checker.CollectionNotEmpty;
|
||||
import org.atriasoft.archidata.model.Data;
|
||||
import org.atriasoft.archidata.model.GenericDataSoftDelete;
|
||||
import org.bson.types.ObjectId;
|
||||
import org.hibernate.validator.constraints.UniqueElements;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import jakarta.annotation.Nullable;
|
||||
import jakarta.persistence.Column;
|
||||
import jakarta.persistence.Table;
|
||||
import jakarta.validation.constraints.NotNull;
|
||||
import jakarta.validation.constraints.Size;
|
||||
|
||||
@Table(name = "type")
|
||||
@DataIfNotExists
|
||||
@JsonInclude(JsonInclude.Include.NON_NULL)
|
||||
@ApiGenerationMode(create = true, update = true)
|
||||
public class Type extends GenericDataSoftDelete {
|
||||
@Column(nullable = false, length = 0)
|
||||
@Size(min = 0, max = 256)
|
||||
@Schema(description = "Name of the media (this represent the title)")
|
||||
public String name;
|
||||
@Column(length = 0)
|
||||
@Schema(description = "Description of the media")
|
||||
@Size(min = 0, max = 8192)
|
||||
public String description;
|
||||
@Schema(description = "List of Id of the specific covers")
|
||||
@DataJson()
|
||||
@Nullable
|
||||
@CollectionNotEmpty
|
||||
@UniqueElements
|
||||
public List<@CheckForeignKey(target = Data.class) @NotNull ObjectId> covers = null;
|
||||
|
||||
public Type() {}
|
||||
|
||||
public Type(final String name, final String description) {
|
||||
this.name = name;
|
||||
this.description = description;
|
||||
}
|
||||
|
||||
}
|
@ -1,7 +1,7 @@
|
||||
package org.kar.karideo.model;
|
||||
package org.atriasoft.karideo.model;
|
||||
|
||||
import org.kar.archidata.annotation.DataIfNotExists;
|
||||
import org.kar.archidata.model.User;
|
||||
import org.atriasoft.archidata.annotation.DataIfNotExists;
|
||||
import org.atriasoft.archidata.model.User;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
|
@ -1,37 +1,49 @@
|
||||
package org.kar.karideo.model;
|
||||
package org.atriasoft.karideo.model;
|
||||
|
||||
import org.kar.archidata.annotation.DataIfNotExists;
|
||||
import org.kar.archidata.annotation.DataNotRead;
|
||||
import org.kar.archidata.model.GenericDataSoftDelete;
|
||||
import org.atriasoft.archidata.annotation.DataIfNotExists;
|
||||
import org.atriasoft.archidata.annotation.DataNotRead;
|
||||
import org.atriasoft.archidata.annotation.apiGenerator.ApiAccessLimitation;
|
||||
import org.atriasoft.archidata.annotation.apiGenerator.ApiGenerationMode;
|
||||
import org.atriasoft.archidata.annotation.checker.CheckForeignKey;
|
||||
import org.atriasoft.archidata.model.GenericDataSoftDelete;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import jakarta.annotation.Nullable;
|
||||
import jakarta.persistence.Column;
|
||||
import jakarta.persistence.FetchType;
|
||||
import jakarta.persistence.ManyToOne;
|
||||
import jakarta.persistence.Table;
|
||||
import jakarta.validation.constraints.PositiveOrZero;
|
||||
|
||||
@Table(name = "userMediaAdvancement")
|
||||
@DataIfNotExists
|
||||
@JsonInclude(JsonInclude.Include.NON_NULL)
|
||||
@ApiGenerationMode(create = true, update = true)
|
||||
public class UserMediaAdvancement extends GenericDataSoftDelete {
|
||||
@DataNotRead
|
||||
@Column(nullable = false)
|
||||
@Schema(description = "Foreign Key Id of the user")
|
||||
@ManyToOne(fetch = FetchType.LAZY, targetEntity = UserKarideo.class)
|
||||
@Nullable
|
||||
@ApiAccessLimitation(updatable = false, creatable = false)
|
||||
public Long userId;
|
||||
@Column(nullable = false)
|
||||
@Schema(description = "Id of the media")
|
||||
@ManyToOne(fetch = FetchType.LAZY, targetEntity = Media.class)
|
||||
@CheckForeignKey(target = Media.class)
|
||||
public Long mediaId;
|
||||
@Column(nullable = false)
|
||||
@Schema(description = "Percent of advancement in the media")
|
||||
@PositiveOrZero
|
||||
public Float percent;
|
||||
@Column(nullable = false)
|
||||
@Schema(description = "Number of second of advancement in the media")
|
||||
@PositiveOrZero
|
||||
public Integer time;
|
||||
@Column(nullable = false)
|
||||
@Schema(description = "Number of time this media has been read")
|
||||
@PositiveOrZero
|
||||
public Integer count;
|
||||
}
|
13
back/src/org/atriasoft/karideo/util/ConfigVariable.java
Normal file
13
back/src/org/atriasoft/karideo/util/ConfigVariable.java
Normal file
@ -0,0 +1,13 @@
|
||||
package org.atriasoft.karideo.util;
|
||||
|
||||
public class ConfigVariable {
|
||||
public static final String BASE_NAME = "ORG_KARIDEO_";
|
||||
|
||||
public static String getFrontFolder() {
|
||||
String out = System.getenv(BASE_NAME + "FRONT_FOLDER");
|
||||
if (out == null) {
|
||||
return "/application/front";
|
||||
}
|
||||
return out;
|
||||
}
|
||||
}
|
@ -1,60 +0,0 @@
|
||||
package org.kar.karideo.internal;
|
||||
|
||||
//import io.scenarium.logger.LogLevel;
|
||||
//import io.scenarium.logger.Logger;
|
||||
|
||||
public class Log {
|
||||
// private static final String LIB_NAME = "logger";
|
||||
// private static final String LIB_NAME_DRAW = Logger.getDrawableName(LIB_NAME);
|
||||
// private static final boolean PRINT_CRITICAL = Logger.getNeedPrint(LIB_NAME, LogLevel.CRITICAL);
|
||||
// private static final boolean PRINT_ERROR = Logger.getNeedPrint(LIB_NAME, LogLevel.ERROR);
|
||||
// private static final boolean PRINT_WARNING = Logger.getNeedPrint(LIB_NAME, LogLevel.WARNING);
|
||||
// private static final boolean PRINT_INFO = Logger.getNeedPrint(LIB_NAME, LogLevel.INFO);
|
||||
// private static final boolean PRINT_DEBUG = Logger.getNeedPrint(LIB_NAME, LogLevel.DEBUG);
|
||||
// private static final boolean PRINT_VERBOSE = Logger.getNeedPrint(LIB_NAME, LogLevel.VERBOSE);
|
||||
// private static final boolean PRINT_TODO = Logger.getNeedPrint(LIB_NAME, LogLevel.TODO);
|
||||
// private static final boolean PRINT_PRINT = Logger.getNeedPrint(LIB_NAME, LogLevel.PRINT);
|
||||
//
|
||||
// private Log() {}
|
||||
//
|
||||
// public static void print(String data) {
|
||||
// if (PRINT_PRINT)
|
||||
// Logger.print(LIB_NAME_DRAW, data);
|
||||
// }
|
||||
//
|
||||
// public static void todo(String data) {
|
||||
// if (PRINT_TODO)
|
||||
// Logger.todo(LIB_NAME_DRAW, data);
|
||||
// }
|
||||
//
|
||||
// public static void critical(String data) {
|
||||
// if (PRINT_CRITICAL)
|
||||
// Logger.critical(LIB_NAME_DRAW, data);
|
||||
// }
|
||||
//
|
||||
// public static void error(String data) {
|
||||
// if (PRINT_ERROR)
|
||||
// Logger.error(LIB_NAME_DRAW, data);
|
||||
// }
|
||||
//
|
||||
// public static void warning(String data) {
|
||||
// if (PRINT_WARNING)
|
||||
// Logger.warning(LIB_NAME_DRAW, data);
|
||||
// }
|
||||
//
|
||||
// public static void info(String data) {
|
||||
// if (PRINT_INFO)
|
||||
// Logger.info(LIB_NAME_DRAW, data);
|
||||
// }
|
||||
//
|
||||
// public static void debug(String data) {
|
||||
// if (PRINT_DEBUG)
|
||||
// Logger.debug(LIB_NAME_DRAW, data);
|
||||
// }
|
||||
//
|
||||
// public static void verbose(String data) {
|
||||
// if (PRINT_VERBOSE)
|
||||
// Logger.verbose(LIB_NAME_DRAW, data);
|
||||
// }
|
||||
|
||||
}
|
@ -1,65 +0,0 @@
|
||||
package org.kar.karideo.migration;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.kar.archidata.migration.MigrationSqlStep;
|
||||
import org.kar.archidata.model.Data;
|
||||
import org.kar.archidata.model.User;
|
||||
import org.kar.karideo.model.Media;
|
||||
import org.kar.karideo.model.Season;
|
||||
import org.kar.karideo.model.Series;
|
||||
import org.kar.karideo.model.Type;
|
||||
import org.kar.karideo.model.UserMediaAdvancement;
|
||||
|
||||
public class Initialization extends MigrationSqlStep {
|
||||
|
||||
public static final int KARSO_INITIALISATION_ID = 1;
|
||||
|
||||
public static final List<Class<?>> CLASSES_BASE = List.of(Data.class, Media.class, Type.class, Series.class, Season.class, User.class, UserMediaAdvancement.class);
|
||||
@Override
|
||||
public String getName() {
|
||||
return "Initialization";
|
||||
}
|
||||
|
||||
public Initialization() {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void generateStep() throws Exception {
|
||||
for(final Class<?> clazz : CLASSES_BASE) {
|
||||
addClass(clazz);
|
||||
}
|
||||
|
||||
addAction("""
|
||||
INSERT INTO `type` (`id`, `name`, `description`) VALUES
|
||||
(UUID_TO_BIN('15237fd7-d4ee-11ee-a8dd-02420a030203'), 'Documentary', 'Documentary (animals, space, earth...)'),
|
||||
(UUID_TO_BIN('553146c1-d4ee-11ee-a8dd-02420a030203'), 'Movie', 'Movie with real humans (film)'),
|
||||
(UUID_TO_BIN('59c430a3-d4ee-11ee-a8dd-02420a030203'), 'Animation', 'Animation movies (film)'),
|
||||
(UUID_TO_BIN('5cd619e3-d4ee-11ee-a8dd-02420a030203'), 'Short movie', 'Small movies (less 2 minutes)'),
|
||||
(UUID_TO_BIN('5fbbf085-d4ee-11ee-a8dd-02420a030203'), 'TV show', 'TV show for old peoples'),
|
||||
(UUID_TO_BIN('66dcb6ba-d4ee-11ee-a8dd-02420a030203'), 'Animation TV show', 'TV show for young peoples'),
|
||||
(UUID_TO_BIN('69ee5c15-d4ee-11ee-a8dd-02420a030203'), 'Theater', 'Theater play'),
|
||||
(UUID_TO_BIN('6ce72530-d4ee-11ee-a8dd-02420a030203'), 'One man show', 'Recorded stand up'),
|
||||
(UUID_TO_BIN('6ff1691a-d4ee-11ee-a8dd-02420a030203'), 'Concert', 'Recorded concert'),
|
||||
(UUID_TO_BIN('730815ef-d4ee-11ee-a8dd-02420a030203'), 'Opera', 'Recorded opera');
|
||||
""");
|
||||
// set start increment element to permit to add after default elements
|
||||
addAction("""
|
||||
ALTER TABLE `media` AUTO_INCREMENT = 1000;
|
||||
""", "mysql");
|
||||
addAction("""
|
||||
ALTER TABLE `type` AUTO_INCREMENT = 1000;
|
||||
""", "mysql");
|
||||
addAction("""
|
||||
ALTER TABLE `series` AUTO_INCREMENT = 1000;
|
||||
""", "mysql");
|
||||
addAction("""
|
||||
ALTER TABLE `season` AUTO_INCREMENT = 1000;
|
||||
""", "mysql");
|
||||
addAction("""
|
||||
ALTER TABLE `userMediaAdvancement` AUTO_INCREMENT = 1000;
|
||||
""", "mysql");
|
||||
}
|
||||
|
||||
}
|
@ -1,141 +0,0 @@
|
||||
package org.kar.karideo.migration;
|
||||
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.NoSuchFileException;
|
||||
import java.nio.file.Paths;
|
||||
import java.nio.file.StandardCopyOption;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.kar.archidata.api.DataResource;
|
||||
import org.kar.archidata.dataAccess.DataAccess;
|
||||
import org.kar.archidata.dataAccess.addOn.model.LinkTableLongLong;
|
||||
import org.kar.archidata.dataAccess.options.AccessDeletedItems;
|
||||
import org.kar.archidata.dataAccess.options.OverrideTableName;
|
||||
import org.kar.archidata.migration.MigrationSqlStep;
|
||||
import org.kar.archidata.tools.UuidUtils;
|
||||
import org.kar.karideo.migration.model.CoverConversion;
|
||||
import org.kar.karideo.migration.model.MediaConversion;
|
||||
import org.kar.karideo.migration.model.UUIDConversion;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class Migration20240226 extends MigrationSqlStep {
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(Migration20240226.class);
|
||||
|
||||
public static final int KARSO_INITIALISATION_ID = 1;
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "migration-2024-02-26: convert base with UUID";
|
||||
}
|
||||
|
||||
public Migration20240226() {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void generateStep() throws Exception {
|
||||
addAction("""
|
||||
ALTER TABLE `data` ADD `uuid` binary(16) AFTER `id`;
|
||||
""");
|
||||
addAction(() -> {
|
||||
final List<UUIDConversion> datas = DataAccess.gets(UUIDConversion.class, new AccessDeletedItems(), new OverrideTableName("data"));
|
||||
for (final UUIDConversion elem: datas) {
|
||||
elem.uuid = UuidUtils.nextUUID();
|
||||
}
|
||||
for (final UUIDConversion elem: datas) {
|
||||
DataAccess.update(elem, elem.id, List.of("uuid"), new OverrideTableName("data"));
|
||||
}
|
||||
});
|
||||
addAction("""
|
||||
ALTER TABLE `data` CHANGE `uuid` `uuid` binary(16) DEFAULT (UUID_TO_BIN(UUID(), TRUE));
|
||||
""");
|
||||
final List<String> tableToTransform = List.of("media", "season", "series", "type", "user");
|
||||
for (final String tableName : tableToTransform ) {
|
||||
addAction("ALTER TABLE `" + tableName + "` ADD `covers` text NULL;");
|
||||
addAction(() -> {
|
||||
final List<UUIDConversion> datas = DataAccess.gets(UUIDConversion.class, new AccessDeletedItems(), new OverrideTableName("data"));
|
||||
final List<CoverConversion> medias = DataAccess.gets(CoverConversion.class, new AccessDeletedItems(), new OverrideTableName(tableName));
|
||||
final List<LinkTableLongLong> links = DataAccess.gets(LinkTableLongLong.class, new OverrideTableName(tableName + "_link_cover"));
|
||||
LOGGER.info("Get somes data: {} {} {}", datas.size(), medias.size(), links.size());
|
||||
for (final CoverConversion media: medias) {
|
||||
final List<UUID> values = new ArrayList<>();
|
||||
for (final LinkTableLongLong link: links) {
|
||||
if (link.object1Id.equals(media.id)) {
|
||||
for (final UUIDConversion data: datas) {
|
||||
if (data.id.equals(link.object2Id)) {
|
||||
values.add(data.uuid);
|
||||
break;
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (values.size() != 0) {
|
||||
media.covers = values;
|
||||
LOGGER.info(" update: {} => {}", media.id, media.covers);
|
||||
DataAccess.update(media, media.id, List.of("covers"), new OverrideTableName(tableName));
|
||||
}
|
||||
}
|
||||
});
|
||||
addAction("DROP TABLE `" + tableName + "_link_cover`;");
|
||||
}
|
||||
addAction("""
|
||||
ALTER TABLE `media` ADD `dataUUID` binary(16) AFTER dataId;
|
||||
""");
|
||||
addAction(() -> {
|
||||
final List<UUIDConversion> datas = DataAccess.gets(UUIDConversion.class, new AccessDeletedItems(), new OverrideTableName("data"));
|
||||
final List<MediaConversion> medias = DataAccess.gets(MediaConversion.class, new AccessDeletedItems(), new OverrideTableName("media"));
|
||||
for (final MediaConversion media: medias) {
|
||||
for (final UUIDConversion data: datas) {
|
||||
if (data.id.equals(media.dataId)) {
|
||||
media.dataUUID = data.uuid;
|
||||
DataAccess.update(media, media.id, List.of("dataUUID"), new OverrideTableName("media"));
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
addAction("""
|
||||
ALTER TABLE `media` DROP `dataId`;
|
||||
""");
|
||||
addAction("""
|
||||
ALTER TABLE `media` CHANGE `dataUUID` `dataId` binary(16) NOT NULL;
|
||||
""");
|
||||
// Move the files...
|
||||
addAction(() -> {
|
||||
final List<UUIDConversion> datas = DataAccess.gets(UUIDConversion.class, new AccessDeletedItems(), new OverrideTableName("data"));
|
||||
for (final UUIDConversion data: datas) {
|
||||
final String origin = DataResource.getFileDataOld(data.id);
|
||||
final String destination = DataResource.getFileData(data.uuid);
|
||||
LOGGER.info("move file = {}", origin);
|
||||
LOGGER.info(" ==> {}", destination);
|
||||
try {
|
||||
Files.move(Paths.get(origin), Paths.get(destination), StandardCopyOption.ATOMIC_MOVE);
|
||||
} catch (final NoSuchFileException ex) {
|
||||
LOGGER.error("MOVE_ERROR : {} -> {}", origin, destination);
|
||||
}
|
||||
}
|
||||
});
|
||||
/* I am not sure then I prefer keep the primary key for the moment
|
||||
addAction("""
|
||||
ALTER TABLE `data` DROP `id`;
|
||||
""");
|
||||
*/
|
||||
addAction("""
|
||||
ALTER TABLE `data` CHANGE `id` `idOld` bigint NOT NULL DEFAULT 0;
|
||||
""");
|
||||
addAction("""
|
||||
ALTER TABLE `data` DROP PRIMARY KEY;
|
||||
""");
|
||||
addAction("""
|
||||
ALTER TABLE `data` CHANGE `uuid` `id` binary(16) DEFAULT (UUID_TO_BIN(UUID(), TRUE));
|
||||
""");
|
||||
addAction("""
|
||||
ALTER TABLE `data` ADD PRIMARY KEY `id` (`id`);
|
||||
""");
|
||||
}
|
||||
|
||||
}
|
@ -1,12 +0,0 @@
|
||||
package org.kar.karideo.migration.model;
|
||||
|
||||
import java.util.UUID;
|
||||
|
||||
import jakarta.persistence.Id;
|
||||
|
||||
public class MediaConversion {
|
||||
@Id
|
||||
public Long id = null;
|
||||
public Long dataId = null;
|
||||
public UUID dataUUID = null;
|
||||
}
|
@ -1,11 +0,0 @@
|
||||
package org.kar.karideo.migration.model;
|
||||
|
||||
import java.util.UUID;
|
||||
|
||||
import jakarta.persistence.Id;
|
||||
|
||||
public class UUIDConversion {
|
||||
@Id
|
||||
public Long id = null;
|
||||
public UUID uuid = null;
|
||||
}
|
@ -1,61 +0,0 @@
|
||||
package org.kar.karideo.model;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.kar.archidata.annotation.DataJson;
|
||||
import org.kar.archidata.model.Data;
|
||||
import org.kar.archidata.model.GenericDataSoftDelete;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
|
||||
import jakarta.persistence.Column;
|
||||
import jakarta.persistence.Entity;
|
||||
import jakarta.persistence.FetchType;
|
||||
import jakarta.persistence.ManyToOne;
|
||||
import jakarta.persistence.Table;
|
||||
|
||||
@Entity
|
||||
@Table(name = "media")
|
||||
@JsonInclude(JsonInclude.Include.NON_NULL)
|
||||
public class Media extends GenericDataSoftDelete {
|
||||
// Name of the media (this represent the title)
|
||||
@Column(nullable = false, length = 0)
|
||||
public String name;
|
||||
// Description of the media
|
||||
@Column(length = 0)
|
||||
public String description;
|
||||
// Foreign Key Id of the data
|
||||
@ManyToOne(fetch = FetchType.LAZY, targetEntity = Data.class)
|
||||
@Column(nullable = false)
|
||||
public UUID dataId;
|
||||
// Type of the media")
|
||||
@ManyToOne(fetch = FetchType.LAZY, targetEntity = Type.class)
|
||||
public Long typeId;
|
||||
// Series reference of the media
|
||||
@ManyToOne(fetch = FetchType.LAZY, targetEntity = Series.class)
|
||||
public Long seriesId;
|
||||
// Season reference of the media
|
||||
@ManyToOne(fetch = FetchType.LAZY, targetEntity = Season.class)
|
||||
public Long seasonId;
|
||||
// Episode Id
|
||||
public Integer episode;
|
||||
// ")
|
||||
public Integer date;
|
||||
// Creation years of the media
|
||||
public Integer time;
|
||||
// Limitation Age of the media
|
||||
public Integer ageLimit;
|
||||
// List of Id of the specific covers
|
||||
@DataJson(targetEntity = Data.class)
|
||||
public List<UUID> covers = null;
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "Media [name=" + this.name + ", description=" + this.description + ", dataId=" + this.dataId + ", typeId=" + this.typeId
|
||||
+ ", seriesId=" + this.seriesId + ", seasonId=" + this.seasonId + ", episode=" + this.episode + ", date=" + this.date
|
||||
+ ", time=" + this.time + ", ageLimit=" + this.ageLimit + ", covers=" + this.covers + "]";
|
||||
}
|
||||
|
||||
|
||||
}
|
@ -1,35 +0,0 @@
|
||||
package org.kar.karideo.model;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.kar.archidata.annotation.DataIfNotExists;
|
||||
import org.kar.archidata.annotation.DataJson;
|
||||
import org.kar.archidata.model.GenericDataSoftDelete;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import jakarta.persistence.Column;
|
||||
import jakarta.persistence.FetchType;
|
||||
import jakarta.persistence.ManyToOne;
|
||||
import jakarta.persistence.Table;
|
||||
|
||||
@Table(name = "season")
|
||||
@DataIfNotExists
|
||||
@JsonInclude(JsonInclude.Include.NON_NULL)
|
||||
public class Season extends GenericDataSoftDelete {
|
||||
@Column(nullable = false, length = 0)
|
||||
@Schema(description = "Name of the media (this represent the title)")
|
||||
public String name;
|
||||
@Column(length = 0)
|
||||
@Schema(description = "Description of the media")
|
||||
public String description;
|
||||
@Column(nullable = false)
|
||||
@Schema(description = "series parent ID")
|
||||
@ManyToOne(fetch = FetchType.LAZY, targetEntity = Series.class)
|
||||
public Long parentId;
|
||||
@Schema(description = "List of Id of the specific covers")
|
||||
@DataJson()
|
||||
public List<UUID> covers = null;
|
||||
}
|
@ -1,35 +0,0 @@
|
||||
package org.kar.karideo.model;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.kar.archidata.annotation.DataIfNotExists;
|
||||
import org.kar.archidata.annotation.DataJson;
|
||||
import org.kar.archidata.model.GenericDataSoftDelete;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import jakarta.persistence.Column;
|
||||
import jakarta.persistence.FetchType;
|
||||
import jakarta.persistence.ManyToOne;
|
||||
import jakarta.persistence.Table;
|
||||
|
||||
@Table(name = "series")
|
||||
@DataIfNotExists
|
||||
@JsonInclude(JsonInclude.Include.NON_NULL)
|
||||
public class Series extends GenericDataSoftDelete {
|
||||
@Column(nullable = false, length = 0)
|
||||
@Schema(description = "Name of the media (this represent the title)")
|
||||
public String name;
|
||||
@Column(length = 0)
|
||||
@Schema(description = "Description of the media")
|
||||
public String description;
|
||||
@Column(nullable = false)
|
||||
@Schema(description = "series parent ID")
|
||||
@ManyToOne(fetch = FetchType.LAZY, targetEntity = Type.class)
|
||||
public Long parentId;
|
||||
@Schema(description = "List of Id of the specific covers")
|
||||
@DataJson()
|
||||
public List<UUID> covers = null;
|
||||
}
|
@ -1,29 +0,0 @@
|
||||
package org.kar.karideo.model;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.kar.archidata.annotation.DataIfNotExists;
|
||||
import org.kar.archidata.annotation.DataJson;
|
||||
import org.kar.archidata.model.GenericDataSoftDelete;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import jakarta.persistence.Column;
|
||||
import jakarta.persistence.Table;
|
||||
|
||||
@Table(name = "type")
|
||||
@DataIfNotExists
|
||||
@JsonInclude(JsonInclude.Include.NON_NULL)
|
||||
public class Type extends GenericDataSoftDelete {
|
||||
@Column(nullable = false, length = 0)
|
||||
@Schema(description = "Name of the media (this represent the title)")
|
||||
public String name;
|
||||
@Column(length = 0)
|
||||
@Schema(description = "Description of the media")
|
||||
public String description;
|
||||
@Schema(description = "List of Id of the specific covers")
|
||||
@DataJson()
|
||||
public List<UUID> covers = null;
|
||||
}
|
@ -1,13 +0,0 @@
|
||||
package org.kar.karideo.util;
|
||||
|
||||
public class ConfigVariable {
|
||||
public static final String BASE_NAME = "ORG_KARIDEO_";
|
||||
|
||||
public static String getFrontFolder() {
|
||||
String out = System.getenv(BASE_NAME + "FRONT_FOLDER");
|
||||
if (out == null) {
|
||||
return "/application/front";
|
||||
}
|
||||
return out;
|
||||
}
|
||||
}
|
@ -1,10 +1,11 @@
|
||||
package test.kar.karideo;
|
||||
package test.atriasoft.karideo;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
import org.kar.archidata.tools.JWTWrapper;
|
||||
import org.atriasoft.archidata.filter.PartRight;
|
||||
import org.atriasoft.archidata.tools.JWTWrapper;
|
||||
|
||||
public class Common {
|
||||
static String USER_TOKEN = JWTWrapper.createJwtTestToken(16512, "test_user_login", "KarAuth", "karideo", Map.of("karideo", Map.of("USER", Boolean.TRUE)));
|
||||
static String ADMIN_TOKEN = JWTWrapper.createJwtTestToken(16512, "test_admin_login", "KarAuth", "karideo", Map.of("karideo", Map.of("USER", Boolean.TRUE, "ADMIN", Boolean.TRUE)));
|
||||
static String USER_TOKEN = JWTWrapper.createJwtTestToken(16512, "test_user_login", "KarAuth", "karideo", Map.of("karideo", Map.of("USER", PartRight.READ)));
|
||||
static String ADMIN_TOKEN = JWTWrapper.createJwtTestToken(16512, "test_admin_login", "KarAuth", "karideo", Map.of("karideo", Map.of("USER", PartRight.READ_WRITE, "ADMIN", PartRight.READ_WRITE)));
|
||||
}
|
115
back/test/src/test/atriasoft/karideo/ConfigureDb.java
Normal file
115
back/test/src/test/atriasoft/karideo/ConfigureDb.java
Normal file
@ -0,0 +1,115 @@
|
||||
package test.atriasoft.karideo;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.atriasoft.archidata.dataAccess.DBAccess;
|
||||
import org.atriasoft.archidata.db.DbConfig;
|
||||
import org.atriasoft.archidata.db.DbIoFactory;
|
||||
import org.atriasoft.archidata.exception.DataAccessException;
|
||||
import org.atriasoft.archidata.tools.ConfigBaseVariable;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import jakarta.ws.rs.InternalServerErrorException;
|
||||
|
||||
public class ConfigureDb {
|
||||
final static private Logger LOGGER = LoggerFactory.getLogger(ConfigureDb.class);
|
||||
final static private String modeTestForced = null;// "MONGO";
|
||||
public static DBAccess da = null;
|
||||
|
||||
public static void configure() throws IOException, InternalServerErrorException, DataAccessException {
|
||||
String modeTest = System.getenv("TEST_E2E_MODE");
|
||||
if (modeTest == null || modeTest.isEmpty() || "false".equalsIgnoreCase(modeTest)) {
|
||||
modeTest = "SQLITE-MEMORY";
|
||||
} else if ("true".equalsIgnoreCase(modeTest)) {
|
||||
modeTest = "MY-SQL";
|
||||
}
|
||||
// override the local test:
|
||||
if (modeTestForced != null) {
|
||||
modeTest = modeTestForced;
|
||||
}
|
||||
// for local test:
|
||||
ConfigBaseVariable.apiAdress = "http://127.0.0.1:12342/test/api/";
|
||||
// Enable the test mode permit to access to the test token (never use it in production).
|
||||
ConfigBaseVariable.testMode = "true";
|
||||
if ("SQLITE-MEMORY".equalsIgnoreCase(modeTest)) {
|
||||
ConfigBaseVariable.dbType = "sqlite";
|
||||
ConfigBaseVariable.bdDatabase = null;
|
||||
ConfigBaseVariable.dbHost = "memory";
|
||||
// for test we need to connect all time the DB
|
||||
ConfigBaseVariable.dbKeepConnected = "true";
|
||||
} else if ("SQLITE".equalsIgnoreCase(modeTest)) {
|
||||
ConfigBaseVariable.dbType = "sqlite";
|
||||
ConfigBaseVariable.bdDatabase = null;
|
||||
ConfigBaseVariable.dbKeepConnected = "true";
|
||||
} else if ("MY-SQL".equalsIgnoreCase(modeTest)) {
|
||||
ConfigBaseVariable.dbType = "mysql";
|
||||
ConfigBaseVariable.bdDatabase = "test_Karideo_db";
|
||||
ConfigBaseVariable.dbPort = "3906";
|
||||
ConfigBaseVariable.dbUser = "root";
|
||||
} else if ("MONGO".equalsIgnoreCase(modeTest)) {
|
||||
ConfigBaseVariable.dbType = "mongo";
|
||||
ConfigBaseVariable.bdDatabase = "test_Karideo_db";
|
||||
} else {
|
||||
// User local modification ...
|
||||
ConfigBaseVariable.bdDatabase = "test_Karideo_db";
|
||||
ConfigBaseVariable.dbPort = "3906";
|
||||
ConfigBaseVariable.dbUser = "root";
|
||||
}
|
||||
removeDB();
|
||||
// Connect the dataBase...
|
||||
da = DBAccess.createInterface();
|
||||
}
|
||||
|
||||
public static void removeDB() {
|
||||
String modeTest = System.getenv("TEST_E2E_MODE");
|
||||
if (modeTest == null || modeTest.isEmpty() || "false".equalsIgnoreCase(modeTest)) {
|
||||
modeTest = "SQLITE-MEMORY";
|
||||
} else if ("true".equalsIgnoreCase(modeTest)) {
|
||||
modeTest = "MY-SQL";
|
||||
}
|
||||
// override the local test:
|
||||
if (modeTestForced != null) {
|
||||
modeTest = modeTestForced;
|
||||
}
|
||||
DbConfig config = null;
|
||||
try {
|
||||
config = new DbConfig();
|
||||
} catch (final DataAccessException e) {
|
||||
e.printStackTrace();
|
||||
LOGGER.error("Fail to clean the DB");
|
||||
return;
|
||||
}
|
||||
config.setDbName(null);
|
||||
LOGGER.info("Remove the DB and create a new one '{}'", config.getDbName());
|
||||
try (final DBAccess daRoot = DBAccess.createInterface(config)) {
|
||||
if ("SQLITE-MEMORY".equalsIgnoreCase(modeTest)) {
|
||||
// nothing to do ...
|
||||
} else if ("SQLITE".equalsIgnoreCase(modeTest)) {
|
||||
daRoot.deleteDB(ConfigBaseVariable.bdDatabase);
|
||||
} else if ("MY-SQL".equalsIgnoreCase(modeTest)) {
|
||||
daRoot.deleteDB(ConfigBaseVariable.bdDatabase);
|
||||
} else if ("MONGO".equalsIgnoreCase(modeTest)) {
|
||||
daRoot.deleteDB(ConfigBaseVariable.bdDatabase);
|
||||
}
|
||||
daRoot.createDB(ConfigBaseVariable.bdDatabase);
|
||||
} catch (final InternalServerErrorException e) {
|
||||
e.printStackTrace();
|
||||
LOGGER.error("Fail to clean the DB");
|
||||
return;
|
||||
} catch (final IOException e) {
|
||||
e.printStackTrace();
|
||||
LOGGER.error("Fail to clean the DB");
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
public static void clear() throws IOException {
|
||||
LOGGER.info("Remove the test db");
|
||||
removeDB();
|
||||
// The connection is by default open ==> close it at the end of test:
|
||||
da.close();
|
||||
DbIoFactory.closeAllForceMode();
|
||||
ConfigBaseVariable.clearAllValue();
|
||||
}
|
||||
}
|
@ -1,4 +1,4 @@
|
||||
package test.kar.karideo;
|
||||
package test.atriasoft.karideo;
|
||||
|
||||
import org.junit.jupiter.api.extension.ConditionEvaluationResult;
|
||||
import org.junit.jupiter.api.extension.ExecutionCondition;
|
48
back/test/src/test/atriasoft/karideo/TestBase.java
Normal file
48
back/test/src/test/atriasoft/karideo/TestBase.java
Normal file
@ -0,0 +1,48 @@
|
||||
package test.atriasoft.karideo;
|
||||
|
||||
import org.junit.jupiter.api.AfterAll;
|
||||
import org.junit.jupiter.api.BeforeAll;
|
||||
import org.junit.jupiter.api.MethodOrderer;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.TestMethodOrder;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.atriasoft.archidata.tools.ConfigBaseVariable;
|
||||
import org.atriasoft.archidata.tools.RESTApi;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@ExtendWith(StepwiseExtension.class)
|
||||
@TestMethodOrder(MethodOrderer.OrderAnnotation.class)
|
||||
public class TestBase {
|
||||
private final static Logger LOGGER = LoggerFactory.getLogger(TestBase.class);
|
||||
public final static String ENDPOINT_NAME = "species/";
|
||||
|
||||
static WebLauncherTest webInterface = null;
|
||||
static RESTApi api = null;
|
||||
|
||||
@BeforeAll
|
||||
public static void configureWebServer() throws Exception {
|
||||
ConfigureDb.configure();
|
||||
LOGGER.info("configure server ...");
|
||||
webInterface = new WebLauncherTest();
|
||||
LOGGER.info("Start REST (BEGIN)");
|
||||
webInterface.process();
|
||||
LOGGER.info("Start REST (DONE)");
|
||||
api = new RESTApi(ConfigBaseVariable.apiAdress);
|
||||
api.setToken(Common.ADMIN_TOKEN);
|
||||
}
|
||||
|
||||
@AfterAll
|
||||
public static void stopWebServer() throws Exception {
|
||||
LOGGER.info("Kill the web server");
|
||||
webInterface.stop();
|
||||
webInterface = null;
|
||||
ConfigureDb.clear();
|
||||
}
|
||||
|
||||
@Test
|
||||
public static void TestEmpty() throws Exception {
|
||||
|
||||
}
|
||||
|
||||
}
|
@ -1,7 +1,9 @@
|
||||
package test.kar.karideo;
|
||||
|
||||
import java.io.IOException;
|
||||
package test.atriasoft.karideo;
|
||||
|
||||
import org.atriasoft.archidata.exception.RESTErrorResponseException;
|
||||
import org.atriasoft.archidata.tools.ConfigBaseVariable;
|
||||
import org.atriasoft.archidata.tools.RESTApi;
|
||||
import org.atriasoft.karideo.api.HealthCheck.HealthResult;
|
||||
import org.junit.jupiter.api.AfterAll;
|
||||
import org.junit.jupiter.api.Assertions;
|
||||
import org.junit.jupiter.api.BeforeAll;
|
||||
@ -10,11 +12,6 @@ import org.junit.jupiter.api.Order;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.TestMethodOrder;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.kar.archidata.db.DBEntry;
|
||||
import org.kar.archidata.exception.RESTErrorResponseExeption;
|
||||
import org.kar.archidata.tools.ConfigBaseVariable;
|
||||
import org.kar.archidata.tools.RESTApi;
|
||||
import org.kar.karideo.api.HealthCheck.HealthResult;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@ -22,49 +19,43 @@ import org.slf4j.LoggerFactory;
|
||||
@TestMethodOrder(MethodOrderer.OrderAnnotation.class)
|
||||
public class TestHealthCheck {
|
||||
private final static Logger LOGGER = LoggerFactory.getLogger(TestHealthCheck.class);
|
||||
|
||||
|
||||
static WebLauncherTest webInterface = null;
|
||||
static RESTApi api = null;
|
||||
|
||||
@BeforeAll
|
||||
public static void configureWebServer() throws Exception {
|
||||
ConfigureDb.configure();
|
||||
LOGGER.info("configure server ...");
|
||||
webInterface = new WebLauncherTest();
|
||||
LOGGER.info("Create DB");
|
||||
try {
|
||||
webInterface.migrateDB();
|
||||
} catch (final Exception ex) {
|
||||
ex.printStackTrace();
|
||||
LOGGER.error("Detect an error: {}", ex.getMessage());
|
||||
}
|
||||
LOGGER.info("Start REST (BEGIN)");
|
||||
webInterface.process();
|
||||
LOGGER.info("Start REST (DONE)");
|
||||
api = new RESTApi(ConfigBaseVariable.apiAdress);
|
||||
api.setToken(Common.ADMIN_TOKEN);
|
||||
}
|
||||
|
||||
|
||||
@AfterAll
|
||||
public static void stopWebServer() throws InterruptedException, IOException {
|
||||
public static void stopWebServer() throws Exception {
|
||||
LOGGER.info("Kill the web server");
|
||||
webInterface.stop();
|
||||
webInterface = null;
|
||||
LOGGER.info("Remove the test db");
|
||||
DBEntry.closeAllForceMode();
|
||||
ConfigBaseVariable.clearAllValue();
|
||||
ConfigureDb.clear();
|
||||
}
|
||||
|
||||
|
||||
@Order(1)
|
||||
@Test
|
||||
//@RepeatedTest(10)
|
||||
// @RepeatedTest(10)
|
||||
public void checkHealthCheck() throws Exception {
|
||||
final HealthResult result = api.get(HealthResult.class, "health_check");
|
||||
final HealthResult result = api.request("health_check").get().fetch(HealthResult.class);
|
||||
Assertions.assertEquals(result.value(), "alive and kicking");
|
||||
}
|
||||
|
||||
|
||||
@Order(2)
|
||||
@Test
|
||||
public void checkHealthCheckWrongAPI() throws Exception {
|
||||
Assertions.assertThrows(RESTErrorResponseExeption.class, () -> api.get(HealthResult.class, "health_checks"));
|
||||
Assertions.assertThrows(RESTErrorResponseException.class,
|
||||
() -> api.request("health_check_kaboom").get().fetch());
|
||||
}
|
||||
|
||||
|
||||
}
|
12
back/test/src/test/atriasoft/karideo/WebLauncherTest.java
Executable file
12
back/test/src/test/atriasoft/karideo/WebLauncherTest.java
Executable file
@ -0,0 +1,12 @@
|
||||
|
||||
package test.atriasoft.karideo;
|
||||
|
||||
import org.atriasoft.karideo.WebLauncher;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class WebLauncherTest extends WebLauncher {
|
||||
final private static Logger LOGGER = LoggerFactory.getLogger(WebLauncherTest.class);
|
||||
|
||||
public WebLauncherTest() {}
|
||||
}
|
@ -1,28 +0,0 @@
|
||||
|
||||
package test.kar.karideo;
|
||||
|
||||
import org.kar.archidata.tools.ConfigBaseVariable;
|
||||
import org.kar.karideo.WebLauncher;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class WebLauncherTest extends WebLauncher {
|
||||
final private static Logger LOGGER = LoggerFactory.getLogger(WebLauncherTest.class);
|
||||
|
||||
public WebLauncherTest() {
|
||||
LOGGER.debug("Configure REST system");
|
||||
// for local test:
|
||||
ConfigBaseVariable.apiAdress = "http://127.0.0.1:12345/test/api/";
|
||||
ConfigBaseVariable.testMode = "true";
|
||||
//ConfigBaseVariable.dbPort = "3306";
|
||||
// for the test we a in memory sqlite..
|
||||
ConfigBaseVariable.dbType = "sqlite";
|
||||
ConfigBaseVariable.dbHost = "memory";
|
||||
// for test we need to connect all time the DB
|
||||
ConfigBaseVariable.dbKeepConnected = "true";
|
||||
|
||||
//ConfigBaseVariable.dbHost = "localhost";
|
||||
//ConfigBaseVariable.dbUser = "root";
|
||||
//ConfigBaseVariable.dbPassword = "ZERTYSDGFVHSDFGHJYZSDFGSQxfgsqdfgsqdrf4564654";
|
||||
}
|
||||
}
|
1
front/.env
Normal file
1
front/.env
Normal file
@ -0,0 +1 @@
|
||||
NODE_ENV=development
|
2
front/.env.production
Normal file
2
front/.env.production
Normal file
@ -0,0 +1,2 @@
|
||||
# URL for database connection
|
||||
VITE_API_BASE_URL=karideo/api/
|
27
front/.storybook/main.ts
Normal file
27
front/.storybook/main.ts
Normal file
@ -0,0 +1,27 @@
|
||||
import type { StorybookConfig } from '@storybook/react-vite';
|
||||
|
||||
const config: StorybookConfig = {
|
||||
framework: {
|
||||
name: '@storybook/react-vite',
|
||||
options: {},
|
||||
},
|
||||
|
||||
core: {
|
||||
disableTelemetry: true,
|
||||
builder: '@storybook/builder-vite',
|
||||
},
|
||||
|
||||
stories: ['../src/**/*.@(mdx|stories.@(js|jsx|ts|tsx))'],
|
||||
|
||||
addons: ['@storybook/addon-links', '@storybook/addon-essentials'],
|
||||
|
||||
staticDirs: ['../public'],
|
||||
|
||||
typescript: {
|
||||
reactDocgen: false,
|
||||
},
|
||||
|
||||
docs: {},
|
||||
};
|
||||
|
||||
export default config;
|
16
front/.storybook/preview-head.html
Normal file
16
front/.storybook/preview-head.html
Normal file
@ -0,0 +1,16 @@
|
||||
<style>
|
||||
html {
|
||||
background: transparent !important;
|
||||
}
|
||||
|
||||
.docs-story > :first-child {
|
||||
padding: 0;
|
||||
}
|
||||
.docs-story > * {
|
||||
background: transparent !important;
|
||||
}
|
||||
|
||||
#root #start-ui-storybook-wrapper {
|
||||
min-height: 100vh;
|
||||
}
|
||||
</style>
|
34
front/.storybook/preview.tsx
Normal file
34
front/.storybook/preview.tsx
Normal file
@ -0,0 +1,34 @@
|
||||
import React from 'react';
|
||||
|
||||
import { Box } from '@chakra-ui/react';
|
||||
import { ChakraProvider } from '@chakra-ui/react';
|
||||
import { MemoryRouter } from 'react-router-dom';
|
||||
|
||||
import { ColorModeProvider } from '../src/components/ui/color-mode';
|
||||
import { Toaster } from '../src/components/ui/toaster';
|
||||
import { systemTheme } from '../src/theme/theme';
|
||||
|
||||
// .
|
||||
const DocumentationWrapper = ({ children }) => {
|
||||
return (
|
||||
<Box id="start-ui-storybook-wrapper" p="4" pb="8" flex="1">
|
||||
{children}
|
||||
</Box>
|
||||
);
|
||||
};
|
||||
|
||||
export const decorators = [
|
||||
(Story, context) => (
|
||||
<ColorModeProvider>
|
||||
<ChakraProvider value={systemTheme}>
|
||||
{/* Using MemoryRouter to avoid route clashing with Storybook */}
|
||||
<MemoryRouter>
|
||||
<DocumentationWrapper>
|
||||
<Story {...context} />
|
||||
</DocumentationWrapper>
|
||||
</MemoryRouter>
|
||||
<Toaster />
|
||||
</ChakraProvider>
|
||||
</ColorModeProvider>
|
||||
),
|
||||
];
|
2
front/LICENSE
Normal file
2
front/LICENSE
Normal file
@ -0,0 +1,2 @@
|
||||
Proprietary
|
||||
@copyright Edouard Dupin 2024
|
10637
front/config sample.yaml
Normal file
10637
front/config sample.yaml
Normal file
File diff suppressed because it is too large
Load Diff
14
front/index.html
Normal file
14
front/index.html
Normal file
@ -0,0 +1,14 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<link rel="manifest" href="/manifest.json" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>Karideo</title>
|
||||
<link rel="icon" href="/favicon.ico" />
|
||||
</head>
|
||||
<body style="width:100vw;height:100vh;min-width:100%;min-height:100%;">
|
||||
<div id="root" style="width:100%;height:100%;min-width:100%;min-height:100%;"></div>
|
||||
<script type="module" src="/src/main.tsx"></script>
|
||||
</body>
|
||||
</html>
|
9
front/knip.ts
Normal file
9
front/knip.ts
Normal file
@ -0,0 +1,9 @@
|
||||
import type { KnipConfig } from 'knip';
|
||||
|
||||
const config: KnipConfig = {
|
||||
// Ignoring mostly shell binaries
|
||||
ignoreBinaries: ['export', 'sleep'],
|
||||
ignore: [],
|
||||
};
|
||||
|
||||
export default config;
|
@ -1,50 +1,94 @@
|
||||
{
|
||||
"name": "karideo",
|
||||
"version": "0.0.0",
|
||||
"license": "MPL-2",
|
||||
"scripts": {
|
||||
"all": "npm run build && npm run test",
|
||||
"ng": "ng",
|
||||
"dev": "ng serve --configuration=develop --watch --port 4202",
|
||||
"dev-hot-update": "ng serve --configuration=develop --watch --hmr --port 4202",
|
||||
"build": "ng build --prod",
|
||||
"test": "ng test",
|
||||
"lint": "ng lint",
|
||||
"style": "prettier --write .",
|
||||
"e2e": "ng e2e",
|
||||
"update_packages": "ncu --upgrade",
|
||||
"install_dependency": "pnpm install --force",
|
||||
"link_kar_cw": "pnpm link ../../kar-cw/dist/kar-cw/",
|
||||
"unlink_kar_cw": "pnpm unlink ../../kar-cw/dist/kar-cw/"
|
||||
},
|
||||
"private": true,
|
||||
"dependencies": {
|
||||
"@angular/animations": "^17.3.5",
|
||||
"@angular/cdk": "^17.3.5",
|
||||
"@angular/common": "^17.3.5",
|
||||
"@angular/compiler": "^17.3.5",
|
||||
"@angular/core": "^17.3.5",
|
||||
"@angular/forms": "^17.3.5",
|
||||
"@angular/material": "^17.3.5",
|
||||
"@angular/platform-browser": "^17.3.5",
|
||||
"@angular/platform-browser-dynamic": "^17.3.5",
|
||||
"@angular/router": "^17.3.5",
|
||||
"rxjs": "^7.8.1",
|
||||
"zone.js": "^0.14.4",
|
||||
"zod": "3.23.0",
|
||||
"@kangaroo-and-rabbit/kar-cw": "^0.2.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@angular-devkit/build-angular": "^17.3.5",
|
||||
"@angular-eslint/builder": "17.3.0",
|
||||
"@angular-eslint/eslint-plugin": "17.3.0",
|
||||
"@angular-eslint/eslint-plugin-template": "17.3.0",
|
||||
"@angular-eslint/schematics": "17.3.0",
|
||||
"@angular-eslint/template-parser": "17.3.0",
|
||||
"@angular/cli": "^17.3.5",
|
||||
"@angular/compiler-cli": "^17.3.5",
|
||||
"@angular/language-service": "^17.3.5",
|
||||
"npm-check-updates": "^16.14.18",
|
||||
"tslib": "^2.6.2"
|
||||
}
|
||||
}
|
||||
"name": "karideo",
|
||||
"private": true,
|
||||
"version": "0.0.1",
|
||||
"description": "KAR web music application",
|
||||
"author": {
|
||||
"name": "Edouard DUPIN",
|
||||
"email": "yui.heero@gmail.farm"
|
||||
},
|
||||
"license": "PROPRIETARY",
|
||||
"engines": {
|
||||
"node": ">=20"
|
||||
},
|
||||
"scripts": {
|
||||
"update_packages": "ncu --target minor",
|
||||
"upgrade_packages": "ncu --upgrade ",
|
||||
"install_dependency": "pnpm install",
|
||||
"test": "vitest run",
|
||||
"test:watch": "vitest watch",
|
||||
"build": "tsc && vite build",
|
||||
"static:build": "pnpm build",
|
||||
"dev": "vite",
|
||||
"pretty": "prettier -w .",
|
||||
"lint": "pnpm tsc --noEmit",
|
||||
"storybook": "storybook dev -p 3001",
|
||||
"storybook:build": "storybook build && mv ./storybook-static ./public/storybook"
|
||||
},
|
||||
"lint-staged": {
|
||||
"*.{ts,tsx,js,jsx,json}": "prettier --write"
|
||||
},
|
||||
"dependencies": {
|
||||
"react-speech-recognition": "4.0.0",
|
||||
"regenerator-runtime": "0.14.1",
|
||||
"@locator/babel-jsx": "0.4.4",
|
||||
"@trivago/prettier-plugin-sort-imports": "5.2.2",
|
||||
"@chakra-ui/cli": "3.16.0",
|
||||
"@chakra-ui/react": "3.16.0",
|
||||
"@emotion/react": "11.14.0",
|
||||
"allotment": "1.20.3",
|
||||
"css-mediaquery": "0.1.2",
|
||||
"dayjs": "1.11.13",
|
||||
"history": "5.3.0",
|
||||
"next-themes": "^0.4.6",
|
||||
"react": "19.1.0",
|
||||
"react-dom": "19.1.0",
|
||||
"react-error-boundary": "5.0.0",
|
||||
"react-icons": "5.5.0",
|
||||
"react-router-dom": "7.5.0",
|
||||
"react-select": "5.10.1",
|
||||
"react-use": "17.6.0",
|
||||
"zod": "3.24.2",
|
||||
"zustand": "5.0.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@chakra-ui/styled-system": "^2.12.0",
|
||||
"@playwright/test": "1.51.1",
|
||||
"@storybook/addon-actions": "8.6.12",
|
||||
"@storybook/addon-essentials": "8.6.12",
|
||||
"@storybook/addon-links": "8.6.12",
|
||||
"@storybook/addon-mdx-gfm": "8.6.12",
|
||||
"@storybook/react": "8.6.12",
|
||||
"@storybook/react-vite": "8.6.12",
|
||||
"@storybook/theming": "8.6.12",
|
||||
"@testing-library/jest-dom": "6.6.3",
|
||||
"@testing-library/react": "16.3.0",
|
||||
"@testing-library/user-event": "14.6.1",
|
||||
"@trivago/prettier-plugin-sort-imports": "5.2.2",
|
||||
"@types/jest": "29.5.14",
|
||||
"@types/node": "22.14.1",
|
||||
"@types/react": "19.1.2",
|
||||
"@types/react-dom": "19.1.2",
|
||||
"@typescript-eslint/eslint-plugin": "8.30.1",
|
||||
"@typescript-eslint/parser": "8.30.1",
|
||||
"@vitejs/plugin-react": "4.3.4",
|
||||
"eslint": "9.24.0",
|
||||
"eslint-plugin-import": "2.31.0",
|
||||
"eslint-plugin-react": "7.37.5",
|
||||
"eslint-plugin-react-hooks": "5.2.0",
|
||||
"eslint-plugin-storybook": "0.12.0",
|
||||
"jest": "29.7.0",
|
||||
"jest-environment-jsdom": "29.7.0",
|
||||
"knip": "5.50.3",
|
||||
"lint-staged": "15.5.1",
|
||||
"npm-check-updates": "^17.1.18",
|
||||
"prettier": "3.5.3",
|
||||
"puppeteer": "24.6.1",
|
||||
"react-is": "19.1.0",
|
||||
"storybook": "8.6.12",
|
||||
"ts-node": "10.9.2",
|
||||
"typescript": "5.8.3",
|
||||
"vite": "6.2.6",
|
||||
"vitest": "3.1.1"
|
||||
}
|
||||
}
|
||||
|
18779
front/pnpm-lock.yaml
generated
18779
front/pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
16
front/prettier.config.js
Normal file
16
front/prettier.config.js
Normal file
@ -0,0 +1,16 @@
|
||||
// Using a JS file, allowing us to add comments
|
||||
module.exports = {
|
||||
// This plugins line is mandatory for the plugin to work with pnpm.
|
||||
// https://github.com/trivago/prettier-plugin-sort-imports/blob/61d069711008c530f5a41ca4e254781abc5de358/README.md?plain=1#L89-L96
|
||||
plugins: ['@trivago/prettier-plugin-sort-imports'],
|
||||
endOfLine: 'lf',
|
||||
semi: true,
|
||||
singleQuote: true,
|
||||
tabWidth: 2,
|
||||
trailingComma: 'es5',
|
||||
arrowParens: 'always',
|
||||
importOrder: ['^react$', '^(?!^react$|^@/|^[./]).*', '^@/(.*)$', '^[./]'],
|
||||
importOrderSeparation: true,
|
||||
importOrderSortSpecifiers: true,
|
||||
importOrderParserPlugins: ['jsx', 'typescript'],
|
||||
};
|
Before Width: | Height: | Size: 12 KiB After Width: | Height: | Size: 12 KiB |
BIN
front/public/icons/icon-192x192.png
Normal file
BIN
front/public/icons/icon-192x192.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 12 KiB |
BIN
front/public/icons/icon-512x512.png
Normal file
BIN
front/public/icons/icon-512x512.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 41 KiB |
21
front/public/manifest.json
Normal file
21
front/public/manifest.json
Normal file
@ -0,0 +1,21 @@
|
||||
{
|
||||
"name": "Karusic",
|
||||
"short_name": "Karusic",
|
||||
"description": "(K)angaroo (A)nd (R)abbit m(usic) is a music streaming",
|
||||
"start_url": "/karusic/",
|
||||
"display": "standalone",
|
||||
"background_color": "#000000",
|
||||
"theme_color": "#FFFFFF",
|
||||
"icons": [
|
||||
{
|
||||
"src": "/karusic/icons/icon-192x192.png",
|
||||
"sizes": "192x192",
|
||||
"type": "image/png"
|
||||
},
|
||||
{
|
||||
"src": "/karusic/icons/icon-512x512.png",
|
||||
"sizes": "512x512",
|
||||
"type": "image/png"
|
||||
}
|
||||
]
|
||||
}
|
20
front/src/App.tsx
Normal file
20
front/src/App.tsx
Normal file
@ -0,0 +1,20 @@
|
||||
import { ErrorBoundary } from '@/errors/ErrorBoundary';
|
||||
|
||||
import { VideoPlayer } from './components';
|
||||
import { EnvDevelopment } from './components/EnvDevelopment/EnvDevelopment';
|
||||
import { AppRoutes } from './scene/AppRoutes';
|
||||
import { ServiceContextProvider } from './service/ServiceContext';
|
||||
|
||||
export const App = () => {
|
||||
return (
|
||||
<ServiceContextProvider>
|
||||
<EnvDevelopment />
|
||||
<ErrorBoundary>
|
||||
<AppRoutes />
|
||||
</ErrorBoundary>
|
||||
<VideoPlayer />
|
||||
</ServiceContextProvider>
|
||||
);
|
||||
};
|
||||
|
||||
export default App;
|
@ -1,18 +0,0 @@
|
||||
/**
|
||||
* API of the server (auto-generated code)
|
||||
*/
|
||||
import {
|
||||
HTTPMimeType,
|
||||
HTTPRequestModel,
|
||||
ModelResponseHttp,
|
||||
RESTCallbacks,
|
||||
RESTConfig,
|
||||
RESTRequestJson,
|
||||
RESTRequestJsonArray,
|
||||
RESTRequestVoid
|
||||
} from "./rest-tools"
|
||||
import {
|
||||
} from "./model"
|
||||
export namespace Front {
|
||||
|
||||
}
|
@ -1,13 +0,0 @@
|
||||
/**
|
||||
* Global import of the package
|
||||
*/
|
||||
export * from "./model";
|
||||
export * from "./front";
|
||||
export * from "./health-check";
|
||||
export * from "./season-resource";
|
||||
export * from "./series-resource";
|
||||
export * from "./type-resource";
|
||||
export * from "./user-media-advancement-resource";
|
||||
export * from "./user-resource";
|
||||
export * from "./media-resource";
|
||||
export * from "./data-resource";
|
@ -1,435 +0,0 @@
|
||||
/**
|
||||
* Interface of the server (auto-generated code)
|
||||
*/
|
||||
import { z as zod } from "zod";
|
||||
|
||||
export const ZodUUID = zod.string().uuid();
|
||||
export type UUID = zod.infer<typeof ZodUUID>;
|
||||
export function isUUID(data: any): data is UUID {
|
||||
try {
|
||||
ZodUUID.parse(data);
|
||||
return true;
|
||||
} catch (e: any) {
|
||||
console.log(`Fail to parse data ${e}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
export const ZodLong = zod.number();
|
||||
export type Long = zod.infer<typeof ZodLong>;
|
||||
export function isLong(data: any): data is Long {
|
||||
try {
|
||||
ZodLong.parse(data);
|
||||
return true;
|
||||
} catch (e: any) {
|
||||
console.log(`Fail to parse data ${e}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
export const ZodInteger = zod.number().safe();
|
||||
export type Integer = zod.infer<typeof ZodInteger>;
|
||||
export function isInteger(data: any): data is Integer {
|
||||
try {
|
||||
ZodInteger.parse(data);
|
||||
return true;
|
||||
} catch (e: any) {
|
||||
console.log(`Fail to parse data ${e}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
export const ZodFloat = zod.number();
|
||||
export type Float = zod.infer<typeof ZodFloat>;
|
||||
export function isFloat(data: any): data is Float {
|
||||
try {
|
||||
ZodFloat.parse(data);
|
||||
return true;
|
||||
} catch (e: any) {
|
||||
console.log(`Fail to parse data ${e}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
export const ZodInstant = zod.string();
|
||||
export type Instant = zod.infer<typeof ZodInstant>;
|
||||
export function isInstant(data: any): data is Instant {
|
||||
try {
|
||||
ZodInstant.parse(data);
|
||||
return true;
|
||||
} catch (e: any) {
|
||||
console.log(`Fail to parse data ${e}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
export const ZodDate = zod.date();
|
||||
export type Date = zod.infer<typeof ZodDate>;
|
||||
export function isDate(data: any): data is Date {
|
||||
try {
|
||||
ZodDate.parse(data);
|
||||
return true;
|
||||
} catch (e: any) {
|
||||
console.log(`Fail to parse data ${e}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
export const ZodTimestamp = zod.date();
|
||||
export type Timestamp = zod.infer<typeof ZodTimestamp>;
|
||||
export function isTimestamp(data: any): data is Timestamp {
|
||||
try {
|
||||
ZodTimestamp.parse(data);
|
||||
return true;
|
||||
} catch (e: any) {
|
||||
console.log(`Fail to parse data ${e}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
export const ZodLocalDate = zod.date();
|
||||
export type LocalDate = zod.infer<typeof ZodLocalDate>;
|
||||
export function isLocalDate(data: any): data is LocalDate {
|
||||
try {
|
||||
ZodLocalDate.parse(data);
|
||||
return true;
|
||||
} catch (e: any) {
|
||||
console.log(`Fail to parse data ${e}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
export const ZodLocalTime = zod.date();
|
||||
export type LocalTime = zod.infer<typeof ZodLocalTime>;
|
||||
export function isLocalTime(data: any): data is LocalTime {
|
||||
try {
|
||||
ZodLocalTime.parse(data);
|
||||
return true;
|
||||
} catch (e: any) {
|
||||
console.log(`Fail to parse data ${e}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
export const ZodRestErrorResponse = zod.object({
|
||||
uuid: ZodUUID.optional(),
|
||||
name: zod.string().max(255).optional(),
|
||||
message: zod.string().max(255).optional(),
|
||||
time: zod.string().max(255).optional(),
|
||||
status: ZodInteger,
|
||||
statusMessage: zod.string().max(255).optional()
|
||||
});
|
||||
export type RestErrorResponse = zod.infer<typeof ZodRestErrorResponse>;
|
||||
export function isRestErrorResponse(data: any): data is RestErrorResponse {
|
||||
try {
|
||||
ZodRestErrorResponse.parse(data);
|
||||
return true;
|
||||
} catch (e: any) {
|
||||
console.log(`Fail to parse data ${e}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
export const ZodHealthResult = zod.object({
|
||||
});
|
||||
export type HealthResult = zod.infer<typeof ZodHealthResult>;
|
||||
export function isHealthResult(data: any): data is HealthResult {
|
||||
try {
|
||||
ZodHealthResult.parse(data);
|
||||
return true;
|
||||
} catch (e: any) {
|
||||
console.log(`Fail to parse data ${e}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
export const ZodGenericTiming = zod.object({
|
||||
// Create time of the object
|
||||
createdAt: ZodDate.readonly().optional(),
|
||||
// When update the object
|
||||
updatedAt: ZodDate.readonly().optional()
|
||||
});
|
||||
export type GenericTiming = zod.infer<typeof ZodGenericTiming>;
|
||||
export function isGenericTiming(data: any): data is GenericTiming {
|
||||
try {
|
||||
ZodGenericTiming.parse(data);
|
||||
return true;
|
||||
} catch (e: any) {
|
||||
console.log(`Fail to parse data ${e}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
export const ZodGenericData = ZodGenericTiming.extend({
|
||||
// Unique Id of the object
|
||||
id: ZodLong.readonly().optional()
|
||||
});
|
||||
export type GenericData = zod.infer<typeof ZodGenericData>;
|
||||
export function isGenericData(data: any): data is GenericData {
|
||||
try {
|
||||
ZodGenericData.parse(data);
|
||||
return true;
|
||||
} catch (e: any) {
|
||||
console.log(`Fail to parse data ${e}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
export const ZodGenericDataSoftDelete = ZodGenericData.extend({
|
||||
// Deleted state
|
||||
deleted: zod.boolean().readonly().optional()
|
||||
});
|
||||
export type GenericDataSoftDelete = zod.infer<typeof ZodGenericDataSoftDelete>;
|
||||
export function isGenericDataSoftDelete(data: any): data is GenericDataSoftDelete {
|
||||
try {
|
||||
ZodGenericDataSoftDelete.parse(data);
|
||||
return true;
|
||||
} catch (e: any) {
|
||||
console.log(`Fail to parse data ${e}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
export const ZodSeason = ZodGenericDataSoftDelete.extend({
|
||||
// Name of the media (this represent the title)
|
||||
name: zod.string().optional(),
|
||||
// Description of the media
|
||||
description: zod.string().optional(),
|
||||
// series parent ID
|
||||
parentId: ZodLong.optional(),
|
||||
// List of Id of the specific covers
|
||||
covers: zod.array(ZodUUID).optional()
|
||||
});
|
||||
export type Season = zod.infer<typeof ZodSeason>;
|
||||
export function isSeason(data: any): data is Season {
|
||||
try {
|
||||
ZodSeason.parse(data);
|
||||
return true;
|
||||
} catch (e: any) {
|
||||
console.log(`Fail to parse data ${e}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
export const ZodSeries = ZodGenericDataSoftDelete.extend({
|
||||
// Name of the media (this represent the title)
|
||||
name: zod.string().optional(),
|
||||
// Description of the media
|
||||
description: zod.string().optional(),
|
||||
// series parent ID
|
||||
parentId: ZodLong.optional(),
|
||||
// List of Id of the specific covers
|
||||
covers: zod.array(ZodUUID).optional()
|
||||
});
|
||||
export type Series = zod.infer<typeof ZodSeries>;
|
||||
export function isSeries(data: any): data is Series {
|
||||
try {
|
||||
ZodSeries.parse(data);
|
||||
return true;
|
||||
} catch (e: any) {
|
||||
console.log(`Fail to parse data ${e}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
export const ZodType = ZodGenericDataSoftDelete.extend({
|
||||
// Name of the media (this represent the title)
|
||||
name: zod.string().optional(),
|
||||
// Description of the media
|
||||
description: zod.string().optional(),
|
||||
// List of Id of the specific covers
|
||||
covers: zod.array(ZodUUID).optional()
|
||||
});
|
||||
export type Type = zod.infer<typeof ZodType>;
|
||||
export function isType(data: any): data is Type {
|
||||
try {
|
||||
ZodType.parse(data);
|
||||
return true;
|
||||
} catch (e: any) {
|
||||
console.log(`Fail to parse data ${e}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
export const ZodUserMediaAdvancement = ZodGenericDataSoftDelete.extend({
|
||||
// Foreign Key Id of the user
|
||||
userId: ZodLong.optional(),
|
||||
// Id of the media
|
||||
mediaId: ZodLong.optional(),
|
||||
// Percent of advancement in the media
|
||||
percent: ZodFloat.optional(),
|
||||
// Number of second of advancement in the media
|
||||
time: ZodInteger.optional(),
|
||||
// Number of time this media has been read
|
||||
count: ZodInteger.optional()
|
||||
});
|
||||
export type UserMediaAdvancement = zod.infer<typeof ZodUserMediaAdvancement>;
|
||||
export function isUserMediaAdvancement(data: any): data is UserMediaAdvancement {
|
||||
try {
|
||||
ZodUserMediaAdvancement.parse(data);
|
||||
return true;
|
||||
} catch (e: any) {
|
||||
console.log(`Fail to parse data ${e}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
export const ZodMediaInformationsDelta = zod.object({
|
||||
});
|
||||
export type MediaInformationsDelta = zod.infer<typeof ZodMediaInformationsDelta>;
|
||||
export function isMediaInformationsDelta(data: any): data is MediaInformationsDelta {
|
||||
try {
|
||||
ZodMediaInformationsDelta.parse(data);
|
||||
return true;
|
||||
} catch (e: any) {
|
||||
console.log(`Fail to parse data ${e}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
export const ZodUser = ZodGenericDataSoftDelete.extend({
|
||||
login: zod.string().max(128).optional(),
|
||||
lastConnection: ZodTimestamp.optional(),
|
||||
admin: zod.boolean(),
|
||||
blocked: zod.boolean(),
|
||||
removed: zod.boolean(),
|
||||
covers: zod.array(ZodLong).optional()
|
||||
});
|
||||
export type User = zod.infer<typeof ZodUser>;
|
||||
export function isUser(data: any): data is User {
|
||||
try {
|
||||
ZodUser.parse(data);
|
||||
return true;
|
||||
} catch (e: any) {
|
||||
console.log(`Fail to parse data ${e}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
export const ZodUserKarideo = ZodUser.extend({
|
||||
});
|
||||
export type UserKarideo = zod.infer<typeof ZodUserKarideo>;
|
||||
export function isUserKarideo(data: any): data is UserKarideo {
|
||||
try {
|
||||
ZodUserKarideo.parse(data);
|
||||
return true;
|
||||
} catch (e: any) {
|
||||
console.log(`Fail to parse data ${e}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
export const ZodUserOut = zod.object({
|
||||
id: ZodLong,
|
||||
login: zod.string().max(255).optional()
|
||||
});
|
||||
export type UserOut = zod.infer<typeof ZodUserOut>;
|
||||
export function isUserOut(data: any): data is UserOut {
|
||||
try {
|
||||
ZodUserOut.parse(data);
|
||||
return true;
|
||||
} catch (e: any) {
|
||||
console.log(`Fail to parse data ${e}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
export const ZodMedia = ZodGenericDataSoftDelete.extend({
|
||||
name: zod.string().optional(),
|
||||
description: zod.string().optional(),
|
||||
dataId: ZodUUID.optional(),
|
||||
typeId: ZodLong.optional(),
|
||||
seriesId: ZodLong.optional(),
|
||||
seasonId: ZodLong.optional(),
|
||||
episode: ZodInteger.optional(),
|
||||
date: ZodInteger.optional(),
|
||||
time: ZodInteger.optional(),
|
||||
ageLimit: ZodInteger.optional(),
|
||||
covers: zod.array(ZodUUID).optional()
|
||||
});
|
||||
export type Media = zod.infer<typeof ZodMedia>;
|
||||
export function isMedia(data: any): data is Media {
|
||||
try {
|
||||
ZodMedia.parse(data);
|
||||
return true;
|
||||
} catch (e: any) {
|
||||
console.log(`Fail to parse data ${e}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
export const ZodUUIDGenericData = ZodGenericTiming.extend({
|
||||
// Unique UUID of the object
|
||||
uuid: ZodUUID.readonly().optional()
|
||||
});
|
||||
export type UUIDGenericData = zod.infer<typeof ZodUUIDGenericData>;
|
||||
export function isUUIDGenericData(data: any): data is UUIDGenericData {
|
||||
try {
|
||||
ZodUUIDGenericData.parse(data);
|
||||
return true;
|
||||
} catch (e: any) {
|
||||
console.log(`Fail to parse data ${e}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
export const ZodUUIDGenericDataSoftDelete = ZodUUIDGenericData.extend({
|
||||
// Deleted state
|
||||
deleted: zod.boolean().readonly().optional()
|
||||
});
|
||||
export type UUIDGenericDataSoftDelete = zod.infer<typeof ZodUUIDGenericDataSoftDelete>;
|
||||
export function isUUIDGenericDataSoftDelete(data: any): data is UUIDGenericDataSoftDelete {
|
||||
try {
|
||||
ZodUUIDGenericDataSoftDelete.parse(data);
|
||||
return true;
|
||||
} catch (e: any) {
|
||||
console.log(`Fail to parse data ${e}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
export const ZodData = ZodUUIDGenericDataSoftDelete.extend({
|
||||
// Sha512 of the data
|
||||
sha512: zod.string().max(128).optional(),
|
||||
// Mime -type of the media
|
||||
mimeType: zod.string().max(128).optional(),
|
||||
// Size in Byte of the data
|
||||
size: ZodLong.optional()
|
||||
});
|
||||
export type Data = zod.infer<typeof ZodData>;
|
||||
export function isData(data: any): data is Data {
|
||||
try {
|
||||
ZodData.parse(data);
|
||||
return true;
|
||||
} catch (e: any) {
|
||||
console.log(`Fail to parse data ${e}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,377 +0,0 @@
|
||||
/** @file
|
||||
* @author Edouard DUPIN
|
||||
* @copyright 2024, Edouard DUPIN, all right reserved
|
||||
* @license MPL-2
|
||||
*/
|
||||
|
||||
import { RestErrorResponse } from "./model"
|
||||
|
||||
export enum HTTPRequestModel {
|
||||
DELETE = 'DELETE',
|
||||
GET = 'GET',
|
||||
PATCH = 'PATCH',
|
||||
POST = 'POST',
|
||||
PUT = 'PUT',
|
||||
}
|
||||
export enum HTTPMimeType {
|
||||
ALL = '*/*',
|
||||
CSV = 'text/csv',
|
||||
IMAGE = 'image/*',
|
||||
IMAGE_JPEG = 'image/jpeg',
|
||||
IMAGE_PNG = 'image/png',
|
||||
JSON = 'application/json',
|
||||
MULTIPART = 'multipart/form-data',
|
||||
OCTET_STREAM = 'application/octet-stream',
|
||||
TEXT_PLAIN = 'text/plain',
|
||||
}
|
||||
|
||||
export interface RESTConfig {
|
||||
// base of the server: http(s)://my.server.org/plop/api/
|
||||
server: string;
|
||||
// Token to access of the data.
|
||||
token?: string;
|
||||
}
|
||||
|
||||
export interface RESTModel {
|
||||
// base of the local API request: "sheep/{id}".
|
||||
endPoint: string;
|
||||
// Type of the request.
|
||||
requestType?: HTTPRequestModel;
|
||||
// Input type requested.
|
||||
accept?: HTTPMimeType;
|
||||
// Content of the local data.
|
||||
contentType?: HTTPMimeType;
|
||||
// Mode of the TOKEN in URL or Header (?token:${tokenInUrl})
|
||||
tokenInUrl?: boolean;
|
||||
}
|
||||
|
||||
export interface ModelResponseHttp {
|
||||
status: number;
|
||||
data: any;
|
||||
}
|
||||
|
||||
export function isArrayOf<TYPE>(
|
||||
data: any,
|
||||
typeChecker: (subData: any) => subData is TYPE,
|
||||
length?: number
|
||||
): data is TYPE[] {
|
||||
if (!Array.isArray(data)) {
|
||||
return false;
|
||||
}
|
||||
if (!data.every(typeChecker)) {
|
||||
return false;
|
||||
}
|
||||
if (length !== undefined && data.length != length) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
function isNullOrUndefined(data: any): data is undefined | null {
|
||||
return data === undefined || data === null;
|
||||
}
|
||||
|
||||
// generic progression callback
|
||||
export type ProgressCallback = (count: number, total: number) => void;
|
||||
|
||||
export interface RESTAbort {
|
||||
abort?: () => boolean
|
||||
}
|
||||
|
||||
|
||||
// Rest generic callback have a basic model to upload and download advancement.
|
||||
export interface RESTCallbacks {
|
||||
progressUpload?: ProgressCallback,
|
||||
progressDownload?: ProgressCallback,
|
||||
abortHandle?: RESTAbort,
|
||||
};
|
||||
|
||||
export interface RESTRequestType {
|
||||
restModel: RESTModel,
|
||||
restConfig: RESTConfig,
|
||||
data?: any,
|
||||
params?: object,
|
||||
queries?: object,
|
||||
callback?: RESTCallbacks,
|
||||
};
|
||||
|
||||
function replaceAll(input, searchValue, replaceValue) {
|
||||
return input.split(searchValue).join(replaceValue);
|
||||
}
|
||||
|
||||
function removeTrailingSlashes(input: string): string {
|
||||
if (isNullOrUndefined(input)) {
|
||||
return "undefined";
|
||||
}
|
||||
return input.replace(/\/+$/, '');
|
||||
}
|
||||
function removeLeadingSlashes(input: string): string {
|
||||
if (isNullOrUndefined(input)) {
|
||||
return "";
|
||||
}
|
||||
return input.replace(/^\/+/, '');
|
||||
}
|
||||
|
||||
export function RESTUrl({ restModel, restConfig, params, queries }: RESTRequestType): string {
|
||||
// Create the URL PATH:
|
||||
let generateUrl = `${removeTrailingSlashes(restConfig.server)}/${removeLeadingSlashes(restModel.endPoint)}`;
|
||||
if (params !== undefined) {
|
||||
for (let key of Object.keys(params)) {
|
||||
generateUrl = replaceAll(generateUrl, `{${key}}`, `${params[key]}`);
|
||||
}
|
||||
}
|
||||
if (queries === undefined && (restConfig.token === undefined || restModel.tokenInUrl !== true)) {
|
||||
return generateUrl;
|
||||
}
|
||||
const searchParams = new URLSearchParams();
|
||||
if (queries !== undefined) {
|
||||
for (let key of Object.keys(queries)) {
|
||||
const value = queries[key];
|
||||
if (Array.isArray(value)) {
|
||||
for (let iii = 0; iii < value.length; iii++) {
|
||||
searchParams.append(`${key}`, `${value[iii]}`);
|
||||
}
|
||||
} else {
|
||||
searchParams.append(`${key}`, `${value}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (restConfig.token !== undefined && restModel.tokenInUrl === true) {
|
||||
searchParams.append('Authorization', `Bearer ${restConfig.token}`);
|
||||
}
|
||||
return generateUrl + "?" + searchParams.toString();
|
||||
}
|
||||
|
||||
|
||||
export function fetchProgress(generateUrl: string, { method, headers, body }: {
|
||||
method: HTTPRequestModel,
|
||||
headers: any,
|
||||
body: any,
|
||||
}, { progressUpload, progressDownload, abortHandle }: RESTCallbacks): Promise<Response> {
|
||||
const xhr = {
|
||||
io: new XMLHttpRequest()
|
||||
}
|
||||
return new Promise((resolve, reject) => {
|
||||
// Stream the upload progress
|
||||
if (progressUpload) {
|
||||
xhr.io.upload.addEventListener("progress", (dataEvent) => {
|
||||
if (dataEvent.lengthComputable) {
|
||||
//console.log(` ==> has a progress event: ${dataEvent.loaded} / ${dataEvent.total}`);
|
||||
progressUpload(dataEvent.loaded, dataEvent.total);
|
||||
}
|
||||
});
|
||||
}
|
||||
// Stream the download progress
|
||||
if (progressDownload) {
|
||||
xhr.io.addEventListener("progress", (dataEvent) => {
|
||||
if (dataEvent.lengthComputable) {
|
||||
//console.log(` ==> download progress:: ${dataEvent.loaded} / ${dataEvent.total}`);
|
||||
progressUpload(dataEvent.loaded, dataEvent.total);
|
||||
}
|
||||
});
|
||||
}
|
||||
if (abortHandle) {
|
||||
abortHandle.abort = () => {
|
||||
if (xhr.io) {
|
||||
console.log(`Request abort on the XMLHttpRequest: ${generateUrl}`);
|
||||
xhr.io.abort();
|
||||
return true;
|
||||
}
|
||||
console.log(`Request abort (FAIL) on the XMLHttpRequest: ${generateUrl}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
// Check if we have an internal Fail:
|
||||
xhr.io.addEventListener('error', () => {
|
||||
xhr.io = undefined;
|
||||
reject(new TypeError('Failed to fetch'))
|
||||
});
|
||||
|
||||
// Capture the end of the stream
|
||||
xhr.io.addEventListener("loadend", () => {
|
||||
if (xhr.io.readyState !== XMLHttpRequest.DONE) {
|
||||
//console.log(` ==> READY state`);
|
||||
return;
|
||||
}
|
||||
if (xhr.io.status === 0) {
|
||||
//the stream has been aborted
|
||||
reject(new TypeError('Fetch has been aborted'));
|
||||
return;
|
||||
}
|
||||
// Stream is ended, transform in a generic response:
|
||||
const response = new Response(xhr.io.response, {
|
||||
status: xhr.io.status,
|
||||
statusText: xhr.io.statusText
|
||||
});
|
||||
const headersArray = replaceAll(xhr.io.getAllResponseHeaders().trim(), "\r\n", "\n").split('\n');
|
||||
headersArray.forEach(function (header) {
|
||||
const firstColonIndex = header.indexOf(':');
|
||||
if (firstColonIndex !== -1) {
|
||||
var key = header.substring(0, firstColonIndex).trim();
|
||||
var value = header.substring(firstColonIndex + 1).trim();
|
||||
response.headers.set(key, value);
|
||||
} else {
|
||||
response.headers.set(header, "");
|
||||
}
|
||||
});
|
||||
xhr.io = undefined;
|
||||
resolve(response);
|
||||
});
|
||||
xhr.io.open(method, generateUrl, true);
|
||||
if (!isNullOrUndefined(headers)) {
|
||||
for (const [key, value] of Object.entries(headers)) {
|
||||
xhr.io.setRequestHeader(key, value as string);
|
||||
}
|
||||
}
|
||||
xhr.io.send(body);
|
||||
});
|
||||
}
|
||||
|
||||
export function RESTRequest({ restModel, restConfig, data, params, queries, callback }: RESTRequestType): Promise<ModelResponseHttp> {
|
||||
// Create the URL PATH:
|
||||
let generateUrl = RESTUrl({ restModel, restConfig, data, params, queries });
|
||||
let headers: any = {};
|
||||
if (restConfig.token !== undefined && restModel.tokenInUrl !== true) {
|
||||
headers['Authorization'] = `Bearer ${restConfig.token}`;
|
||||
}
|
||||
if (restModel.accept !== undefined) {
|
||||
headers['Accept'] = restModel.accept;
|
||||
}
|
||||
if (restModel.requestType !== HTTPRequestModel.GET) {
|
||||
// if Get we have not a content type, the body is empty
|
||||
if (restModel.contentType !== HTTPMimeType.MULTIPART) {
|
||||
// special case of multi-part ==> no content type otherwise the browser does not set the ";bundary=--****"
|
||||
headers['Content-Type'] = restModel.contentType;
|
||||
}
|
||||
}
|
||||
let body = data;
|
||||
if (restModel.contentType === HTTPMimeType.JSON) {
|
||||
body = JSON.stringify(data);
|
||||
} else if (restModel.contentType === HTTPMimeType.MULTIPART) {
|
||||
const formData = new FormData();
|
||||
for (const name in data) {
|
||||
formData.append(name, data[name]);
|
||||
}
|
||||
body = formData
|
||||
}
|
||||
return new Promise((resolve, reject) => {
|
||||
let action: undefined | Promise<Response> = undefined;
|
||||
if (isNullOrUndefined(callback)
|
||||
|| (isNullOrUndefined(callback.progressDownload)
|
||||
&& isNullOrUndefined(callback.progressUpload)
|
||||
&& isNullOrUndefined(callback.abortHandle))) {
|
||||
// No information needed: call the generic fetch interface
|
||||
action = fetch(generateUrl, {
|
||||
method: restModel.requestType,
|
||||
headers,
|
||||
body,
|
||||
});
|
||||
} else {
|
||||
// need progression information: call old fetch model (XMLHttpRequest) that permit to keep % upload and % download for HTTP1.x
|
||||
action = fetchProgress(generateUrl, {
|
||||
method: restModel.requestType ?? HTTPRequestModel.GET,
|
||||
headers,
|
||||
body,
|
||||
}, callback);
|
||||
}
|
||||
action.then((response: Response) => {
|
||||
if (response.status >= 200 && response.status <= 299) {
|
||||
const contentType = response.headers.get('Content-Type');
|
||||
if (restModel.accept !== contentType) {
|
||||
reject({
|
||||
time: Date().toString(),
|
||||
status: 901,
|
||||
error: `REST check wrong type: ${restModel.accept} != ${contentType}`,
|
||||
statusMessage: "Fetch error",
|
||||
message: "rest-tools.ts Wrong type in the message return type"
|
||||
} as RestErrorResponse);
|
||||
} else if (contentType === HTTPMimeType.JSON) {
|
||||
response
|
||||
.json()
|
||||
.then((value: any) => {
|
||||
//console.log(`RECEIVE ==> ${response.status}=${ JSON.stringify(value, null, 2)}`);
|
||||
resolve({ status: response.status, data: value });
|
||||
})
|
||||
.catch((reason: any) => {
|
||||
reject({
|
||||
time: Date().toString(),
|
||||
status: 902,
|
||||
error: `REST parse json fail: ${reason}`,
|
||||
statusMessage: "Fetch parse error",
|
||||
message: "rest-tools.ts Wrong message model to parse"
|
||||
} as RestErrorResponse);
|
||||
});
|
||||
} else {
|
||||
resolve({ status: response.status, data: response.body });
|
||||
}
|
||||
} else {
|
||||
reject({
|
||||
time: Date().toString(),
|
||||
status: response.status,
|
||||
error: `${response.body}`,
|
||||
statusMessage: "Fetch code error",
|
||||
message: "rest-tools.ts Wrong return code"
|
||||
} as RestErrorResponse);
|
||||
}
|
||||
}).catch((error: any) => {
|
||||
reject({
|
||||
time: Date(),
|
||||
status: 999,
|
||||
error: error,
|
||||
statusMessage: "Fetch catch error",
|
||||
message: "rest-tools.ts detect an error in the fetch request"
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
|
||||
export function RESTRequestJson<TYPE>(request: RESTRequestType, checker: (data: any) => data is TYPE): Promise<TYPE> {
|
||||
return new Promise((resolve, reject) => {
|
||||
RESTRequest(request).then((value: ModelResponseHttp) => {
|
||||
if (checker(value.data)) {
|
||||
resolve(value.data);
|
||||
} else {
|
||||
reject({
|
||||
time: Date().toString(),
|
||||
status: 950,
|
||||
error: "REST Fail to verify the data",
|
||||
statusMessage: "API cast ERROR",
|
||||
message: "api.ts Check type as fail"
|
||||
} as RestErrorResponse);
|
||||
}
|
||||
}).catch((reason: RestErrorResponse) => {
|
||||
reject(reason);
|
||||
});
|
||||
});
|
||||
}
|
||||
export function RESTRequestJsonArray<TYPE>(request: RESTRequestType, checker: (data: any) => data is TYPE): Promise<TYPE[]> {
|
||||
return new Promise((resolve, reject) => {
|
||||
RESTRequest(request).then((value: ModelResponseHttp) => {
|
||||
if (isArrayOf(value.data, checker)) {
|
||||
resolve(value.data);
|
||||
} else {
|
||||
reject({
|
||||
time: Date().toString(),
|
||||
status: 950,
|
||||
error: "REST Fail to verify the data",
|
||||
statusMessage: "API cast ERROR",
|
||||
message: "api.ts Check type as fail"
|
||||
} as RestErrorResponse);
|
||||
}
|
||||
}).catch((reason: RestErrorResponse) => {
|
||||
reject(reason);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
export function RESTRequestVoid(request: RESTRequestType): Promise<void> {
|
||||
return new Promise((resolve, reject) => {
|
||||
RESTRequest(request).then((value: ModelResponseHttp) => {
|
||||
resolve();
|
||||
}).catch((reason: RestErrorResponse) => {
|
||||
reject(reason);
|
||||
});
|
||||
});
|
||||
}
|
@ -2,25 +2,75 @@
|
||||
<!-- Created with Inkscape (http://www.inkscape.org/) -->
|
||||
|
||||
<svg
|
||||
xmlns:dc="http://purl.org/dc/elements/1.1/"
|
||||
xmlns:cc="http://creativecommons.org/ns#"
|
||||
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
||||
xmlns:svg="http://www.w3.org/2000/svg"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||
width="256"
|
||||
height="256"
|
||||
viewBox="0 0 67.733333 67.733333"
|
||||
version="1.1"
|
||||
id="svg8"
|
||||
inkscape:version="0.92.4 5da689c313, 2019-01-14"
|
||||
sodipodi:docname="ikon.svg"
|
||||
inkscape:version="1.3.2 (091e20ef0f, 2023-11-25, custom)"
|
||||
sodipodi:docname="ikon_gray.svg"
|
||||
inkscape:export-filename="/home/heero/dev/perso/appl_pro/NoKomment/plugin/chrome/ikon.png"
|
||||
inkscape:export-xdpi="7.1250005"
|
||||
inkscape:export-ydpi="7.1250005">
|
||||
inkscape:export-ydpi="7.1250005"
|
||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:svg="http://www.w3.org/2000/svg"
|
||||
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
||||
xmlns:cc="http://creativecommons.org/ns#"
|
||||
xmlns:dc="http://purl.org/dc/elements/1.1/">
|
||||
<defs
|
||||
id="defs2" />
|
||||
id="defs2">
|
||||
<filter
|
||||
style="color-interpolation-filters:sRGB;"
|
||||
inkscape:label="Drop Shadow"
|
||||
id="filter5338"
|
||||
x="-0.12319682"
|
||||
y="-0.081815216"
|
||||
width="1.2463936"
|
||||
height="1.1636304">
|
||||
<feFlood
|
||||
flood-opacity="1"
|
||||
flood-color="rgb(255,255,255)"
|
||||
result="flood"
|
||||
id="feFlood5328" />
|
||||
<feComposite
|
||||
in="flood"
|
||||
in2="SourceGraphic"
|
||||
operator="in"
|
||||
result="composite1"
|
||||
id="feComposite5330" />
|
||||
<feGaussianBlur
|
||||
in="composite1"
|
||||
stdDeviation="2.1"
|
||||
result="blur"
|
||||
id="feGaussianBlur5332" />
|
||||
<feOffset
|
||||
dx="0"
|
||||
dy="0"
|
||||
result="offset"
|
||||
id="feOffset5334" />
|
||||
<feComposite
|
||||
in="SourceGraphic"
|
||||
in2="offset"
|
||||
operator="over"
|
||||
result="composite2"
|
||||
id="feComposite5336" />
|
||||
</filter>
|
||||
<filter
|
||||
inkscape:collect="always"
|
||||
style="color-interpolation-filters:sRGB"
|
||||
id="filter1159"
|
||||
x="-0.11802406"
|
||||
width="1.2360481"
|
||||
y="-0.078379973"
|
||||
height="1.1567599">
|
||||
<feGaussianBlur
|
||||
inkscape:collect="always"
|
||||
stdDeviation="2.0118255"
|
||||
id="feGaussianBlur1161" />
|
||||
</filter>
|
||||
</defs>
|
||||
<sodipodi:namedview
|
||||
id="base"
|
||||
pagecolor="#ffffff"
|
||||
@ -28,22 +78,31 @@
|
||||
borderopacity="1.0"
|
||||
inkscape:pageopacity="0.0"
|
||||
inkscape:pageshadow="2"
|
||||
inkscape:zoom="1.979899"
|
||||
inkscape:cx="52.480467"
|
||||
inkscape:cy="138.73493"
|
||||
inkscape:zoom="7.9195959"
|
||||
inkscape:cx="100.06824"
|
||||
inkscape:cy="115.66247"
|
||||
inkscape:document-units="mm"
|
||||
inkscape:current-layer="layer1"
|
||||
showgrid="true"
|
||||
units="px"
|
||||
inkscape:snap-text-baseline="false"
|
||||
inkscape:window-width="1918"
|
||||
inkscape:window-height="1038"
|
||||
inkscape:window-width="3838"
|
||||
inkscape:window-height="2118"
|
||||
inkscape:window-x="0"
|
||||
inkscape:window-y="20"
|
||||
inkscape:window-maximized="1">
|
||||
inkscape:window-maximized="1"
|
||||
inkscape:showpageshadow="2"
|
||||
inkscape:pagecheckerboard="0"
|
||||
inkscape:deskcolor="#d1d1d1">
|
||||
<inkscape:grid
|
||||
type="xygrid"
|
||||
id="grid4504" />
|
||||
id="grid4504"
|
||||
originx="0"
|
||||
originy="0"
|
||||
spacingy="1"
|
||||
spacingx="1"
|
||||
units="px"
|
||||
visible="true" />
|
||||
</sodipodi:namedview>
|
||||
<metadata
|
||||
id="metadata5">
|
||||
@ -61,18 +120,24 @@
|
||||
inkscape:label="Layer 1"
|
||||
inkscape:groupmode="layer"
|
||||
id="layer1"
|
||||
transform="translate(0,-229.26668)">
|
||||
transform="translate(0,-229.26668)"
|
||||
style="display:inline">
|
||||
<g
|
||||
aria-label="K"
|
||||
transform="scale(1.0347881,0.96638145)"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:84.55024719px;line-height:1.25;font-family:'DejaVu Sans Mono';-inkscape-font-specification:'DejaVu Sans Mono, Normal';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke:none;stroke-width:2.11376619"
|
||||
id="text821">
|
||||
id="text821-7"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:84.55024719px;line-height:1.25;font-family:'DejaVu Sans Mono';-inkscape-font-specification:'DejaVu Sans Mono, Normal';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:start;opacity:1;fill:#2b3137;fill-opacity:1;stroke:none;stroke-width:2.11376619;stroke-opacity:1"
|
||||
transform="matrix(0.8407653,0,0,0.83753055,-37.28971,3.4402954)"
|
||||
aria-label="K">
|
||||
<path
|
||||
d="m 12.784421,241.62303 h 8.949095 v 27.37877 l 25.568842,-27.37877 6.39221,6.84469 -20.455074,21.90302 20.455074,27.37876 -6.39221,5.47576 -19.176632,-27.37877 -6.39221,6.84469 0,20.53408 h -8.949095 z"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:84.55024719px;font-family:'DejaVu Sans Mono';-inkscape-font-specification:'DejaVu Sans Mono, Normal';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;stroke-width:2.11376619;fill:#ff0000;fill-opacity:1"
|
||||
id="path823"
|
||||
sodipodi:nodetypes="ccccccccccccccccccccccccccccccccccccssccccssscccccccccccccccsscccccsssccccccccccccccssscsscsss"
|
||||
inkscape:connector-curvature="0"
|
||||
sodipodi:nodetypes="ccccccccccccc" />
|
||||
id="path823-5"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:84.5502px;font-family:'DejaVu Sans Mono';-inkscape-font-specification:'DejaVu Sans Mono, Normal';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;opacity:0.775;fill:#2b3137;fill-opacity:1;stroke-width:2.11377;filter:url(#filter5338)"
|
||||
d="m 65.200546,279.9533 h 8.949095 v 27.37877 l 25.568842,-27.37877 6.392207,6.84469 -20.455071,21.90302 20.455071,27.37876 -6.392207,5.47576 -19.176632,-27.37877 -6.39221,6.84469 v 20.53408 h -8.949095 z m 3.913007,39.48974 c -0.26846,-0.43226 -0.592093,-0.92734 -0.887692,-1.37494 l 0.02075,-0.022 c 0.456433,0.27687 0.977308,0.56258 1.422211,0.80755 l 0.407045,0.22999 -0.710959,0.75468 z m 0.591316,3.01367 0.778423,-0.82629 -0.642969,-1.02783 1.022328,-1.08519 1.052938,0.59264 0.80956,-0.85934 -4.631256,-2.27837 -0.913349,0.96952 z m 6.54394,-6.94635 0.762854,-0.80977 -1.289542,-1.22424 0.399591,-0.42416 1.938227,0.53566 0.856265,-0.90892 -2.195068,-0.54992 c 0.187965,-0.54159 0.09714,-1.11827 -0.429654,-1.61839 -0.850547,-0.80747 -1.705543,-0.42955 -2.421693,0.33064 l -1.198771,1.27249 z m -1.168715,-2.64352 -1.004195,-0.95334 0.373643,-0.39662 c 0.40478,-0.42967 0.738271,-0.54092 1.089465,-0.20751 0.351195,0.33341 0.31951,0.73118 -0.08527,1.16085 z m 7.416862,-3.98885 2.345648,-2.48989 -0.68044,-0.64598 -0.788801,0.83731 -2.216914,-2.10465 0.788802,-0.83731 -0.680439,-0.64598 -2.345647,2.48989 0.680439,0.64598 0.788802,-0.83731 2.216913,2.10465 -0.788801,0.83731 z m 6.206624,-6.58829 0.980813,-1.04113 c 0.939292,-0.99705 1.006613,-2.22712 -0.222565,-3.39405 -1.229181,-1.16694 -2.41593,-0.99961 -3.401932,0.047 l -0.934107,0.99155 z m 0.115038,-1.43521 -2.271787,-2.15674 0.124547,-0.13221 c 0.52932,-0.56189 1.150798,-0.69191 2.006834,0.12078 0.856035,0.81268 0.794297,1.47411 0.264954,2.03597 z m 6.226516,-5.29631 2.293753,-2.4348 -0.68044,-0.64599 -1.525707,1.61953 -0.823112,-0.78143 1.250665,-1.32757 -0.674951,-0.64077 -1.250666,1.32757 -0.71885,-0.68245 1.473813,-1.56444 -0.680438,-0.64598 -2.241858,2.37972 z m 7.372646,-7.6936 c 0.80437,-0.85384 0.71213,-2.05798 -0.51156,-3.2197 -1.19626,-1.13568 -2.393561,-1.15578 -3.197932,-0.30194 -0.80437,0.85383 -0.717618,2.05277 0.478638,3.18844 1.223694,1.16173 2.426484,1.18703 3.230854,0.3332 z m -0.6969,-0.66161 c -0.35289,0.37458 -0.97662,0.23116 -1.750342,-0.50339 -0.7408,-0.70328 -0.918234,-1.32045 -0.565349,-1.69503 0.352885,-0.37459 0.976611,-0.23116 1.717411,0.47213 0.77373,0.73454 0.95116,1.3517 0.59828,1.72629 z" />
|
||||
</g>
|
||||
<g
|
||||
id="text821"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:84.55024719px;line-height:1.25;font-family:'DejaVu Sans Mono';-inkscape-font-specification:'DejaVu Sans Mono, Normal';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:start;fill:#2b3137;fill-opacity:1;stroke:none;stroke-width:2.11376619;stroke-opacity:1;filter:url(#filter1159)"
|
||||
transform="matrix(1.0347881,0,0,0.96638144,-54.239583,-37.041665)"
|
||||
aria-label="K" />
|
||||
</g>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 3.2 KiB After Width: | Height: | Size: 7.5 KiB |
134
front/src/back-api/api/data-resource.ts
Normal file
134
front/src/back-api/api/data-resource.ts
Normal file
@ -0,0 +1,134 @@
|
||||
/**
|
||||
* Interface of the server (auto-generated code)
|
||||
*/
|
||||
import {
|
||||
HTTPMimeType,
|
||||
HTTPRequestModel,
|
||||
RESTConfig,
|
||||
RESTRequestJson,
|
||||
RESTRequestVoid,
|
||||
} from "../rest-tools";
|
||||
|
||||
import {
|
||||
ObjectId,
|
||||
} from "../model";
|
||||
|
||||
export namespace DataResource {
|
||||
|
||||
/**
|
||||
* Get back some data from the data environment (with a beautiful name (permit download with basic name)
|
||||
*/
|
||||
export function retrieveDataFull({
|
||||
restConfig,
|
||||
queries,
|
||||
params,
|
||||
headers,
|
||||
}: {
|
||||
restConfig: RESTConfig,
|
||||
queries: {
|
||||
Authorization?: string,
|
||||
},
|
||||
params: {
|
||||
name: string,
|
||||
oid: ObjectId,
|
||||
},
|
||||
headers?: {
|
||||
Range?: string,
|
||||
},
|
||||
}): Promise<object> {
|
||||
return RESTRequestJson({
|
||||
restModel: {
|
||||
endPoint: "/data/{oid}/{name}",
|
||||
requestType: HTTPRequestModel.GET,
|
||||
},
|
||||
restConfig,
|
||||
params,
|
||||
queries,
|
||||
headers,
|
||||
});
|
||||
};
|
||||
/**
|
||||
* Get back some data from the data environment
|
||||
*/
|
||||
export function retrieveDataId({
|
||||
restConfig,
|
||||
queries,
|
||||
params,
|
||||
headers,
|
||||
}: {
|
||||
restConfig: RESTConfig,
|
||||
queries: {
|
||||
Authorization?: string,
|
||||
},
|
||||
params: {
|
||||
oid: ObjectId,
|
||||
},
|
||||
headers?: {
|
||||
Range: string,
|
||||
},
|
||||
}): Promise<object> {
|
||||
return RESTRequestJson({
|
||||
restModel: {
|
||||
endPoint: "/data/{oid}",
|
||||
requestType: HTTPRequestModel.GET,
|
||||
},
|
||||
restConfig,
|
||||
params,
|
||||
queries,
|
||||
headers,
|
||||
});
|
||||
};
|
||||
/**
|
||||
* Get a thumbnail of from the data environment (if resize is possible)
|
||||
*/
|
||||
export function retrieveDataThumbnailId({
|
||||
restConfig,
|
||||
queries,
|
||||
params,
|
||||
headers,
|
||||
}: {
|
||||
restConfig: RESTConfig,
|
||||
queries: {
|
||||
Authorization?: string,
|
||||
},
|
||||
params: {
|
||||
oid: ObjectId,
|
||||
},
|
||||
headers?: {
|
||||
Range: string,
|
||||
},
|
||||
}): Promise<object> {
|
||||
return RESTRequestJson({
|
||||
restModel: {
|
||||
endPoint: "/data/thumbnail/{oid}",
|
||||
requestType: HTTPRequestModel.GET,
|
||||
},
|
||||
restConfig,
|
||||
params,
|
||||
queries,
|
||||
headers,
|
||||
});
|
||||
};
|
||||
/**
|
||||
* Insert a new data in the data environment
|
||||
*/
|
||||
export function uploadFile({
|
||||
restConfig,
|
||||
data,
|
||||
}: {
|
||||
restConfig: RESTConfig,
|
||||
data: {
|
||||
file: File,
|
||||
},
|
||||
}): Promise<void> {
|
||||
return RESTRequestVoid({
|
||||
restModel: {
|
||||
endPoint: "/data//upload/",
|
||||
requestType: HTTPRequestModel.POST,
|
||||
contentType: HTTPMimeType.MULTIPART,
|
||||
},
|
||||
restConfig,
|
||||
data,
|
||||
});
|
||||
};
|
||||
}
|
6
front/src/back-api/api/front.ts
Normal file
6
front/src/back-api/api/front.ts
Normal file
@ -0,0 +1,6 @@
|
||||
/**
|
||||
* Interface of the server (auto-generated code)
|
||||
*/
|
||||
export namespace Front {
|
||||
|
||||
}
|
@ -1,20 +1,18 @@
|
||||
/**
|
||||
* API of the server (auto-generated code)
|
||||
* Interface of the server (auto-generated code)
|
||||
*/
|
||||
import {
|
||||
HTTPMimeType,
|
||||
HTTPRequestModel,
|
||||
ModelResponseHttp,
|
||||
RESTCallbacks,
|
||||
RESTConfig,
|
||||
RESTRequestJson,
|
||||
RESTRequestJsonArray,
|
||||
RESTRequestVoid
|
||||
} from "./rest-tools"
|
||||
HTTPMimeType,
|
||||
HTTPRequestModel,
|
||||
RESTConfig,
|
||||
RESTRequestJson,
|
||||
} from "../rest-tools";
|
||||
|
||||
import {
|
||||
HealthResult,
|
||||
isHealthResult,
|
||||
} from "./model"
|
||||
HealthResult,
|
||||
isHealthResult,
|
||||
} from "../model";
|
||||
|
||||
export namespace HealthCheck {
|
||||
|
||||
/**
|
||||
@ -27,7 +25,7 @@ export namespace HealthCheck {
|
||||
}): Promise<HealthResult> {
|
||||
return RESTRequestJson({
|
||||
restModel: {
|
||||
endPoint: "/health_check",
|
||||
endPoint: "/health_check/",
|
||||
requestType: HTTPRequestModel.GET,
|
||||
accept: HTTPMimeType.JSON,
|
||||
},
|
12
front/src/back-api/api/index.ts
Normal file
12
front/src/back-api/api/index.ts
Normal file
@ -0,0 +1,12 @@
|
||||
/**
|
||||
* Interface of the server (auto-generated code)
|
||||
*/
|
||||
export * from "./data-resource"
|
||||
export * from "./front"
|
||||
export * from "./health-check"
|
||||
export * from "./media-resource"
|
||||
export * from "./season-resource"
|
||||
export * from "./series-resource"
|
||||
export * from "./type-resource"
|
||||
export * from "./user-media-advancement-resource"
|
||||
export * from "./user-resource"
|
213
front/src/back-api/api/media-resource.ts
Normal file
213
front/src/back-api/api/media-resource.ts
Normal file
@ -0,0 +1,213 @@
|
||||
/**
|
||||
* Interface of the server (auto-generated code)
|
||||
*/
|
||||
import {
|
||||
HTTPMimeType,
|
||||
HTTPRequestModel,
|
||||
RESTCallbacks,
|
||||
RESTConfig,
|
||||
RESTRequestJson,
|
||||
RESTRequestVoid,
|
||||
} from "../rest-tools";
|
||||
|
||||
import { z as zod } from "zod"
|
||||
import {
|
||||
Long,
|
||||
Media,
|
||||
MediaUpdate,
|
||||
ObjectId,
|
||||
ZodMedia,
|
||||
isMedia,
|
||||
} from "../model";
|
||||
|
||||
export namespace MediaResource {
|
||||
|
||||
/**
|
||||
* Get a specific Media with his ID
|
||||
*/
|
||||
export function get({
|
||||
restConfig,
|
||||
params,
|
||||
}: {
|
||||
restConfig: RESTConfig,
|
||||
params: {
|
||||
id: Long,
|
||||
},
|
||||
}): Promise<Media> {
|
||||
return RESTRequestJson({
|
||||
restModel: {
|
||||
endPoint: "/media/{id}",
|
||||
requestType: HTTPRequestModel.GET,
|
||||
accept: HTTPMimeType.JSON,
|
||||
},
|
||||
restConfig,
|
||||
params,
|
||||
}, isMedia);
|
||||
};
|
||||
|
||||
export const ZodGetsTypeReturn = zod.array(ZodMedia);
|
||||
export type GetsTypeReturn = zod.infer<typeof ZodGetsTypeReturn>;
|
||||
|
||||
export function isGetsTypeReturn(data: any): data is GetsTypeReturn {
|
||||
try {
|
||||
ZodGetsTypeReturn.parse(data);
|
||||
return true;
|
||||
} catch (e: any) {
|
||||
console.log(`Fail to parse data type='ZodGetsTypeReturn' error=${e}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all Media
|
||||
*/
|
||||
export function gets({
|
||||
restConfig,
|
||||
}: {
|
||||
restConfig: RESTConfig,
|
||||
}): Promise<GetsTypeReturn> {
|
||||
return RESTRequestJson({
|
||||
restModel: {
|
||||
endPoint: "/media/",
|
||||
requestType: HTTPRequestModel.GET,
|
||||
accept: HTTPMimeType.JSON,
|
||||
},
|
||||
restConfig,
|
||||
}, isGetsTypeReturn);
|
||||
};
|
||||
/**
|
||||
* Modify a specific Media
|
||||
*/
|
||||
export function patch({
|
||||
restConfig,
|
||||
params,
|
||||
data,
|
||||
}: {
|
||||
restConfig: RESTConfig,
|
||||
params: {
|
||||
id: Long,
|
||||
},
|
||||
data: MediaUpdate,
|
||||
}): Promise<Media> {
|
||||
return RESTRequestJson({
|
||||
restModel: {
|
||||
endPoint: "/media/{id}",
|
||||
requestType: HTTPRequestModel.PUT,
|
||||
contentType: HTTPMimeType.JSON,
|
||||
accept: HTTPMimeType.JSON,
|
||||
},
|
||||
restConfig,
|
||||
params,
|
||||
data,
|
||||
}, isMedia);
|
||||
};
|
||||
/**
|
||||
* Remove a specific Media
|
||||
*/
|
||||
export function remove({
|
||||
restConfig,
|
||||
params,
|
||||
}: {
|
||||
restConfig: RESTConfig,
|
||||
params: {
|
||||
id: Long,
|
||||
},
|
||||
}): Promise<void> {
|
||||
return RESTRequestVoid({
|
||||
restModel: {
|
||||
endPoint: "/media/{id}",
|
||||
requestType: HTTPRequestModel.DELETE,
|
||||
contentType: HTTPMimeType.TEXT_PLAIN,
|
||||
},
|
||||
restConfig,
|
||||
params,
|
||||
});
|
||||
};
|
||||
/**
|
||||
* Remove a specific cover of a media
|
||||
*/
|
||||
export function removeCover({
|
||||
restConfig,
|
||||
params,
|
||||
}: {
|
||||
restConfig: RESTConfig,
|
||||
params: {
|
||||
coverId: ObjectId,
|
||||
id: Long,
|
||||
},
|
||||
}): Promise<Media> {
|
||||
return RESTRequestJson({
|
||||
restModel: {
|
||||
endPoint: "/media/{id}/cover/{coverId}",
|
||||
requestType: HTTPRequestModel.DELETE,
|
||||
contentType: HTTPMimeType.TEXT_PLAIN,
|
||||
accept: HTTPMimeType.JSON,
|
||||
},
|
||||
restConfig,
|
||||
params,
|
||||
}, isMedia);
|
||||
};
|
||||
/**
|
||||
* Upload a new season cover media
|
||||
*/
|
||||
export function uploadCover({
|
||||
restConfig,
|
||||
params,
|
||||
data,
|
||||
callbacks,
|
||||
}: {
|
||||
restConfig: RESTConfig,
|
||||
params: {
|
||||
id: Long,
|
||||
},
|
||||
data: {
|
||||
file: File,
|
||||
},
|
||||
callbacks?: RESTCallbacks,
|
||||
}): Promise<Media> {
|
||||
return RESTRequestJson({
|
||||
restModel: {
|
||||
endPoint: "/media/{id}/cover",
|
||||
requestType: HTTPRequestModel.POST,
|
||||
contentType: HTTPMimeType.MULTIPART,
|
||||
accept: HTTPMimeType.JSON,
|
||||
},
|
||||
restConfig,
|
||||
params,
|
||||
data,
|
||||
callbacks,
|
||||
}, isMedia);
|
||||
};
|
||||
/**
|
||||
* Create a new Media
|
||||
*/
|
||||
export function uploadMedia({
|
||||
restConfig,
|
||||
data,
|
||||
callbacks,
|
||||
}: {
|
||||
restConfig: RESTConfig,
|
||||
data: {
|
||||
file: File,
|
||||
universeId: string,
|
||||
season: string,
|
||||
typeId: string,
|
||||
episode: string,
|
||||
title: string,
|
||||
seriesId: string,
|
||||
},
|
||||
callbacks?: RESTCallbacks,
|
||||
}): Promise<Media> {
|
||||
return RESTRequestJson({
|
||||
restModel: {
|
||||
endPoint: "/media/",
|
||||
requestType: HTTPRequestModel.POST,
|
||||
contentType: HTTPMimeType.MULTIPART,
|
||||
accept: HTTPMimeType.JSON,
|
||||
},
|
||||
restConfig,
|
||||
data,
|
||||
callbacks,
|
||||
}, isMedia);
|
||||
};
|
||||
}
|
204
front/src/back-api/api/season-resource.ts
Normal file
204
front/src/back-api/api/season-resource.ts
Normal file
@ -0,0 +1,204 @@
|
||||
/**
|
||||
* Interface of the server (auto-generated code)
|
||||
*/
|
||||
import {
|
||||
HTTPMimeType,
|
||||
HTTPRequestModel,
|
||||
RESTCallbacks,
|
||||
RESTConfig,
|
||||
RESTRequestJson,
|
||||
RESTRequestVoid,
|
||||
} from "../rest-tools";
|
||||
|
||||
import { z as zod } from "zod"
|
||||
import {
|
||||
Long,
|
||||
ObjectId,
|
||||
Season,
|
||||
SeasonCreate,
|
||||
SeasonUpdate,
|
||||
ZodSeason,
|
||||
isSeason,
|
||||
} from "../model";
|
||||
|
||||
export namespace SeasonResource {
|
||||
|
||||
/**
|
||||
* Get all season
|
||||
*/
|
||||
export function get({
|
||||
restConfig,
|
||||
params,
|
||||
}: {
|
||||
restConfig: RESTConfig,
|
||||
params: {
|
||||
id: Long,
|
||||
},
|
||||
}): Promise<Season> {
|
||||
return RESTRequestJson({
|
||||
restModel: {
|
||||
endPoint: "/season/{id}",
|
||||
requestType: HTTPRequestModel.GET,
|
||||
contentType: HTTPMimeType.JSON,
|
||||
accept: HTTPMimeType.JSON,
|
||||
},
|
||||
restConfig,
|
||||
params,
|
||||
}, isSeason);
|
||||
};
|
||||
|
||||
export const ZodGetsTypeReturn = zod.array(ZodSeason);
|
||||
export type GetsTypeReturn = zod.infer<typeof ZodGetsTypeReturn>;
|
||||
|
||||
export function isGetsTypeReturn(data: any): data is GetsTypeReturn {
|
||||
try {
|
||||
ZodGetsTypeReturn.parse(data);
|
||||
return true;
|
||||
} catch (e: any) {
|
||||
console.log(`Fail to parse data type='ZodGetsTypeReturn' error=${e}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a specific Season with his ID
|
||||
*/
|
||||
export function gets({
|
||||
restConfig,
|
||||
}: {
|
||||
restConfig: RESTConfig,
|
||||
}): Promise<GetsTypeReturn> {
|
||||
return RESTRequestJson({
|
||||
restModel: {
|
||||
endPoint: "/season/",
|
||||
requestType: HTTPRequestModel.GET,
|
||||
accept: HTTPMimeType.JSON,
|
||||
},
|
||||
restConfig,
|
||||
}, isGetsTypeReturn);
|
||||
};
|
||||
/**
|
||||
* Modify a specific season
|
||||
*/
|
||||
export function patch({
|
||||
restConfig,
|
||||
params,
|
||||
data,
|
||||
}: {
|
||||
restConfig: RESTConfig,
|
||||
params: {
|
||||
id: Long,
|
||||
},
|
||||
data: SeasonUpdate,
|
||||
}): Promise<Season> {
|
||||
return RESTRequestJson({
|
||||
restModel: {
|
||||
endPoint: "/season/{id}",
|
||||
requestType: HTTPRequestModel.PUT,
|
||||
contentType: HTTPMimeType.JSON,
|
||||
accept: HTTPMimeType.JSON,
|
||||
},
|
||||
restConfig,
|
||||
params,
|
||||
data,
|
||||
}, isSeason);
|
||||
};
|
||||
/**
|
||||
* Create a new season
|
||||
*/
|
||||
export function post({
|
||||
restConfig,
|
||||
data,
|
||||
}: {
|
||||
restConfig: RESTConfig,
|
||||
data: SeasonCreate,
|
||||
}): Promise<Season> {
|
||||
return RESTRequestJson({
|
||||
restModel: {
|
||||
endPoint: "/season/",
|
||||
requestType: HTTPRequestModel.POST,
|
||||
contentType: HTTPMimeType.JSON,
|
||||
accept: HTTPMimeType.JSON,
|
||||
},
|
||||
restConfig,
|
||||
data,
|
||||
}, isSeason);
|
||||
};
|
||||
/**
|
||||
* Remove a specific season
|
||||
*/
|
||||
export function remove({
|
||||
restConfig,
|
||||
params,
|
||||
}: {
|
||||
restConfig: RESTConfig,
|
||||
params: {
|
||||
id: Long,
|
||||
},
|
||||
}): Promise<void> {
|
||||
return RESTRequestVoid({
|
||||
restModel: {
|
||||
endPoint: "/season/{id}",
|
||||
requestType: HTTPRequestModel.DELETE,
|
||||
contentType: HTTPMimeType.TEXT_PLAIN,
|
||||
},
|
||||
restConfig,
|
||||
params,
|
||||
});
|
||||
};
|
||||
/**
|
||||
* Remove a specific cover of a season
|
||||
*/
|
||||
export function removeCover({
|
||||
restConfig,
|
||||
params,
|
||||
}: {
|
||||
restConfig: RESTConfig,
|
||||
params: {
|
||||
coverId: ObjectId,
|
||||
id: Long,
|
||||
},
|
||||
}): Promise<Season> {
|
||||
return RESTRequestJson({
|
||||
restModel: {
|
||||
endPoint: "/season/{id}/cover/{coverId}",
|
||||
requestType: HTTPRequestModel.DELETE,
|
||||
contentType: HTTPMimeType.TEXT_PLAIN,
|
||||
accept: HTTPMimeType.JSON,
|
||||
},
|
||||
restConfig,
|
||||
params,
|
||||
}, isSeason);
|
||||
};
|
||||
/**
|
||||
* Upload a new season cover season
|
||||
*/
|
||||
export function uploadCover({
|
||||
restConfig,
|
||||
params,
|
||||
data,
|
||||
callbacks,
|
||||
}: {
|
||||
restConfig: RESTConfig,
|
||||
params: {
|
||||
id: Long,
|
||||
},
|
||||
data: {
|
||||
file: File,
|
||||
},
|
||||
callbacks?: RESTCallbacks,
|
||||
}): Promise<Season> {
|
||||
return RESTRequestJson({
|
||||
restModel: {
|
||||
endPoint: "/season/{id}/cover",
|
||||
requestType: HTTPRequestModel.POST,
|
||||
contentType: HTTPMimeType.MULTIPART,
|
||||
accept: HTTPMimeType.JSON,
|
||||
},
|
||||
restConfig,
|
||||
params,
|
||||
data,
|
||||
callbacks,
|
||||
}, isSeason);
|
||||
};
|
||||
}
|
204
front/src/back-api/api/series-resource.ts
Normal file
204
front/src/back-api/api/series-resource.ts
Normal file
@ -0,0 +1,204 @@
|
||||
/**
|
||||
* Interface of the server (auto-generated code)
|
||||
*/
|
||||
import {
|
||||
HTTPMimeType,
|
||||
HTTPRequestModel,
|
||||
RESTCallbacks,
|
||||
RESTConfig,
|
||||
RESTRequestJson,
|
||||
RESTRequestVoid,
|
||||
} from "../rest-tools";
|
||||
|
||||
import { z as zod } from "zod"
|
||||
import {
|
||||
Long,
|
||||
ObjectId,
|
||||
Series,
|
||||
SeriesCreate,
|
||||
SeriesUpdate,
|
||||
ZodSeries,
|
||||
isSeries,
|
||||
} from "../model";
|
||||
|
||||
export namespace SeriesResource {
|
||||
|
||||
/**
|
||||
* Get a specific Series with his ID
|
||||
*/
|
||||
export function get({
|
||||
restConfig,
|
||||
params,
|
||||
}: {
|
||||
restConfig: RESTConfig,
|
||||
params: {
|
||||
id: Long,
|
||||
},
|
||||
}): Promise<Series> {
|
||||
return RESTRequestJson({
|
||||
restModel: {
|
||||
endPoint: "/series/{id}",
|
||||
requestType: HTTPRequestModel.GET,
|
||||
contentType: HTTPMimeType.JSON,
|
||||
accept: HTTPMimeType.JSON,
|
||||
},
|
||||
restConfig,
|
||||
params,
|
||||
}, isSeries);
|
||||
};
|
||||
|
||||
export const ZodGetsTypeReturn = zod.array(ZodSeries);
|
||||
export type GetsTypeReturn = zod.infer<typeof ZodGetsTypeReturn>;
|
||||
|
||||
export function isGetsTypeReturn(data: any): data is GetsTypeReturn {
|
||||
try {
|
||||
ZodGetsTypeReturn.parse(data);
|
||||
return true;
|
||||
} catch (e: any) {
|
||||
console.log(`Fail to parse data type='ZodGetsTypeReturn' error=${e}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all Series
|
||||
*/
|
||||
export function gets({
|
||||
restConfig,
|
||||
}: {
|
||||
restConfig: RESTConfig,
|
||||
}): Promise<GetsTypeReturn> {
|
||||
return RESTRequestJson({
|
||||
restModel: {
|
||||
endPoint: "/series/",
|
||||
requestType: HTTPRequestModel.GET,
|
||||
accept: HTTPMimeType.JSON,
|
||||
},
|
||||
restConfig,
|
||||
}, isGetsTypeReturn);
|
||||
};
|
||||
/**
|
||||
* Modify a specific Series
|
||||
*/
|
||||
export function patch({
|
||||
restConfig,
|
||||
params,
|
||||
data,
|
||||
}: {
|
||||
restConfig: RESTConfig,
|
||||
params: {
|
||||
id: Long,
|
||||
},
|
||||
data: SeriesUpdate,
|
||||
}): Promise<Series> {
|
||||
return RESTRequestJson({
|
||||
restModel: {
|
||||
endPoint: "/series/{id}",
|
||||
requestType: HTTPRequestModel.PUT,
|
||||
contentType: HTTPMimeType.JSON,
|
||||
accept: HTTPMimeType.JSON,
|
||||
},
|
||||
restConfig,
|
||||
params,
|
||||
data,
|
||||
}, isSeries);
|
||||
};
|
||||
/**
|
||||
* Create a new Series
|
||||
*/
|
||||
export function post({
|
||||
restConfig,
|
||||
data,
|
||||
}: {
|
||||
restConfig: RESTConfig,
|
||||
data: SeriesCreate,
|
||||
}): Promise<Series> {
|
||||
return RESTRequestJson({
|
||||
restModel: {
|
||||
endPoint: "/series/",
|
||||
requestType: HTTPRequestModel.POST,
|
||||
contentType: HTTPMimeType.JSON,
|
||||
accept: HTTPMimeType.JSON,
|
||||
},
|
||||
restConfig,
|
||||
data,
|
||||
}, isSeries);
|
||||
};
|
||||
/**
|
||||
* Remove a specific Series
|
||||
*/
|
||||
export function remove({
|
||||
restConfig,
|
||||
params,
|
||||
}: {
|
||||
restConfig: RESTConfig,
|
||||
params: {
|
||||
id: Long,
|
||||
},
|
||||
}): Promise<void> {
|
||||
return RESTRequestVoid({
|
||||
restModel: {
|
||||
endPoint: "/series/{id}",
|
||||
requestType: HTTPRequestModel.DELETE,
|
||||
contentType: HTTPMimeType.TEXT_PLAIN,
|
||||
},
|
||||
restConfig,
|
||||
params,
|
||||
});
|
||||
};
|
||||
/**
|
||||
* Remove a specific Series of a season
|
||||
*/
|
||||
export function removeCover({
|
||||
restConfig,
|
||||
params,
|
||||
}: {
|
||||
restConfig: RESTConfig,
|
||||
params: {
|
||||
coverId: ObjectId,
|
||||
id: Long,
|
||||
},
|
||||
}): Promise<Series> {
|
||||
return RESTRequestJson({
|
||||
restModel: {
|
||||
endPoint: "/series/{id}/cover/{coverId}",
|
||||
requestType: HTTPRequestModel.DELETE,
|
||||
contentType: HTTPMimeType.TEXT_PLAIN,
|
||||
accept: HTTPMimeType.JSON,
|
||||
},
|
||||
restConfig,
|
||||
params,
|
||||
}, isSeries);
|
||||
};
|
||||
/**
|
||||
* Upload a new season cover Series
|
||||
*/
|
||||
export function uploadCover({
|
||||
restConfig,
|
||||
params,
|
||||
data,
|
||||
callbacks,
|
||||
}: {
|
||||
restConfig: RESTConfig,
|
||||
params: {
|
||||
id: Long,
|
||||
},
|
||||
data: {
|
||||
file: File,
|
||||
},
|
||||
callbacks?: RESTCallbacks,
|
||||
}): Promise<Series> {
|
||||
return RESTRequestJson({
|
||||
restModel: {
|
||||
endPoint: "/series/{id}/cover",
|
||||
requestType: HTTPRequestModel.POST,
|
||||
contentType: HTTPMimeType.MULTIPART,
|
||||
accept: HTTPMimeType.JSON,
|
||||
},
|
||||
restConfig,
|
||||
params,
|
||||
data,
|
||||
callbacks,
|
||||
}, isSeries);
|
||||
};
|
||||
}
|
204
front/src/back-api/api/type-resource.ts
Normal file
204
front/src/back-api/api/type-resource.ts
Normal file
@ -0,0 +1,204 @@
|
||||
/**
|
||||
* Interface of the server (auto-generated code)
|
||||
*/
|
||||
import {
|
||||
HTTPMimeType,
|
||||
HTTPRequestModel,
|
||||
RESTCallbacks,
|
||||
RESTConfig,
|
||||
RESTRequestJson,
|
||||
RESTRequestVoid,
|
||||
} from "../rest-tools";
|
||||
|
||||
import { z as zod } from "zod"
|
||||
import {
|
||||
Long,
|
||||
ObjectId,
|
||||
Type,
|
||||
TypeCreate,
|
||||
TypeUpdate,
|
||||
ZodType,
|
||||
isType,
|
||||
} from "../model";
|
||||
|
||||
export namespace TypeResource {
|
||||
|
||||
/**
|
||||
* Get a specific Type with his ID
|
||||
*/
|
||||
export function get({
|
||||
restConfig,
|
||||
params,
|
||||
}: {
|
||||
restConfig: RESTConfig,
|
||||
params: {
|
||||
id: Long,
|
||||
},
|
||||
}): Promise<Type> {
|
||||
return RESTRequestJson({
|
||||
restModel: {
|
||||
endPoint: "/type/{id}",
|
||||
requestType: HTTPRequestModel.GET,
|
||||
contentType: HTTPMimeType.JSON,
|
||||
accept: HTTPMimeType.JSON,
|
||||
},
|
||||
restConfig,
|
||||
params,
|
||||
}, isType);
|
||||
};
|
||||
|
||||
export const ZodGetsTypeReturn = zod.array(ZodType);
|
||||
export type GetsTypeReturn = zod.infer<typeof ZodGetsTypeReturn>;
|
||||
|
||||
export function isGetsTypeReturn(data: any): data is GetsTypeReturn {
|
||||
try {
|
||||
ZodGetsTypeReturn.parse(data);
|
||||
return true;
|
||||
} catch (e: any) {
|
||||
console.log(`Fail to parse data type='ZodGetsTypeReturn' error=${e}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all Type
|
||||
*/
|
||||
export function gets({
|
||||
restConfig,
|
||||
}: {
|
||||
restConfig: RESTConfig,
|
||||
}): Promise<GetsTypeReturn> {
|
||||
return RESTRequestJson({
|
||||
restModel: {
|
||||
endPoint: "/type/",
|
||||
requestType: HTTPRequestModel.GET,
|
||||
accept: HTTPMimeType.JSON,
|
||||
},
|
||||
restConfig,
|
||||
}, isGetsTypeReturn);
|
||||
};
|
||||
/**
|
||||
* Modify a specific Type
|
||||
*/
|
||||
export function patch({
|
||||
restConfig,
|
||||
params,
|
||||
data,
|
||||
}: {
|
||||
restConfig: RESTConfig,
|
||||
params: {
|
||||
id: Long,
|
||||
},
|
||||
data: TypeUpdate,
|
||||
}): Promise<Type> {
|
||||
return RESTRequestJson({
|
||||
restModel: {
|
||||
endPoint: "/type/{id}",
|
||||
requestType: HTTPRequestModel.PUT,
|
||||
contentType: HTTPMimeType.JSON,
|
||||
accept: HTTPMimeType.JSON,
|
||||
},
|
||||
restConfig,
|
||||
params,
|
||||
data,
|
||||
}, isType);
|
||||
};
|
||||
/**
|
||||
* Create a new Type
|
||||
*/
|
||||
export function post({
|
||||
restConfig,
|
||||
data,
|
||||
}: {
|
||||
restConfig: RESTConfig,
|
||||
data: TypeCreate,
|
||||
}): Promise<Type> {
|
||||
return RESTRequestJson({
|
||||
restModel: {
|
||||
endPoint: "/type/",
|
||||
requestType: HTTPRequestModel.POST,
|
||||
contentType: HTTPMimeType.JSON,
|
||||
accept: HTTPMimeType.JSON,
|
||||
},
|
||||
restConfig,
|
||||
data,
|
||||
}, isType);
|
||||
};
|
||||
/**
|
||||
* Remove a specific Type
|
||||
*/
|
||||
export function remove({
|
||||
restConfig,
|
||||
params,
|
||||
}: {
|
||||
restConfig: RESTConfig,
|
||||
params: {
|
||||
id: Long,
|
||||
},
|
||||
}): Promise<void> {
|
||||
return RESTRequestVoid({
|
||||
restModel: {
|
||||
endPoint: "/type/{id}",
|
||||
requestType: HTTPRequestModel.DELETE,
|
||||
contentType: HTTPMimeType.TEXT_PLAIN,
|
||||
},
|
||||
restConfig,
|
||||
params,
|
||||
});
|
||||
};
|
||||
/**
|
||||
* Remove a specific cover of a type
|
||||
*/
|
||||
export function removeCover({
|
||||
restConfig,
|
||||
params,
|
||||
}: {
|
||||
restConfig: RESTConfig,
|
||||
params: {
|
||||
coverId: ObjectId,
|
||||
id: Long,
|
||||
},
|
||||
}): Promise<Type> {
|
||||
return RESTRequestJson({
|
||||
restModel: {
|
||||
endPoint: "/type/{id}/cover/{coverId}",
|
||||
requestType: HTTPRequestModel.DELETE,
|
||||
contentType: HTTPMimeType.TEXT_PLAIN,
|
||||
accept: HTTPMimeType.JSON,
|
||||
},
|
||||
restConfig,
|
||||
params,
|
||||
}, isType);
|
||||
};
|
||||
/**
|
||||
* Upload a new season cover Type
|
||||
*/
|
||||
export function uploadCover({
|
||||
restConfig,
|
||||
params,
|
||||
data,
|
||||
callbacks,
|
||||
}: {
|
||||
restConfig: RESTConfig,
|
||||
params: {
|
||||
id: Long,
|
||||
},
|
||||
data: {
|
||||
file: File,
|
||||
},
|
||||
callbacks?: RESTCallbacks,
|
||||
}): Promise<Type> {
|
||||
return RESTRequestJson({
|
||||
restModel: {
|
||||
endPoint: "/type/{id}/cover",
|
||||
requestType: HTTPRequestModel.POST,
|
||||
contentType: HTTPMimeType.MULTIPART,
|
||||
accept: HTTPMimeType.JSON,
|
||||
},
|
||||
restConfig,
|
||||
params,
|
||||
data,
|
||||
callbacks,
|
||||
}, isType);
|
||||
};
|
||||
}
|
124
front/src/back-api/api/user-media-advancement-resource.ts
Normal file
124
front/src/back-api/api/user-media-advancement-resource.ts
Normal file
@ -0,0 +1,124 @@
|
||||
/**
|
||||
* Interface of the server (auto-generated code)
|
||||
*/
|
||||
import {
|
||||
HTTPMimeType,
|
||||
HTTPRequestModel,
|
||||
RESTConfig,
|
||||
RESTRequestJson,
|
||||
RESTRequestVoid,
|
||||
} from "../rest-tools";
|
||||
|
||||
import { z as zod } from "zod"
|
||||
import {
|
||||
Long,
|
||||
MediaInformationsDelta,
|
||||
UserMediaAdvancement,
|
||||
ZodUserMediaAdvancement,
|
||||
isUserMediaAdvancement,
|
||||
} from "../model";
|
||||
|
||||
export namespace UserMediaAdvancementResource {
|
||||
|
||||
/**
|
||||
* Get a specific user advancement with his ID
|
||||
*/
|
||||
export function get({
|
||||
restConfig,
|
||||
params,
|
||||
}: {
|
||||
restConfig: RESTConfig,
|
||||
params: {
|
||||
id: Long,
|
||||
},
|
||||
}): Promise<UserMediaAdvancement> {
|
||||
return RESTRequestJson({
|
||||
restModel: {
|
||||
endPoint: "/advancement/{id}",
|
||||
requestType: HTTPRequestModel.GET,
|
||||
accept: HTTPMimeType.JSON,
|
||||
},
|
||||
restConfig,
|
||||
params,
|
||||
}, isUserMediaAdvancement);
|
||||
};
|
||||
|
||||
export const ZodGetsTypeReturn = zod.array(ZodUserMediaAdvancement);
|
||||
export type GetsTypeReturn = zod.infer<typeof ZodGetsTypeReturn>;
|
||||
|
||||
export function isGetsTypeReturn(data: any): data is GetsTypeReturn {
|
||||
try {
|
||||
ZodGetsTypeReturn.parse(data);
|
||||
return true;
|
||||
} catch (e: any) {
|
||||
console.log(`Fail to parse data type='ZodGetsTypeReturn' error=${e}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all user advancement
|
||||
*/
|
||||
export function gets({
|
||||
restConfig,
|
||||
}: {
|
||||
restConfig: RESTConfig,
|
||||
}): Promise<GetsTypeReturn> {
|
||||
return RESTRequestJson({
|
||||
restModel: {
|
||||
endPoint: "/advancement/",
|
||||
requestType: HTTPRequestModel.GET,
|
||||
accept: HTTPMimeType.JSON,
|
||||
},
|
||||
restConfig,
|
||||
}, isGetsTypeReturn);
|
||||
};
|
||||
/**
|
||||
* Modify a user advancement
|
||||
*/
|
||||
export function patch({
|
||||
restConfig,
|
||||
params,
|
||||
data,
|
||||
}: {
|
||||
restConfig: RESTConfig,
|
||||
params: {
|
||||
id: Long,
|
||||
},
|
||||
data: MediaInformationsDelta,
|
||||
}): Promise<UserMediaAdvancement> {
|
||||
return RESTRequestJson({
|
||||
restModel: {
|
||||
endPoint: "/advancement/{id}",
|
||||
requestType: HTTPRequestModel.PUT,
|
||||
contentType: HTTPMimeType.JSON,
|
||||
accept: HTTPMimeType.JSON,
|
||||
},
|
||||
restConfig,
|
||||
params,
|
||||
data,
|
||||
}, isUserMediaAdvancement);
|
||||
};
|
||||
/**
|
||||
* Remove a specific user advancement
|
||||
*/
|
||||
export function remove({
|
||||
restConfig,
|
||||
params,
|
||||
}: {
|
||||
restConfig: RESTConfig,
|
||||
params: {
|
||||
id: Long,
|
||||
},
|
||||
}): Promise<void> {
|
||||
return RESTRequestVoid({
|
||||
restModel: {
|
||||
endPoint: "/advancement/{id}",
|
||||
requestType: HTTPRequestModel.DELETE,
|
||||
contentType: HTTPMimeType.TEXT_PLAIN,
|
||||
},
|
||||
restConfig,
|
||||
params,
|
||||
});
|
||||
};
|
||||
}
|
@ -1,23 +1,23 @@
|
||||
/**
|
||||
* API of the server (auto-generated code)
|
||||
* Interface of the server (auto-generated code)
|
||||
*/
|
||||
import {
|
||||
HTTPMimeType,
|
||||
HTTPRequestModel,
|
||||
ModelResponseHttp,
|
||||
RESTCallbacks,
|
||||
RESTConfig,
|
||||
RESTRequestJson,
|
||||
RESTRequestJsonArray,
|
||||
RESTRequestVoid
|
||||
} from "./rest-tools"
|
||||
HTTPMimeType,
|
||||
HTTPRequestModel,
|
||||
RESTConfig,
|
||||
RESTRequestJson,
|
||||
} from "../rest-tools";
|
||||
|
||||
import { z as zod } from "zod"
|
||||
import {
|
||||
Long,
|
||||
UserKarideo,
|
||||
UserOut,
|
||||
isUserKarideo,
|
||||
isUserOut,
|
||||
} from "./model"
|
||||
Long,
|
||||
UserKarideo,
|
||||
UserOut,
|
||||
ZodUserKarideo,
|
||||
isUserKarideo,
|
||||
isUserOut,
|
||||
} from "../model";
|
||||
|
||||
export namespace UserResource {
|
||||
|
||||
/**
|
||||
@ -42,23 +42,6 @@ export namespace UserResource {
|
||||
params,
|
||||
}, isUserKarideo);
|
||||
};
|
||||
/**
|
||||
* Get all the users
|
||||
*/
|
||||
export function gets({
|
||||
restConfig,
|
||||
}: {
|
||||
restConfig: RESTConfig,
|
||||
}): Promise<UserKarideo[]> {
|
||||
return RESTRequestJsonArray({
|
||||
restModel: {
|
||||
endPoint: "/users",
|
||||
requestType: HTTPRequestModel.GET,
|
||||
accept: HTTPMimeType.JSON,
|
||||
},
|
||||
restConfig,
|
||||
}, isUserKarideo);
|
||||
};
|
||||
/**
|
||||
* Get the user personal data
|
||||
*/
|
||||
@ -76,4 +59,35 @@ export namespace UserResource {
|
||||
restConfig,
|
||||
}, isUserOut);
|
||||
};
|
||||
|
||||
export const ZodGetsTypeReturn = zod.array(ZodUserKarideo);
|
||||
export type GetsTypeReturn = zod.infer<typeof ZodGetsTypeReturn>;
|
||||
|
||||
export function isGetsTypeReturn(data: any): data is GetsTypeReturn {
|
||||
try {
|
||||
ZodGetsTypeReturn.parse(data);
|
||||
return true;
|
||||
} catch (e: any) {
|
||||
console.log(`Fail to parse data type='ZodGetsTypeReturn' error=${e}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all the users
|
||||
*/
|
||||
export function gets({
|
||||
restConfig,
|
||||
}: {
|
||||
restConfig: RESTConfig,
|
||||
}): Promise<GetsTypeReturn> {
|
||||
return RESTRequestJson({
|
||||
restModel: {
|
||||
endPoint: "/users/",
|
||||
requestType: HTTPRequestModel.GET,
|
||||
accept: HTTPMimeType.JSON,
|
||||
},
|
||||
restConfig,
|
||||
}, isGetsTypeReturn);
|
||||
};
|
||||
}
|
7
front/src/back-api/index.ts
Normal file
7
front/src/back-api/index.ts
Normal file
@ -0,0 +1,7 @@
|
||||
/**
|
||||
* Interface of the server (auto-generated code)
|
||||
*/
|
||||
export * from "./model";
|
||||
export * from "./api";
|
||||
export * from "./rest-tools";
|
||||
|
40
front/src/back-api/model/data.ts
Normal file
40
front/src/back-api/model/data.ts
Normal file
@ -0,0 +1,40 @@
|
||||
/**
|
||||
* Interface of the server (auto-generated code)
|
||||
*/
|
||||
import { z as zod } from "zod";
|
||||
|
||||
import {ZodOIDGenericDataSoftDelete, ZodOIDGenericDataSoftDeleteUpdate , ZodOIDGenericDataSoftDeleteCreate } from "./oid-generic-data-soft-delete";
|
||||
import {ZodObjectId} from "./object-id";
|
||||
import {ZodLong} from "./long";
|
||||
|
||||
export const ZodData = ZodOIDGenericDataSoftDelete.extend({
|
||||
/**
|
||||
* Sha512 of the data
|
||||
*/
|
||||
sha512: zod.string().max(512).readonly(),
|
||||
/**
|
||||
* Mime -type of the media
|
||||
*/
|
||||
mimeType: zod.string().max(512).readonly(),
|
||||
/**
|
||||
* Size in Byte of the data
|
||||
*/
|
||||
size: ZodLong.readonly(),
|
||||
/**
|
||||
* Unique ObjectID of the object
|
||||
*/
|
||||
oid: ZodObjectId.readonly(),
|
||||
|
||||
});
|
||||
|
||||
export type Data = zod.infer<typeof ZodData>;
|
||||
|
||||
export function isData(data: any): data is Data {
|
||||
try {
|
||||
ZodData.parse(data);
|
||||
return true;
|
||||
} catch (e: any) {
|
||||
console.log(`Fail to parse data type='ZodData' error=${e}`);
|
||||
return false;
|
||||
}
|
||||
}
|
8
front/src/back-api/model/float.ts
Normal file
8
front/src/back-api/model/float.ts
Normal file
@ -0,0 +1,8 @@
|
||||
/**
|
||||
* Interface of the server (auto-generated code)
|
||||
*/
|
||||
import { z as zod } from "zod";
|
||||
|
||||
|
||||
export const ZodFloat = zod.number();
|
||||
export type Float = zod.infer<typeof ZodFloat>;
|
52
front/src/back-api/model/generic-data-soft-delete.ts
Normal file
52
front/src/back-api/model/generic-data-soft-delete.ts
Normal file
@ -0,0 +1,52 @@
|
||||
/**
|
||||
* Interface of the server (auto-generated code)
|
||||
*/
|
||||
import { z as zod } from "zod";
|
||||
|
||||
import {ZodGenericData, ZodGenericDataUpdate , ZodGenericDataCreate } from "./generic-data";
|
||||
|
||||
export const ZodGenericDataSoftDelete = ZodGenericData.extend({
|
||||
/**
|
||||
* Deleted state
|
||||
*/
|
||||
deleted: zod.boolean().readonly().optional(),
|
||||
|
||||
});
|
||||
|
||||
export type GenericDataSoftDelete = zod.infer<typeof ZodGenericDataSoftDelete>;
|
||||
|
||||
export function isGenericDataSoftDelete(data: any): data is GenericDataSoftDelete {
|
||||
try {
|
||||
ZodGenericDataSoftDelete.parse(data);
|
||||
return true;
|
||||
} catch (e: any) {
|
||||
console.log(`Fail to parse data type='ZodGenericDataSoftDelete' error=${e}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
export const ZodGenericDataSoftDeleteUpdate = ZodGenericDataUpdate;
|
||||
|
||||
export type GenericDataSoftDeleteUpdate = zod.infer<typeof ZodGenericDataSoftDeleteUpdate>;
|
||||
|
||||
export function isGenericDataSoftDeleteUpdate(data: any): data is GenericDataSoftDeleteUpdate {
|
||||
try {
|
||||
ZodGenericDataSoftDeleteUpdate.parse(data);
|
||||
return true;
|
||||
} catch (e: any) {
|
||||
console.log(`Fail to parse data type='ZodGenericDataSoftDeleteUpdate' error=${e}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
export const ZodGenericDataSoftDeleteCreate = ZodGenericDataCreate;
|
||||
|
||||
export type GenericDataSoftDeleteCreate = zod.infer<typeof ZodGenericDataSoftDeleteCreate>;
|
||||
|
||||
export function isGenericDataSoftDeleteCreate(data: any): data is GenericDataSoftDeleteCreate {
|
||||
try {
|
||||
ZodGenericDataSoftDeleteCreate.parse(data);
|
||||
return true;
|
||||
} catch (e: any) {
|
||||
console.log(`Fail to parse data type='ZodGenericDataSoftDeleteCreate' error=${e}`);
|
||||
return false;
|
||||
}
|
||||
}
|
53
front/src/back-api/model/generic-data.ts
Normal file
53
front/src/back-api/model/generic-data.ts
Normal file
@ -0,0 +1,53 @@
|
||||
/**
|
||||
* Interface of the server (auto-generated code)
|
||||
*/
|
||||
import { z as zod } from "zod";
|
||||
|
||||
import {ZodGenericTiming, ZodGenericTimingUpdate , ZodGenericTimingCreate } from "./generic-timing";
|
||||
import {ZodLong} from "./long";
|
||||
|
||||
export const ZodGenericData = ZodGenericTiming.extend({
|
||||
/**
|
||||
* Unique Id of the object
|
||||
*/
|
||||
id: ZodLong.readonly(),
|
||||
|
||||
});
|
||||
|
||||
export type GenericData = zod.infer<typeof ZodGenericData>;
|
||||
|
||||
export function isGenericData(data: any): data is GenericData {
|
||||
try {
|
||||
ZodGenericData.parse(data);
|
||||
return true;
|
||||
} catch (e: any) {
|
||||
console.log(`Fail to parse data type='ZodGenericData' error=${e}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
export const ZodGenericDataUpdate = ZodGenericTimingUpdate;
|
||||
|
||||
export type GenericDataUpdate = zod.infer<typeof ZodGenericDataUpdate>;
|
||||
|
||||
export function isGenericDataUpdate(data: any): data is GenericDataUpdate {
|
||||
try {
|
||||
ZodGenericDataUpdate.parse(data);
|
||||
return true;
|
||||
} catch (e: any) {
|
||||
console.log(`Fail to parse data type='ZodGenericDataUpdate' error=${e}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
export const ZodGenericDataCreate = ZodGenericTimingCreate;
|
||||
|
||||
export type GenericDataCreate = zod.infer<typeof ZodGenericDataCreate>;
|
||||
|
||||
export function isGenericDataCreate(data: any): data is GenericDataCreate {
|
||||
try {
|
||||
ZodGenericDataCreate.parse(data);
|
||||
return true;
|
||||
} catch (e: any) {
|
||||
console.log(`Fail to parse data type='ZodGenericDataCreate' error=${e}`);
|
||||
return false;
|
||||
}
|
||||
}
|
60
front/src/back-api/model/generic-timing.ts
Normal file
60
front/src/back-api/model/generic-timing.ts
Normal file
@ -0,0 +1,60 @@
|
||||
/**
|
||||
* Interface of the server (auto-generated code)
|
||||
*/
|
||||
import { z as zod } from "zod";
|
||||
|
||||
import {ZodIsoDate} from "./iso-date";
|
||||
|
||||
export const ZodGenericTiming = zod.object({
|
||||
/**
|
||||
* Create time of the object
|
||||
*/
|
||||
createdAt: ZodIsoDate.readonly().optional(),
|
||||
/**
|
||||
* When update the object
|
||||
*/
|
||||
updatedAt: ZodIsoDate.readonly().optional(),
|
||||
|
||||
});
|
||||
|
||||
export type GenericTiming = zod.infer<typeof ZodGenericTiming>;
|
||||
|
||||
export function isGenericTiming(data: any): data is GenericTiming {
|
||||
try {
|
||||
ZodGenericTiming.parse(data);
|
||||
return true;
|
||||
} catch (e: any) {
|
||||
console.log(`Fail to parse data type='ZodGenericTiming' error=${e}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
export const ZodGenericTimingUpdate = zod.object({
|
||||
|
||||
});
|
||||
|
||||
export type GenericTimingUpdate = zod.infer<typeof ZodGenericTimingUpdate>;
|
||||
|
||||
export function isGenericTimingUpdate(data: any): data is GenericTimingUpdate {
|
||||
try {
|
||||
ZodGenericTimingUpdate.parse(data);
|
||||
return true;
|
||||
} catch (e: any) {
|
||||
console.log(`Fail to parse data type='ZodGenericTimingUpdate' error=${e}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
export const ZodGenericTimingCreate = zod.object({
|
||||
|
||||
});
|
||||
|
||||
export type GenericTimingCreate = zod.infer<typeof ZodGenericTimingCreate>;
|
||||
|
||||
export function isGenericTimingCreate(data: any): data is GenericTimingCreate {
|
||||
try {
|
||||
ZodGenericTimingCreate.parse(data);
|
||||
return true;
|
||||
} catch (e: any) {
|
||||
console.log(`Fail to parse data type='ZodGenericTimingCreate' error=${e}`);
|
||||
return false;
|
||||
}
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user