Compare commits

...

88 Commits

Author SHA1 Message Date
62435cd97e [RELEASE] new version 0.6.0 2024-01-06 21:24:21 +01:00
d983210a10 [DE] update rteadme and .gitignore 2024-01-06 21:03:03 +01:00
a704008f7d [DEV] format pom.xml 2024-01-06 09:07:03 +01:00
04a2014799 Update LICENSE 2024-01-05 23:57:34 +01:00
b74b404e72 wrong position of dependabot 2024-01-05 23:56:15 +01:00
42245e026b Create dependabot.yml 2024-01-05 23:55:37 +01:00
284c00a88c [CI] update build path to generic path 2024-01-05 23:52:20 +01:00
0ae8f9ad16 [DEV] remove codacity 2024-01-05 15:28:41 +01:00
7b31c66ce5 [DEV] correct delete API for test 2024-01-05 15:27:55 +01:00
fe63c6b954 try to find a correct maven plugin for dependecy
Signed-off-by: Edouard DUPIN <yui.heero@gmail.com>
2024-01-05 00:11:09 +01:00
b4b6e40250 [DEV] update generation of dependency 2024-01-04 23:51:48 +01:00
4d1f70a637 [DEV] add dependabot 2024-01-04 23:51:44 +01:00
86dc0351fe Update maven.yml 2024-01-04 23:37:49 +01:00
a049d3ca30 Create maven.yml 2024-01-04 23:22:50 +01:00
c90af0bdc9 Create codacy.yml 2024-01-04 23:21:50 +01:00
f3ea7983df [DEV] add clean all for test mode 2024-01-04 21:45:52 +01:00
ba314a97ed [DEV] reduce step time between migration 2024-01-03 00:18:43 +01:00
c99fb84ef4 [DEV] add list at insert 2024-01-03 00:18:20 +01:00
eff5513705 [DEV] Add drop table to improve testing 2024-01-02 12:05:44 +01:00
66796d9591 [DEV] try to fix the ManyToMany link table insertion and update to simplify API 2023-12-31 09:34:46 +01:00
554e2493aa [DEBUG] correct link table (remove the deleted field) 2023-12-31 09:34:10 +01:00
49654698a6 [DEV] remove the timeout control in test mode to permit @swagger to have generic token 2023-12-31 09:32:40 +01:00
61f3e70835 update REST API caller 2023-12-31 09:31:56 +01:00
6ac2f1dcfc [DEV] update comment of swagger 2023-12-31 09:31:34 +01:00
5b7fdce349 [DEV] add specific jackson catcher 2023-12-31 09:29:34 +01:00
058fb2e640 [DEV] fix size error in string 2023-12-31 09:28:42 +01:00
dfc7c77f47 [DEV] update with openAPI 2023-12-27 11:08:18 +01:00
1c82fb1a86 [DEV] upgrade library start to be really cool 2023-12-22 23:34:22 +01:00
3b0c73bd55 [DEV] continue normalisation API 2023-12-18 07:32:46 +01:00
d9fcacc812 [DEV] rework to finalize API 2023-12-16 11:09:53 +01:00
5fc45a23d3 [DEV] add a simple JPA checker to simplify client code 2023-12-10 23:58:38 +01:00
81b6438a7c [DEV] correct the test mode 2023-12-10 23:25:02 +01:00
26ba20d964 [DEV] continue integrations 2023-12-08 20:15:20 +01:00
659f9ca306 [FIX] test unit and style 2023-12-08 20:15:20 +01:00
9f337db62c [DEV] update dev tag version 2023-11-29 19:15:02 +01:00
c0fe8ca64d [RELEASE] new version 0.5.0 2023-11-29 19:14:17 +01:00
9a26fba91c [FIX] update some fail 2023-11-27 22:30:49 +01:00
3d526aaede [FIX-DOC] update migration and some docs 2023-11-26 22:54:40 +01:00
c3d2eff5be [DEV] update options and where condition, some normalisation 2023-11-25 14:13:58 +01:00
ed3bfa0604 [DEV] update migraation model and manage sqlite support update with milisecond 2023-11-24 00:07:52 +01:00
01de431f5a [DEV] correct migration engine and add many test 2023-11-23 21:19:22 +01:00
7fd6502e60 [DEV] missing exception 2023-11-13 21:38:07 +01:00
49480bc0aa [DEV] start add hooks 2023-11-13 21:38:07 +01:00
b48916be07 [DEV] update the basic exception model 2023-11-13 23:33:05 +01:00
d1a866277d [DEBUG] missing clean connections 2023-11-12 21:48:28 +01:00
c4fc49d91b [DEV] remove generic annotation and test json update 2023-11-11 10:47:03 +01:00
f69bc8097a [DEV] add sub-element with object and serialization as json 2023-11-03 22:45:28 +01:00
bc5c37e02a [DEV] continue works 2023-11-02 17:30:14 +01:00
8d271601be [DEV] refacto dataAccess and ManyToMany interface (get Long) 2023-11-02 15:14:55 +01:00
81cfe8a713 [DEV] update sone modality of test and normalization 2023-10-28 00:29:14 +02:00
bfb329b5be [DEV] work On test and api correction 2023-10-25 23:53:55 +02:00
88b945285b [DEV] update some models 2023-10-25 10:15:54 +02:00
9730b89b15 [DEV] continue review integration JPA 2023-10-23 00:36:04 +02:00
e64c70cd86 [DEV] continue refacto 2023-10-15 23:36:01 +02:00
d8c6de7bde [DEV] review many models and system 2023-10-14 12:18:36 +02:00
99cca8bebf continue refacto 2023-10-02 16:25:45 +02:00
293d80efd7 [DEV] some correction in migartion that does not work 2023-08-13 19:29:11 +02:00
144190fc4b [DOC] add some comment in migration 2023-06-27 23:53:41 +02:00
3a4217391a update some logs 2023-05-27 19:02:38 +02:00
f510537964 [DEV] correct insert on right 2023-05-25 23:13:08 +02:00
6cf67a1143 [DEV] migration manage first migration 2023-05-11 07:28:34 +02:00
1826f40874 [DEV] Migration base 1 OK 2023-05-08 23:30:44 +02:00
7c37b65842 [DEV] implement base of create and check db exist 2023-05-06 23:58:21 +02:00
1501b7a21e [DEV] implemet base of migration 2023-05-02 21:06:39 +02:00
37212ba70e [DEV] security, restWrapper, catcher exception, sqlite:memory 2023-04-30 22:44:32 +02:00
7fd93485e6 [DEV] add right checker... 2023-04-27 11:31:11 +02:00
dc8cae5150 [DEV] rework authentication 2023-04-22 00:32:29 +02:00
12231762d3 [DEV] set SSO optionnal. (no security mode)
Some checks are pending
WEB karideo and rabbit/archidata/pipeline/head Build queued...
2023-03-12 21:49:53 +01:00
a07c134d0f new java version
Some checks are pending
WEB karideo and rabbit/archidata/pipeline/head Build queued...
2023-02-17 19:01:53 +01:00
1d10ecb618 [DEBUG] remove deleted whan not needed
Some checks are pending
WEB karideo and rabbit/archidata/pipeline/head Build queued...
2023-02-17 19:00:32 +01:00
fe3fc54b7b [DEBUG] already corrected
Some checks are pending
WEB karideo and rabbit/archidata/pipeline/head Build queued...
2023-02-12 23:58:22 +01:00
84e1268e1c create version 0.3.2
Some checks are pending
WEB karideo and rabbit/archidata/pipeline/head Build queued...
2023-02-12 23:55:39 +01:00
956c120b47 [DEV] correct now in sqlite and wrint time in jwt token 2023-02-12 23:55:22 +01:00
124b24e059 [DEV] big upgrade of dependedncy
All checks were successful
WEB karideo and rabbit/archidata/pipeline/head This commit looks good
2023-02-02 00:03:06 +01:00
fab2b3b017 [DEV] add support of SQLITE
All checks were successful
WEB karideo and rabbit/archidata/pipeline/head This commit looks good
2023-02-01 20:21:11 +01:00
b85d5ec423 correct comment
All checks were successful
WEB karideo and rabbit/archidata/pipeline/head This commit looks good
2023-01-29 23:13:07 +01:00
50cb92703b base of new securisation model
All checks were successful
WEB karideo and rabbit/archidata/pipeline/head This commit looks good
2023-01-28 00:15:03 +01:00
949fc0b05c [DEV] think about the migration
All checks were successful
WEB karideo and rabbit/archidata/pipeline/head This commit looks good
2023-01-20 21:42:40 +01:00
d1dded0694 [DEV] correct json parining include
All checks were successful
WEB karideo and rabbit/archidata/pipeline/head This commit looks good
2023-01-19 00:01:12 +01:00
25fa3f8298 [DEV] add basic jenkinsfile 2023-01-08 17:49:20 +01:00
23e3739654 update version 0.2.4 2023-01-08 12:14:15 +01:00
edfe9e8016 [DEV] correct error management of interfaces 2023-01-08 12:13:33 +01:00
b50dacc374 [DEBUG] remove dead bdd entry 2023-01-06 00:25:18 +01:00
ff9907e341 [DEV] upgrade and remove logs 2023-01-05 00:11:07 +01:00
1a6135f2cf [DEV] add many tools to simplify usages 2023-01-03 23:52:25 +01:00
15c9ab83db [DEBUG] correct some missing unconnect and remove deprecated logs 2023-01-02 00:09:28 +01:00
e090943f49 [DEV] add gitigore 2022-12-30 09:03:56 +01:00
23d53fdfd7 [DEV] uopdate release 0.1.4 2022-12-30 09:01:57 +01:00
166 changed files with 10291 additions and 3229 deletions

7
.checkstyle Normal file
View File

@@ -0,0 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<fileset-config file-format-version="1.2.0" simple-config="true" sync-formatter="false">
<fileset name="all" enabled="true" check-config-name="Google Checks" local="false">
<file-match-pattern match-pattern="." include-pattern="true"/>
</fileset>
</fileset-config>

View File

@@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" output="out/maven/classes" path="src">
<classpathentry including="**/*.java" kind="src" output="out/maven/classes" path="src">
<attributes>
<attribute name="optional" value="true"/>
<attribute name="maven.pomderived" value="true"/>
@@ -8,17 +8,24 @@
</classpathentry>
<classpathentry kind="src" output="out/maven/test-classes" path="test/src">
<attributes>
<attribute name="test" value="true"/>
<attribute name="optional" value="true"/>
<attribute name="maven.pomderived" value="true"/>
<attribute name="test" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-17">
<classpathentry exported="true" kind="con" path="org.eclipse.m2e.MAVEN2_CLASSPATH_CONTAINER">
<attributes>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="con" path="org.eclipse.m2e.MAVEN2_CLASSPATH_CONTAINER">
<classpathentry excluding="**" kind="src" output="out/maven/test-classes" path="test/resources">
<attributes>
<attribute name="test" value="true"/>
<attribute name="maven.pomderived" value="true"/>
<attribute name="optional" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-21">
<attributes>
<attribute name="maven.pomderived" value="true"/>
</attributes>

12
.github/dependabot.yml vendored Normal file
View File

@@ -0,0 +1,12 @@
# To get started with Dependabot version updates, you'll need to specify which
# package ecosystems to update and where the package manifests are located.
# Please see the documentation for all configuration options:
# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
version: 2
updates:
- package-ecosystem: "" # See documentation for possible values
directory: "/" # Location of package manifests
schedule:
interval: "weekly"

37
.github/workflows/maven.yml vendored Normal file
View File

@@ -0,0 +1,37 @@
# This workflow will build a Java project with Maven, and cache/restore any dependencies to improve the workflow execution time
# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-java-with-maven
# This workflow uses actions that are not certified by GitHub.
# They are provided by a third-party and are governed by
# separate terms of service, privacy policy, and support
# documentation.
name: Java CI with Maven
on:
push:
branches: [ "develop" ]
pull_request:
branches: [ "develop" ]
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Set up JDK 17
uses: actions/setup-java@v3
with:
java-version: '21'
distribution: 'temurin'
cache: maven
- name: Build with Maven
run: mvn -B package --file pom.xml
- name: find the element
run: find . -name "dependency-graph.json"
# Optional: Uploads the full dependency graph to GitHub to improve the quality of Dependabot alerts this repository can receive
- name: Update dependency graph
uses: advanced-security/maven-dependency-submission-action@571e99aab1055c2e71a1e2309b9691de18d6b7d6

4
.gitignore vendored Normal file
View File

@@ -0,0 +1,4 @@
out/
target/
*.class
.settings/

31
.jenkinsfile Normal file
View File

@@ -0,0 +1,31 @@
pipeline {
agent any
environment {
REGISTRY_ADDRESS = "gitea.atria-soft.org"
REGISTRY_ADDRESS_FULL = "https://${REGISTRY_ADDRESS}"
REPOSITORY_BASE_NAME = 'kangaroo-and-rabbit/archidata'
TAG_DOCKER = getEnvName(env.BRANCH_NAME)
}
stages {
stage('save-evironment') {
steps {
sh 'ls -la'
sh 'pwd'
sh 'uname -a'
sh 'printenv | sort'
//sh 'cat version.txt'
sh 'git log -n 20'
sh 'javac --version'
}
}
}
}
def getEnvName(branchName) {
if("master".equals(branchName)) {
return "latest";
} else if("dev".equals(branchName)) {
return "dev";
}
return "other";
}

View File

@@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>achi-data</name>
<name>archi-data</name>
<comment></comment>
<projects>
</projects>
@@ -20,4 +20,15 @@
<nature>org.eclipse.jdt.core.javanature</nature>
<nature>org.eclipse.m2e.core.maven2Nature</nature>
</natures>
<filteredResources>
<filter>
<id>1682721079856</id>
<name></name>
<type>30</type>
<matcher>
<id>org.eclipse.core.resources.regexFilterMatcher</id>
<arguments>node_modules|\.git|__CREATED_BY_JAVA_LANGUAGE_SERVER__</arguments>
</matcher>
</filter>
</filteredResources>
</projectDescription>

375
LICENSE
View File

@@ -1,6 +1,373 @@
PROPIETARY licence
==================
Mozilla Public License Version 2.0
==================================
Copyright at Edouard DUPIN
1. Definitions
--------------
you have no right
1.1. "Contributor"
means each individual or legal entity that creates, contributes to
the creation of, or owns Covered Software.
1.2. "Contributor Version"
means the combination of the Contributions of others (if any) used
by a Contributor and that particular Contributor's Contribution.
1.3. "Contribution"
means Covered Software of a particular Contributor.
1.4. "Covered Software"
means Source Code Form to which the initial Contributor has attached
the notice in Exhibit A, the Executable Form of such Source Code
Form, and Modifications of such Source Code Form, in each case
including portions thereof.
1.5. "Incompatible With Secondary Licenses"
means
(a) that the initial Contributor has attached the notice described
in Exhibit B to the Covered Software; or
(b) that the Covered Software was made available under the terms of
version 1.1 or earlier of the License, but not also under the
terms of a Secondary License.
1.6. "Executable Form"
means any form of the work other than Source Code Form.
1.7. "Larger Work"
means a work that combines Covered Software with other material, in
a separate file or files, that is not Covered Software.
1.8. "License"
means this document.
1.9. "Licensable"
means having the right to grant, to the maximum extent possible,
whether at the time of the initial grant or subsequently, any and
all of the rights conveyed by this License.
1.10. "Modifications"
means any of the following:
(a) any file in Source Code Form that results from an addition to,
deletion from, or modification of the contents of Covered
Software; or
(b) any new file in Source Code Form that contains any Covered
Software.
1.11. "Patent Claims" of a Contributor
means any patent claim(s), including without limitation, method,
process, and apparatus claims, in any patent Licensable by such
Contributor that would be infringed, but for the grant of the
License, by the making, using, selling, offering for sale, having
made, import, or transfer of either its Contributions or its
Contributor Version.
1.12. "Secondary License"
means either the GNU General Public License, Version 2.0, the GNU
Lesser General Public License, Version 2.1, the GNU Affero General
Public License, Version 3.0, or any later versions of those
licenses.
1.13. "Source Code Form"
means the form of the work preferred for making modifications.
1.14. "You" (or "Your")
means an individual or a legal entity exercising rights under this
License. For legal entities, "You" includes any entity that
controls, is controlled by, or is under common control with You. For
purposes of this definition, "control" means (a) the power, direct
or indirect, to cause the direction or management of such entity,
whether by contract or otherwise, or (b) ownership of more than
fifty percent (50%) of the outstanding shares or beneficial
ownership of such entity.
2. License Grants and Conditions
--------------------------------
2.1. Grants
Each Contributor hereby grants You a world-wide, royalty-free,
non-exclusive license:
(a) under intellectual property rights (other than patent or trademark)
Licensable by such Contributor to use, reproduce, make available,
modify, display, perform, distribute, and otherwise exploit its
Contributions, either on an unmodified basis, with Modifications, or
as part of a Larger Work; and
(b) under Patent Claims of such Contributor to make, use, sell, offer
for sale, have made, import, and otherwise transfer either its
Contributions or its Contributor Version.
2.2. Effective Date
The licenses granted in Section 2.1 with respect to any Contribution
become effective for each Contribution on the date the Contributor first
distributes such Contribution.
2.3. Limitations on Grant Scope
The licenses granted in this Section 2 are the only rights granted under
this License. No additional rights or licenses will be implied from the
distribution or licensing of Covered Software under this License.
Notwithstanding Section 2.1(b) above, no patent license is granted by a
Contributor:
(a) for any code that a Contributor has removed from Covered Software;
or
(b) for infringements caused by: (i) Your and any other third party's
modifications of Covered Software, or (ii) the combination of its
Contributions with other software (except as part of its Contributor
Version); or
(c) under Patent Claims infringed by Covered Software in the absence of
its Contributions.
This License does not grant any rights in the trademarks, service marks,
or logos of any Contributor (except as may be necessary to comply with
the notice requirements in Section 3.4).
2.4. Subsequent Licenses
No Contributor makes additional grants as a result of Your choice to
distribute the Covered Software under a subsequent version of this
License (see Section 10.2) or under the terms of a Secondary License (if
permitted under the terms of Section 3.3).
2.5. Representation
Each Contributor represents that the Contributor believes its
Contributions are its original creation(s) or it has sufficient rights
to grant the rights to its Contributions conveyed by this License.
2.6. Fair Use
This License is not intended to limit any rights You have under
applicable copyright doctrines of fair use, fair dealing, or other
equivalents.
2.7. Conditions
Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted
in Section 2.1.
3. Responsibilities
-------------------
3.1. Distribution of Source Form
All distribution of Covered Software in Source Code Form, including any
Modifications that You create or to which You contribute, must be under
the terms of this License. You must inform recipients that the Source
Code Form of the Covered Software is governed by the terms of this
License, and how they can obtain a copy of this License. You may not
attempt to alter or restrict the recipients' rights in the Source Code
Form.
3.2. Distribution of Executable Form
If You distribute Covered Software in Executable Form then:
(a) such Covered Software must also be made available in Source Code
Form, as described in Section 3.1, and You must inform recipients of
the Executable Form how they can obtain a copy of such Source Code
Form by reasonable means in a timely manner, at a charge no more
than the cost of distribution to the recipient; and
(b) You may distribute such Executable Form under the terms of this
License, or sublicense it under different terms, provided that the
license for the Executable Form does not attempt to limit or alter
the recipients' rights in the Source Code Form under this License.
3.3. Distribution of a Larger Work
You may create and distribute a Larger Work under terms of Your choice,
provided that You also comply with the requirements of this License for
the Covered Software. If the Larger Work is a combination of Covered
Software with a work governed by one or more Secondary Licenses, and the
Covered Software is not Incompatible With Secondary Licenses, this
License permits You to additionally distribute such Covered Software
under the terms of such Secondary License(s), so that the recipient of
the Larger Work may, at their option, further distribute the Covered
Software under the terms of either this License or such Secondary
License(s).
3.4. Notices
You may not remove or alter the substance of any license notices
(including copyright notices, patent notices, disclaimers of warranty,
or limitations of liability) contained within the Source Code Form of
the Covered Software, except that You may alter any license notices to
the extent required to remedy known factual inaccuracies.
3.5. Application of Additional Terms
You may choose to offer, and to charge a fee for, warranty, support,
indemnity or liability obligations to one or more recipients of Covered
Software. However, You may do so only on Your own behalf, and not on
behalf of any Contributor. You must make it absolutely clear that any
such warranty, support, indemnity, or liability obligation is offered by
You alone, and You hereby agree to indemnify every Contributor for any
liability incurred by such Contributor as a result of warranty, support,
indemnity or liability terms You offer. You may include additional
disclaimers of warranty and limitations of liability specific to any
jurisdiction.
4. Inability to Comply Due to Statute or Regulation
---------------------------------------------------
If it is impossible for You to comply with any of the terms of this
License with respect to some or all of the Covered Software due to
statute, judicial order, or regulation then You must: (a) comply with
the terms of this License to the maximum extent possible; and (b)
describe the limitations and the code they affect. Such description must
be placed in a text file included with all distributions of the Covered
Software under this License. Except to the extent prohibited by statute
or regulation, such description must be sufficiently detailed for a
recipient of ordinary skill to be able to understand it.
5. Termination
--------------
5.1. The rights granted under this License will terminate automatically
if You fail to comply with any of its terms. However, if You become
compliant, then the rights granted under this License from a particular
Contributor are reinstated (a) provisionally, unless and until such
Contributor explicitly and finally terminates Your grants, and (b) on an
ongoing basis, if such Contributor fails to notify You of the
non-compliance by some reasonable means prior to 60 days after You have
come back into compliance. Moreover, Your grants from a particular
Contributor are reinstated on an ongoing basis if such Contributor
notifies You of the non-compliance by some reasonable means, this is the
first time You have received notice of non-compliance with this License
from such Contributor, and You become compliant prior to 30 days after
Your receipt of the notice.
5.2. If You initiate litigation against any entity by asserting a patent
infringement claim (excluding declaratory judgment actions,
counter-claims, and cross-claims) alleging that a Contributor Version
directly or indirectly infringes any patent, then the rights granted to
You by any and all Contributors for the Covered Software under Section
2.1 of this License shall terminate.
5.3. In the event of termination under Sections 5.1 or 5.2 above, all
end user license agreements (excluding distributors and resellers) which
have been validly granted by You or Your distributors under this License
prior to termination shall survive termination.
************************************************************************
* *
* 6. Disclaimer of Warranty *
* ------------------------- *
* *
* Covered Software is provided under this License on an "as is" *
* basis, without warranty of any kind, either expressed, implied, or *
* statutory, including, without limitation, warranties that the *
* Covered Software is free of defects, merchantable, fit for a *
* particular purpose or non-infringing. The entire risk as to the *
* quality and performance of the Covered Software is with You. *
* Should any Covered Software prove defective in any respect, You *
* (not any Contributor) assume the cost of any necessary servicing, *
* repair, or correction. This disclaimer of warranty constitutes an *
* essential part of this License. No use of any Covered Software is *
* authorized under this License except under this disclaimer. *
* *
************************************************************************
************************************************************************
* *
* 7. Limitation of Liability *
* -------------------------- *
* *
* Under no circumstances and under no legal theory, whether tort *
* (including negligence), contract, or otherwise, shall any *
* Contributor, or anyone who distributes Covered Software as *
* permitted above, be liable to You for any direct, indirect, *
* special, incidental, or consequential damages of any character *
* including, without limitation, damages for lost profits, loss of *
* goodwill, work stoppage, computer failure or malfunction, or any *
* and all other commercial damages or losses, even if such party *
* shall have been informed of the possibility of such damages. This *
* limitation of liability shall not apply to liability for death or *
* personal injury resulting from such party's negligence to the *
* extent applicable law prohibits such limitation. Some *
* jurisdictions do not allow the exclusion or limitation of *
* incidental or consequential damages, so this exclusion and *
* limitation may not apply to You. *
* *
************************************************************************
8. Litigation
-------------
Any litigation relating to this License may be brought only in the
courts of a jurisdiction where the defendant maintains its principal
place of business and such litigation shall be governed by laws of that
jurisdiction, without reference to its conflict-of-law provisions.
Nothing in this Section shall prevent a party's ability to bring
cross-claims or counter-claims.
9. Miscellaneous
----------------
This License represents the complete agreement concerning the subject
matter hereof. If any provision of this License is held to be
unenforceable, such provision shall be reformed only to the extent
necessary to make it enforceable. Any law or regulation which provides
that the language of a contract shall be construed against the drafter
shall not be used to construe this License against a Contributor.
10. Versions of the License
---------------------------
10.1. New Versions
Mozilla Foundation is the license steward. Except as provided in Section
10.3, no one other than the license steward has the right to modify or
publish new versions of this License. Each version will be given a
distinguishing version number.
10.2. Effect of New Versions
You may distribute the Covered Software under the terms of the version
of the License under which You originally received the Covered Software,
or under the terms of any subsequent version published by the license
steward.
10.3. Modified Versions
If you create software not governed by this License, and you want to
create a new license for such software, you may create and use a
modified version of this License if you rename the license and remove
any references to the name of the license steward (except to note that
such modified license differs from this License).
10.4. Distributing Source Code Form that is Incompatible With Secondary
Licenses
If You choose to distribute Source Code Form that is Incompatible With
Secondary Licenses under the terms of this version of the License, the
notice described in Exhibit B of this License must be attached.
Exhibit A - Source Code Form License Notice
-------------------------------------------
This Source Code Form is subject to the terms of the Mozilla Public
License, v. 2.0. If a copy of the MPL was not distributed with this
file, You can obtain one at http://mozilla.org/MPL/2.0/.
If it is not possible or desirable to put the notice in a particular
file, then You may include the notice in a location (such as a LICENSE
file in a relevant directory) where a recipient would be likely to look
for such a notice.
You may add additional accurate notices of copyright ownership.
Exhibit B - "Incompatible With Secondary Licenses" Notice
---------------------------------------------------------
This Source Code Form is "Incompatible With Secondary Licenses", as
defined by the Mozilla Public License, v. 2.0.

120
README.md
View File

@@ -1,27 +1,131 @@
Generic backend for archidata in java
===================================
Archi-data
==========
Archi-data is a framework that simplify:
- Creating a REST server with:
- Right control
- Swagger display interface
- Normalize error generate by the server
- Access to the DB:
- introspect Object and insert in the TD (SQLITE & MY-SQL)
- Manage migration
- JPA checker for many generic request
- simplify the request of the Test-service
Develop in cmd-line:
--------------------
The first step is configuring your JAVA version (or select the JVM with the OS)
```bash
export PATH=$(ls -d --color=never /usr/lib/jvm/java-2*-openjdk)/bin:$PATH
```
Install the dependency:
```bash
mvn install
```
// create a single package jar
mvn clean compile assembly:single
Run the test
```bash
mvn test
```
Install it for external use
```bash
mvn install
```
Develop With Eclipse:
--------------------
Import the project:
- Open a (new) project on eclipse
- `File` -> `Import`
- `Maven` -> `Existing Maven project`
- Select the `pom.xml` file and click on import
Run the Test:
- Open the package `test.kar.archidata`
- Click right on it
- Select `Debug As` -> `JUnit Test`
Install in the local maven repository:
- Click right on the `pom.xml` file
- Select `Run As` -> `Maven install`
Somes tools:
============
generic interface for all KAR web application
Auto-update dependency:
-----------------------
Auto-update to the last version dependency:
```bash
mvn versions:use-latest-versions
```
Format the code
---------------
Simply run the cmd-line:
```bash
mvn formatter:format
```
Reformat XML file like the pom.xml
```bash
XMLLINT_INDENT=" " xmllint --format "back/pom.xml" -o "back/pom.xml"
```
Enable the pre-commit checker
-----------------------------
```bash
./tools/configure_precommit.bash
```
> **_Note_**: You can change the code in `.git/hooks/pre-commit` by replacing `formatter:verify` with `formatter:format` to auto format the code @ every commit
Add Gitea in the dependency for the registry:
=============================================
Read instruction for tocken in ~/.m2/setting.xml
release:
edit file: ```~/.m2/settings.xml```
export PATH=/usr/lib/jvm/java-18-openjdk/bin:$PATH
```xml
<settings>
<servers>
<server>
<id>gitea</id>
<configuration>
<httpHeaders>
<property>
<name>Authorization</name>
<value>token xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx</value>
</property>
</httpHeaders>
</configuration>
</server>
</servers>
</settings>
```
release:
========
```bash
export PATH=$(ls -d --color=never /usr/lib/jvm/java-2*-openjdk)/bin:$PATH
mvn install
mvn deploy
```

1
dependabot.yml Normal file
View File

@@ -0,0 +1 @@

226
pom.xml
View File

@@ -1,38 +1,34 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>kangaroo-and-rabbit</groupId>
<artifactId>archidata</artifactId>
<version>0.1.3</version>
<modelVersion>4.0.0</modelVersion>
<groupId>kangaroo-and-rabbit</groupId>
<artifactId>archidata</artifactId>
<version>0.6.0</version>
<properties>
<jaxrs.version>2.1</jaxrs.version>
<jersey.version>2.32</jersey.version>
<jaxb.version>2.3.1</jaxb.version>
<istack.version>3.0.7</istack.version>
<maven.compiler.version>3.1</maven.compiler.version>
<maven.compiler.source>17</maven.compiler.source>
<maven.compiler.target>17</maven.compiler.target>
<maven.compiler.source>21</maven.compiler.source>
<maven.compiler.target>21</maven.compiler.target>
<maven.dependency.version>3.1.1</maven.dependency.version>
<jersey.version>3.1.5</jersey.version>
<jaxb.version>2.3.1</jaxb.version>
<istack.version>4.1.1</istack.version>
</properties>
<repositories>
<repository>
<id>gitea</id>
<url>https://gitea.atria-soft.org/api/packages/kangaroo-and-rabbit/maven</url>
</repository>
<repository>
<id>gitea</id>
<url>https://gitea.atria-soft.org/api/packages/kangaroo-and-rabbit/maven</url>
</repository>
</repositories>
<distributionManagement>
<repository>
<id>gitea</id>
<url>https://gitea.atria-soft.org/api/packages/kangaroo-and-rabbit/maven</url>
</repository>
<snapshotRepository>
<id>gitea</id>
<url>https://gitea.atria-soft.org/api/packages/kangaroo-and-rabbit/maven</url>
</snapshotRepository>
<repository>
<id>gitea</id>
<url>https://gitea.atria-soft.org/api/packages/kangaroo-and-rabbit/maven</url>
</repository>
<snapshotRepository>
<id>gitea</id>
<url>https://gitea.atria-soft.org/api/packages/kangaroo-and-rabbit/maven</url>
</snapshotRepository>
</distributionManagement>
<dependencyManagement>
<dependencies>
<dependency>
@@ -44,13 +40,28 @@
</dependency>
</dependencies>
</dependencyManagement>
<dependencies>
<!-- https://mvnrepository.com/artifact/org.slf4j/slf4j-api -->
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>2.0.9</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-simple</artifactId>
<version>2.0.9</version>
<scope>test</scope>
</dependency>
<!-- https://mvnrepository.com/artifact/org.glassfish.jersey.media/jersey-media-multipart -->
<dependency>
<groupId>org.glassfish.jersey.media</groupId>
<artifactId>jersey-media-multipart</artifactId>
</dependency>
<dependency>
<groupId>org.glassfish.jersey.core</groupId>
<artifactId>jersey-client</artifactId>
</dependency>
<dependency>
<groupId>org.glassfish.jersey.inject</groupId>
<artifactId>jersey-hk2</artifactId>
@@ -65,9 +76,14 @@
<version>${jaxb.version}</version>
</dependency>
<dependency>
<groupId>javax.ws.rs</groupId>
<artifactId>javax.ws.rs-api</artifactId>
<version>2.1.1</version>
<groupId>org.glassfish.jaxb</groupId>
<artifactId>jaxb-runtime</artifactId>
<version>4.0.4</version>
</dependency>
<dependency>
<groupId>jakarta.ws.rs</groupId>
<artifactId>jakarta.ws.rs-api</artifactId>
<version>3.1.0</version>
</dependency>
<dependency>
<groupId>com.sun.xml.bind</groupId>
@@ -84,43 +100,75 @@
<artifactId>jersey-test-framework-provider-grizzly2</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>8.0.30</version>
</dependency>
<dependency>
<groupId>org.glassfish.jersey.media</groupId>
<artifactId>jersey-media-json-jackson</artifactId>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>2.8.10</version>
</dependency>
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>javax.servlet-api</artifactId>
<version>3.0.1</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.jetbrains</groupId>
<artifactId>annotations</artifactId>
<version>RELEASE</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>com.nimbusds</groupId>
<artifactId>nimbus-jose-jwt</artifactId>
<version>9.22</version>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>2.16.0</version>
</dependency>
</dependencies>
<dependency>
<groupId>jakarta.servlet</groupId>
<artifactId>jakarta.servlet-api</artifactId>
<version>6.1.0-M1</version>
<scope>provided</scope>
</dependency>
<!-- Interface for My-sql & sqlite DB -->
<dependency>
<groupId>com.mysql</groupId>
<artifactId>mysql-connector-j</artifactId>
<version>8.2.0</version>
</dependency>
<dependency>
<groupId>org.xerial</groupId>
<artifactId>sqlite-jdbc</artifactId>
<version>3.40.0.0</version>
</dependency>
<!-- Interface for JWT token -->
<dependency>
<groupId>com.nimbusds</groupId>
<artifactId>nimbus-jose-jwt</artifactId>
<version>9.37.1</version>
</dependency>
<dependency>
<groupId>jakarta.persistence</groupId>
<artifactId>jakarta.persistence-api</artifactId>
<version>3.2.0-M1</version>
</dependency>
<!-- Swagger dependencies -->
<dependency>
<groupId>io.swagger.core.v3</groupId>
<artifactId>swagger-jaxrs2-jakarta</artifactId>
<version>2.1.10</version>
</dependency>
<!--
************************************************************
** TEST dependency **
************************************************************
-->
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-api</artifactId>
<version>5.10.1</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-engine</artifactId>
<version>5.10.1</version>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<sourceDirectory>src</sourceDirectory>
<testSourceDirectory>test/src</testSourceDirectory>
<directory>${project.basedir}/out/maven/</directory>
<testResources>
<testResource>
<directory>${basedir}/test/resources</directory>
</testResource>
</testResources>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
@@ -136,6 +184,7 @@
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<version>3.3.0</version>
<executions>
<execution>
<id>attach-sources</id>
@@ -145,13 +194,27 @@
</execution>
</executions>
</plugin>
<!-- For dependabot plugin -->
<plugin>
<groupId>org.apache.servicemix.tooling</groupId>
<artifactId>depends-maven-plugin</artifactId>
<version>1.5.0</version>
<executions>
<execution>
<id>generate-depends-file</id>
<goals>
<goal>generate-depends-file</goal>
</goals>
</execution>
</executions>
</plugin>
<!-- junit results -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>3.0.0-M5</version>
</plugin>
<plugin>
<plugin>
<artifactId>maven-assembly-plugin</artifactId>
<configuration>
<archive>
@@ -163,7 +226,7 @@
<descriptorRef>jar-with-dependencies</descriptorRef>
</descriptorRefs>
</configuration>
</plugin>
</plugin>
<!-- Java-doc generation for stand-alone site -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
@@ -174,6 +237,46 @@
<nohelp>true</nohelp>
</configuration>
</plugin>
<!-- Check the style of the code -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-checkstyle-plugin</artifactId>
<version>3.1.0</version>
<configuration>
<configLocation>CheckStyle.xml</configLocation>
<consoleOutput>true</consoleOutput>
<failOnViolation>true</failOnViolation>
<failsOnError>true</failsOnError>
<includeTestSourceDirectory>true</includeTestSourceDirectory>
</configuration>
</plugin>
<plugin>
<groupId>net.revelc.code.formatter</groupId>
<artifactId>formatter-maven-plugin</artifactId>
<version>2.12.2</version>
<configuration>
<encoding>UTF-8</encoding>
<lineEnding>LF</lineEnding>
<configFile>Formatter.xml</configFile>
<directories>
<directory>src/</directory>
<directory>test/src</directory>
</directories>
<includes>
<include>**/*.java</include>
</includes>
<excludes>
<exclude>module-info.java</exclude>
</excludes>
</configuration>
<executions>
<execution>
<goals>
<goal>validate</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
<!-- Generate Java-docs As Part Of Project Reports -->
@@ -189,5 +292,4 @@
</plugin>
</plugins>
</reporting>
</project>
</project>

View File

@@ -1,16 +1,13 @@
package org.kar.archidata;
import org.kar.archidata.db.DBConfig;
import org.kar.archidata.util.ConfigBaseVariable;
import org.kar.archidata.tools.ConfigBaseVariable;
public class GlobalConfiguration {
public static DBConfig dbConfig = null;
static {
dbConfig = new DBConfig(ConfigBaseVariable.getDBHost(),
Integer.parseInt(ConfigBaseVariable.getDBPort()),
ConfigBaseVariable.getDBLogin(),
ConfigBaseVariable.getDBPassword(),
ConfigBaseVariable.getDBName());
}
static {
dbConfig = new DBConfig(ConfigBaseVariable.getDBType(), ConfigBaseVariable.getDBHost(), Integer.parseInt(ConfigBaseVariable.getDBPort()), ConfigBaseVariable.getDBLogin(),
ConfigBaseVariable.getDBPassword(), ConfigBaseVariable.getDBName(), ConfigBaseVariable.getDBKeepConnected());
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,28 +1,37 @@
package org.kar.archidata;
import org.kar.archidata.util.ConfigBaseVariable;
import org.kar.archidata.util.JWTWrapper;
import org.kar.archidata.tools.ConfigBaseVariable;
import org.kar.archidata.tools.JWTWrapper;
public class UpdateJwtPublicKey extends Thread {
boolean kill = false;
@Override
public void run() {
while (this.kill == false) {
// need to uppgrade when server call us...
if (ConfigBaseVariable.getSSOAddress() == null) {
System.out.println("SSO INTERFACE is not provided ==> work alone.");
// No SO provided, kill the thread.
return;
}
while (!this.kill) {
// need to upgrade when server call us...
try {
JWTWrapper.initLocalTokenRemote(ConfigBaseVariable.getSSOAddress(), "archidata");
} catch (Exception e1) {
} catch (final Exception e1) {
e1.printStackTrace();
System.out.println("Can not retreive the basic tocken");
return;
}
try {
Thread.sleep(1000*60*5, 0);
} catch (InterruptedException e) {
// update every 5 minutes the master token
Thread.sleep(1000 * 60 * 5, 0);
} catch (final InterruptedException e) {
e.printStackTrace();
}
}
}
}
public void kill() {
this.kill = true;
}
}
}

View File

@@ -1,74 +1,42 @@
package org.kar.archidata;
import org.kar.archidata.db.DBEntry;
import org.kar.archidata.model.User;
import java.io.IOException;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import org.kar.archidata.dataAccess.DataAccess;
import org.kar.archidata.db.DBEntry;
import org.kar.archidata.model.User;
public class UserDB {
public UserDB() {
}
public UserDB() {}
public static User getUsers(long userId) throws Exception {
return SqlWrapper.get(User.class, userId);
}
public static User getUsers(final long userId) throws Exception {
return DataAccess.get(User.class, userId);
}
public static User getUserOrCreate(long userId, String userLogin) throws Exception {
User user = getUsers(userId);
if (user != null) {
return user;
}
createUsersInfoFromOAuth(userId, userLogin);
return getUsers(userId);
}
private static void createUsersInfoFromOAuth(long userId, String login) {
DBEntry entry = new DBEntry(GlobalConfiguration.dbConfig);
String query = "INSERT INTO `user` (`id`, `login`, `lastConnection`, `admin`, `blocked`, `removed`) VALUE (?,?,now(3),'0','0','0')";
try {
PreparedStatement ps = entry.connection.prepareStatement(query);
ps.setLong(1, userId);
ps.setString(2, login);
ps.executeUpdate();
} catch (SQLException throwables) {
throwables.printStackTrace();
}
entry.disconnect();
}
public static User getUserOrCreate(final long userId, final String userLogin) throws Exception {
final User user = getUsers(userId);
if (user != null) {
return user;
}
createUsersInfoFromOAuth(userId, userLogin);
return getUsers(userId);
}
private static void createUsersInfoFromOAuth(final long userId, final String login) throws IOException {
final DBEntry entry = DBEntry.createInterface(GlobalConfiguration.dbConfig);
final String query = "INSERT INTO `user` (`id`, `login`, `lastConnection`, `admin`, `blocked`, `removed`) VALUE (?,?,now(3),'0','0','0')";
try {
final PreparedStatement ps = entry.connection.prepareStatement(query);
ps.setLong(1, userId);
ps.setString(2, login);
ps.executeUpdate();
} catch (final SQLException throwables) {
throwables.printStackTrace();
} finally {
entry.close();
}
}
}

View File

@@ -0,0 +1,367 @@
package org.kar.archidata.annotation;
import java.lang.annotation.Annotation;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.List;
import org.kar.archidata.dataAccess.QueryOptions;
import org.kar.archidata.dataAccess.options.OverrideTableName;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import io.swagger.v3.oas.annotations.media.Schema;
import jakarta.persistence.Column;
import jakarta.persistence.GeneratedValue;
import jakarta.persistence.GenerationType;
import jakarta.persistence.Id;
import jakarta.persistence.ManyToOne;
import jakarta.persistence.Table;
import jakarta.validation.constraints.Max;
import jakarta.validation.constraints.Min;
import jakarta.validation.constraints.NotNull;
import jakarta.validation.constraints.Pattern;
import jakarta.validation.constraints.Size;
public class AnnotationTools {
static final Logger LOGGER = LoggerFactory.getLogger(AnnotationTools.class);
public static String getTableName(final Class<?> clazz, final QueryOptions options) throws Exception {
if (options != null) {
final OverrideTableName data = options.get(OverrideTableName.class);
if (data != null) {
return data.getName();
}
}
return AnnotationTools.getTableName(clazz);
}
public static String getTableName(final Class<?> element) throws Exception {
final Annotation[] annotation = element.getDeclaredAnnotationsByType(Table.class);
if (annotation.length == 0) {
// when no annotation is detected, then the table name is the class name
return element.getSimpleName();
}
if (annotation.length > 1) {
throw new Exception("Must not have more than 1 element @Table on " + element.getClass().getCanonicalName());
}
final String tmp = ((Table) annotation[0]).name();
if (tmp == null) {
return element.getSimpleName();
}
return tmp;
}
public static String getSchemedescription(final Field element) throws Exception {
final Annotation[] annotation = element.getDeclaredAnnotationsByType(Schema.class);
if (annotation.length == 0) {
return null;
}
if (annotation.length > 1) {
throw new Exception("Must not have more than 1 element @Schema on " + element.getClass().getCanonicalName());
}
return ((Schema) annotation[0]).description();
}
public static String getComment(final Field element) throws Exception {
final Annotation[] annotation = element.getDeclaredAnnotationsByType(DataComment.class);
if (annotation.length == 0) {
return getSchemedescription(element);
}
if (annotation.length > 1) {
throw new Exception("Must not have more than 1 element @DataComment on " + element.getClass().getCanonicalName());
}
return ((DataComment) annotation[0]).value();
}
public static String getDefault(final Field element) throws Exception {
final Annotation[] annotation = element.getDeclaredAnnotationsByType(DataDefault.class);
if (annotation.length == 0) {
return null;
}
if (annotation.length > 1) {
throw new Exception("Must not have more than 1 element @DataDefault on " + element.getClass().getCanonicalName());
}
return ((DataDefault) annotation[0]).value();
}
public static ManyToOne getManyToOne(final Field element) throws Exception {
final Annotation[] annotation = element.getDeclaredAnnotationsByType(ManyToOne.class);
if (annotation.length == 0) {
return null;
}
if (annotation.length > 1) {
throw new Exception("Must not have more than 1 element @ManyToOne on " + element.getClass().getCanonicalName());
}
return (ManyToOne) annotation[0];
}
public static DataJson getDataJson(final Field element) throws Exception {
final Annotation[] annotation = element.getDeclaredAnnotationsByType(DataJson.class);
if (annotation.length == 0) {
return null;
}
if (annotation.length > 1) {
throw new Exception("Must not have more than 1 element @ManyToOne on " + element.getClass().getCanonicalName());
}
return (DataJson) annotation[0];
}
public static Long getConstraintsMax(final Field element) throws Exception {
final Annotation[] annotation = element.getDeclaredAnnotationsByType(Max.class);
if (annotation.length == 0) {
return null;
}
if (annotation.length > 1) {
throw new Exception("Must not have more than 1 element @Size on " + element.getClass().getCanonicalName());
}
return ((Max) annotation[0]).value();
}
public static Long getConstraintsMin(final Field element) throws Exception {
final Annotation[] annotation = element.getDeclaredAnnotationsByType(Min.class);
if (annotation.length == 0) {
return null;
}
if (annotation.length > 1) {
throw new Exception("Must not have more than 1 element @Size on " + element.getClass().getCanonicalName());
}
return ((Min) annotation[0]).value();
}
public static int getLimitSize(final Field element) throws Exception {
final Annotation[] annotation = element.getDeclaredAnnotationsByType(Column.class);
if (annotation.length == 0) {
return 255;
}
if (annotation.length > 1) {
throw new Exception("Must not have more than 1 element @Column on " + element.getClass().getCanonicalName());
}
final int length = ((Column) annotation[0]).length();
return length <= 0 ? 0 : length;
}
public static Size getConstraintsSize(final Field element) throws Exception {
final Annotation[] annotation = element.getDeclaredAnnotationsByType(Size.class);
if (annotation.length == 0) {
return null;
}
if (annotation.length > 1) {
throw new Exception("Must not have more than 1 element @Size on " + element.getClass().getCanonicalName());
}
return (Size) annotation[0];
}
public static String getConstraintsPattern(final Field element) throws Exception {
final Annotation[] annotation = element.getDeclaredAnnotationsByType(Pattern.class);
if (annotation.length == 0) {
return null;
}
if (annotation.length > 1) {
throw new Exception("Must not have more than 1 element @Pattern on " + element.getClass().getCanonicalName());
}
return ((Pattern) annotation[0]).regexp();
}
public static boolean isAnnotationGroup(final Field field, final Class<?> annotationType) {
try {
final Annotation[] anns = field.getAnnotations();
for (final Annotation ann : anns) {
if (ann.annotationType() == annotationType) {
return true;
}
}
for (final Annotation ann : anns) {
final Annotation[] anns2 = ann.annotationType().getDeclaredAnnotations();
for (final Annotation ann2 : anns2) {
if (ann2.annotationType() == annotationType) {
return true;
}
}
}
} catch (final Exception ex) {
LOGGER.error("Catch exception when try to get annotation...{}", ex.getLocalizedMessage());
return false;
}
return false;
}
public static String getFieldName(final Field element) throws Exception {
final Annotation[] annotation = element.getDeclaredAnnotationsByType(Column.class);
if (annotation.length == 0) {
return element.getName();
}
if (annotation.length > 1) {
throw new Exception("Must not have more than 1 element @Column on " + element.getClass().getCanonicalName());
}
final String name = ((Column) annotation[0]).name();
if (name.isBlank()) {
return element.getName();
}
return name;
}
public static boolean getColumnNotNull(final Field element) throws Exception {
final Annotation[] annotation = element.getDeclaredAnnotationsByType(Column.class);
if (annotation.length == 0) {
return false;
}
if (annotation.length > 1) {
throw new Exception("Must not have more than 1 element @Column on " + element.getClass().getCanonicalName());
}
return !((Column) annotation[0]).nullable();
}
public static boolean getConstraintsNotNull(final Field element) throws Exception {
final Annotation[] annotation = element.getDeclaredAnnotationsByType(NotNull.class);
if (annotation.length == 0) {
return false;
}
if (annotation.length > 1) {
throw new Exception("Must not have more than 1 element @NotNull on " + element.getClass().getCanonicalName());
}
return true;
}
public static boolean isPrimaryKey(final Field element) throws Exception {
final Annotation[] annotation = element.getDeclaredAnnotationsByType(Id.class);
if (annotation.length == 0) {
return false;
}
return true;
}
public static boolean isUnique(final Field element) throws Exception {
final Annotation[] annotation = element.getDeclaredAnnotationsByType(Column.class);
if (annotation.length == 0) {
return false;
}
if (annotation.length > 1) {
throw new Exception("Must not have more than 1 element @Column on " + element.getClass().getCanonicalName());
}
return ((Column) annotation[0]).unique();
}
public static GenerationType getStrategy(final Field element) throws Exception {
final Annotation[] annotation = element.getDeclaredAnnotationsByType(GeneratedValue.class);
if (annotation.length == 0) {
return null;
}
if (annotation.length > 1) {
throw new Exception("Must not have more than 1 element @Column on " + element.getClass().getCanonicalName());
}
return ((GeneratedValue) annotation[0]).strategy();
}
public static boolean isDeletedField(final Field element) throws Exception {
return element.getDeclaredAnnotationsByType(DataDeleted.class).length != 0;
}
public static boolean isCreatedAtField(final Field element) throws Exception {
return element.getDeclaredAnnotationsByType(CreationTimestamp.class).length != 0;
}
public static boolean isUpdateAtField(final Field element) throws Exception {
return element.getDeclaredAnnotationsByType(UpdateTimestamp.class).length != 0;
}
public static boolean isdefaultNotRead(final Field element) throws Exception {
return element.getDeclaredAnnotationsByType(DataNotRead.class).length != 0;
}
public static boolean isIdField(final Field element) throws Exception {
return element.getDeclaredAnnotationsByType(Id.class).length != 0;
}
public static String getDeletedFieldName(final Class<?> clazz) throws Exception {
try {
for (final Field elem : clazz.getFields()) {
// static field is only for internal global declaration ==> remove it ..
if (java.lang.reflect.Modifier.isStatic(elem.getModifiers())) {
continue;
}
if (AnnotationTools.isDeletedField(elem)) {
return AnnotationTools.getFieldName(elem);
}
}
} catch (final Exception ex) {
ex.printStackTrace();
}
return null;
}
public static String getUpdatedFieldName(final Class<?> clazz) throws Exception {
try {
for (final Field elem : clazz.getFields()) {
// static field is only for internal global declaration ==> remove it ..
if (java.lang.reflect.Modifier.isStatic(elem.getModifiers())) {
continue;
}
if (AnnotationTools.isUpdateAtField(elem)) {
return AnnotationTools.getFieldName(elem);
}
}
} catch (final Exception ex) {
ex.printStackTrace();
}
return null;
}
public static Field getIdField(final Class<?> clazz) {
try {
for (final Field field : clazz.getFields()) {
// static field is only for internal global declaration ==> remove it ..
if (java.lang.reflect.Modifier.isStatic(field.getModifiers())) {
continue;
}
if (AnnotationTools.isIdField(field)) {
return field;
}
}
} catch (final Exception ex) {
ex.printStackTrace();
}
return null;
}
public static List<String> getFieldsNames(final Class<?> clazz) throws Exception {
return getFieldsNamesFilter(clazz, false);
}
public static List<String> getAllFieldsNames(final Class<?> clazz) throws Exception {
return getFieldsNamesFilter(clazz, true);
}
private static List<String> getFieldsNamesFilter(final Class<?> clazz, final boolean full) throws Exception {
final List<String> out = new ArrayList<>();
for (final Field field : clazz.getFields()) {
// static field is only for internal global declaration ==> remove it ..
if (java.lang.reflect.Modifier.isStatic(field.getModifiers())) {
continue;
}
if (!full && AnnotationTools.isGenericField(field)) {
continue;
}
out.add(AnnotationTools.getFieldName(field));
}
return out;
}
public static boolean isGenericField(final Field elem) throws Exception {
return AnnotationTools.isPrimaryKey(elem) || AnnotationTools.isCreatedAtField(elem) || AnnotationTools.isUpdateAtField(elem) || AnnotationTools.isDeletedField(elem);
}
public static Field getFieldOfId(final Class<?> clazz) throws Exception {
for (final Field field : clazz.getFields()) {
// static field is only for internal global declaration ==> remove it ..
if (java.lang.reflect.Modifier.isStatic(field.getModifiers())) {
continue;
}
if (AnnotationTools.isIdField(field)) {
return field;
}
}
return null;
}
}

View File

@@ -7,6 +7,6 @@ import java.lang.annotation.Target;
@Target(ElementType.FIELD)
@Retention(RetentionPolicy.RUNTIME)
public @interface SQLAutoIncrement {
public @interface CreationTimestamp {
}

View File

@@ -7,8 +7,7 @@ import java.lang.annotation.Target;
@Target({ ElementType.TYPE, ElementType.FIELD })
@Retention(RetentionPolicy.RUNTIME)
public @interface SQLComment {
@Deprecated(since = "0.5.2")
public @interface DataComment {
String value();
}

View File

@@ -7,7 +7,7 @@ import java.lang.annotation.Target;
@Target({ ElementType.TYPE, ElementType.FIELD })
@Retention(RetentionPolicy.RUNTIME)
public @interface SQLDefault {
public @interface DataDefault {
String value();

View File

@@ -7,6 +7,6 @@ import java.lang.annotation.Target;
@Target(ElementType.FIELD)
@Retention(RetentionPolicy.RUNTIME)
public @interface SQLNotNull {
public @interface DataDeleted {
}

View File

@@ -7,6 +7,6 @@ import java.lang.annotation.Target;
@Target(ElementType.TYPE)
@Retention(RetentionPolicy.RUNTIME)
public @interface SQLIfNotExists {
public @interface DataIfNotExists {
}

View File

@@ -0,0 +1,15 @@
package org.kar.archidata.annotation;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import org.kar.archidata.dataAccess.options.CheckFunctionInterface;
import org.kar.archidata.dataAccess.options.CheckFunctionVoid;
@Target({ ElementType.TYPE, ElementType.FIELD })
@Retention(RetentionPolicy.RUNTIME)
public @interface DataJson {
Class<? extends CheckFunctionInterface> checker() default CheckFunctionVoid.class;
}

View File

@@ -7,6 +7,6 @@ import java.lang.annotation.Target;
@Target(ElementType.FIELD)
@Retention(RetentionPolicy.RUNTIME)
public @interface SQLNotRead {
public @interface DataNotRead {
}

View File

@@ -1,12 +0,0 @@
package org.kar.archidata.annotation;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Target(ElementType.FIELD)
@Retention(RetentionPolicy.RUNTIME)
public @interface SQLForeignKey {
String value();
}

View File

@@ -1,20 +0,0 @@
package org.kar.archidata.annotation;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Target(ElementType.FIELD)
@Retention(RetentionPolicy.RUNTIME)
public @interface SQLTableLinkGeneric {
public enum ModelLink {
NONE,
INTERNAL,
EXTERNAL
};
ModelLink value() default ModelLink.EXTERNAL;
}

View File

@@ -1,14 +0,0 @@
package org.kar.archidata.annotation;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Target(ElementType.TYPE)
@Retention(RetentionPolicy.RUNTIME)
public @interface SQLTableName {
String value();
}

View File

@@ -1,12 +0,0 @@
package org.kar.archidata.annotation;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Target(ElementType.FIELD)
@Retention(RetentionPolicy.RUNTIME)
public @interface SQLUpdateTime {
}

View File

@@ -5,8 +5,8 @@ import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Target(ElementType.FIELD)
@Target({ ElementType.TYPE })
@Retention(RetentionPolicy.RUNTIME)
public @interface SQLLimitSize {
int value();
public @interface SQLWhere {
String clause();
}

View File

@@ -7,6 +7,6 @@ import java.lang.annotation.Target;
@Target(ElementType.FIELD)
@Retention(RetentionPolicy.RUNTIME)
public @interface SQLPrimaryKey {
public @interface UpdateTimestamp {
}

View File

@@ -1,4 +1,4 @@
package org.kar.archidata.annotation;
package org.kar.archidata.annotation.addOn;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
@@ -7,6 +7,6 @@ import java.lang.annotation.Target;
@Target(ElementType.FIELD)
@Retention(RetentionPolicy.RUNTIME)
public @interface SQLCreateTime {
public @interface SQLTableExternalForeinKeyAsList {
}

View File

@@ -1,14 +0,0 @@
package org.kar.archidata.annotation.security;
import javax.ws.rs.NameBinding;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import static java.lang.annotation.ElementType.METHOD;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
@NameBinding
@Retention(RUNTIME)
@Target({METHOD})
public @interface DenyAll {
}

View File

@@ -1,14 +0,0 @@
package org.kar.archidata.annotation.security;
import javax.ws.rs.NameBinding;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import static java.lang.annotation.ElementType.METHOD;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
@NameBinding
@Retention(RUNTIME)
@Target({METHOD})
public @interface PermitAll {
}

View File

@@ -1,14 +1,14 @@
package org.kar.archidata.annotation.security;
import javax.ws.rs.NameBinding;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import static java.lang.annotation.ElementType.METHOD;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import jakarta.ws.rs.NameBinding;
@NameBinding
@Retention(RUNTIME)
@Target({METHOD})
public @interface PermitTokenInURI {
}
@Target({ METHOD })
public @interface PermitTokenInURI {}

View File

@@ -1,15 +0,0 @@
package org.kar.archidata.annotation.security;
import javax.ws.rs.NameBinding;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import static java.lang.annotation.ElementType.METHOD;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
@NameBinding
@Retention(RUNTIME)
@Target({METHOD})
public @interface RolesAllowed {
String[] value();
}

View File

@@ -1,26 +1,15 @@
package org.kar.archidata.api;
import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
import org.glassfish.jersey.media.multipart.FormDataParam;
import org.kar.archidata.filter.GenericContext;
import org.kar.archidata.model.Data;
import org.kar.archidata.SqlWrapper;
import org.kar.archidata.annotation.security.PermitTokenInURI;
import org.kar.archidata.annotation.security.RolesAllowed;
import org.kar.archidata.util.ConfigBaseVariable;
import javax.imageio.ImageIO;
import javax.ws.rs.*;
import javax.ws.rs.core.CacheControl;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.SecurityContext;
import javax.ws.rs.core.StreamingOutput;
import java.awt.*;
import java.awt.Graphics2D;
import java.awt.image.BufferedImage;
import java.io.*;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.RandomAccessFile;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.nio.file.StandardCopyOption;
@@ -28,402 +17,394 @@ import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.Date;
import javax.imageio.ImageIO;
import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
import org.glassfish.jersey.media.multipart.FormDataParam;
import org.kar.archidata.annotation.security.PermitTokenInURI;
import org.kar.archidata.dataAccess.DataAccess;
import org.kar.archidata.dataAccess.QueryCondition;
import org.kar.archidata.dataAccess.options.Condition;
import org.kar.archidata.filter.GenericContext;
import org.kar.archidata.model.Data;
import org.kar.archidata.tools.ConfigBaseVariable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import io.swagger.v3.oas.annotations.Operation;
import jakarta.annotation.security.RolesAllowed;
import jakarta.ws.rs.Consumes;
import jakarta.ws.rs.GET;
import jakarta.ws.rs.HeaderParam;
import jakarta.ws.rs.InternalServerErrorException;
import jakarta.ws.rs.POST;
import jakarta.ws.rs.Path;
import jakarta.ws.rs.PathParam;
import jakarta.ws.rs.Produces;
import jakarta.ws.rs.QueryParam;
import jakarta.ws.rs.core.CacheControl;
import jakarta.ws.rs.core.Context;
import jakarta.ws.rs.core.HttpHeaders;
import jakarta.ws.rs.core.MediaType;
import jakarta.ws.rs.core.Response;
import jakarta.ws.rs.core.SecurityContext;
import jakarta.ws.rs.core.StreamingOutput;
// https://stackoverflow.com/questions/35367113/jersey-webservice-scalable-approach-to-download-file-and-reply-to-client
// https://gist.github.com/aitoroses/4f7a2b197b732a6a691d
@Path("/data")
@Produces({MediaType.APPLICATION_JSON})
@Produces(MediaType.APPLICATION_JSON)
public class DataResource {
private final static int CHUNK_SIZE = 1024 * 1024; // 1MB chunks
private final static int CHUNK_SIZE_IN = 50 * 1024 * 1024; // 1MB chunks
/**
* Upload some datas
*/
private static long tmpFolderId = 1;
private static final Logger LOGGER = LoggerFactory.getLogger(MediaType.class);
private final static int CHUNK_SIZE = 1024 * 1024; // 1MB chunks
private final static int CHUNK_SIZE_IN = 50 * 1024 * 1024; // 1MB chunks
/** Upload some datas */
private static long tmpFolderId = 1;
private static void createFolder(String path) throws IOException {
if (!Files.exists(java.nio.file.Path.of(path))) {
//Log.print("Create folder: " + path);
Files.createDirectories(java.nio.file.Path.of(path));
}
}
private static void createFolder(final String path) throws IOException {
if (!Files.exists(java.nio.file.Path.of(path))) {
// Log.print("Create folder: " + path);
Files.createDirectories(java.nio.file.Path.of(path));
}
}
public static long getTmpDataId() {
return tmpFolderId++;
}
public static long getTmpDataId() {
return tmpFolderId++;
}
public static String getTmpFileInData(long tmpFolderId) {
String filePath = ConfigBaseVariable.getTmpDataFolder() + File.separator + tmpFolderId;
try {
createFolder(ConfigBaseVariable.getTmpDataFolder() + File.separator);
} catch (IOException e) {
e.printStackTrace();
}
return filePath;
}
public static String getTmpFileInData(final long tmpFolderId) {
final String filePath = ConfigBaseVariable.getTmpDataFolder() + File.separator + tmpFolderId;
try {
createFolder(ConfigBaseVariable.getTmpDataFolder() + File.separator);
} catch (final IOException e) {
e.printStackTrace();
}
return filePath;
}
public static String getFileData(long tmpFolderId) {
String filePath = ConfigBaseVariable.getMediaDataFolder() + File.separator + tmpFolderId + File.separator + "data";
try {
createFolder(ConfigBaseVariable.getMediaDataFolder() + File.separator + tmpFolderId + File.separator);
} catch (IOException e) {
e.printStackTrace();
}
return filePath;
}
public static String getFileData(final long tmpFolderId) {
final String filePath = ConfigBaseVariable.getMediaDataFolder() + File.separator + tmpFolderId + File.separator + "data";
try {
createFolder(ConfigBaseVariable.getMediaDataFolder() + File.separator + tmpFolderId + File.separator);
} catch (final IOException e) {
e.printStackTrace();
}
return filePath;
}
public static Data getWithSha512(String sha512) {
System.out.println("find sha512 = " + sha512);
try {
return SqlWrapper.getWhere(Data.class, "sha512", "=", sha512);
} catch (Exception e) {
public static Data getWithSha512(final String sha512) {
LOGGER.info("find sha512 = {}", sha512);
try {
return DataAccess.getWhere(Data.class, new Condition(new QueryCondition("sha512", "=", sha512)));
} catch (final Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return null;
}
return null;
}
public static Data getWithId(long id) {
System.out.println("find id = " + id);
try {
return SqlWrapper.get(Data.class, id);
} catch (Exception e) {
public static Data getWithId(final long id) {
LOGGER.info("find id = {}", id);
try {
return DataAccess.get(Data.class, id);
} catch (final Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return null;
}
return null;
}
public static Data createNewData(long tmpUID, String originalFileName, String sha512) throws IOException {
// determine mime type:
Data injectedData = new Data();
String mimeType = "";
String extension = originalFileName.substring(originalFileName.lastIndexOf('.') + 1);
switch (extension.toLowerCase()) {
case "jpg":
case "jpeg":
mimeType = "image/jpeg";
break;
case "png":
mimeType = "image/png";
break;
case "webp":
mimeType = "image/webp";
break;
case "mka":
mimeType = "audio/x-matroska";
break;
case "mkv":
mimeType = "video/x-matroska";
break;
case "webm":
mimeType = "video/webm";
break;
default:
throw new IOException("Can not find the mime type of data input: '" + extension + "'");
}
injectedData.mimeType = mimeType;
injectedData.sha512 = sha512;
String tmpPath = getTmpFileInData(tmpUID);
injectedData.size = Files.size(Paths.get(tmpPath));
public static Data createNewData(final long tmpUID, final String originalFileName, final String sha512) throws IOException {
// determine mime type:
Data injectedData = new Data();
String mimeType = "";
final String extension = originalFileName.substring(originalFileName.lastIndexOf('.') + 1);
mimeType = switch (extension.toLowerCase()) {
case "jpg", "jpeg" -> "image/jpeg";
case "png" -> "image/png";
case "webp" -> "image/webp";
case "mka" -> "audio/x-matroska";
case "mkv" -> "video/x-matroska";
case "webm" -> "video/webm";
default -> throw new IOException("Can not find the mime type of data input: '" + extension + "'");
};
injectedData.mimeType = mimeType;
injectedData.sha512 = sha512;
final String tmpPath = getTmpFileInData(tmpUID);
injectedData.size = Files.size(Paths.get(tmpPath));
try {
injectedData = SqlWrapper.insert(injectedData);
} catch (Exception e) {
try {
injectedData = DataAccess.insert(injectedData);
} catch (final Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
return null;
}
String mediaPath = getFileData(injectedData.id);
System.out.println("src = " + tmpPath);
System.out.println("dst = " + mediaPath);
Files.move(Paths.get(tmpPath), Paths.get(mediaPath), StandardCopyOption.ATOMIC_MOVE);
System.out.println("Move done");
return injectedData;
}
final String mediaPath = getFileData(injectedData.id);
LOGGER.info("src = {}", tmpPath);
LOGGER.info("dst = {}", mediaPath);
Files.move(Paths.get(tmpPath), Paths.get(mediaPath), StandardCopyOption.ATOMIC_MOVE);
LOGGER.info("Move done");
return injectedData;
}
public static String saveTemporaryFile(InputStream uploadedInputStream, long idData) {
return saveFile(uploadedInputStream, DataResource.getTmpFileInData(idData));
}
public static String saveTemporaryFile(final InputStream uploadedInputStream, final long idData) {
return saveFile(uploadedInputStream, DataResource.getTmpFileInData(idData));
}
public static void removeTemporaryFile(long idData) {
String filepath = DataResource.getTmpFileInData(idData);
if (Files.exists(Paths.get(filepath))) {
try {
Files.delete(Paths.get(filepath));
} catch (IOException e) {
System.out.println("can not delete temporary file : " + Paths.get(filepath));
e.printStackTrace();
}
}
}
public static void removeTemporaryFile(final long idData) {
final String filepath = DataResource.getTmpFileInData(idData);
if (Files.exists(Paths.get(filepath))) {
try {
Files.delete(Paths.get(filepath));
} catch (final IOException e) {
LOGGER.info("can not delete temporary file : {}", Paths.get(filepath));
e.printStackTrace();
}
}
}
// save uploaded file to a defined location on the server
static String saveFile(InputStream uploadedInputStream, String serverLocation) {
String out = "";
try {
OutputStream outpuStream = new FileOutputStream(new File(
serverLocation));
int read = 0;
byte[] bytes = new byte[CHUNK_SIZE_IN];
MessageDigest md = MessageDigest.getInstance("SHA-512");
// save uploaded file to a defined location on the server
static String saveFile(final InputStream uploadedInputStream, final String serverLocation) {
String out = "";
try {
OutputStream outpuStream = new FileOutputStream(new File(serverLocation));
int read = 0;
final byte[] bytes = new byte[CHUNK_SIZE_IN];
final MessageDigest md = MessageDigest.getInstance("SHA-512");
outpuStream = new FileOutputStream(new File(serverLocation));
while ((read = uploadedInputStream.read(bytes)) != -1) {
//System.out.println("write " + read);
md.update(bytes, 0, read);
outpuStream.write(bytes, 0, read);
}
System.out.println("Flush input stream ... " + serverLocation);
System.out.flush();
outpuStream.flush();
outpuStream.close();
// create the end of sha512
byte[] sha512Digest = md.digest();
// convert in hexadecimal
out = bytesToHex(sha512Digest);
uploadedInputStream.close();
} catch (IOException ex) {
System.out.println("Can not write in temporary file ... ");
ex.printStackTrace();
} catch (NoSuchAlgorithmException ex) {
System.out.println("Can not find sha512 algorithms");
ex.printStackTrace();
}
return out;
}
outpuStream = new FileOutputStream(new File(serverLocation));
while ((read = uploadedInputStream.read(bytes)) != -1) {
// logger.info("write {}", read);
md.update(bytes, 0, read);
outpuStream.write(bytes, 0, read);
}
LOGGER.info("Flush input stream ... {}", serverLocation);
System.out.flush();
outpuStream.flush();
outpuStream.close();
// create the end of sha512
final byte[] sha512Digest = md.digest();
// convert in hexadecimal
out = bytesToHex(sha512Digest);
uploadedInputStream.close();
} catch (final IOException ex) {
LOGGER.info("Can not write in temporary file ... ");
ex.printStackTrace();
} catch (final NoSuchAlgorithmException ex) {
LOGGER.info("Can not find sha512 algorithms");
ex.printStackTrace();
}
return out;
}
public static String bytesToHex(byte[] bytes) {
StringBuilder sb = new StringBuilder();
for (byte b : bytes) {
sb.append(String.format("%02x", b));
}
return sb.toString();
}
public static String bytesToHex(final byte[] bytes) {
final StringBuilder sb = new StringBuilder();
for (final byte b : bytes) {
sb.append(String.format("%02x", b));
}
return sb.toString();
}
public Data getSmall(Long id) {
try {
return SqlWrapper.get(Data.class, id);
} catch (Exception e) {
public Data getSmall(final Long id) {
try {
return DataAccess.get(Data.class, id);
} catch (final Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return null;
}
return null;
}
@POST
@Path("/upload/")
@Consumes({MediaType.MULTIPART_FORM_DATA})
@RolesAllowed("ADMIN")
public Response uploadFile(@Context SecurityContext sc, @FormDataParam("file") InputStream fileInputStream, @FormDataParam("file") FormDataContentDisposition fileMetaData) {
GenericContext gc = (GenericContext) sc.getUserPrincipal();
System.out.println("===================================================");
System.out.println("== DATA uploadFile " + (gc==null?"null":gc.user));
System.out.println("===================================================");
//public NodeSmall uploadFile(final FormDataMultiPart form) {
System.out.println("Upload file: ");
String filePath = ConfigBaseVariable.getTmpDataFolder() + File.separator + tmpFolderId++;
try {
createFolder(ConfigBaseVariable.getTmpDataFolder() + File.separator);
} catch (IOException e) {
e.printStackTrace();
}
saveFile(fileInputStream, filePath);
return Response.ok("Data uploaded successfully !!").build();
//return null;
}
@POST
@Path("/upload/")
@Consumes({ MediaType.MULTIPART_FORM_DATA })
@RolesAllowed("ADMIN")
@Operation(description = "Insert a new data in the data environment", tags = "SYSTEM")
public Response uploadFile(@Context final SecurityContext sc, @FormDataParam("file") final InputStream fileInputStream, @FormDataParam("file") final FormDataContentDisposition fileMetaData) {
final GenericContext gc = (GenericContext) sc.getUserPrincipal();
LOGGER.info("===================================================");
LOGGER.info("== DATA uploadFile {}", (gc == null ? "null" : gc.userByToken));
LOGGER.info("===================================================");
// public NodeSmall uploadFile(final FormDataMultiPart form) {
LOGGER.info("Upload file: ");
final String filePath = ConfigBaseVariable.getTmpDataFolder() + File.separator + tmpFolderId++;
try {
createFolder(ConfigBaseVariable.getTmpDataFolder() + File.separator);
} catch (final IOException e) {
e.printStackTrace();
}
saveFile(fileInputStream, filePath);
return Response.ok("Data uploaded successfully !!").build();
// return null;
}
@GET
@Path("{id}")
@PermitTokenInURI
@RolesAllowed("USER")
@Produces(MediaType.APPLICATION_OCTET_STREAM)
public Response retriveDataId(@Context SecurityContext sc, @QueryParam(HttpHeaders.AUTHORIZATION) String token, @HeaderParam("Range") String range, @PathParam("id") Long id) throws Exception {
GenericContext gc = (GenericContext) sc.getUserPrincipal();
//System.out.println("===================================================");
System.out.println("== DATA retriveDataId ? id=" + id + " user=" + (gc==null?"null":gc.user));
//System.out.println("===================================================");
Data value = getSmall(id);
if (value == null) {
Response.status(404).
entity("media NOT FOUND: " + id).
type("text/plain").
build();
}
return buildStream(ConfigBaseVariable.getMediaDataFolder() + File.separator + id + File.separator + "data", range, value.mimeType);
}
@GET
@Path("{id}")
@PermitTokenInURI
@RolesAllowed("USER")
@Produces(MediaType.APPLICATION_OCTET_STREAM)
@Operation(description = "Get back some data from the data environment", tags = "SYSTEM")
public Response retriveDataId(@Context final SecurityContext sc, @QueryParam(HttpHeaders.AUTHORIZATION) final String token, @HeaderParam("Range") final String range,
@PathParam("id") final Long id) throws Exception {
final GenericContext gc = (GenericContext) sc.getUserPrincipal();
// logger.info("===================================================");
LOGGER.info("== DATA retriveDataId ? id={} user={}", id, (gc == null ? "null" : gc.userByToken));
// logger.info("===================================================");
final Data value = getSmall(id);
if (value == null) {
Response.status(404).entity("media NOT FOUND: " + id).type("text/plain").build();
}
return buildStream(ConfigBaseVariable.getMediaDataFolder() + File.separator + id + File.separator + "data", range, value.mimeType);
}
@GET
@Path("thumbnail/{id}")
@RolesAllowed("USER")
@PermitTokenInURI
@Produces(MediaType.APPLICATION_OCTET_STREAM)
//@CacheMaxAge(time = 10, unit = TimeUnit.DAYS)
public Response retriveDataThumbnailId(@Context SecurityContext sc,
@QueryParam(HttpHeaders.AUTHORIZATION) String token,
@HeaderParam("Range") String range,
@PathParam("id") Long id) throws Exception {
GenericContext gc = (GenericContext) sc.getUserPrincipal();
//System.out.println("===================================================");
//System.out.println("== DATA retriveDataThumbnailId ? " + (gc==null?"null":gc.user));
//System.out.println("===================================================");
Data value = getSmall(id);
if (value == null) {
return Response.status(404).
entity("media NOT FOUND: " + id).
type("text/plain").
build();
}
String filePathName = ConfigBaseVariable.getMediaDataFolder() + File.separator + id + File.separator + "data";
if ( value.mimeType.contentEquals("image/jpeg")
|| value.mimeType.contentEquals("image/png")
// || value.mimeType.contentEquals("image/webp")
) {
// reads input image
//System.out.println("Read path: " + filePathName);
File inputFile = new File(filePathName);
if (!inputFile.exists()) {
return Response.status(500).
entity("Internal Error: Media is NOT FOUNDABLE: " + id).
type("text/plain").
build();
}
BufferedImage inputImage = ImageIO.read(inputFile);
int scaledWidth = 250;
int scaledHeight = (int)((float)inputImage.getHeight() / (float)inputImage.getWidth() * (float) scaledWidth);
// creates output image
BufferedImage outputImage = new BufferedImage(scaledWidth,
scaledHeight, inputImage.getType());
@GET
@Path("thumbnail/{id}")
@RolesAllowed("USER")
@PermitTokenInURI
@Produces(MediaType.APPLICATION_OCTET_STREAM)
@Operation(description = "Get a thumbnail of from the data environment (if resize is possible)", tags = "SYSTEM")
// @CacheMaxAge(time = 10, unit = TimeUnit.DAYS)
public Response retriveDataThumbnailId(@Context final SecurityContext sc, @QueryParam(HttpHeaders.AUTHORIZATION) final String token, @HeaderParam("Range") final String range,
@PathParam("id") final Long id) throws Exception {
// GenericContext gc = (GenericContext) sc.getUserPrincipal();
// logger.info("===================================================");
// logger.info("== DATA retriveDataThumbnailId ? {}", (gc==null?"null":gc.user));
// logger.info("===================================================");
final Data value = getSmall(id);
if (value == null) {
return Response.status(404).entity("media NOT FOUND: " + id).type("text/plain").build();
}
final String filePathName = ConfigBaseVariable.getMediaDataFolder() + File.separator + id + File.separator + "data";
final File inputFile = new File(filePathName);
if (!inputFile.exists()) {
return Response.status(404).entity("{\"error\":\"media Does not exist: " + id + "\"}").type("application/json").build();
}
if (value.mimeType.contentEquals("image/jpeg") || value.mimeType.contentEquals("image/png")
// || value.mimeType.contentEquals("image/webp")
) {
// reads input image
final BufferedImage inputImage = ImageIO.read(inputFile);
final int scaledWidth = 250;
final int scaledHeight = (int) ((float) inputImage.getHeight() / (float) inputImage.getWidth() * scaledWidth);
// creates output image
final BufferedImage outputImage = new BufferedImage(scaledWidth, scaledHeight, inputImage.getType());
// scales the input image to the output image
Graphics2D g2d = outputImage.createGraphics();
g2d.drawImage(inputImage, 0, 0, scaledWidth, scaledHeight, null);
g2d.dispose();
// create the output stream:
ByteArrayOutputStream baos = new ByteArrayOutputStream();
try {
// TODO: check how to remove buffer file !!! here, it is not needed at all...
ImageIO.write( outputImage, "JPG", baos);
} catch (IOException e) {
// scales the input image to the output image
final Graphics2D g2d = outputImage.createGraphics();
g2d.drawImage(inputImage, 0, 0, scaledWidth, scaledHeight, null);
g2d.dispose();
// create the output stream:
final ByteArrayOutputStream baos = new ByteArrayOutputStream();
try {
// TODO: check how to remove buffer file !!! here, it is not needed at all...
ImageIO.write(outputImage, "JPG", baos);
} catch (final IOException e) {
e.printStackTrace();
return Response.status(500).
entity("Internal Error: resize fail: " + e.getMessage()).
type("text/plain").
build();
return Response.status(500).entity("Internal Error: resize fail: " + e.getMessage()).type("text/plain").build();
}
byte[] imageData = baos.toByteArray();
//Response.ok(new ByteArrayInputStream(imageData)).build();
Response.ResponseBuilder out = Response.ok(imageData)
.header(HttpHeaders.CONTENT_LENGTH, imageData.length);
out.type("image/jpeg");
// TODO: move this in a decorator !!!
CacheControl cc = new CacheControl();
cc.setMaxAge(3600);
cc.setNoCache(false);
out.cacheControl(cc);
return out.build();
}
return buildStream(filePathName, range, value.mimeType);
}
//@Secured
@GET
@Path("{id}/{name}")
@PermitTokenInURI
@RolesAllowed("USER")
@Produces(MediaType.APPLICATION_OCTET_STREAM)
public Response retriveDataFull(@Context SecurityContext sc, @QueryParam(HttpHeaders.AUTHORIZATION) String token, @HeaderParam("Range") String range, @PathParam("id") Long id, @PathParam("name") String name) throws Exception {
GenericContext gc = (GenericContext) sc.getUserPrincipal();
//System.out.println("===================================================");
System.out.println("== DATA retriveDataFull ? id=" + id + " user=" + (gc==null?"null":gc.user));
//System.out.println("===================================================");
Data value = getSmall(id);
if (value == null) {
Response.status(404).
entity("media NOT FOUND: " + id).
type("text/plain").
build();
}
return buildStream(ConfigBaseVariable.getMediaDataFolder() + File.separator + id + File.separator + "data", range, value.mimeType);
}
final byte[] imageData = baos.toByteArray();
// Response.ok(new ByteArrayInputStream(imageData)).build();
final Response.ResponseBuilder out = Response.ok(imageData).header(HttpHeaders.CONTENT_LENGTH, imageData.length);
out.type("image/jpeg");
// TODO: move this in a decorator !!!
final CacheControl cc = new CacheControl();
cc.setMaxAge(3600);
cc.setNoCache(false);
out.cacheControl(cc);
return out.build();
}
return buildStream(filePathName, range, value.mimeType);
}
/**
* Adapted from http://stackoverflow.com/questions/12768812/video-streaming-to-ipad-does-not-work-with-tapestry5/12829541#12829541
*
* @param range range header
* @return Streaming output
* @throws Exception IOException if an error occurs in streaming.
*/
private Response buildStream(final String filename, final String range, String mimeType) throws Exception {
File file = new File(filename);
//System.out.println("request range : " + range);
// range not requested : Firefox does not send range headers
if (range == null) {
final StreamingOutput output = new StreamingOutput() {
@Override
public void write(OutputStream out) {
try (FileInputStream in = new FileInputStream(file)) {
byte[] buf = new byte[1024 * 1024];
int len;
while ((len = in.read(buf)) != -1) {
try {
out.write(buf, 0, len);
out.flush();
//System.out.println("---- wrote " + len + " bytes file ----");
} catch (IOException ex) {
System.out.println("remote close connection");
break;
}
}
} catch (IOException ex) {
throw new InternalServerErrorException(ex);
}
}
};
Response.ResponseBuilder out = Response.ok(output)
.header(HttpHeaders.CONTENT_LENGTH, file.length());
if (mimeType != null) {
out.type(mimeType);
}
return out.build();
// @Secured
@GET
@Path("{id}/{name}")
@PermitTokenInURI
@RolesAllowed("USER")
@Produces(MediaType.APPLICATION_OCTET_STREAM)
@Operation(description = "Get back some data from the data environment (with a beautifull name (permit download with basic name)", tags = "SYSTEM")
public Response retriveDataFull(@Context final SecurityContext sc, @QueryParam(HttpHeaders.AUTHORIZATION) final String token, @HeaderParam("Range") final String range,
@PathParam("id") final Long id, @PathParam("name") final String name) throws Exception {
final GenericContext gc = (GenericContext) sc.getUserPrincipal();
// logger.info("===================================================");
LOGGER.info("== DATA retriveDataFull ? id={} user={}", id, (gc == null ? "null" : gc.userByToken));
// logger.info("===================================================");
final Data value = getSmall(id);
if (value == null) {
Response.status(404).entity("media NOT FOUND: " + id).type("text/plain").build();
}
return buildStream(ConfigBaseVariable.getMediaDataFolder() + File.separator + id + File.separator + "data", range, value.mimeType);
}
}
/** Adapted from http://stackoverflow.com/questions/12768812/video-streaming-to-ipad-does-not-work-with-tapestry5/12829541#12829541
*
* @param range range header
* @return Streaming output
* @throws Exception IOException if an error occurs in streaming. */
private Response buildStream(final String filename, final String range, final String mimeType) throws Exception {
final File file = new File(filename);
// logger.info("request range : {}", range);
// range not requested : Firefox does not send range headers
if (range == null) {
final StreamingOutput output = new StreamingOutput() {
@Override
public void write(final OutputStream out) {
try (FileInputStream in = new FileInputStream(file)) {
final byte[] buf = new byte[1024 * 1024];
int len;
while ((len = in.read(buf)) != -1) {
try {
out.write(buf, 0, len);
out.flush();
// logger.info("---- wrote {} bytes file ----", len);
} catch (final IOException ex) {
LOGGER.info("remote close connection");
break;
}
}
} catch (final IOException ex) {
throw new InternalServerErrorException(ex);
}
}
};
final Response.ResponseBuilder out = Response.ok(output).header(HttpHeaders.CONTENT_LENGTH, file.length());
if (mimeType != null) {
out.type(mimeType);
}
return out.build();
String[] ranges = range.split("=")[1].split("-");
final long from = Long.parseLong(ranges[0]);
}
//System.out.println("request range : " + ranges.length);
//Chunk media if the range upper bound is unspecified. Chrome, Opera sends "bytes=0-"
long to = CHUNK_SIZE + from;
if (ranges.length == 1) {
to = file.length() - 1;
} else {
if (to >= file.length()) {
to = (long) (file.length() - 1);
}
}
final String responseRange = String.format("bytes %d-%d/%d", from, to, file.length());
//System.out.println("responseRange : " + responseRange);
final RandomAccessFile raf = new RandomAccessFile(file, "r");
raf.seek(from);
final String[] ranges = range.split("=")[1].split("-");
final long from = Long.parseLong(ranges[0]);
final long len = to - from + 1;
final MediaStreamer streamer = new MediaStreamer(len, raf);
Response.ResponseBuilder out = Response.ok(streamer)
.status(Response.Status.PARTIAL_CONTENT)
.header("Accept-Ranges", "bytes")
.header("Content-Range", responseRange)
.header(HttpHeaders.CONTENT_LENGTH, streamer.getLenth())
.header(HttpHeaders.LAST_MODIFIED, new Date(file.lastModified()));
if (mimeType != null) {
out.type(mimeType);
}
return out.build();
}
// logger.info("request range : {}", ranges.length);
// Chunk media if the range upper bound is unspecified. Chrome, Opera sends "bytes=0-"
long to = CHUNK_SIZE + from;
if (ranges.length == 1) {
to = file.length() - 1;
} else if (to >= file.length()) {
to = file.length() - 1;
}
final String responseRange = String.format("bytes %d-%d/%d", from, to, file.length());
// logger.info("responseRange: {}", responseRange);
final RandomAccessFile raf = new RandomAccessFile(file, "r");
raf.seek(from);
public static void undelete(Long id) throws Exception {
SqlWrapper.unsetDelete(Data.class, id);
final long len = to - from + 1;
final MediaStreamer streamer = new MediaStreamer(len, raf);
final Response.ResponseBuilder out = Response.ok(streamer).status(Response.Status.PARTIAL_CONTENT).header("Accept-Ranges", "bytes").header("Content-Range", responseRange)
.header(HttpHeaders.CONTENT_LENGTH, streamer.getLenth()).header(HttpHeaders.LAST_MODIFIED, new Date(file.lastModified()));
if (mimeType != null) {
out.type(mimeType);
}
return out.build();
}
public static void undelete(final Long id) throws Exception {
DataAccess.unsetDelete(Data.class, id);
}
}

View File

@@ -3,103 +3,113 @@ package org.kar.archidata.api;
import java.io.File;
import java.util.List;
import javax.ws.rs.*;
import javax.ws.rs.core.CacheControl;
import javax.ws.rs.core.PathSegment;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.ResponseBuilder;
import org.kar.archidata.annotation.security.PermitAll;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import io.swagger.v3.oas.annotations.Operation;
import jakarta.annotation.security.PermitAll;
import jakarta.ws.rs.GET;
import jakarta.ws.rs.NotFoundException;
import jakarta.ws.rs.NotSupportedException;
import jakarta.ws.rs.Path;
import jakarta.ws.rs.PathParam;
import jakarta.ws.rs.core.CacheControl;
import jakarta.ws.rs.core.PathSegment;
import jakarta.ws.rs.core.Response;
import jakarta.ws.rs.core.Response.ResponseBuilder;
public class FrontGeneric {
protected String baseFrontFolder = "/data/front";
private String getExtension(String filename) {
if (filename.contains(".")) {
return filename.substring(filename.lastIndexOf(".") + 1);
}
return "";
}
private Response retrive(String fileName) throws Exception {
String filePathName = baseFrontFolder + File.separator + fileName;
String extention = getExtension(filePathName);
String mineType = null;
System.out.println("try retrive : '" + filePathName + "' '" + extention + "'");
if (extention.length() !=0 && extention.length() <= 5) {
if (extention.equalsIgnoreCase("jpg") || extention.equalsIgnoreCase("jpeg")) {
mineType = "image/jpeg";
} else if (extention.equalsIgnoreCase("gif")) {
mineType = "image/gif";
} else if (extention.equalsIgnoreCase("png")) {
mineType = "image/png";
} else if (extention.equalsIgnoreCase("svg")) {
mineType = "image/svg+xml";
} else if (extention.equalsIgnoreCase("webp")) {
mineType = "image/webp";
} else if (extention.equalsIgnoreCase("js")) {
mineType = "application/javascript";
} else if (extention.equalsIgnoreCase("json")) {
mineType = "application/json";
} else if (extention.equalsIgnoreCase("ico")) {
mineType = "image/x-icon";
} else if (extention.equalsIgnoreCase("html")) {
mineType = "text/html";
} else if (extention.equalsIgnoreCase("css")) {
mineType = "text/css";
} else {
return Response.status(403).
entity("Not supported model: '" + fileName + "'").
type("text/plain").
build();
}
} else {
mineType = "text/html";
filePathName = baseFrontFolder + File.separator + "index.html";
}
System.out.println(" ==> '" + filePathName + "'");
// reads input image
File download = new File(filePathName);
if (!download.exists()) {
return Response.status(404).
entity("Not Found: '" + fileName + "' extension='" + extention + "'").
type("text/plain").
build();
}
ResponseBuilder response = Response.ok((Object)download);
// use this if I want to download the file:
//response.header("Content-Disposition", "attachment; filename=" + fileName);
CacheControl cc = new CacheControl();
cc.setMaxAge(60);
cc.setNoCache(false);
response.cacheControl(cc);
response.type(mineType);
return response.build();
}
private static final Logger LOGGER = LoggerFactory.getLogger(FrontGeneric.class);
@GET
@PermitAll()
//@Produces(MediaType.APPLICATION_OCTET_STREAM)
//@CacheMaxAge(time = 1, unit = TimeUnit.DAYS)
public Response retrive0() throws Exception {
return retrive("index.html");
}
@GET
@Path("{any: .*}")
@PermitAll()
//@Produces(MediaType.APPLICATION_OCTET_STREAM)
//@CacheMaxAge(time = 10, unit = TimeUnit.DAYS)
public Response retrive1(@PathParam("any") List<PathSegment> segments) throws Exception {
String filename = "";
for (PathSegment elem: segments) {
if (!filename.isEmpty()) {
filename += File.separator;
}
filename += elem.getPath();
}
return retrive(filename);
}
protected String baseFrontFolder = "/data/front";
private String getExtension(final String filename) {
if (filename.contains(".")) {
return filename.substring(filename.lastIndexOf(".") + 1);
}
return "";
}
private Response retrive(final String fileName) throws Exception {
String filePathName = this.baseFrontFolder + File.separator + fileName;
final String extention = getExtension(filePathName);
String mineType = null;
LOGGER.debug("try retrive : '{}' '{}'", filePathName, extention);
if (extention.length() != 0 && extention.length() <= 5) {
if (extention.equalsIgnoreCase("jpg") || extention.equalsIgnoreCase("jpeg")) {
mineType = "image/jpeg";
} else if (extention.equalsIgnoreCase("gif")) {
mineType = "image/gif";
} else if (extention.equalsIgnoreCase("png")) {
mineType = "image/png";
} else if (extention.equalsIgnoreCase("svg")) {
mineType = "image/svg+xml";
} else if (extention.equalsIgnoreCase("webp")) {
mineType = "image/webp";
} else if (extention.equalsIgnoreCase("js")) {
mineType = "application/javascript";
} else if (extention.equalsIgnoreCase("json")) {
mineType = "application/json";
} else if (extention.equalsIgnoreCase("ico")) {
mineType = "image/x-icon";
} else if (extention.equalsIgnoreCase("html")) {
mineType = "text/html";
} else if (extention.equalsIgnoreCase("css")) {
mineType = "text/css";
} else if (extention.equalsIgnoreCase("mka")) {
mineType = "audio/x-matroska";
} else if (extention.equalsIgnoreCase("mkv")) {
mineType = "video/x-matroska";
} else if (extention.equalsIgnoreCase("webm")) {
mineType = "video/webm";
} else {
throw new NotSupportedException("Not supported model: '" + fileName + "'");
}
} else {
mineType = "text/html";
filePathName = this.baseFrontFolder + File.separator + "index.html";
}
LOGGER.debug(" ==> '[}'", filePathName);
// reads input image
final File download = new File(filePathName);
if (!download.exists()) {
throw new NotFoundException("Not Found: '" + fileName + "' extension='" + extention + "'");
}
final ResponseBuilder response = Response.ok(download);
// use this if I want to download the file:
// response.header("Content-Disposition", "attachment; filename=" + fileName);
final CacheControl cc = new CacheControl();
cc.setMaxAge(60);
cc.setNoCache(false);
response.cacheControl(cc);
response.type(mineType);
return response.build();
}
@GET
@PermitAll()
@Operation(description = "Retrieve native element (index)", tags = "SYSTEM")
// @Produces(MediaType.APPLICATION_OCTET_STREAM)
// @CacheMaxAge(time = 1, unit = TimeUnit.DAYS)
public Response retrive0() throws Exception {
return retrive("index.html");
}
@GET
@Path("{any: .*}")
@PermitAll()
@Operation(description = "Get specific file from the front environment", tags = "SYSTEM")
// @Produces(MediaType.APPLICATION_OCTET_STREAM)
// @CacheMaxAge(time = 10, unit = TimeUnit.DAYS)
public Response retrive1(@PathParam("any") final List<PathSegment> segments) throws Exception {
String filename = "";
for (final PathSegment elem : segments) {
if (!filename.isEmpty()) {
filename += File.separator;
}
filename += elem.getPath();
}
return retrive(filename);
}
}

View File

@@ -1,56 +1,61 @@
package org.kar.archidata.api;
import javax.ws.rs.InternalServerErrorException;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.StreamingOutput;
import java.io.IOException;
import java.io.OutputStream;
import java.io.RandomAccessFile;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import jakarta.ws.rs.InternalServerErrorException;
import jakarta.ws.rs.WebApplicationException;
import jakarta.ws.rs.core.StreamingOutput;
public class MediaStreamer implements StreamingOutput {
private final int CHUNK_SIZE = 1024 * 1024; // 1MB chunks
final byte[] buf = new byte[CHUNK_SIZE];
private long length;
private RandomAccessFile raf;
private static final Logger LOGGER = LoggerFactory.getLogger(MediaStreamer.class);
private final int CHUNK_SIZE = 1024 * 1024; // 1MB chunks
final byte[] buf = new byte[this.CHUNK_SIZE];
private long length;
private final RandomAccessFile raf;
public MediaStreamer(long length, RandomAccessFile raf) throws IOException {
//System.out.println("request stream of " + length / 1024 + " data");
if (length<0) {
throw new IOException("Wrong size of the file to stream: " + length);
}
this.length = length;
this.raf = raf;
}
public MediaStreamer(final long length, final RandomAccessFile raf) throws IOException {
// logger.info("request stream of {} data", length / 1024);
if (length < 0) {
throw new IOException("Wrong size of the file to stream: " + length);
}
this.length = length;
this.raf = raf;
}
@Override
public void write(OutputStream outputStream) {
try {
while (length != 0) {
int read = raf.read(buf, 0, buf.length > length ? (int) length : buf.length);
try {
outputStream.write(buf, 0, read);
} catch (IOException ex) {
System.out.println("remote close connection");
break;
}
length -= read;
}
} catch (IOException ex) {
throw new InternalServerErrorException(ex);
} catch (WebApplicationException ex) {
throw new InternalServerErrorException(ex);
} finally {
try {
raf.close();
} catch (IOException ex) {
ex.printStackTrace();
throw new InternalServerErrorException(ex);
}
}
}
@Override
public void write(final OutputStream outputStream) {
try {
while (this.length != 0) {
final int read = this.raf.read(this.buf, 0, this.buf.length > this.length ? (int) this.length : this.buf.length);
try {
outputStream.write(this.buf, 0, read);
} catch (final IOException ex) {
LOGGER.info("remote close connection");
break;
}
this.length -= read;
}
} catch (final IOException ex) {
throw new InternalServerErrorException(ex);
} catch (final WebApplicationException ex) {
throw new InternalServerErrorException(ex);
} finally {
try {
this.raf.close();
} catch (final IOException ex) {
ex.printStackTrace();
throw new InternalServerErrorException(ex);
}
}
}
public long getLenth() {
return length;
}
public long getLenth() {
return this.length;
}
}

View File

@@ -0,0 +1,33 @@
package org.kar.archidata.api;
import io.swagger.v3.jaxrs2.integration.resources.BaseOpenApiResource;
import io.swagger.v3.oas.annotations.Operation;
import jakarta.annotation.security.PermitAll;
import jakarta.servlet.ServletConfig;
import jakarta.ws.rs.GET;
import jakarta.ws.rs.Path;
import jakarta.ws.rs.Produces;
import jakarta.ws.rs.core.Application;
import jakarta.ws.rs.core.Context;
import jakarta.ws.rs.core.HttpHeaders;
import jakarta.ws.rs.core.MediaType;
import jakarta.ws.rs.core.Response;
import jakarta.ws.rs.core.UriInfo;
@Path("/openapi")
public class openApiResource extends BaseOpenApiResource {
@Context
ServletConfig config;
@Context
Application app;
@GET
@Path("swagger.json")
@Produces({ MediaType.APPLICATION_JSON })
@PermitAll
@Operation(hidden = true, description = "Get the OPEN-API description", tags = "SYSTEM")
public Response getDescription(@Context final HttpHeaders headers, @Context final UriInfo uriInfo) throws Exception {
return getOpenApi(headers, this.config, this.app, uriInfo, "json");
}
}

View File

@@ -0,0 +1,29 @@
package org.kar.archidata.backup;
import java.util.ArrayList;
import java.util.List;
public class BackupEngine {
public enum StoreMode {
JSON, SQL
}
private final String pathStore;
private final StoreMode mode;
private final List<Class<?>> classes = new ArrayList<>();
public BackupEngine(final String pathToStoreDB, final StoreMode mode) {
this.pathStore = pathToStoreDB;
this.mode = mode;
}
public void addClass(final Class<?> clazz) {
this.classes.add(clazz);
}
public void store() {
// TODO ...
}
}

View File

@@ -0,0 +1,26 @@
package org.kar.archidata.catcher;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import jakarta.ws.rs.core.MediaType;
import jakarta.ws.rs.core.Response;
import jakarta.ws.rs.ext.ExceptionMapper;
public class ExceptionCatcher implements ExceptionMapper<Exception> {
private static final Logger LOGGER = LoggerFactory.getLogger(ExceptionCatcher.class);
@Override
public Response toResponse(final Exception exception) {
LOGGER.warn("Catch exception (not managed...):");
final RestErrorResponse ret = build(exception);
LOGGER.error("Error UUID={}", ret.uuid);
exception.printStackTrace();
return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity(ret).type(MediaType.APPLICATION_JSON).build();
}
private RestErrorResponse build(final Exception exception) {
return new RestErrorResponse(Response.Status.INTERNAL_SERVER_ERROR, "Catch Unknown Exception", exception.getMessage());
}
}

View File

@@ -0,0 +1,25 @@
package org.kar.archidata.catcher;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import jakarta.ws.rs.ClientErrorException;
import jakarta.ws.rs.core.MediaType;
import jakarta.ws.rs.core.Response;
import jakarta.ws.rs.ext.ExceptionMapper;
public class FailException404API implements ExceptionMapper<ClientErrorException> {
private static final Logger LOGGER = LoggerFactory.getLogger(FailException404API.class);
@Override
public Response toResponse(final ClientErrorException exception) {
final RestErrorResponse ret = build(exception);
LOGGER.error("Error UUID={}", ret.uuid);
return Response.status(exception.getResponse().getStatusInfo().toEnum()).entity(ret).type(MediaType.APPLICATION_JSON).build();
}
private RestErrorResponse build(final ClientErrorException exception) {
return new RestErrorResponse(exception.getResponse().getStatusInfo().toEnum(), "Catch system exception", exception.getMessage());
}
}

View File

@@ -0,0 +1,27 @@
package org.kar.archidata.catcher;
import org.kar.archidata.exception.FailException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import jakarta.ws.rs.core.MediaType;
import jakarta.ws.rs.core.Response;
import jakarta.ws.rs.ext.ExceptionMapper;
public class FailExceptionCatcher implements ExceptionMapper<FailException> {
private static final Logger LOGGER = LoggerFactory.getLogger(FailExceptionCatcher.class);
@Override
public Response toResponse(final FailException exception) {
final RestErrorResponse ret = build(exception);
LOGGER.error("Error UUID={}", ret.uuid);
// Not display backtrace ==> this may be a normal case ...
// exception.printStackTrace();
return Response.status(exception.status).entity(ret).type(MediaType.APPLICATION_JSON).build();
}
private RestErrorResponse build(final FailException exception) {
return new RestErrorResponse(exception.status, "Request Fail", exception.getMessage());
}
}

View File

@@ -0,0 +1,26 @@
package org.kar.archidata.catcher;
import org.kar.archidata.exception.InputException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import jakarta.ws.rs.core.MediaType;
import jakarta.ws.rs.core.Response;
import jakarta.ws.rs.ext.ExceptionMapper;
public class InputExceptionCatcher implements ExceptionMapper<InputException> {
private static final Logger LOGGER = LoggerFactory.getLogger(InputExceptionCatcher.class);
@Override
public Response toResponse(final InputException exception) {
final RestErrorResponse ret = build(exception);
LOGGER.error("Error UUID={} ==> '{}'=>'{}'", ret.uuid, exception.missingVariable, exception.getLocalizedMessage());
// exception.printStackTrace();
return Response.status(exception.status).entity(ret).type(MediaType.APPLICATION_JSON).build();
}
private RestErrorResponse build(final InputException exception) {
return new RestErrorResponse(exception.status, "Error on input='" + exception.missingVariable + "'", exception.getMessage());
}
}

View File

@@ -0,0 +1,28 @@
package org.kar.archidata.catcher;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.core.JsonProcessingException;
import jakarta.ws.rs.core.MediaType;
import jakarta.ws.rs.core.Response;
import jakarta.ws.rs.ext.ExceptionMapper;
public class JacksonCatcher implements ExceptionMapper<JsonProcessingException> {
private static final Logger LOGGER = LoggerFactory.getLogger(JacksonCatcher.class);
@Override
public Response toResponse(final JsonProcessingException exception) {
LOGGER.warn("Catch exception Input data parsing:");
final RestErrorResponse ret = build(exception);
LOGGER.error("Error UUID={}", ret.uuid);
exception.printStackTrace();
return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity(ret).type(MediaType.APPLICATION_JSON).build();
}
private RestErrorResponse build(final Exception exception) {
return new RestErrorResponse(Response.Status.INTERNAL_SERVER_ERROR, "Catch Unknown Exception", exception.getMessage());
}
}

View File

@@ -0,0 +1,38 @@
package org.kar.archidata.catcher;
import java.time.Instant;
import java.util.UUID;
import jakarta.ws.rs.core.Response;
public class RestErrorResponse {
public UUID uuid = UUID.randomUUID();
public String time;
public String error;
public String message;
final public int status;
final public String statusMessage;
public RestErrorResponse(final Response.Status status, final String time, final String error, final String message) {
this.time = time;
this.error = error;
this.message = message;
this.status = status.getStatusCode();
this.statusMessage = status.getReasonPhrase();
}
public RestErrorResponse(final Response.Status status, final String error, final String message) {
this.time = Instant.now().toString();
this.error = error;
this.message = message;
this.status = status.getStatusCode();
this.statusMessage = status.getReasonPhrase();
}
public RestErrorResponse(final Response.Status status) {
this.time = Instant.now().toString();
this.status = status.getStatusCode();
this.statusMessage = status.getReasonPhrase();
}
}

View File

@@ -0,0 +1,26 @@
package org.kar.archidata.catcher;
import org.kar.archidata.exception.SystemException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import jakarta.ws.rs.core.MediaType;
import jakarta.ws.rs.core.Response;
import jakarta.ws.rs.ext.ExceptionMapper;
public class SystemExceptionCatcher implements ExceptionMapper<SystemException> {
private static final Logger LOGGER = LoggerFactory.getLogger(SystemExceptionCatcher.class);
@Override
public Response toResponse(final SystemException exception) {
final RestErrorResponse ret = build(exception);
LOGGER.error("Error UUID={}", ret.uuid);
exception.printStackTrace();
return Response.status(exception.status).entity(ret).type(MediaType.APPLICATION_JSON).build();
}
private RestErrorResponse build(final SystemException exception) {
return new RestErrorResponse(exception.status, "System error", exception.getMessage());
}
}

View File

@@ -0,0 +1,21 @@
package org.kar.archidata.dataAccess;
/** Java does not permit to set return data (eg: integer) in the function parameter. This class permit to update a value as in/out function parameters. */
public class CountInOut {
// internal value of the stream
public int value = 0;
/** Default constructor */
public CountInOut() {}
/** Constructor with the initial value.
* @param i Initial Value */
public CountInOut(final int i) {
this.value = i;
}
/** Increment by one the value. */
public void inc() {
this.value++;
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,109 @@
package org.kar.archidata.dataAccess;
import java.lang.reflect.Field;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.List;
import jakarta.validation.constraints.NotNull;
public interface DataAccessAddOn {
/** Get the Class of the declaration annotation
* @return The annotation class */
Class<?> getAnnotationClass();
/** Get the SQL type that is needed to declare for the specific Field Type.
* @param elem Field to declare.
* @return SQL type to create. */
String getSQLFieldType(Field elem) throws Exception;
/** Check if the field is manage by the local add-on
* @param elem Field to inspect.
* @return True of the field is manage by the current Add-on. */
boolean isCompatibleField(Field elem);
/** Insert data in the specific field (the field must be in the current db, otherwiise it does not work at all.
* @param ps DB statement interface.
* @param data The date to inject.
* @param iii The index of injection
* @return the new index of injection in case of multiple value management
* @throws SQLException */
void insertData(PreparedStatement ps, final Field field, Object data, CountInOut iii) throws Exception, SQLException, IllegalArgumentException, IllegalAccessException;
/** Element can insert in the single request
* @param field
* @return */
default boolean canInsert(final Field field) {
return false;
}
/** Element can be retrieve with the specific mode
* @param field
* @return */
default boolean canRetrieve(final Field field) {
return false;
}
void generateQuerry(@NotNull String tableName, @NotNull Field field, @NotNull final StringBuilder querySelect, @NotNull final StringBuilder query, @NotNull String name, @NotNull CountInOut count,
QueryOptions options) throws Exception;
// Return the number of colomn read
void fillFromQuerry(ResultSet rs, Field field, Object data, CountInOut count, QueryOptions options, final List<LazyGetter> lazyCall)
throws Exception, SQLException, IllegalArgumentException, IllegalAccessException;
/** Create associated table of the specific element.
* @param tableName
* @param elem
* @param mainTableBuilder
* @param ListOtherTables
* @param createIfNotExist
* @param createDrop
* @param fieldId
* @throws Exception */
void createTables(String tableName, Field field, StringBuilder mainTableBuilder, List<String> preActionList, List<String> postActionList, boolean createIfNotExist, boolean createDrop, int fieldId)
throws Exception;
/** Some action must be done asynchronously for update or remove element
* @param field
* @return */
default boolean isInsertAsync(final Field field) throws Exception {
return false;
}
/** When insert is mark async, this function permit to create or update the data
* @param tableName Name of the Table.
* @param localId Local ID of the current table
* @param field Field that is updated.
* @param data Data that might be inserted.
* @param actions Asynchronous action to do after main request. */
default void asyncInsert(final String tableName, final Object localId, final Field field, final Object data, final List<LazyGetter> actions) throws Exception {
}
/** Some action must be done asynchronously for update or remove element
* @param field
* @return */
default boolean isUpdateAsync(final Field field) throws Exception {
return false;
}
/** When insert is mark async, this function permit to create or update the data
* @param tableName Name of the Table.
* @param localId Local ID of the current table
* @param field Field that is updated.
* @param data Data that might be inserted.
* @param actions Asynchronous action to do after main request. */
default void asyncUpdate(final String tableName, final Object localId, final Field field, final Object data, final List<LazyGetter> actions) throws Exception {
}
default void drop(final String tableName, final Field field) throws Exception {
}
default void cleanAll(final String tableName, final Field field) throws Exception {
}
}

View File

@@ -0,0 +1,394 @@
package org.kar.archidata.dataAccess;
import java.lang.reflect.Field;
import java.sql.Timestamp;
import java.time.LocalDate;
import java.time.LocalTime;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import org.kar.archidata.annotation.AnnotationTools;
import org.kar.archidata.annotation.CreationTimestamp;
import org.kar.archidata.annotation.DataIfNotExists;
import org.kar.archidata.annotation.UpdateTimestamp;
import org.kar.archidata.dataAccess.options.CreateDropTable;
import org.kar.archidata.exception.DataAccessException;
import org.kar.archidata.tools.ConfigBaseVariable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.annotation.JsonValue;
import jakarta.persistence.GenerationType;
public class DataFactory {
static final Logger LOGGER = LoggerFactory.getLogger(DataFactory.class);
public static String convertTypeInSQL(final Class<?> type, final String fieldName) throws Exception {
if (!"sqlite".equals(ConfigBaseVariable.getDBType())) {
if (type == Long.class || type == long.class) {
return "bigint";
}
if (type == Integer.class || type == int.class) {
return "int";
}
if (type == Boolean.class || type == boolean.class) {
return "tinyint(1)";
}
if (type == Float.class || type == float.class) {
return "float";
}
if (type == Double.class || type == double.class) {
return "double";
}
if (type == Date.class || type == Timestamp.class) {
return "timestamp(3)";
}
if (type == LocalDate.class) {
return "date";
}
if (type == LocalTime.class) {
return "time";
}
if (type == String.class) {
return "text";
}
if (type == JsonValue.class) {
return "json";
}
if (type.isEnum()) {
final Object[] arr = type.getEnumConstants();
final StringBuilder out = new StringBuilder();
out.append("ENUM(");
boolean first = true;
for (final Object elem : arr) {
if (!first) {
out.append(",");
}
first = false;
out.append("'");
out.append(elem.toString());
out.append("'");
}
out.append(")");
return out.toString();
}
} else {
if (type == Long.class || type == long.class) {
return "INTEGER";
}
if (type == Integer.class || type == int.class) {
return "INTEGER";
}
if (type == Boolean.class || type == boolean.class) {
return "INTEGER";
}
if (type == Float.class || type == float.class) {
return "REAL";
}
if (type == Double.class || type == double.class) {
return "REAL";
}
if (type == Date.class || type == Timestamp.class) {
return "DATETIME";
}
if (type == LocalDate.class) {
return "DATE";
}
if (type == LocalTime.class) {
return "TIME";
}
if (type == String.class) {
return "text";
}
if (type == JsonValue.class) {
return "text";
}
if (type.isEnum()) {
final Object[] arr = type.getEnumConstants();
final StringBuilder out = new StringBuilder();
out.append("TEXT CHECK(");
out.append(fieldName);
out.append(" IN (");
boolean first = true;
for (final Object elem : arr) {
if (!first) {
out.append(",");
}
first = false;
out.append("'");
out.append(elem.toString());
out.append("'");
}
out.append(" ) )");
return out.toString();
}
}
throw new DataAccessException("Imcompatible type of element in object for: " + type.getCanonicalName());
}
public static void createTablesSpecificType(final String tableName, final Field elem, final StringBuilder mainTableBuilder, final List<String> preOtherTables, final List<String> postOtherTables,
final boolean createIfNotExist, final boolean createDrop, final int fieldId, final Class<?> classModel) throws Exception {
final String name = AnnotationTools.getFieldName(elem);
final int limitSize = AnnotationTools.getLimitSize(elem);
final boolean notNull = AnnotationTools.getColumnNotNull(elem);
final boolean primaryKey = AnnotationTools.isPrimaryKey(elem);
final GenerationType strategy = AnnotationTools.getStrategy(elem);
final boolean createTime = elem.getDeclaredAnnotationsByType(CreationTimestamp.class).length != 0;
final boolean updateTime = elem.getDeclaredAnnotationsByType(UpdateTimestamp.class).length != 0;
final String comment = AnnotationTools.getComment(elem);
final String defaultValue = AnnotationTools.getDefault(elem);
if (fieldId == 0) {
mainTableBuilder.append("\n\t\t`");
} else {
mainTableBuilder.append(",\n\t\t`");
}
mainTableBuilder.append(name);
mainTableBuilder.append("` ");
String typeValue = null;
typeValue = convertTypeInSQL(classModel, name);
if ("text".equals(typeValue) && !"sqlite".equals(ConfigBaseVariable.getDBType())) {
if (limitSize > 0) {
mainTableBuilder.append("varchar(");
mainTableBuilder.append(limitSize);
mainTableBuilder.append(")");
} else {
mainTableBuilder.append("text");
if (!"sqlite".equals(ConfigBaseVariable.getDBType())) {
mainTableBuilder.append(" CHARACTER SET utf8");
}
}
} else {
mainTableBuilder.append(typeValue);
}
mainTableBuilder.append(" ");
if (notNull) {
if (!primaryKey || !"sqlite".equalsIgnoreCase(ConfigBaseVariable.getDBType())) {
mainTableBuilder.append("NOT NULL ");
}
if (defaultValue == null) {
if (updateTime || createTime) {
mainTableBuilder.append("DEFAULT CURRENT_TIMESTAMP");
if (!"sqlite".equals(ConfigBaseVariable.getDBType())) {
mainTableBuilder.append("(3)");
}
mainTableBuilder.append(" ");
}
if (updateTime) {
if (!"sqlite".equals(ConfigBaseVariable.getDBType())) {
mainTableBuilder.append("ON UPDATE CURRENT_TIMESTAMP");
mainTableBuilder.append("(3)");
} else {
// TODO: add trigger:
/* CREATE TRIGGER your_table_trig AFTER UPDATE ON your_table BEGIN update your_table SET updated_on = datetime('now') WHERE user_id = NEW.user_id; END; */
final StringBuilder triggerBuilder = new StringBuilder();
triggerBuilder.append("CREATE TRIGGER ");
triggerBuilder.append(tableName);
triggerBuilder.append("_update_trigger AFTER UPDATE ON ");
triggerBuilder.append(tableName);
triggerBuilder.append(" \nBEGIN \n update ");
triggerBuilder.append(tableName);
triggerBuilder.append(" SET ");
triggerBuilder.append(name);
// triggerBuilder.append(" = datetime('now') WHERE id = NEW.id; \n");
triggerBuilder.append(" = strftime('%Y-%m-%d %H:%M:%f', 'now') WHERE id = NEW.id; \n");
triggerBuilder.append("END;");
postOtherTables.add(triggerBuilder.toString());
}
mainTableBuilder.append(" ");
}
} else {
mainTableBuilder.append("DEFAULT ");
if ("CURRENT_TIMESTAMP(3)".equals(defaultValue) && "sqlite".equals(ConfigBaseVariable.getDBType())) {
mainTableBuilder.append("CURRENT_TIMESTAMP");
} else {
mainTableBuilder.append(defaultValue);
}
mainTableBuilder.append(" ");
if (updateTime) {
if (!"sqlite".equals(ConfigBaseVariable.getDBType())) {
mainTableBuilder.append("ON UPDATE CURRENT_TIMESTAMP");
mainTableBuilder.append("(3)");
}
mainTableBuilder.append(" ");
}
}
} else if (defaultValue == null) {
if (updateTime || createTime) {
if ("sqlite".equals(ConfigBaseVariable.getDBType())) {
mainTableBuilder.append("DEFAULT CURRENT_TIMESTAMP ");
} else {
mainTableBuilder.append("DEFAULT CURRENT_TIMESTAMP(3) ");
}
} else if (primaryKey) {
mainTableBuilder.append("NOT NULL ");
} else {
mainTableBuilder.append("DEFAULT NULL ");
}
} else {
mainTableBuilder.append("DEFAULT ");
mainTableBuilder.append(defaultValue);
mainTableBuilder.append(" ");
}
if (primaryKey && "sqlite".equals(ConfigBaseVariable.getDBType())) {
mainTableBuilder.append("PRIMARY KEY ");
}
if (strategy == GenerationType.IDENTITY) {
if (!"sqlite".equals(ConfigBaseVariable.getDBType())) {
mainTableBuilder.append("AUTO_INCREMENT ");
} else {
mainTableBuilder.append("AUTOINCREMENT ");
}
} else if (strategy != null) {
throw new DataAccessException("Can not generate a stategy different of IDENTITY");
}
if (comment != null && !"sqlite".equals(ConfigBaseVariable.getDBType())) {
mainTableBuilder.append("COMMENT '");
mainTableBuilder.append(comment.replace('\'', '\''));
mainTableBuilder.append("' ");
}
}
private static boolean isFieldFromSuperClass(final Class<?> model, final String filedName) {
final Class<?> superClass = model.getSuperclass();
if (superClass == null) {
return false;
}
for (final Field field : superClass.getFields()) {
String name;
try {
name = AnnotationTools.getFieldName(field);
if (filedName.equals(name)) {
return true;
}
} catch (final Exception e) {
// TODO Auto-generated catch block
LOGGER.trace("Catch error field name in parent create data table: {}", e.getMessage());
}
}
return false;
}
public static List<String> createTable(final Class<?> clazz) throws Exception {
return createTable(clazz, null);
}
public static List<String> createTable(final Class<?> clazz, final QueryOptions options) throws Exception {
final String tableName = AnnotationTools.getTableName(clazz, options);
boolean createDrop = false;
if (options != null) {
createDrop = options.exist(CreateDropTable.class);
}
final boolean createIfNotExist = clazz.getDeclaredAnnotationsByType(DataIfNotExists.class).length != 0;
final List<String> preActionList = new ArrayList<>();
final List<String> postActionList = new ArrayList<>();
final StringBuilder out = new StringBuilder();
// Drop Table
if (createIfNotExist && createDrop) {
final StringBuilder tableTmp = new StringBuilder();
tableTmp.append("DROP TABLE IF EXISTS `");
tableTmp.append(tableName);
tableTmp.append("`;");
postActionList.add(tableTmp.toString());
}
// create Table:
out.append("CREATE TABLE `");
out.append(tableName);
out.append("` (");
int fieldId = 0;
LOGGER.debug("===> TABLE `{}`", tableName);
final List<String> primaryKeys = new ArrayList<>();
for (final Field elem : clazz.getFields()) {
// DEtect the primary key (support only one primary key right now...
if (AnnotationTools.isPrimaryKey(elem)) {
primaryKeys.add(AnnotationTools.getFieldName(elem));
}
}
// Here we insert the data in the reverse mode ==> the parent class add there parameter at the start (we reorder the field with the parenting).
StringBuilder tmpOut = new StringBuilder();
StringBuilder reverseOut = new StringBuilder();
final List<String> alreadyAdded = new ArrayList<>();
Class<?> currentClazz = clazz;
while (currentClazz != null) {
fieldId = 0;
LOGGER.trace("parse class: '{}'", currentClazz.getCanonicalName());
for (final Field elem : clazz.getFields()) {
// static field is only for internal global declaration ==> remove it ..
if (java.lang.reflect.Modifier.isStatic(elem.getModifiers())) {
continue;
}
final String dataName = AnnotationTools.getFieldName(elem);
if (isFieldFromSuperClass(currentClazz, dataName)) {
LOGGER.trace(" SKIP: '{}'", elem.getName());
continue;
}
if (alreadyAdded.contains(dataName)) {
LOGGER.trace(" SKIP2: '{}'", elem.getName());
continue;
}
alreadyAdded.add(dataName);
LOGGER.trace(" + '{}'", elem.getName());
if (DataAccess.isAddOnField(elem)) {
final DataAccessAddOn addOn = DataAccess.findAddOnforField(elem);
LOGGER.trace("Create type for: {} ==> {} (ADD-ON)", AnnotationTools.getFieldName(elem), elem.getType());
if (addOn != null) {
addOn.createTables(tableName, elem, tmpOut, preActionList, postActionList, createIfNotExist, createDrop, fieldId);
} else {
throw new DataAccessException(
"Element matked as add-on but add-on does not loaded: table:" + tableName + " field name=" + AnnotationTools.getFieldName(elem) + " type=" + elem.getType());
}
} else {
LOGGER.trace("Create type for: {} ==> {}", AnnotationTools.getFieldName(elem), elem.getType());
DataFactory.createTablesSpecificType(tableName, elem, tmpOut, preActionList, postActionList, createIfNotExist, createDrop, fieldId, elem.getType());
}
fieldId++;
}
final boolean dataInThisObject = tmpOut.toString().length() > 0;
if (dataInThisObject) {
final boolean dataInPreviousObject = reverseOut.toString().length() > 0;
if (dataInPreviousObject) {
tmpOut.append(", ");
tmpOut.append(reverseOut.toString());
}
reverseOut = tmpOut;
tmpOut = new StringBuilder();
}
currentClazz = currentClazz.getSuperclass();
if (currentClazz == Object.class) {
break;
}
}
out.append(reverseOut.toString());
if (primaryKeys.size() != 0 && !"sqlite".equals(ConfigBaseVariable.getDBType())) {
out.append(",\n\tPRIMARY KEY (`");
for (int iii = 0; iii < primaryKeys.size(); iii++) {
if (iii != 0) {
out.append(",");
}
out.append(primaryKeys.get(iii));
}
out.append("`)");
}
out.append("\n\t)");
if (!"sqlite".equals(ConfigBaseVariable.getDBType())) {
out.append(" ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci");
}
out.append(";");
preActionList.add(out.toString());
preActionList.addAll(postActionList);
return preActionList;
}
}

View File

@@ -0,0 +1,18 @@
package org.kar.archidata.dataAccess;
// Mark as deprecated while the concept is not ready ...
@Deprecated
public class Foreign<T> {
public final Long id;
public final T data;
public Foreign(final Long id) {
this.id = id;
this.data = null;
}
public Foreign(final T data) {
this.id = null;
this.data = data;
}
}

View File

@@ -0,0 +1,5 @@
package org.kar.archidata.dataAccess;
public interface LazyGetter {
void doRequest() throws Exception;
}

View File

@@ -0,0 +1,20 @@
package org.kar.archidata.dataAccess;
import java.sql.PreparedStatement;
public class Limit extends QueryOption {
protected final long limit;
public Limit(final long limit) {
this.limit = limit;
}
public void generateQuerry(final StringBuilder query, final String tableName) {
query.append(" LIMIT ? ");
}
public void injectQuerry(final PreparedStatement ps, final CountInOut iii) throws Exception {
DataAccess.addElement(ps, this.limit, iii);
iii.inc();
}
}

View File

@@ -0,0 +1,41 @@
package org.kar.archidata.dataAccess;
import java.sql.PreparedStatement;
import java.util.List;
public class OrderBy extends QueryOption {
protected final List<OrderItem> childs;
public OrderBy(final List<OrderItem> childs) {
this.childs = childs;
}
public OrderBy(final OrderItem... childs) {
this.childs = List.of(childs);
}
public void generateQuerry(final StringBuilder query, final String tableName) {
if (this.childs.size() >= 1) {
query.append(" ORDER BY ");
}
boolean first = true;
for (final OrderItem elem : this.childs) {
if (first) {
first = false;
} else {
query.append(", ");
}
query.append("`");
query.append(elem.value);
query.append("` ");
query.append(elem.order.toString());
}
if (this.childs.size() >= 1) {
query.append(")");
}
}
public void injectQuerry(final PreparedStatement ps, final CountInOut iii) throws Exception {
// nothing to add.
}
}

View File

@@ -0,0 +1,16 @@
package org.kar.archidata.dataAccess;
public class OrderItem {
public enum Order {
ASC, DESC
};
public final String value;
public final Order order;
public OrderItem(final String value, final Order order) {
this.value = value;
this.order = order;
}
}

View File

@@ -0,0 +1,46 @@
package org.kar.archidata.dataAccess;
import java.sql.PreparedStatement;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
public class QueryAnd implements QueryItem {
protected final List<QueryItem> childs;
public QueryAnd(final List<QueryItem> childs) {
this.childs = childs;
}
public QueryAnd(final QueryItem... items) {
this.childs = new ArrayList<>();
Collections.addAll(this.childs, items);
}
@Override
public void generateQuerry(final StringBuilder query, final String tableName) {
if (this.childs.size() >= 1) {
query.append(" (");
}
boolean first = true;
for (final QueryItem elem : this.childs) {
if (first) {
first = false;
} else {
query.append(" AND ");
}
elem.generateQuerry(query, tableName);
}
if (this.childs.size() >= 1) {
query.append(")");
}
}
@Override
public void injectQuerry(final PreparedStatement ps, final CountInOut iii) throws Exception {
for (final QueryItem elem : this.childs) {
elem.injectQuerry(ps, iii);
}
}
}

View File

@@ -0,0 +1,31 @@
package org.kar.archidata.dataAccess;
import java.sql.PreparedStatement;
public class QueryCondition implements QueryItem {
private final String key;
private final String comparator;
private final Object value;
public QueryCondition(final String key, final String comparator, final Object value) {
this.key = key;
this.comparator = comparator;
this.value = value;
}
@Override
public void generateQuerry(final StringBuilder query, final String tableName) {
query.append(tableName);
query.append(".");
query.append(this.key);
query.append(" ");
query.append(this.comparator);
query.append(" ?");
}
@Override
public void injectQuerry(final PreparedStatement ps, final CountInOut iii) throws Exception {
DataAccess.addElement(ps, this.value, iii);
iii.inc();
}
}

View File

@@ -0,0 +1,51 @@
package org.kar.archidata.dataAccess;
import java.sql.PreparedStatement;
import java.util.List;
public class QueryInList<T> implements QueryItem {
protected final String key;
protected final String comparator;
protected final List<T> value;
protected QueryInList(final String key, final String comparator, final List<T> value) {
this.key = key;
this.comparator = comparator;
this.value = value;
}
public QueryInList(final String key, final List<T> value) {
this(key, "IN", value);
}
public QueryInList(final String key, final T... value) {
this(key, "IN", List.of(value));
}
@Override
public void generateQuerry(final StringBuilder query, final String tableName) {
query.append(tableName);
query.append(".");
query.append(this.key);
query.append(" ");
query.append(this.comparator);
query.append(" (");
for (int iii = 0; iii < this.value.size(); iii++) {
if (iii != 0) {
query.append(",?");
} else {
query.append("?");
}
}
query.append(")");
}
@Override
public void injectQuerry(final PreparedStatement ps, final CountInOut iii) throws Exception {
for (final Object elem : this.value) {
DataAccess.addElement(ps, elem, iii);
iii.inc();
}
}
}

View File

@@ -0,0 +1,9 @@
package org.kar.archidata.dataAccess;
import java.sql.PreparedStatement;
public interface QueryItem {
void generateQuerry(StringBuilder query, String tableName);
void injectQuerry(PreparedStatement ps, CountInOut iii) throws Exception;
}

View File

@@ -0,0 +1,9 @@
package org.kar.archidata.dataAccess;
import java.util.List;
public class QueryNoInList<T> extends QueryInList<T> {
public QueryNoInList(final String key, final List<T> value) {
super(key, "NOT IN", value);
}
}

View File

@@ -0,0 +1,3 @@
package org.kar.archidata.dataAccess;
public class QueryOption {}

View File

@@ -0,0 +1,68 @@
package org.kar.archidata.dataAccess;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.kar.archidata.dataAccess.options.AccessDeletedItems;
import org.kar.archidata.dataAccess.options.CreateDropTable;
import org.kar.archidata.dataAccess.options.ReadAllColumn;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class QueryOptions {
static final Logger LOGGER = LoggerFactory.getLogger(QueryOptions.class);
public static final ReadAllColumn READ_ALL_COLOMN = new ReadAllColumn();
public static final AccessDeletedItems ACCESS_DELETED_ITEMS = new AccessDeletedItems();
public static final CreateDropTable CREATE_DROP_TABLE = new CreateDropTable();
private final List<QueryOption> options = new ArrayList<>();
public QueryOptions() {}
public QueryOptions(final QueryOption... elems) {
if (elems == null || elems.length == 0) {
return;
}
Collections.addAll(this.options, elems);
}
public void add(final QueryOption option) {
this.options.add(option);
}
public List<QueryOption> getAll() {
return this.options;
}
public QueryOption[] getAllArray() {
return this.options.toArray(new QueryOption[0]);
}
@SuppressWarnings("unchecked")
public <T> T get(final Class<T> type) {
for (final QueryOption elem : this.options) {
if (elem.getClass() == type) {
return (T) elem;
}
}
return null;
}
public boolean exist(final Class<?> type) {
for (final QueryOption elem : this.options) {
if (elem.getClass() == type) {
return true;
}
}
return false;
}
public static boolean readAllColomn(final QueryOptions options) {
if (options != null) {
return options.exist(ReadAllColumn.class);
}
return false;
}
}

View File

@@ -0,0 +1,42 @@
package org.kar.archidata.dataAccess;
import java.sql.PreparedStatement;
import java.util.List;
public class QueryOr implements QueryItem {
protected final List<QueryItem> childs;
public QueryOr(final List<QueryItem> childs) {
this.childs = childs;
}
public QueryOr(final QueryItem... childs) {
this.childs = List.of(childs);
}
@Override
public void generateQuerry(final StringBuilder query, final String tableName) {
if (this.childs.size() >= 1) {
query.append(" (");
}
boolean first = true;
for (final QueryItem elem : this.childs) {
if (first) {
first = false;
} else {
query.append(" OR ");
}
elem.generateQuerry(query, tableName);
}
if (this.childs.size() >= 1) {
query.append(")");
}
}
@Override
public void injectQuerry(final PreparedStatement ps, final CountInOut iii) throws Exception {
for (final QueryItem elem : this.childs) {
elem.injectQuerry(ps, iii);
}
}
}

View File

@@ -0,0 +1,98 @@
package org.kar.archidata.dataAccess.addOn;
import java.lang.reflect.Field;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.Types;
import java.util.List;
import org.kar.archidata.annotation.AnnotationTools;
import org.kar.archidata.annotation.DataJson;
import org.kar.archidata.dataAccess.CountInOut;
import org.kar.archidata.dataAccess.DataAccessAddOn;
import org.kar.archidata.dataAccess.DataFactory;
import org.kar.archidata.dataAccess.LazyGetter;
import org.kar.archidata.dataAccess.QueryOptions;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.annotation.JsonValue;
import com.fasterxml.jackson.databind.ObjectMapper;
import jakarta.validation.constraints.NotNull;
public class AddOnDataJson implements DataAccessAddOn {
static final Logger LOGGER = LoggerFactory.getLogger(AddOnDataJson.class);
@Override
public Class<?> getAnnotationClass() {
return DataJson.class;
}
@Override
public String getSQLFieldType(final Field elem) throws Exception {
final String fieldName = AnnotationTools.getFieldName(elem);
return DataFactory.convertTypeInSQL(String.class, fieldName);
}
@Override
public boolean isCompatibleField(final Field elem) {
final DataJson decorators = elem.getDeclaredAnnotation(DataJson.class);
return decorators != null;
}
@Override
public void insertData(final PreparedStatement ps, final Field field, final Object rootObject, final CountInOut iii) throws Exception {
final Object data = field.get(rootObject);
if (data == null) {
ps.setNull(iii.value, Types.VARCHAR);
}
final ObjectMapper objectMapper = new ObjectMapper();
final String dataString = objectMapper.writeValueAsString(data);
ps.setString(iii.value, dataString);
iii.inc();
}
@Override
public boolean canInsert(final Field field) {
return true;
}
@Override
public boolean isInsertAsync(final Field field) throws Exception {
return false;
}
@Override
public boolean canRetrieve(final Field field) {
return true;
}
@Override
public void generateQuerry(@NotNull final String tableName, @NotNull final Field field, @NotNull final StringBuilder querrySelect, @NotNull final StringBuilder querry, @NotNull final String name,
@NotNull final CountInOut elemCount, final QueryOptions options) throws Exception {
querrySelect.append(" ");
querrySelect.append(tableName);
querrySelect.append(".");
querrySelect.append(name);
elemCount.inc();
return;
}
@Override
public void fillFromQuerry(final ResultSet rs, final Field field, final Object data, final CountInOut count, final QueryOptions options, final List<LazyGetter> lazyCall) throws Exception {
final String jsonData = rs.getString(count.value);
count.inc();
if (!rs.wasNull()) {
final ObjectMapper objectMapper = new ObjectMapper();
final Object dataParsed = objectMapper.readValue(jsonData, field.getType());
field.set(data, dataParsed);
}
}
@Override
public void createTables(final String tableName, final Field field, final StringBuilder mainTableBuilder, final List<String> preActionList, final List<String> postActionList,
final boolean createIfNotExist, final boolean createDrop, final int fieldId) throws Exception {
DataFactory.createTablesSpecificType(tableName, field, mainTableBuilder, preActionList, postActionList, createIfNotExist, createDrop, fieldId, JsonValue.class);
}
}

View File

@@ -0,0 +1,291 @@
package org.kar.archidata.dataAccess.addOn;
import java.lang.reflect.Field;
import java.lang.reflect.ParameterizedType;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import org.kar.archidata.annotation.AnnotationTools;
import org.kar.archidata.dataAccess.CountInOut;
import org.kar.archidata.dataAccess.DataAccess;
import org.kar.archidata.dataAccess.DataAccessAddOn;
import org.kar.archidata.dataAccess.DataFactory;
import org.kar.archidata.dataAccess.LazyGetter;
import org.kar.archidata.dataAccess.QueryAnd;
import org.kar.archidata.dataAccess.QueryCondition;
import org.kar.archidata.dataAccess.QueryInList;
import org.kar.archidata.dataAccess.QueryOptions;
import org.kar.archidata.dataAccess.addOn.model.LinkTable;
import org.kar.archidata.dataAccess.options.Condition;
import org.kar.archidata.dataAccess.options.OverrideTableName;
import org.kar.archidata.exception.DataAccessException;
import org.kar.archidata.tools.ConfigBaseVariable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import jakarta.persistence.FetchType;
import jakarta.persistence.ManyToMany;
import jakarta.validation.constraints.NotNull;
public class AddOnManyToMany implements DataAccessAddOn {
static final Logger LOGGER = LoggerFactory.getLogger(AddOnManyToMany.class);
static final String SEPARATOR = "-";
@Override
public Class<?> getAnnotationClass() {
return ManyToMany.class;
}
@Override
public String getSQLFieldType(final Field elem) {
return null;
}
@Override
public boolean isCompatibleField(final Field elem) {
final ManyToMany decorators = elem.getDeclaredAnnotation(ManyToMany.class);
return decorators != null;
}
@Override
public void insertData(final PreparedStatement ps, final Field field, final Object rootObject, final CountInOut iii) throws SQLException, IllegalArgumentException, IllegalAccessException {
}
@Override
public boolean canInsert(final Field field) {
return false;
}
@Override
public boolean canRetrieve(final Field field) {
return true;
}
public static String generateLinkTableNameField(final String tableName, final Field field) throws Exception {
final String name = AnnotationTools.getFieldName(field);
return generateLinkTableName(tableName, name);
}
public static String generateLinkTableName(final String tableName, final String name) {
String localName = name;
if (name.endsWith("s")) {
localName = name.substring(0, name.length() - 1);
}
return tableName + "_link_" + localName;
}
public void generateConcatQuerry(@NotNull final String tableName, @NotNull final Field field, @NotNull final StringBuilder querrySelect, @NotNull final StringBuilder querry,
@NotNull final String name, @NotNull final CountInOut elemCount, final QueryOptions options) {
final String linkTableName = generateLinkTableName(tableName, name);
final String tmpVariable = "tmp_" + Integer.toString(elemCount.value);
querrySelect.append(" (SELECT GROUP_CONCAT(");
querrySelect.append(tmpVariable);
querrySelect.append(".object2Id ");
if ("sqlite".equals(ConfigBaseVariable.getDBType())) {
querrySelect.append(", ");
} else {
querrySelect.append("SEPARATOR ");
}
querrySelect.append("'");
querrySelect.append(SEPARATOR);
querrySelect.append("') FROM ");
querrySelect.append(linkTableName);
querrySelect.append(" ");
querrySelect.append(tmpVariable);
querrySelect.append(" WHERE ");
/* querrySelect.append(tmpVariable); querrySelect.append(".deleted = false AND "); */
querrySelect.append(tableName);
querrySelect.append(".id = ");
querrySelect.append(tmpVariable);
querrySelect.append(".");
querrySelect.append("object1Id ");
if (!"sqlite".equals(ConfigBaseVariable.getDBType())) {
querrySelect.append(" GROUP BY ");
querrySelect.append(tmpVariable);
querrySelect.append(".object1Id");
}
querrySelect.append(") AS ");
querrySelect.append(name);
querrySelect.append(" ");
/* " (SELECT GROUP_CONCAT(tmp.data_id SEPARATOR '-')" + " FROM cover_link_node tmp" + " WHERE tmp.deleted = false" +
* " AND node.id = tmp.node_id" + " GROUP BY tmp.node_id) AS covers" + */
elemCount.inc();
}
@Override
public void generateQuerry(@NotNull final String tableName, @NotNull final Field field, @NotNull final StringBuilder querrySelect, @NotNull final StringBuilder querry, @NotNull final String name,
@NotNull final CountInOut elemCount, final QueryOptions options) throws Exception {
if (field.getType() != List.class) {
return;
}
final Class<?> objectClass = (Class<?>) ((ParameterizedType) field.getGenericType()).getActualTypeArguments()[0];
if (objectClass == Long.class) {
generateConcatQuerry(tableName, field, querrySelect, querry, name, elemCount, options);
}
final ManyToMany decorators = field.getDeclaredAnnotation(ManyToMany.class);
if (decorators == null) {
return;
}
if (objectClass == decorators.targetEntity()) {
if (decorators.fetch() == FetchType.EAGER) {
throw new DataAccessException("EAGER is not supported for list of element...");
} else {
generateConcatQuerry(tableName, field, querrySelect, querry, name, elemCount, options);
}
}
}
@Override
public void fillFromQuerry(final ResultSet rs, final Field field, final Object data, final CountInOut count, final QueryOptions options, final List<LazyGetter> lazyCall) throws Exception {
if (field.getType() != List.class) {
LOGGER.error("Can not ManyToMany with other than List Model: {}", field.getType().getCanonicalName());
return;
}
final Class<?> objectClass = (Class<?>) ((ParameterizedType) field.getGenericType()).getActualTypeArguments()[0];
if (objectClass == Long.class) {
final List<Long> idList = DataAccess.getListOfIds(rs, count.value, SEPARATOR);
field.set(data, idList);
count.inc();
return;
// } else {
// LOGGER.error("Can not ManyToMany with other than List<Long> Model: List<{}>", objectClass.getCanonicalName());
// return;
}
final ManyToMany decorators = field.getDeclaredAnnotation(ManyToMany.class);
if (decorators == null) {
return;
}
if (objectClass == decorators.targetEntity()) {
if (decorators.fetch() == FetchType.EAGER) {
throw new DataAccessException("EAGER is not supported for list of element...");
} else {
final List<Long> idList = DataAccess.getListOfIds(rs, count.value, SEPARATOR);
// field.set(data, idList);
count.inc();
if (idList != null && idList.size() > 0) {
final String idField = AnnotationTools.getFieldName(AnnotationTools.getIdField(objectClass));
// In the lazy mode, the request is done in asynchronous mode, they will be done after...
final LazyGetter lambda = () -> {
final List<Long> childs = new ArrayList<>(idList);
// TODO: update to have get with abstract types ....
@SuppressWarnings("unchecked")
final Object foreignData = DataAccess.getsWhere(decorators.targetEntity(), new Condition(new QueryInList<>(idField, childs)));
if (foreignData == null) {
return;
}
field.set(data, foreignData);
};
lazyCall.add(lambda);
}
}
}
}
@Override
public boolean isUpdateAsync(final Field field) {
return true;
}
@Override
public void asyncUpdate(final String tableName, final Object localKey, final Field field, final Object data, final List<LazyGetter> actions) throws Exception {
if (field.getType() != List.class) {
LOGGER.error("Can not ManyToMany with other than List Model: {}", field.getType().getCanonicalName());
return;
}
final String columnName = AnnotationTools.getFieldName(field);
final String linkTableName = generateLinkTableName(tableName, columnName);
actions.add(() -> {
DataAccess.deleteWhere(LinkTable.class, new OverrideTableName(linkTableName), new Condition(new QueryCondition("object1Id", "=", localKey)));
});
asyncInsert(tableName, localKey, field, data, actions);
}
@Override
public boolean isInsertAsync(final Field field) {
return true;
}
@Override
public void asyncInsert(final String tableName, final Object localKey, final Field field, final Object data, final List<LazyGetter> actions) throws Exception {
if (data == null) {
return;
}
if (field.getType() != List.class) {
LOGGER.error("Can not ManyToMany with other than List Model: {}", field.getType().getCanonicalName());
return;
}
final String columnName = AnnotationTools.getFieldName(field);
final String linkTableName = generateLinkTableName(tableName, columnName);
final Class<?> objectClass = (Class<?>) ((ParameterizedType) field.getGenericType()).getActualTypeArguments()[0];
if (objectClass != Long.class) {
LOGGER.error("Can not ManyToMany with other than List<Long> Model: List<{}>", objectClass.getCanonicalName());
return;
}
@SuppressWarnings("unchecked")
final List<Long> dataCasted = (List<Long>) data;
if (dataCasted.size() == 0) {
return;
}
final List<LinkTable> insertElements = new ArrayList<>();
for (final Long remoteKey : dataCasted) {
if (remoteKey == null) {
throw new DataAccessException("Try to insert remote key with null value");
}
if (localKey instanceof final Long localKeyLong) {
insertElements.add(new LinkTable(localKeyLong, remoteKey));
} else {
throw new DataAccessException("Not manage access of remte key like ManyToMany other than Long: " + localKey.getClass().getCanonicalName());
}
}
if (insertElements.size() == 0) {
LOGGER.warn("Insert multiple link without any value (may have null in the list): {}", dataCasted);
return;
}
actions.add(() -> {
DataAccess.insertMultiple(insertElements, new OverrideTableName(linkTableName));
});
}
@Override
public void drop(final String tableName, final Field field) throws Exception {
final String columnName = AnnotationTools.getFieldName(field);
final String linkTableName = generateLinkTableName(tableName, columnName);
DataAccess.drop(LinkTable.class, new OverrideTableName(linkTableName));
}
@Override
public void cleanAll(final String tableName, final Field field) throws Exception {
final String columnName = AnnotationTools.getFieldName(field);
final String linkTableName = generateLinkTableName(tableName, columnName);
DataAccess.cleanAll(LinkTable.class, new OverrideTableName(linkTableName));
}
public static void addLink(final Class<?> clazz, final long localKey, final String column, final long remoteKey) throws Exception {
final String tableName = AnnotationTools.getTableName(clazz);
final String linkTableName = generateLinkTableName(tableName, column);
final LinkTable insertElement = new LinkTable(localKey, remoteKey);
DataAccess.insert(insertElement, new OverrideTableName(linkTableName));
}
public static int removeLink(final Class<?> clazz, final long localKey, final String column, final long remoteKey) throws Exception {
final String tableName = AnnotationTools.getTableName(clazz);
final String linkTableName = generateLinkTableName(tableName, column);
return DataAccess.deleteWhere(LinkTable.class, new OverrideTableName(linkTableName),
new Condition(new QueryAnd(new QueryCondition("object1Id", "=", localKey), new QueryCondition("object2Id", "=", remoteKey))));
}
@Override
public void createTables(final String tableName, final Field field, final StringBuilder mainTableBuilder, final List<String> preActionList, final List<String> postActionList,
final boolean createIfNotExist, final boolean createDrop, final int fieldId) throws Exception {
final String linkTableName = generateLinkTableNameField(tableName, field);
final QueryOptions options = new QueryOptions(new OverrideTableName(linkTableName));
final List<String> sqlCommand = DataFactory.createTable(LinkTable.class, options);
postActionList.addAll(sqlCommand);
}
}

View File

@@ -0,0 +1,191 @@
package org.kar.archidata.dataAccess.addOn;
import java.lang.reflect.Field;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.Types;
import java.util.List;
import org.kar.archidata.annotation.AnnotationTools;
import org.kar.archidata.dataAccess.CountInOut;
import org.kar.archidata.dataAccess.DataAccess;
import org.kar.archidata.dataAccess.DataAccessAddOn;
import org.kar.archidata.dataAccess.DataFactory;
import org.kar.archidata.dataAccess.LazyGetter;
import org.kar.archidata.dataAccess.QueryOptions;
import org.kar.archidata.exception.DataAccessException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import jakarta.persistence.FetchType;
import jakarta.persistence.ManyToOne;
import jakarta.validation.constraints.NotNull;
public class AddOnManyToOne implements DataAccessAddOn {
static final Logger LOGGER = LoggerFactory.getLogger(AddOnManyToMany.class);
@Override
public Class<?> getAnnotationClass() {
return ManyToOne.class;
}
@Override
public String getSQLFieldType(final Field elem) throws Exception {
final String fieldName = AnnotationTools.getFieldName(elem);
try {
return DataFactory.convertTypeInSQL(Long.class, fieldName);
} catch (final Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return null;
}
@Override
public boolean isCompatibleField(final Field elem) {
final ManyToOne decorators = elem.getDeclaredAnnotation(ManyToOne.class);
return decorators != null;
}
@Override
public void insertData(final PreparedStatement ps, final Field field, final Object rootObject, final CountInOut iii) throws Exception {
final Object data = field.get(rootObject);
if (data == null) {
ps.setNull(iii.value, Types.BIGINT);
} else if (field.getType() == Long.class) {
final Long dataLong = (Long) data;
ps.setLong(iii.value, dataLong);
} else {
final Field idField = AnnotationTools.getFieldOfId(field.getType());
final Object uid = idField.get(data);
if (uid == null) {
ps.setNull(iii.value, Types.BIGINT);
throw new DataAccessException("Not implemented adding subClasses ==> add it manualy before...");
} else {
final Long dataLong = (Long) uid;
ps.setLong(iii.value, dataLong);
}
}
iii.inc();
}
@Override
public boolean canInsert(final Field field) {
if (field.getType() == Long.class) {
return true;
}
final ManyToOne decorators = field.getDeclaredAnnotation(ManyToOne.class);
if (field.getType() == decorators.targetEntity()) {
return true;
}
return false;
}
@Override
public boolean isInsertAsync(final Field field) throws Exception {
return false;
}
@Override
public boolean canRetrieve(final Field field) {
if (field.getType() == Long.class) {
return true;
}
final ManyToOne decorators = field.getDeclaredAnnotation(ManyToOne.class);
if (field.getType() == decorators.targetEntity()) {
return true;
}
return false;
}
@Override
public void generateQuerry(@NotNull final String tableName, @NotNull final Field field, @NotNull final StringBuilder querrySelect, @NotNull final StringBuilder querry, @NotNull final String name,
@NotNull final CountInOut elemCount, final QueryOptions options) throws Exception {
if (field.getType() == Long.class) {
querrySelect.append(" ");
querrySelect.append(tableName);
querrySelect.append(".");
querrySelect.append(name);
elemCount.inc();
return;
}
final ManyToOne decorators = field.getDeclaredAnnotation(ManyToOne.class);
if (field.getType() == decorators.targetEntity()) {
if (decorators.fetch() == FetchType.EAGER) {
// TODO: rework this to have a lazy mode ...
DataAccess.generateSelectField(querrySelect, querry, field.getType(), options, elemCount);
final Class<?> subType = field.getType();
final String subTableName = AnnotationTools.getTableName(subType);
final Field idField = AnnotationTools.getFieldOfId(subType);
querry.append("LEFT OUTER JOIN `");
querry.append(subTableName);
querry.append("` ON ");
querry.append(subTableName);
querry.append(".");
querry.append(AnnotationTools.getFieldName(idField));
querry.append(" = ");
querry.append(tableName);
querry.append(".");
querry.append(AnnotationTools.getFieldName(field));
} else {
querrySelect.append(" ");
querrySelect.append(tableName);
querrySelect.append(".");
querrySelect.append(name);
elemCount.inc();
return;
}
}
/* SELECT k.id, r.id FROM `right` k LEFT OUTER JOIN `rightDescription` r ON k.rightDescriptionId=r.id */
}
@Override
public void fillFromQuerry(final ResultSet rs, final Field field, final Object data, final CountInOut count, final QueryOptions options, final List<LazyGetter> lazyCall) throws Exception {
if (field.getType() == Long.class) {
final Long foreignKey = rs.getLong(count.value);
count.inc();
if (!rs.wasNull()) {
field.set(data, foreignKey);
}
return;
}
final Class<?> objectClass = field.getType();
final ManyToOne decorators = field.getDeclaredAnnotation(ManyToOne.class);
if (decorators == null) {
return;
}
if (objectClass == decorators.targetEntity()) {
if (decorators.fetch() == FetchType.EAGER) {
final CountInOut countNotNull = new CountInOut(0);
final Object dataNew = DataAccess.createObjectFromSQLRequest(rs, objectClass, count, countNotNull, options, lazyCall);
if (dataNew != null && countNotNull.value != 0) {
field.set(data, dataNew);
}
} else {
// here we have the field, the data and the the remote value ==> can create callback that generate the update of the value ...
final Long foreignKey = rs.getLong(count.value);
count.inc();
if (!rs.wasNull()) {
// In the lazy mode, the request is done in asynchronous mode, they will be done after...
final LazyGetter lambda = () -> {
// TODO: update to have get with abstract types ....
final Object foreignData = DataAccess.get(decorators.targetEntity(), foreignKey);
if (foreignData == null) {
return;
}
field.set(data, foreignData);
};
lazyCall.add(lambda);
}
}
}
}
// TODO : refacto this table to manage a generic table with dynamic name to be serialisable with the default system
@Override
public void createTables(final String tableName, final Field field, final StringBuilder mainTableBuilder, final List<String> preActionList, final List<String> postActionList,
final boolean createIfNotExist, final boolean createDrop, final int fieldId) throws Exception {
DataFactory.createTablesSpecificType(tableName, field, mainTableBuilder, preActionList, postActionList, createIfNotExist, createDrop, fieldId, Long.class);
}
}

View File

@@ -0,0 +1,132 @@
package org.kar.archidata.dataAccess.addOn;
import java.lang.reflect.Field;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Types;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
import org.kar.archidata.annotation.AnnotationTools;
import org.kar.archidata.dataAccess.CountInOut;
import org.kar.archidata.dataAccess.DataAccessAddOn;
import org.kar.archidata.dataAccess.DataFactory;
import org.kar.archidata.dataAccess.LazyGetter;
import org.kar.archidata.dataAccess.QueryOptions;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import jakarta.persistence.OneToMany;
import jakarta.validation.constraints.NotNull;
public class AddOnOneToMany implements DataAccessAddOn {
static final Logger LOGGER = LoggerFactory.getLogger(AddOnManyToMany.class);
/** Convert the list if external id in a string '-' separated
* @param ids List of value (null are removed)
* @return '-' string separated */
protected static String getStringOfIds(final List<Long> ids) {
final List<Long> tmp = new ArrayList<>(ids);
return tmp.stream().map(String::valueOf).collect(Collectors.joining("-"));
}
/** extract a list of "-" separated element from a SQL input data.
* @param rs Result Set of the BDD
* @param iii Id in the result set
* @return The list of Long value
* @throws SQLException if an error is generated in the sql request. */
protected static List<Long> getListOfIds(final ResultSet rs, final int iii) throws SQLException {
final String trackString = rs.getString(iii);
if (rs.wasNull()) {
return null;
}
final List<Long> out = new ArrayList<>();
final String[] elements = trackString.split("-");
for (final String elem : elements) {
final Long tmp = Long.parseLong(elem);
out.add(tmp);
}
return out;
}
@Override
public Class<?> getAnnotationClass() {
return OneToMany.class;
}
@Override
public String getSQLFieldType(final Field field) throws Exception {
final String fieldName = AnnotationTools.getFieldName(field);
try {
return DataFactory.convertTypeInSQL(Long.class, fieldName);
} catch (final Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return null;
}
@Override
public boolean isCompatibleField(final Field field) {
final OneToMany decorators = field.getDeclaredAnnotation(OneToMany.class);
return decorators != null;
}
@Override
public void insertData(final PreparedStatement ps, final Field field, final Object rootObject, final CountInOut iii) throws SQLException, IllegalArgumentException, IllegalAccessException {
final Object data = field.get(rootObject);
iii.inc();
if (data == null) {
ps.setNull(iii.value, Types.BIGINT);
} else {
@SuppressWarnings("unchecked")
final String dataTmp = getStringOfIds((List<Long>) data);
ps.setString(iii.value, dataTmp);
}
}
@Override
public boolean canInsert(final Field field) {
return false;
}
@Override
public boolean isInsertAsync(final Field field) throws Exception {
return false;
}
@Override
public boolean canRetrieve(final Field field) {
return false;
}
@Override
public void generateQuerry(@NotNull final String tableName, @NotNull final Field field, @NotNull final StringBuilder querrySelect, @NotNull final StringBuilder querry, @NotNull final String name,
@NotNull final CountInOut elemCount, final QueryOptions options) {
querrySelect.append(" ");
querrySelect.append(tableName);
querrySelect.append(".");
querrySelect.append(name);
elemCount.inc();
}
@Override
public void fillFromQuerry(final ResultSet rs, final Field field, final Object data, final CountInOut count, final QueryOptions options, final List<LazyGetter> lazyCall)
throws SQLException, IllegalArgumentException, IllegalAccessException {
final Long foreignKey = rs.getLong(count.value);
count.inc();
if (!rs.wasNull()) {
field.set(data, foreignKey);
}
}
// TODO : refacto this table to manage a generic table with dynamic name to be serializable with the default system
@Override
public void createTables(final String tableName, final Field field, final StringBuilder mainTableBuilder, final List<String> preActionList, final List<String> postActionList,
final boolean createIfNotExist, final boolean createDrop, final int fieldId) throws Exception {
DataFactory.createTablesSpecificType(tableName, field, mainTableBuilder, preActionList, postActionList, createIfNotExist, createDrop, fieldId, Long.class);
}
}

View File

@@ -0,0 +1,114 @@
package org.kar.archidata.dataAccess.addOn;
import java.lang.reflect.Field;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Types;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
import org.kar.archidata.annotation.AnnotationTools;
import org.kar.archidata.annotation.addOn.SQLTableExternalForeinKeyAsList;
import org.kar.archidata.dataAccess.CountInOut;
import org.kar.archidata.dataAccess.DataAccess;
import org.kar.archidata.dataAccess.DataAccessAddOn;
import org.kar.archidata.dataAccess.DataFactory;
import org.kar.archidata.dataAccess.LazyGetter;
import org.kar.archidata.dataAccess.QueryOptions;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import jakarta.validation.constraints.NotNull;
// TODO: maybe deprecated ==> use DataJson instead...
public class AddOnSQLTableExternalForeinKeyAsList implements DataAccessAddOn {
static final Logger LOGGER = LoggerFactory.getLogger(AddOnManyToMany.class);
static final String SEPARATOR = "-";
/** Convert the list if external id in a string '-' separated
* @param ids List of value (null are removed)
* @return '-' string separated */
protected static String getStringOfIds(final List<Long> ids) {
final List<Long> tmp = new ArrayList<>(ids);
return tmp.stream().map(String::valueOf).collect(Collectors.joining(SEPARATOR));
}
@Override
public Class<?> getAnnotationClass() {
return SQLTableExternalForeinKeyAsList.class;
}
@Override
public String getSQLFieldType(final Field field) throws Exception {
final String fieldName = AnnotationTools.getFieldName(field);
try {
return DataFactory.convertTypeInSQL(String.class, fieldName);
} catch (final Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return null;
}
@Override
public boolean isCompatibleField(final Field field) {
final SQLTableExternalForeinKeyAsList decorators = field.getDeclaredAnnotation(SQLTableExternalForeinKeyAsList.class);
return decorators != null;
}
@Override
public void insertData(final PreparedStatement ps, final Field field, final Object rootObject, final CountInOut iii) throws SQLException, IllegalArgumentException, IllegalAccessException {
final Object data = field.get(rootObject);
iii.inc();
if (data == null) {
ps.setNull(iii.value, Types.BIGINT);
} else {
@SuppressWarnings("unchecked")
final String dataTmp = getStringOfIds((List<Long>) data);
ps.setString(iii.value, dataTmp);
}
}
@Override
public boolean canInsert(final Field field) {
return false;
}
@Override
public boolean isInsertAsync(final Field field) throws Exception {
return false;
}
@Override
public boolean canRetrieve(final Field field) {
return false;
}
@Override
public void generateQuerry(@NotNull final String tableName, @NotNull final Field field, @NotNull final StringBuilder querrySelect, @NotNull final StringBuilder querry, @NotNull final String name,
@NotNull final CountInOut elemCount, final QueryOptions options) {
elemCount.inc();
querrySelect.append(" ");
querrySelect.append(tableName);
querrySelect.append(".");
querrySelect.append(name);
}
@Override
public void fillFromQuerry(final ResultSet rs, final Field field, final Object data, final CountInOut count, final QueryOptions options, final List<LazyGetter> lazyCall)
throws SQLException, IllegalArgumentException, IllegalAccessException {
final List<Long> idList = DataAccess.getListOfIds(rs, count.value, SEPARATOR);
field.set(data, idList);
count.inc();
}
@Override
public void createTables(final String tableName, final Field field, final StringBuilder mainTableBuilder, final List<String> preActionList, final List<String> postActionList,
final boolean createIfNotExist, final boolean createDrop, final int fieldId) throws Exception {
// TODO Auto-generated method stub
DataFactory.createTablesSpecificType(tableName, field, mainTableBuilder, preActionList, postActionList, createIfNotExist, createDrop, fieldId, String.class);
}
}

View File

@@ -0,0 +1,25 @@
package org.kar.archidata.dataAccess.addOn.model;
import org.kar.archidata.model.GenericData;
import io.swagger.v3.oas.annotations.media.Schema;
import jakarta.persistence.Column;
public class LinkTable extends GenericData {
public LinkTable() {
// nothing to do...
}
public LinkTable(final long object1Id, final long object2Id) {
this.object1Id = object1Id;
this.object2Id = object2Id;
}
@Schema(description = "Object reference 1")
@Column(nullable = false)
public Long object1Id;
@Schema(description = "Object reference 2")
@Column(nullable = false)
public Long object2Id;
}

View File

@@ -0,0 +1,8 @@
package org.kar.archidata.dataAccess.options;
import org.kar.archidata.dataAccess.QueryOption;
/** This option permit to access on deleted items of a table */
public class AccessDeletedItems extends QueryOption {
public AccessDeletedItems() {}
}

View File

@@ -0,0 +1,16 @@
package org.kar.archidata.dataAccess.options;
import org.kar.archidata.dataAccess.QueryOption;
/** By default some element are not read like createAt and UpdatedAt. This option permit to read it. */
public class CheckFunction extends QueryOption {
private final CheckFunctionInterface checker;
public CheckFunction(final CheckFunctionInterface checker) {
this.checker = checker;
}
public CheckFunctionInterface getChecker() {
return this.checker;
}
}

View File

@@ -0,0 +1,20 @@
package org.kar.archidata.dataAccess.options;
import java.util.List;
import org.kar.archidata.annotation.AnnotationTools;
/** By default some element are not read like createAt and UpdatedAt. This option permit to read it. */
public interface CheckFunctionInterface {
/** This function implementation is design to check if the updated class is valid of not for insertion
* @param baseName NAme of the object to be precise with the use of what fail.
* @param data The object that might be injected.
* @param filterValue List of fields that might be check. If null, then all column must be checked.
* @throws Exception Exception is generate if the data are incorrect. */
void check(final String baseName, Object data, List<String> filterValue) throws Exception;
default void checkAll(final String baseName, final Object data) throws Exception {
check(baseName, data, AnnotationTools.getAllFieldsNames(data.getClass()));
}
}

View File

@@ -0,0 +1,12 @@
package org.kar.archidata.dataAccess.options;
import java.util.List;
/** By default some element are not read like createAt and UpdatedAt. This option permit to read it. */
public class CheckFunctionVoid implements CheckFunctionInterface {
@Override
public void check(final String baseName, Object data, List<String> filterValue) {
}
}

View File

@@ -0,0 +1,335 @@
package org.kar.archidata.dataAccess.options;
import java.lang.reflect.Field;
import java.sql.Timestamp;
import java.time.LocalDate;
import java.time.LocalTime;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.regex.Pattern;
import org.kar.archidata.annotation.AnnotationTools;
import org.kar.archidata.annotation.DataJson;
import org.kar.archidata.dataAccess.DataAccess;
import org.kar.archidata.dataAccess.QueryCondition;
import org.kar.archidata.exception.DataAccessException;
import org.kar.archidata.exception.InputException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.annotation.JsonValue;
import jakarta.persistence.ManyToOne;
import jakarta.validation.constraints.Size;
public class CheckJPA<T> implements CheckFunctionInterface {
private static final Logger LOGGER = LoggerFactory.getLogger(CheckJPA.class);
private final Class<?> clazz;
/** By default some element are not read like createAt and UpdatedAt. This option permit to read it. */
public interface CheckInterface<K> {
/** This function implementation is design to check if the updated class is valid of not for insertion
* @param data The object that might be injected.
* @param filterValue List of fields that might be check. If null, then all column must be checked.
* @throws Exception Exception is generate if the data are incorrect. */
void check(final String baseName, final K data) throws Exception;
}
protected Map<String, List<CheckInterface<T>>> checking = null;
protected void add(final String field, final CheckInterface<T> checkFunction) {
List<CheckInterface<T>> actions = this.checking.get(field);
if (actions == null) {
actions = new ArrayList<>();
this.checking.put(field, actions);
}
actions.add(checkFunction);
}
public CheckJPA(final Class<T> clazz) {
this.clazz = clazz;
}
public void initialize() throws Exception {
if (this.checking != null) {
return;
}
try {
this.checking = new HashMap<>();
// create Table:
final List<String> primaryKeys = new ArrayList<>();
for (final Field field : this.clazz.getFields()) {
final String fieldName = field.getName(); // AnnotationTools.getFieldName(field);
if (AnnotationTools.isPrimaryKey(field)) {
add(fieldName, (final String baseName, final T data) -> {
throw new InputException(baseName + fieldName, "This is a '@Id' (primaryKey) ==> can not be change");
});
}
if (AnnotationTools.getConstraintsNotNull(field)) {
add(fieldName, (final String baseName, final T data) -> {
if (field.get(data) == null) {
throw new InputException(baseName + fieldName, "Can not be null");
}
});
}
if (AnnotationTools.isCreatedAtField(field) || AnnotationTools.isUpdateAtField(field)) {
add(fieldName, (final String baseName, final T data) -> {
throw new InputException(baseName + fieldName, "It is forbidden to change this field");
});
}
final Class<?> type = field.getType();
if (type == Long.class || type == long.class) {
final Long maxValue = AnnotationTools.getConstraintsMax(field);
if (maxValue != null) {
add(fieldName, (final String baseName, final T data) -> {
final Object elem = field.get(data);
if (elem == null) {
return;
}
final Long elemTyped = (Long) elem;
if (elemTyped > maxValue) {
throw new InputException(baseName + fieldName, "Value too height max: " + maxValue);
}
});
}
final Long minValue = AnnotationTools.getConstraintsMin(field);
if (minValue != null) {
add(fieldName, (final String baseName, final T data) -> {
final Object elem = field.get(data);
if (elem == null) {
return;
}
final Long elemTyped = (Long) elem;
if (elemTyped < minValue) {
throw new InputException(baseName + fieldName, "Value too Low min: " + minValue);
}
});
}
final ManyToOne annotationManyToOne = AnnotationTools.getManyToOne(field);
if (annotationManyToOne != null && annotationManyToOne.targetEntity() != null) {
add(fieldName, (final String baseName, final T data) -> {
final Object elem = field.get(data);
if (elem == null) {
return;
}
final long count = DataAccess.count(annotationManyToOne.targetEntity(), elem);
if (count == 0) {
throw new InputException(baseName + fieldName, "Foreign element does not exist in the DB:" + elem);
}
});
}
} else if (type == Integer.class || type == int.class) {
final Long maxValueRoot = AnnotationTools.getConstraintsMax(field);
if (maxValueRoot != null) {
final int maxValue = maxValueRoot.intValue();
add(fieldName, (final String baseName, final T data) -> {
final Object elem = field.get(data);
if (elem == null) {
return;
}
final Integer elemTyped = (Integer) elem;
if (elemTyped > maxValue) {
throw new InputException(baseName + fieldName, "Value too height max: " + maxValue);
}
});
}
final Long minValueRoot = AnnotationTools.getConstraintsMin(field);
if (minValueRoot != null) {
final int minValue = minValueRoot.intValue();
add(fieldName, (final String baseName, final T data) -> {
final Object elem = field.get(data);
if (elem == null) {
return;
}
final Integer elemTyped = (Integer) elem;
if (elemTyped < minValue) {
throw new InputException(baseName + fieldName, "Value too Low min: " + minValue);
}
});
}
final ManyToOne annotationManyToOne = AnnotationTools.getManyToOne(field);
if (annotationManyToOne != null && annotationManyToOne.targetEntity() != null) {
add(fieldName, (final String baseName, final T data) -> {
final Object elem = field.get(data);
if (elem == null) {
return;
}
final long count = DataAccess.count(annotationManyToOne.targetEntity(), elem);
if (count == 0) {
throw new InputException(baseName + fieldName, "Foreign element does not exist in the DB:" + elem);
}
});
}
} else if (type == Boolean.class || type == boolean.class) {
} else if (type == Float.class || type == float.class) {
final Long maxValueRoot = AnnotationTools.getConstraintsMax(field);
if (maxValueRoot != null) {
final float maxValue = maxValueRoot.floatValue();
add(fieldName, (final String baseName, final T data) -> {
final Object elem = field.get(data);
if (elem == null) {
return;
}
final Float elemTyped = (Float) elem;
if (elemTyped > maxValue) {
throw new InputException(baseName + fieldName, "Value too height max: " + maxValue);
}
});
}
final Long minValueRoot = AnnotationTools.getConstraintsMin(field);
if (minValueRoot != null) {
final float minValue = minValueRoot.floatValue();
add(fieldName, (final String baseName, final T data) -> {
final Object elem = field.get(data);
if (elem == null) {
return;
}
final Float elemTyped = (Float) elem;
if (elemTyped < minValue) {
throw new InputException(baseName + fieldName, "Value too Low min: " + minValue);
}
});
}
} else if (type == Double.class || type == double.class) {
final Long maxValueRoot = AnnotationTools.getConstraintsMax(field);
if (maxValueRoot != null) {
final double maxValue = maxValueRoot.doubleValue();
add(fieldName, (final String baseName, final T data) -> {
final Object elem = field.get(data);
if (elem == null) {
return;
}
final Double elemTyped = (Double) elem;
if (elemTyped > maxValue) {
throw new InputException(baseName + fieldName, "Value too height max: " + maxValue);
}
});
}
final Long minValueRoot = AnnotationTools.getConstraintsMin(field);
if (minValueRoot != null) {
final double minValue = minValueRoot.doubleValue();
add(fieldName, (final String baseName, final T data) -> {
final Object elem = field.get(data);
if (elem == null) {
return;
}
final Double elemTyped = (Double) elem;
if (elemTyped < minValue) {
throw new InputException(baseName + fieldName, "Value too Low min: " + minValue);
}
});
}
} else if (type == Date.class || type == Timestamp.class) {
} else if (type == LocalDate.class) {
} else if (type == LocalTime.class) {
} else if (type == String.class) {
final int maxSizeString = AnnotationTools.getLimitSize(field);
if (maxSizeString > 0) {
add(fieldName, (final String baseName, final T data) -> {
final Object elem = field.get(data);
if (elem == null) {
return;
}
final String elemTyped = (String) elem;
if (elemTyped.length() > maxSizeString) {
throw new InputException(baseName + fieldName, "Too long size must be <= " + maxSizeString);
}
});
}
final Size limitSize = AnnotationTools.getConstraintsSize(field);
if (limitSize != null) {
add(fieldName, (final String baseName, final T data) -> {
final Object elem = field.get(data);
if (elem == null) {
return;
}
final String elemTyped = (String) elem;
if (elemTyped.length() > limitSize.max()) {
throw new InputException(baseName + fieldName, "Too long size (constraints) must be <= " + limitSize.max());
}
if (elemTyped.length() < limitSize.min()) {
throw new InputException(baseName + fieldName, "Too small size (constraints) must be >= " + limitSize.max());
}
});
}
final String patternString = AnnotationTools.getConstraintsPattern(field);
if (patternString != null) {
final Pattern pattern = Pattern.compile(patternString);
add(fieldName, (final String baseName, final T data) -> {
final Object elem = field.get(data);
if (elem == null) {
return;
}
final String elemTyped = (String) elem;
if (!pattern.matcher(elemTyped).find()) {
throw new InputException(baseName + fieldName, "does not match the required pattern (constraints) must be '" + patternString + "'");
}
});
}
} else if (type == JsonValue.class) {
final DataJson jsonAnnotation = AnnotationTools.getDataJson(field);
if (jsonAnnotation != null && jsonAnnotation.checker() != CheckFunctionVoid.class) {
// Here if we have an error it crash at start and no new instance after creation...
final CheckFunctionInterface instance = jsonAnnotation.checker().getDeclaredConstructor().newInstance();
add(fieldName, (final String baseName, final T data) -> {
instance.checkAll(baseName + fieldName + ".", field.get(data));
});
}
} else if (type.isEnum()) {
// nothing to do.
}
// keep this is last ==> take more time...
if (AnnotationTools.isUnique(field)) {
// Create the request ...
add(fieldName, (final String baseName, final T data) -> {
final Object other = DataAccess.getWhere(this.clazz, new Condition(new QueryCondition(fieldName, "==", field.get(data))));
if (other != null) {
throw new InputException(baseName + fieldName, "Name already exist in the DB");
}
});
}
}
} catch (final Exception ex) {
this.checking = null;
throw ex;
}
}
@Override
public void check(final String baseName, final Object data, final List<String> filterValue) throws Exception {
if (this.checking == null) {
initialize();
}
if (!(this.clazz.isAssignableFrom(data.getClass()))) {
throw new DataAccessException("Incompatatyble type of Object" + data.getClass().getCanonicalName());
}
@SuppressWarnings("unchecked")
final T dataCasted = (T) data;
for (final String filter : filterValue) {
final List<CheckInterface<T>> actions = this.checking.get(filter);
if (actions == null) {
continue;
}
for (final CheckInterface<T> action : actions) {
action.check(baseName, dataCasted);
}
}
checkTyped(dataCasted, filterValue);
}
public void checkTyped(final T data, final List<String> filterValue) throws Exception {
// nothing to do ...
}
}

View File

@@ -0,0 +1,62 @@
package org.kar.archidata.dataAccess.options;
import java.sql.PreparedStatement;
import org.kar.archidata.dataAccess.CountInOut;
import org.kar.archidata.dataAccess.QueryItem;
import org.kar.archidata.dataAccess.QueryOption;
import org.kar.archidata.dataAccess.QueryOptions;
/** By default some element are not read like createAt and UpdatedAt. This option permit to read it. */
public class Condition extends QueryOption {
public final QueryItem condition;
public Condition(final QueryItem items) {
this.condition = items;
}
public Condition() {
this.condition = null;
}
public void generateQuerry(final StringBuilder query, final String tableName) {
if (this.condition != null) {
this.condition.generateQuerry(query, tableName);
}
}
public void injectQuerry(final PreparedStatement ps, final CountInOut iii) throws Exception {
if (this.condition != null) {
this.condition.injectQuerry(ps, iii);
}
}
public void whereAppendQuery(final StringBuilder query, final String tableName, final QueryOptions options, final String deletedFieldName) {
boolean exclude_deleted = true;
if (options != null) {
exclude_deleted = !options.exist(AccessDeletedItems.class);
}
// Check if we have a condition to generate
if (this.condition == null) {
if (exclude_deleted && deletedFieldName != null) {
query.append(" WHERE ");
query.append(tableName);
query.append(".");
query.append(deletedFieldName);
query.append(" = false ");
}
return;
}
query.append(" WHERE (");
this.condition.generateQuerry(query, tableName);
query.append(") ");
if (exclude_deleted && deletedFieldName != null) {
query.append("AND ");
query.append(tableName);
query.append(".");
query.append(deletedFieldName);
query.append(" = false ");
}
}
}

View File

@@ -0,0 +1,8 @@
package org.kar.archidata.dataAccess.options;
import org.kar.archidata.dataAccess.QueryOption;
/** This option permit to create the DROP `Table` IF EXIST in the generation of structure. */
public class CreateDropTable extends QueryOption {
public CreateDropTable() {}
}

View File

@@ -0,0 +1,22 @@
package org.kar.archidata.dataAccess.options;
import java.util.List;
import org.kar.archidata.dataAccess.QueryOption;
/** By default some element are not read like createAt and UpdatedAt. This option permit to read it. */
public class FilterValue extends QueryOption {
public final List<String> filterValue;
public FilterValue(final List<String> filterValue) {
this.filterValue = filterValue;
}
public FilterValue(final String... filterValue) {
this.filterValue = List.of(filterValue);
}
public List<String> getValues() {
return this.filterValue;
}
}

View File

@@ -0,0 +1,17 @@
package org.kar.archidata.dataAccess.options;
import org.kar.archidata.dataAccess.QueryOption;
/** Option that permit to access to a table structure with an other name that is define in the structure. Note: Internal use for link tables (see:
* org.kar.archidata.dataAccess.addOn.model.LinkTable). */
public class OverrideTableName extends QueryOption {
private final String name;
public OverrideTableName(final String name) {
this.name = name;
}
public String getName() {
return this.name;
}
}

View File

@@ -0,0 +1,8 @@
package org.kar.archidata.dataAccess.options;
import org.kar.archidata.dataAccess.QueryOption;
/** By default some element are not read like createAt and UpdatedAt. This option permit to read it. */
public class ReadAllColumn extends QueryOption {
public ReadAllColumn() {}
}

View File

@@ -0,0 +1,16 @@
package org.kar.archidata.dataAccess.options;
import org.kar.archidata.dataAccess.QueryOption;
/** Internal option that permit to transmit the Key when updating the ManyToMany values (first step). */
public class TransmitKey extends QueryOption {
private final Object key;
public TransmitKey(final Object key) {
this.key = key;
}
public Object getKey() {
return this.key;
}
}

View File

@@ -1,60 +1,88 @@
package org.kar.archidata.db;
import org.kar.archidata.dataAccess.DataAccess;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class DBConfig {
private final String hostname;
private final int port;
private final String login;
private final String password;
private final String dbName;
static final Logger LOGGER = LoggerFactory.getLogger(DataAccess.class);
private final String type;
private final String hostname;
private final int port;
private final String login;
private final String password;
private final String dbName;
private final boolean keepConnected;
public DBConfig(String hostname, Integer port, String login, String password, String dbName) {
if (hostname == null) {
this.hostname = "localhost";
} else {
this.hostname = hostname;
}
if (port == null) {
this.port = 3306;
} else {
this.port = port;
}
this.login = login;
this.password = password;
this.dbName = dbName;
}
public DBConfig(final String type, final String hostname, final Integer port, final String login, final String password, final String dbName, final boolean keepConnected) {
if (type == null) {
this.type = "mysql";
} else {
this.type = type;
}
if (hostname == null) {
this.hostname = "localhost";
} else {
this.hostname = hostname;
}
if (port == null) {
this.port = 3306;
} else {
this.port = port;
}
this.login = login;
this.password = password;
this.dbName = dbName;
this.keepConnected = keepConnected;
}
@Override
public String toString() {
return "DBConfig{" +
"hostname='" + hostname + '\'' +
", port=" + port +
", login='" + login + '\'' +
", password='" + password + '\'' +
", dbName='" + dbName + '\'' +
'}';
}
@Override
public String toString() {
return "DBConfig{type='" + this.type + '\'' + ", hostname='" + this.hostname + '\'' + ", port=" + this.port + ", login='" + this.login + '\'' + ", password='" + this.password + '\''
+ ", dbName='" + this.dbName + "' }";
}
public String getHostname() {
return hostname;
}
public String getHostname() {
return this.hostname;
}
public int getPort() {
return port;
}
public int getPort() {
return this.port;
}
public String getLogin() {
return login;
}
public String getLogin() {
return this.login;
}
public String getPassword() {
return password;
}
public String getPassword() {
return this.password;
}
public String getDbName() {
return dbName;
}
public String getDbName() {
return this.dbName;
}
public String getUrl() {
return "jdbc:mysql://" + this.hostname + ":" + this.port + "/" + this.dbName + "?allowPublicKeyRetrieval=true&useSSL=false&serverTimezone=UTC";
}
public boolean getKeepConnected() {
return this.keepConnected;
}
public String getUrl() {
return getUrl(false);
}
public String getUrl(final boolean isRoot) {
if (this.type.equals("sqlite")) {
if (isRoot) {
LOGGER.error("Can not manage root connection on SQLite...");
}
if (this.hostname.equals("memory")) {
return "jdbc:sqlite::memory:";
}
return "jdbc:sqlite:" + this.hostname + ".db";
}
if (isRoot) {
return "jdbc:" + this.type + "://" + this.hostname + ":" + this.port + "/?allowPublicKeyRetrieval=true&useSSL=false&serverTimezone=UTC";
}
return "jdbc:" + this.type + "://" + this.hostname + ":" + this.port + "/" + this.dbName + "?allowPublicKeyRetrieval=true&useSSL=false&serverTimezone=UTC";
}
}

View File

@@ -1,45 +1,92 @@
package org.kar.archidata.db;
import org.kar.archidata.model.User;
import java.io.Closeable;
import java.io.IOException;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import java.sql.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class DBEntry {
public DBConfig config;
public Connection connection;
public class DBEntry implements Closeable {
final static Logger LOGGER = LoggerFactory.getLogger(DBEntry.class);
public DBConfig config;
public Connection connection;
private static List<DBEntry> stored = new ArrayList<>();
public DBEntry(DBConfig config) {
this.config = config;
connect();
}
private DBEntry(final DBConfig config, final boolean root) throws IOException {
this.config = config;
if (root) {
connectRoot();
} else {
connect();
}
}
public void connect() {
try {
connection = DriverManager.getConnection(config.getUrl(), config.getLogin(), config.getPassword());
} catch (SQLException ex) {
ex.printStackTrace();
}
public static DBEntry createInterface(final DBConfig config) throws IOException {
return createInterface(config, false);
}
}
public static DBEntry createInterface(final DBConfig config, final boolean root) throws IOException {
if (config.getKeepConnected()) {
for (final DBEntry elem : stored) {
if (elem == null) {
continue;
}
if (elem.config.getUrl().equals(config.getUrl())) {
return elem;
}
}
final DBEntry tmp = new DBEntry(config, root);
stored.add(tmp);
return tmp;
} else {
return new DBEntry(config, root);
}
}
public void disconnect() {
try {
//connection.commit();
connection.close();
} catch (SQLException ex) {
ex.printStackTrace();
}
}
/*
public void test() throws SQLException {
String query = "SELECT * FROM user";
Statement st = connection.createStatement();
ResultSet rs = st.executeQuery(query);
System.out.println("List of user:");
if (rs.next()) {
User user = new User(rs);
System.out.println(" - " + user);
}
}
*/
public void connectRoot() throws IOException {
try {
this.connection = DriverManager.getConnection(this.config.getUrl(true), this.config.getLogin(), this.config.getPassword());
} catch (final SQLException ex) {
throw new IOException("Connection db fail: " + ex.getMessage());
}
}
public void connect() throws IOException {
try {
this.connection = DriverManager.getConnection(this.config.getUrl(), this.config.getLogin(), this.config.getPassword());
} catch (final SQLException ex) {
throw new IOException("Connection db fail: " + ex.getMessage());
}
}
@Override
public void close() throws IOException {
if (this.config.getKeepConnected()) {
return;
}
closeForce();
}
public void closeForce() throws IOException {
try {
// connection.commit();
this.connection.close();
} catch (final SQLException ex) {
throw new IOException("Dis-connection db fail: " + ex.getMessage());
}
}
public static void closeAllForceMode() throws IOException {
for (final DBEntry entry : stored) {
entry.closeForce();
}
stored = new ArrayList<>();
}
}

View File

@@ -0,0 +1,9 @@
package org.kar.archidata.exception;
public class DataAccessException extends Exception {
private static final long serialVersionUID = 1L;
public DataAccessException(final String message) {
super(message);
}
}

View File

@@ -0,0 +1,19 @@
package org.kar.archidata.exception;
import jakarta.ws.rs.core.Response;
public class FailException extends Exception {
private static final long serialVersionUID = 1L;
public final Response.Status status;
public FailException(final Response.Status status, final String message) {
super(message);
this.status = status;
}
public FailException(final String message) {
super(message);
this.status = Response.Status.BAD_REQUEST;
}
}

View File

@@ -0,0 +1,21 @@
package org.kar.archidata.exception;
import jakarta.ws.rs.core.Response;
public class InputException extends Exception {
private static final long serialVersionUID = 1L;
public final String missingVariable;
public final Response.Status status;
public InputException(final Response.Status status, final String variable, final String message) {
super(message);
this.missingVariable = variable;
this.status = status;
}
public InputException(final String variable, final String message) {
super(message);
this.missingVariable = variable;
this.status = Response.Status.NOT_ACCEPTABLE;
}
}

View File

@@ -0,0 +1,11 @@
package org.kar.archidata.exception;
import jakarta.ws.rs.core.Response;
public class NotFoundException extends FailException {
private static final long serialVersionUID = 1L;
public NotFoundException(final String message) {
super(Response.Status.NOT_FOUND, message);
}
}

View File

@@ -0,0 +1,37 @@
package org.kar.archidata.exception;
import java.util.UUID;
public class RESTErrorResponseExeption extends Exception {
public UUID uuid;
public String time;
public String error;
public String message;
public int status;
public String statusMessage;
public RESTErrorResponseExeption() {
this.uuid = null;
this.time = null;
this.error = null;
this.message = null;
this.status = 0;
this.statusMessage = null;
}
public RESTErrorResponseExeption(final UUID uuid, final String time, final String error, final String message, final int status, final String statusMessage) {
this.uuid = uuid;
this.time = time;
this.error = error;
this.message = message;
this.status = status;
this.statusMessage = statusMessage;
}
@Override
public String toString() {
return "RESTErrorResponseExeption [uuid=" + this.uuid + ", time=" + this.time + ", error=" + this.error + ", message=" + this.message + ", status=" + this.status + ", statusMessage="
+ this.statusMessage + "]";
}
}

View File

@@ -0,0 +1,18 @@
package org.kar.archidata.exception;
import jakarta.ws.rs.core.Response;
public class SystemException extends Exception {
private static final long serialVersionUID = 1L;
public final Response.Status status;
public SystemException(final Response.Status status, final String message) {
super(message);
this.status = status;
}
public SystemException(final String message) {
super(message);
this.status = Response.Status.INTERNAL_SERVER_ERROR;
}
}

View File

@@ -0,0 +1,11 @@
package org.kar.archidata.exception;
import jakarta.ws.rs.core.Response;
public class UnAuthorizedException extends FailException {
private static final long serialVersionUID = 1L;
public UnAuthorizedException(final String message) {
super(Response.Status.UNAUTHORIZED, message);
}
}

View File

@@ -1,198 +1,219 @@
package org.kar.archidata.filter;
import java.lang.reflect.Method;
import org.kar.archidata.annotation.security.DenyAll;
import org.kar.archidata.annotation.security.PermitAll;
import org.kar.archidata.annotation.security.RolesAllowed;
import javax.annotation.Priority;
import javax.ws.rs.Priorities;
import javax.ws.rs.container.ContainerRequestContext;
import javax.ws.rs.container.ContainerRequestFilter;
import javax.ws.rs.container.ResourceInfo;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.core.Response;
import javax.ws.rs.ext.Provider;
import org.kar.archidata.UserDB;
import org.kar.archidata.annotation.security.PermitTokenInURI;
import org.kar.archidata.model.User;
import org.kar.archidata.util.JWTWrapper;
import com.nimbusds.jwt.JWTClaimsSet;
import java.io.IOException;
import java.lang.reflect.Method;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
// https://stackoverflow.com/questions/26777083/best-practice-for-rest-token-based-authentication-with-jax-rs-and-jersey
// https://stackoverflow.com/questions/26777083/best-practice-for-rest-token-based-authentication-with-jax-rs-and-jersey/45814178#45814178
// https://stackoverflow.com/questions/32817210/how-to-access-jersey-resource-secured-by-rolesallowed
import org.kar.archidata.annotation.security.PermitTokenInURI;
import org.kar.archidata.catcher.RestErrorResponse;
import org.kar.archidata.model.UserByToken;
import org.kar.archidata.tools.JWTWrapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.nimbusds.jwt.JWTClaimsSet;
import jakarta.annotation.Priority;
import jakarta.annotation.security.DenyAll;
import jakarta.annotation.security.PermitAll;
import jakarta.annotation.security.RolesAllowed;
import jakarta.ws.rs.Priorities;
import jakarta.ws.rs.container.ContainerRequestContext;
import jakarta.ws.rs.container.ContainerRequestFilter;
import jakarta.ws.rs.container.ResourceInfo;
import jakarta.ws.rs.core.Context;
import jakarta.ws.rs.core.HttpHeaders;
import jakarta.ws.rs.core.MediaType;
import jakarta.ws.rs.core.MultivaluedMap;
import jakarta.ws.rs.core.Response;
import jakarta.ws.rs.ext.Provider;
//@PreMatching
@Provider
@Priority(Priorities.AUTHENTICATION)
public class AuthenticationFilter implements ContainerRequestFilter {
private final static Logger LOGGER = LoggerFactory.getLogger(AuthenticationFilter.class);
@Context
private ResourceInfo resourceInfo;
private static final String AUTHENTICATION_SCHEME = "Yota";
protected final String applicationName;
@Override
public void filter(ContainerRequestContext requestContext) throws IOException {
/*
System.out.println("-----------------------------------------------------");
System.out.println("---- Check if have authorization ----");
System.out.println("-----------------------------------------------------");
System.out.println(" for:" + requestContext.getUriInfo().getPath());
*/
Method method = resourceInfo.getResourceMethod();
// Access denied for all
if(method.isAnnotationPresent(DenyAll.class)) {
System.out.println(" ==> deny all " + requestContext.getUriInfo().getPath());
requestContext.abortWith(Response.status(Response.Status.FORBIDDEN).entity("Access blocked !!!").build());
return;
}
//Access allowed for all
if( method.isAnnotationPresent(PermitAll.class)) {
System.out.println(" ==> permit all " + requestContext.getUriInfo().getPath());
// no control ...
return;
}
// this is a security guard, all the API must define their access level:
if(!method.isAnnotationPresent(RolesAllowed.class)) {
System.out.println(" ==> missing @RolesAllowed " + requestContext.getUriInfo().getPath());
requestContext.abortWith(Response.status(Response.Status.FORBIDDEN).entity("Access ILLEGAL !!!").build());
return;
}
private static final String AUTHENTICATION_SCHEME = "Yota";
private static final String AUTHENTICATION_TOKEN_SCHEME = "Zota";
// Get the Authorization header from the request
String authorizationHeader = requestContext.getHeaderString(HttpHeaders.AUTHORIZATION);
//System.out.println("authorizationHeader: " + authorizationHeader);
if(authorizationHeader == null && method.isAnnotationPresent(PermitTokenInURI.class)) {
MultivaluedMap<String, String> quaryparam = requestContext.getUriInfo().getQueryParameters();
for (Entry<String, List<String>> item: quaryparam.entrySet()) {
if (item.getKey().equals(HttpHeaders.AUTHORIZATION)) {
if (!item.getValue().isEmpty()) {
authorizationHeader = item.getValue().get(0);
}
break;
}
}
}
//System.out.println("authorizationHeader: " + authorizationHeader);
/*
System.out.println(" -------------------------------");
// this get the parameters inside the pre-parsed element in the request ex: @Path("thumbnail/{id}") generate a map with "id"
MultivaluedMap<String, String> pathparam = requestContext.getUriInfo().getPathParameters();
for (Entry<String, List<String>> item: pathparam.entrySet()) {
System.out.println(" param: " + item.getKey() + " ==>" + item.getValue());
}
System.out.println(" -------------------------------");
// need to add "@QueryParam("p") String token, " in the model
//MultivaluedMap<String, String> quaryparam = requestContext.getUriInfo().getQueryParameters();
for (Entry<String, List<String>> item: quaryparam.entrySet()) {
System.out.println(" query: " + item.getKey() + " ==>" + item.getValue());
}
System.out.println(" -------------------------------");
List<PathSegment> segments = requestContext.getUriInfo().getPathSegments();
for (final PathSegment item: segments) {
System.out.println(" query: " + item.getPath() + " ==>" + item.getMatrixParameters());
}
System.out.println(" -------------------------------");
MultivaluedMap<String, String> headers = requestContext.getHeaders();
for (Entry<String, List<String>> item: headers.entrySet()) {
System.out.println(" headers: " + item.getKey() + " ==>" + item.getValue());
}
System.out.println(" -------------------------------");
*/
// Validate the Authorization header data Model "Yota userId:token"
if (!isTokenBasedAuthentication(authorizationHeader)) {
System.out.println("REJECTED unauthorized: " + requestContext.getUriInfo().getPath());
abortWithUnauthorized(requestContext);
return;
}
// check JWT token (basic:)
public AuthenticationFilter(final String applicationName) {
this.applicationName = applicationName;
}
// Extract the token from the Authorization header (Remove "Yota ")
String token = authorizationHeader.substring(AUTHENTICATION_SCHEME.length()).trim();
System.out.println("token: " + token);
User user = null;
try {
user = validateToken(token);
} catch (Exception e) {
System.out.println("Fail to validate token: " + e.getMessage());
abortWithUnauthorized(requestContext);
return;
}
if (user == null) {
System.out.println("get a NULL user ...");
abortWithUnauthorized(requestContext);
return;
}
// create the security context model:
String scheme = requestContext.getUriInfo().getRequestUri().getScheme();
MySecurityContext userContext = new MySecurityContext(user, scheme);
// retrieve the allowed right:
RolesAllowed rolesAnnotation = method.getAnnotation(RolesAllowed.class);
List<String> roles = Arrays.asList(rolesAnnotation.value());
// check if the user have the right:
boolean haveRight = false;
for (String role : roles) {
if (userContext.isUserInRole(role)) {
haveRight = true;
break;
}
}
//Is user valid?
if( ! haveRight) {
System.out.println("REJECTED not enought right : " + requestContext.getUriInfo().getPath() + " require: " + roles);
requestContext.abortWith(Response.status(Response.Status.UNAUTHORIZED).entity("Not enought RIGHT !!!").build());
return;
}
requestContext.setSecurityContext(userContext);
System.out.println("Get local user : " + user);
}
private boolean isTokenBasedAuthentication(String authorizationHeader) {
// Check if the Authorization header is valid
// It must not be null and must be prefixed with "Bearer" plus a whitespace
// The authentication scheme comparison must be case-insensitive
return authorizationHeader != null && authorizationHeader.toLowerCase().startsWith(AUTHENTICATION_SCHEME.toLowerCase() + " ");
}
@Override
public void filter(final ContainerRequestContext requestContext) throws IOException {
/* logger.debug("-----------------------------------------------------"); logger.debug("---- Check if have authorization ----");
* logger.debug("-----------------------------------------------------"); logger.debug(" for:{}", requestContext.getUriInfo().getPath()); */
final Method method = this.resourceInfo.getResourceMethod();
// Access denied for all
if (method.isAnnotationPresent(DenyAll.class)) {
LOGGER.debug(" ==> deny all {}", requestContext.getUriInfo().getPath());
requestContext.abortWith(Response.status(Response.Status.FORBIDDEN).entity("Access blocked !!!").build());
return;
}
private void abortWithUnauthorized(ContainerRequestContext requestContext) {
// Access allowed for all
if (method.isAnnotationPresent(PermitAll.class)) {
// logger.debug(" ==> permit all " + requestContext.getUriInfo().getPath());
// no control ...
return;
}
// this is a security guard, all the API must define their access level:
if (!method.isAnnotationPresent(RolesAllowed.class)) {
LOGGER.error(" ==> missing @RolesAllowed {}", requestContext.getUriInfo().getPath());
requestContext.abortWith(Response.status(Response.Status.FORBIDDEN).entity("Access ILLEGAL !!!").build());
return;
}
// Abort the filter chain with a 401 status code response
// The WWW-Authenticate header is sent along with the response
requestContext.abortWith(
Response.status(Response.Status.UNAUTHORIZED)
.header(HttpHeaders.WWW_AUTHENTICATE,
AUTHENTICATION_SCHEME + " base64(HEADER).base64(CONTENT).base64(KEY)")
.build());
}
// Get the Authorization header from the request
String authorizationHeader = requestContext.getHeaderString(HttpHeaders.AUTHORIZATION);
// logger.debug("authorizationHeader: {}", authorizationHeader);
if (authorizationHeader == null && method.isAnnotationPresent(PermitTokenInURI.class)) {
final MultivaluedMap<String, String> quaryparam = requestContext.getUriInfo().getQueryParameters();
for (final Entry<String, List<String>> item : quaryparam.entrySet()) {
if (item.getKey().equals(HttpHeaders.AUTHORIZATION)) {
if (!item.getValue().isEmpty()) {
authorizationHeader = item.getValue().get(0);
}
break;
}
}
}
// logger.debug("authorizationHeader: {}", authorizationHeader);
final boolean isApplicationToken = isApplicationTokenBasedAuthentication(authorizationHeader);
final boolean isJwtToken = isTokenBasedAuthentication(authorizationHeader);
// Validate the Authorization header data Model "Yota jwt.to.ken" "Zota tokenId:hash(token)"
if (!isApplicationToken && !isJwtToken) {
LOGGER.warn("REJECTED unauthorized: {}", requestContext.getUriInfo().getPath());
abortWithUnauthorized(requestContext, "REJECTED unauthorized: " + requestContext.getUriInfo().getPath());
return;
}
UserByToken userByToken = null;
if (isJwtToken) {
// Extract the token from the Authorization header (Remove "Yota ")
final String token = authorizationHeader.substring(AUTHENTICATION_SCHEME.length()).trim();
// logger.debug("token: {}", token);
try {
userByToken = validateJwtToken(token);
} catch (final Exception e) {
LOGGER.error("Fail to validate token: {}", e.getMessage());
abortWithUnauthorized(requestContext, "Fail to validate token: " + e.getMessage());
return;
}
if (userByToken == null) {
LOGGER.warn("get a NULL user ...");
abortWithUnauthorized(requestContext, "get a NULL user ...");
return;
}
} else {
// Extract the token from the Authorization header (Remove "Zota ")
final String token = authorizationHeader.substring(AUTHENTICATION_TOKEN_SCHEME.length()).trim();
// logger.debug("token: {}", token);
try {
userByToken = validateToken(token);
} catch (final Exception e) {
LOGGER.error("Fail to validate token: {}", e.getMessage());
abortWithUnauthorized(requestContext, "Fail to validate token: " + e.getMessage());
return;
}
if (userByToken == null) {
LOGGER.warn("get a NULL application ...");
abortWithUnauthorized(requestContext, "get a NULL application ...");
return;
}
private User validateToken(String authorization) throws Exception {
System.out.println(" validate token : " + authorization);
JWTClaimsSet ret = JWTWrapper.validateToken(authorization, "KarAuth", null);
// check the token is valid !!! (signed and coherent issuer...
if (ret == null) {
System.out.println("The token is not valid: '" + authorization + "'");
return null;
}
// check userID
String userUID = ret.getSubject();
long id = Long.parseLong(userUID);
System.out.println("request user: '" + userUID + "'");
return UserDB.getUserOrCreate(id, (String)ret.getClaim("login") );
}
}
// create the security context model:
final String scheme = requestContext.getUriInfo().getRequestUri().getScheme();
final MySecurityContext userContext = new MySecurityContext(userByToken, scheme);
// retrieve the allowed right:
final RolesAllowed rolesAnnotation = method.getAnnotation(RolesAllowed.class);
final List<String> roles = Arrays.asList(rolesAnnotation.value());
// check if the user have the right:
boolean haveRight = false;
for (final String role : roles) {
if (userContext.isUserInRole(role)) {
haveRight = true;
break;
}
}
// Is user valid?
if (!haveRight) {
LOGGER.error("REJECTED not enought right : {} require: {}", requestContext.getUriInfo().getPath(), roles);
requestContext.abortWith(Response.status(Response.Status.UNAUTHORIZED).entity("Not enought RIGHT !!!").build());
return;
}
requestContext.setSecurityContext(userContext);
// logger.debug("Get local user : {} / {}", user, userByToken);
}
private boolean isTokenBasedAuthentication(final String authorizationHeader) {
// Check if the Authorization header is valid
// It must not be null and must be prefixed with "Bearer" plus a whitespace
// The authentication scheme comparison must be case-insensitive
return authorizationHeader != null && authorizationHeader.toLowerCase().startsWith(AUTHENTICATION_SCHEME.toLowerCase() + " ");
}
private boolean isApplicationTokenBasedAuthentication(final String authorizationHeader) {
// Check if the Authorization header is valid
// It must not be null and must be prefixed with "Bearer" plus a whitespace
// The authentication scheme comparison must be case-insensitive
return authorizationHeader != null && authorizationHeader.toLowerCase().startsWith(AUTHENTICATION_TOKEN_SCHEME.toLowerCase() + " ");
}
private void abortWithUnauthorized(final ContainerRequestContext requestContext, final String message) {
// Abort the filter chain with a 401 status code response
// The WWW-Authenticate header is sent along with the response
LOGGER.warn("abortWithUnauthorized:");
final RestErrorResponse ret = new RestErrorResponse(Response.Status.UNAUTHORIZED, "Unauthorized", message);
LOGGER.error("Error UUID={}", ret.uuid);
requestContext.abortWith(Response.status(ret.status).header(HttpHeaders.WWW_AUTHENTICATE, AUTHENTICATION_SCHEME + " base64(HEADER).base64(CONTENT).base64(KEY)").entity(ret)
.type(MediaType.APPLICATION_JSON).build());
}
protected UserByToken validateToken(final String authorization) throws Exception {
LOGGER.info("Must be Override by the application implmentation, otherwise it dose not work");
return null;
}
// must be override to be good implementation
protected UserByToken validateJwtToken(final String authorization) throws Exception {
// logger.debug(" validate token : " + authorization);
final JWTClaimsSet ret = JWTWrapper.validateToken(authorization, "KarAuth", null);
// check the token is valid !!! (signed and coherent issuer...
if (ret == null) {
LOGGER.error("The token is not valid: '{}'", authorization);
return null;
}
// check userID
final String userUID = ret.getSubject();
final long id = Long.parseLong(userUID);
final UserByToken user = new UserByToken();
user.id = id;
user.name = (String) ret.getClaim("login");
user.type = UserByToken.TYPE_USER;
final Object rowRight = ret.getClaim("right");
if (rowRight != null) {
final Map<String, Map<String, Object>> rights = (Map<String, Map<String, Object>>) ret.getClaim("right");
if (rights.containsKey(this.applicationName)) {
user.right = rights.get(this.applicationName);
} else {
LOGGER.error("Connect with no right for this application='{}' full Right='{}'", this.applicationName, rights);
}
}
// logger.debug("request user: '{}' right: '{}' row='{}'", userUID, user.right, rowRight);
return user;
// return UserDB.getUserOrCreate(id, (String)ret.getClaim("login") );
}
}

View File

@@ -1,25 +1,23 @@
package org.kar.archidata.filter;
import javax.ws.rs.container.ContainerRequestContext;
import javax.ws.rs.container.ContainerResponseContext;
import javax.ws.rs.container.ContainerResponseFilter;
import javax.ws.rs.ext.Provider;
import java.io.IOException;
import jakarta.ws.rs.container.ContainerRequestContext;
import jakarta.ws.rs.container.ContainerResponseContext;
import jakarta.ws.rs.container.ContainerResponseFilter;
import jakarta.ws.rs.ext.Provider;
@Provider
public class CORSFilter implements ContainerResponseFilter {
@Override
public void filter(ContainerRequestContext request,
ContainerResponseContext response) throws IOException {
//System.err.println("filter cors ..." + request.toString());
@Override
public void filter(final ContainerRequestContext request, final ContainerResponseContext response) throws IOException {
// System.err.println("filter cors ..." + request.toString());
response.getHeaders().add("Access-Control-Allow-Origin", "*");
response.getHeaders().add("Access-Control-Allow-Headers", "*");
// "Origin, content-type, Content-type, Accept, authorization, mime-type, filename");
response.getHeaders().add("Access-Control-Allow-Credentials", "true");
response.getHeaders().add("Access-Control-Allow-Methods",
"GET, POST, PUT, DELETE, OPTIONS, HEAD");
}
response.getHeaders().add("Access-Control-Allow-Origin", "*");
response.getHeaders().add("Access-Control-Allow-Headers", "*");
// "Origin, content-type, Content-type, Accept, authorization, mime-type, filename");
response.getHeaders().add("Access-Control-Allow-Credentials", "true");
response.getHeaders().add("Access-Control-Allow-Methods", "GET, POST, PUT, PATCH, DELETE, OPTIONS, HEAD");
}
}

View File

@@ -1,22 +1,22 @@
package org.kar.archidata.filter;
import org.kar.archidata.model.User;
import java.security.Principal;
import org.kar.archidata.model.UserByToken;
public class GenericContext implements Principal {
public User user;
public UserByToken userByToken;
public GenericContext(User user) {
this.user = user;
}
public GenericContext(final UserByToken userByToken) {
this.userByToken = userByToken;
}
@Override
public String getName() {
if (user == null) {
return "???";
}
return user.login;
}
@Override
public String getName() {
if (this.userByToken == null) {
return "???";
}
return this.userByToken.name;
}
}

View File

@@ -1,47 +1,49 @@
package org.kar.archidata.filter;
import org.kar.archidata.model.User;
import javax.ws.rs.core.SecurityContext;
import java.security.Principal;
import org.kar.archidata.model.UserByToken;
import jakarta.ws.rs.core.SecurityContext;
// https://simplapi.wordpress.com/2015/09/19/jersey-jax-rs-securitycontext-in-action/
class MySecurityContext implements SecurityContext {
private final GenericContext contextPrincipale;
private final String sheme;
private final GenericContext contextPrincipale;
private final String sheme;
public MySecurityContext(User user, String sheme) {
this.contextPrincipale = new GenericContext(user);
this.sheme = sheme;
}
public MySecurityContext(final UserByToken userByToken, final String sheme) {
this.contextPrincipale = new GenericContext(userByToken);
this.sheme = sheme;
}
@Override
public Principal getUserPrincipal() {
return contextPrincipale;
}
@Override
public Principal getUserPrincipal() {
return this.contextPrincipale;
}
@Override
public boolean isUserInRole(String role) {
if (role.contentEquals("ADMIN")) {
return contextPrincipale.user.admin == true;
}
if (role.contentEquals("USER")) {
// if not an admin, this is a user...
return true; //contextPrincipale.user.admin == false;
}
return false;
}
@Override
public boolean isUserInRole(final String role) {
if (this.contextPrincipale.userByToken != null) {
final Object value = this.contextPrincipale.userByToken.right.get(role);
if (value instanceof final Boolean ret) {
return ret;
}
}
return false;
}
@Override
public boolean isSecure() {
return true;
}
@Override
public boolean isSecure() {
return this.sheme.equalsIgnoreCase("https");
}
@Override
public String getAuthenticationScheme() {
return "Yota";
}
@Override
public String getAuthenticationScheme() {
if (this.contextPrincipale.userByToken != null) {
return "Zota";
}
return null;
}
}

View File

@@ -1,21 +1,20 @@
package org.kar.archidata.filter;
import javax.ws.rs.container.ContainerRequestContext;
import javax.ws.rs.container.ContainerRequestFilter;
import javax.ws.rs.container.PreMatching;
import javax.ws.rs.core.Response;
import javax.ws.rs.ext.Provider;
import java.io.IOException;
import jakarta.ws.rs.container.ContainerRequestContext;
import jakarta.ws.rs.container.ContainerRequestFilter;
import jakarta.ws.rs.container.PreMatching;
import jakarta.ws.rs.core.Response;
import jakarta.ws.rs.ext.Provider;
@Provider
@PreMatching
public class OptionFilter implements ContainerRequestFilter {
@Override
public void filter(ContainerRequestContext requestContext) throws IOException {
if (requestContext.getMethod().contentEquals("OPTIONS")) {
requestContext.abortWith(Response.status(Response.Status.NO_CONTENT).build());
}
}
@Override
public void filter(final ContainerRequestContext requestContext) throws IOException {
if (requestContext.getMethod().contentEquals("OPTIONS")) {
requestContext.abortWith(Response.status(Response.Status.NO_CONTENT).build());
}
}
}

View File

@@ -1,60 +0,0 @@
package org.kar.archidata.internal;
//import io.scenarium.logger.LogLevel;
//import io.scenarium.logger.Logger;
public class Log {
// private static final String LIB_NAME = "logger";
// private static final String LIB_NAME_DRAW = Logger.getDrawableName(LIB_NAME);
// private static final boolean PRINT_CRITICAL = Logger.getNeedPrint(LIB_NAME, LogLevel.CRITICAL);
// private static final boolean PRINT_ERROR = Logger.getNeedPrint(LIB_NAME, LogLevel.ERROR);
// private static final boolean PRINT_WARNING = Logger.getNeedPrint(LIB_NAME, LogLevel.WARNING);
// private static final boolean PRINT_INFO = Logger.getNeedPrint(LIB_NAME, LogLevel.INFO);
// private static final boolean PRINT_DEBUG = Logger.getNeedPrint(LIB_NAME, LogLevel.DEBUG);
// private static final boolean PRINT_VERBOSE = Logger.getNeedPrint(LIB_NAME, LogLevel.VERBOSE);
// private static final boolean PRINT_TODO = Logger.getNeedPrint(LIB_NAME, LogLevel.TODO);
// private static final boolean PRINT_PRINT = Logger.getNeedPrint(LIB_NAME, LogLevel.PRINT);
//
// private Log() {}
//
// public static void print(String data) {
// if (PRINT_PRINT)
// Logger.print(LIB_NAME_DRAW, data);
// }
//
// public static void todo(String data) {
// if (PRINT_TODO)
// Logger.todo(LIB_NAME_DRAW, data);
// }
//
// public static void critical(String data) {
// if (PRINT_CRITICAL)
// Logger.critical(LIB_NAME_DRAW, data);
// }
//
// public static void error(String data) {
// if (PRINT_ERROR)
// Logger.error(LIB_NAME_DRAW, data);
// }
//
// public static void warning(String data) {
// if (PRINT_WARNING)
// Logger.warning(LIB_NAME_DRAW, data);
// }
//
// public static void info(String data) {
// if (PRINT_INFO)
// Logger.info(LIB_NAME_DRAW, data);
// }
//
// public static void debug(String data) {
// if (PRINT_DEBUG)
// Logger.debug(LIB_NAME_DRAW, data);
// }
//
// public static void verbose(String data) {
// if (PRINT_VERBOSE)
// Logger.verbose(LIB_NAME_DRAW, data);
// }
}

View File

@@ -0,0 +1,332 @@
package org.kar.archidata.migration;
import java.io.IOException;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import org.kar.archidata.dataAccess.DataAccess;
import org.kar.archidata.dataAccess.DataFactory;
import org.kar.archidata.dataAccess.QueryOptions;
import org.kar.archidata.db.DBConfig;
import org.kar.archidata.db.DBEntry;
import org.kar.archidata.migration.model.Migration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class MigrationEngine {
final static Logger LOGGER = LoggerFactory.getLogger(MigrationEngine.class);
// List of order migrations
private final List<MigrationInterface> datas;
// initialization of the migration if the DB is not present...
private MigrationInterface init;
/** Migration engine constructor (empty). */
public MigrationEngine() {
this(new ArrayList<>(), null);
}
/** Migration engine constructor (specific mode).
* @param datas All the migration ordered.
* @param init Initialization migration model. */
public MigrationEngine(final List<MigrationInterface> datas, final MigrationInterface init) {
this.datas = datas;
this.init = init;
}
/** Add a Migration in the list
* @param migration Migration to add. */
public void add(final MigrationInterface migration) {
this.datas.add(migration);
}
/** Set first initialization class
* @param migration migration class for first init. */
public void setInit(final MigrationInterface migration) {
this.init = migration;
}
/** Get the current version/migration name
* @return Model represent the last migration. If null then no migration has been done.
* @throws MigrationException */
public Migration getCurrentVersion() throws MigrationException {
if (!DataAccess.isTableExist("KAR_migration")) {
return null;
}
try {
List<Migration> data = null;
try {
data = DataAccess.gets(Migration.class, QueryOptions.READ_ALL_COLOMN);
} catch (final Exception e) {
// Previous version does not have the same timeCode...
data = DataAccess.gets(Migration.class);
}
if (data == null) {
LOGGER.error("Can not collect the migration table in the DB:{}");
return null;
}
if (data.size() == 0) {
LOGGER.error("Fail to Request migration table in the DB: empty size");
return null;
}
LOGGER.info("List of migrations:");
for (final Migration elem : data) {
LOGGER.info(" - date={} name={} end={}", elem.updatedAt, elem.name, elem.terminated);
}
return data.get(data.size() - 1);
} catch (final Exception ex) {
LOGGER.error("Fail to Request migration table in the DB:{}", ex.getMessage());
ex.printStackTrace();
}
throw new MigrationException("Can not retreive Migration model");
}
/** Process the automatic migration of the system The function wait the Administrator intervention to correct the bug.
* @param config SQL connection for the migration.
* @throws InterruptedException user interrupt the migration */
public void migrateWaitAdmin(final DBConfig config) throws InterruptedException {
try {
migrateErrorThrow(config);
} catch (final Exception ex) {
ex.printStackTrace();
while (true) {
LOGGER.error("ERROR: {}", ex.getMessage());
LOGGER.error("========================================================================");
LOGGER.error("== Fail to migrate ==> wait administrator interventions ==");
LOGGER.error("========================================================================");
Thread.sleep(60 * 60 * 1000);
}
}
}
/** Process the automatic migration of the system
* @param config SQL connection for the migration
* @throws IOException Error if access on the DB */
public void migrateErrorThrow(final DBConfig config) throws MigrationException {
LOGGER.info("Execute migration ... [BEGIN]");
// check the integrity of the migrations:
LOGGER.info("List of availlable Migration: ");
for (final MigrationInterface elem : this.datas) {
if (elem == null) {
LOGGER.info(" - null");
throw new MigrationException("Add a null migration");
}
LOGGER.info(" - {}", elem.getName());
if (elem == this.init) {
throw new MigrationException("Add a migration that is the initialization migration");
}
if (this.init != null && elem.getName().equals(this.init.getName())) {
throw new MigrationException("Two migration have the same name as initilaisation: " + elem.getName());
}
for (final MigrationInterface elemCheck : this.datas) {
if (elem == elemCheck) {
continue;
}
if (elem.getName().equals(elemCheck.getName())) {
throw new MigrationException("Two migration have the same name...: " + elem.getName());
}
}
}
// STEP 1: Check the DB exist:
LOGGER.info("Verify existance of '{}'", config.getDbName());
boolean exist = DataAccess.isDBExist(config.getDbName());
if (!exist) {
LOGGER.warn("DB: '{}' DOES NOT EXIST ==> create one", config.getDbName());
// create the local DB:
DataAccess.createDB(config.getDbName());
}
exist = DataAccess.isDBExist(config.getDbName());
while (!exist) {
LOGGER.error("DB: '{}' DOES NOT EXIST after trying to create one ", config.getDbName());
LOGGER.error("Waiting administrator create a new one, we check after 30 seconds...");
try {
Thread.sleep(30000);
} catch (final InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
exist = DataAccess.isDBExist(config.getDbName());
}
LOGGER.info("DB '{}' exist.", config.getDbName());
// STEP 2: Check migration table exist:
LOGGER.info("Verify existance of migration table '{}'", "KAR_migration");
// TODO: set the class in parameters instead of string...
exist = DataAccess.isTableExist("KAR_migration");
if (!exist) {
LOGGER.info("'{}' Does not exist create a new one...", "KAR_migration");
// create the table:
List<String> sqlQuery;
try {
sqlQuery = DataFactory.createTable(Migration.class);
} catch (final Exception ex) {
ex.printStackTrace();
throw new MigrationException("Fail to create the local DB SQL model for migaration ==> wait administrator interventions");
}
LOGGER.info("Create Table with : {}", sqlQuery.get(0));
try {
DataAccess.executeQuerry(sqlQuery.get(0));
} catch (SQLException | IOException ex) {
ex.printStackTrace();
throw new MigrationException("Fail to create the local DB model for migaration ==> wait administrator interventions");
}
}
final Migration currentVersion = getCurrentVersion();
List<MigrationInterface> toApply = new ArrayList<>();
boolean needPlaceholder = false;
if (currentVersion == null) {
// This is a first migration
LOGGER.info("First installation of the system ==> Create the DB");
if (this.init == null) {
// No initialization class ==> manage a historical creation mode...
toApply = this.datas;
} else {
// Select Initialization class if it exist
toApply.add(this.init);
needPlaceholder = true;
}
} else {
if (!currentVersion.terminated) {
throw new MigrationException("An error occured in the last migration: '" + currentVersion.name + "' defect @" + currentVersion.stepId + "/" + currentVersion.count);
}
LOGGER.info("Upgrade the system Current version: {}", currentVersion.name);
boolean find = this.init != null && this.init.getName().equals(currentVersion.name);
if (find) {
toApply = this.datas;
} else {
LOGGER.info(" ===> Check what must be apply:");
for (final MigrationInterface elem : this.datas) {
LOGGER.info(" - {}", elem.getName());
if (!find) {
if (currentVersion.name.equals(elem.getName())) {
LOGGER.info(" == current version");
find = true;
}
continue;
}
LOGGER.info(" ++ add ");
toApply.add(elem);
}
}
}
DBEntry entry;
try {
entry = DBEntry.createInterface(config);
final int id = 0;
final int count = toApply.size();
for (final MigrationInterface elem : toApply) {
migrateSingle(entry, elem, id, count);
}
} catch (final IOException e) {
e.printStackTrace();
throw new MigrationException("An error occured in the migration (can not access to the DB): '" + currentVersion.name + "' defect @" + currentVersion.stepId + "/" + currentVersion.count);
}
if (needPlaceholder) {
if (this.datas.size() == 0) {
// No placeholder needed, the model have no migration in the current version...
} else {
// we insert a placeholder to simulate the last migration is well done.
final String placeholderName = this.datas.get(this.datas.size() - 1).getName();
Migration migrationResult = new Migration();
migrationResult.id = 1000L;
migrationResult.name = placeholderName;
migrationResult.stepId = 0;
migrationResult.terminated = true;
migrationResult.count = 0;
migrationResult.log = "Place-holder for first initialization";
try {
migrationResult = DataAccess.insert(migrationResult);
} catch (final Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
LOGGER.info("Execute migration ... [ END ]");
}
public void migrateSingle(final DBEntry entry, final MigrationInterface elem, final int id, final int count) throws MigrationException {
LOGGER.info("---------------------------------------------------------");
LOGGER.info("-- Migrate: [{}/{}] {} [BEGIN]", id, count, elem.getName());
LOGGER.info("---------------------------------------------------------");
final StringBuilder log = new StringBuilder();
log.append("Start migration\n");
Migration migrationResult = new Migration();
migrationResult.name = elem.getName();
migrationResult.stepId = 0;
migrationResult.terminated = false;
try {
migrationResult.count = elem.getNumberOfStep();
} catch (final Exception e) {
e.printStackTrace();
throw new MigrationException("Fail to get number of migration step (maybe generation fail): " + e.getLocalizedMessage());
}
migrationResult.log = log.toString();
try {
migrationResult = DataAccess.insert(migrationResult);
} catch (final Exception e) {
e.printStackTrace();
throw new MigrationException("Fail to insert migration Log in the migration table: " + e.getLocalizedMessage());
}
boolean ret = true;
try {
ret = elem.applyMigration(entry, log, migrationResult);
} catch (final Exception e) {
log.append("\nFail in the migration apply ");
log.append(e.getLocalizedMessage());
e.printStackTrace();
throw new MigrationException("Migration fail: '" + migrationResult.name + "' defect @" + migrationResult.stepId + "/" + migrationResult.count);
}
if (ret) {
migrationResult.terminated = true;
try {
DataAccess.update(migrationResult, migrationResult.id, List.of("terminated"));
} catch (final Exception e) {
e.printStackTrace();
throw new MigrationException("Fail to update migration Log in the migration table: " + e.getLocalizedMessage());
}
} else {
try {
log.append("Fail in the migration engine...");
migrationResult.log = log.toString();
DataAccess.update(migrationResult, migrationResult.id, List.of("log"));
} catch (final Exception e) {
e.printStackTrace();
throw new MigrationException("Fail to update migration Log in the migration table: " + e.getLocalizedMessage() + " WITH: An error occured in the migration (OUTSIDE detection): '"
+ migrationResult.name + "' defect @" + migrationResult.stepId + "/" + migrationResult.count);
}
throw new MigrationException("An error occured in the migration (OUTSIDE detection): '" + migrationResult.name + "' defect @" + migrationResult.stepId + "/" + migrationResult.count);
}
LOGGER.info("Migrate: [{}/{}] {} [ END ]", id, count, elem.getName());
}
public void revertTo(final DBEntry entry, final String migrationName) throws MigrationException {
final Migration currentVersion = getCurrentVersion();
final List<MigrationInterface> toApply = new ArrayList<>();
boolean find = false;
for (int iii = this.datas.size() - 1; iii >= 0; iii--) {
if (!find) {
if (this.datas.get(iii).getName() == currentVersion.name) {
find = true;
}
continue;
}
if (this.datas.get(iii).getName() == currentVersion.name) {
break;
}
toApply.add(this.datas.get(iii));
}
final int id = 0;
final int count = toApply.size();
for (final MigrationInterface elem : toApply) {
revertSingle(entry, elem, id, count);
}
}
public void revertSingle(final DBEntry entry, final MigrationInterface elem, final int id, final int count) {
LOGGER.info("Revert migration: {} [BEGIN]", elem.getName());
LOGGER.info("Revert migration: {} [ END ]", elem.getName());
}
}

View File

@@ -0,0 +1,10 @@
package org.kar.archidata.migration;
public class MigrationException extends Exception {
private static final long serialVersionUID = 20230502L;
public MigrationException(final String message) {
super(message);
}
}

View File

@@ -0,0 +1,27 @@
package org.kar.archidata.migration;
import org.kar.archidata.db.DBEntry;
import org.kar.archidata.migration.model.Migration;
public interface MigrationInterface {
/** Get Name of the migration
* @return Migration name */
String getName();
/** Migrate the system to a new version.
* @param entry DB interface for the migration.
* @param log Stored data in the BDD for the migration progression.
* @param migration Migration post data on each step...
* @return true if migration is finished. */
boolean applyMigration(DBEntry entry, StringBuilder log, Migration model) throws Exception;
/** Remove a migration the system to the previous version.
* @param entry DB interface for the migration.
* @param log Stored data in the BDD for the migration progression.
* @return true if migration is finished. */
boolean revertMigration(DBEntry entry, StringBuilder log) throws Exception;
/** Get the number of step in the migration process.
* @return count of SQL access. */
int getNumberOfStep() throws Exception;
}

View File

@@ -0,0 +1,146 @@
package org.kar.archidata.migration;
import java.io.IOException;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import org.kar.archidata.dataAccess.DataAccess;
import org.kar.archidata.dataAccess.DataFactory;
import org.kar.archidata.db.DBEntry;
import org.kar.archidata.migration.model.Migration;
import org.kar.archidata.tools.ConfigBaseVariable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
record Action(String action, List<String> filterDB) {
public Action(final String action) {
this(action, List.of());
}
public Action(final String action, final String filterDB) {
this(action, List.of(filterDB));
}
}
public class MigrationSqlStep implements MigrationInterface {
final static Logger LOGGER = LoggerFactory.getLogger(MigrationSqlStep.class);
private final List<Action> actions = new ArrayList<>();
private boolean isGenerated = false;
@Override
public String getName() {
return getClass().getCanonicalName();
}
public void display() throws Exception {
if (!this.isGenerated) {
this.isGenerated = true;
generateStep();
}
for (int iii = 0; iii < this.actions.size(); iii++) {
final Action action = this.actions.get(iii);
LOGGER.info(" >>>> SQL ACTION : {}/{} ==> filter='{}'\n{}", iii, this.actions.size(), action.filterDB(), action.action());
}
}
public void generateStep() throws Exception {
throw new Exception("Forward is not implemented");
}
public void generateRevertStep() throws Exception {
throw new Exception("Backward is not implemented");
}
@Override
public boolean applyMigration(final DBEntry entry, final StringBuilder log, final Migration model) throws Exception {
if (!this.isGenerated) {
this.isGenerated = true;
generateStep();
}
for (int iii = 0; iii < this.actions.size(); iii++) {
log.append("action [" + (iii + 1) + "/" + this.actions.size() + "]\n");
LOGGER.info(" >>>> SQL ACTION : {}/{}", iii + 1, this.actions.size());
final Action action = this.actions.get(iii);
LOGGER.info("SQL request: ```{}``` on '{}' current={}", action.action(), action.filterDB(), ConfigBaseVariable.getDBType());
log.append("SQL: " + action.action() + " on " + action.filterDB() + "\n");
boolean isValid = true;
if (action.filterDB() != null && action.filterDB().size() > 0) {
isValid = false;
for (final String elem : action.filterDB()) {
if (ConfigBaseVariable.getDBType().equals(elem)) {
isValid = true;
}
}
}
if (!isValid) {
log.append("==> Skip (DB is not compatible: " + ConfigBaseVariable.getDBType() + ")\n");
LOGGER.info(" >>>> SQL ACTION : {}/{} ==> SKIP", iii + 1, this.actions.size());
continue;
}
try {
DataAccess.executeQuerry(action.action());
} catch (SQLException | IOException ex) {
ex.printStackTrace();
LOGGER.info("SQL request ERROR: ", ex.getMessage());
log.append("SQL request ERROR: " + ex.getMessage() + "\n");
model.stepId = iii + 1;
model.log = log.toString();
try {
DataAccess.update(model, model.id, List.of("stepId", "log"));
} catch (final Exception e) {
e.printStackTrace();
}
return false;
}
log.append("action [" + (iii + 1) + "/" + this.actions.size() + "] ==> DONE\n");
LOGGER.info(" >>>> SQL ACTION : {}/{} ==> DONE", iii + 1, this.actions.size());
model.stepId = iii + 1;
model.log = log.toString();
try {
DataAccess.update(model, model.id, List.of("stepId", "log"));
} catch (final Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
Thread.sleep(2);
} catch (final InterruptedException e) {
e.printStackTrace();
}
}
return true;
}
@Override
public boolean revertMigration(final DBEntry entry, final StringBuilder log) throws Exception {
generateRevertStep();
return false;
}
public void addAction(final String action) {
this.actions.add(new Action(action));
}
public void addAction(final String action, final String filterdBType) {
this.actions.add(new Action(action, filterdBType));
}
public void addClass(final Class<?> clazz) throws Exception {
final List<String> tmp = DataFactory.createTable(clazz);
for (final String elem : tmp) {
this.actions.add(new Action(elem));
}
}
@Override
public int getNumberOfStep() throws Exception {
if (!this.isGenerated) {
this.isGenerated = true;
generateStep();
}
return this.actions.size();
}
}

View File

@@ -0,0 +1,41 @@
package org.kar.archidata.migration.model;
import org.kar.archidata.annotation.DataDefault;
import org.kar.archidata.annotation.DataIfNotExists;
import org.kar.archidata.annotation.DataNotRead;
import org.kar.archidata.model.GenericDataSoftDelete;
import com.fasterxml.jackson.annotation.JsonInclude;
import io.swagger.v3.oas.annotations.media.Schema;
import jakarta.persistence.Column;
import jakarta.persistence.Table;
// For logs only
//public static final String TABLE_NAME = "KAR_migration";
// TODO: Add a migration Hash to be sure that the current migration init is correct and has not change...
@Table(name = "KAR_migration")
@DataIfNotExists
@JsonInclude(JsonInclude.Include.NON_NULL)
public class Migration extends GenericDataSoftDelete {
final static int VERSION_MIGRATION = 2;
@Schema(description = "Name of the migration")
@Column(length = 256)
public String name;
@DataNotRead
@DataDefault("'2'")
@Schema(description = "Version of the migration engine")
public Integer version;
@Column(nullable = false)
@DataDefault("'0'")
@Schema(description = "if the migration is well terminated or not")
public Boolean terminated = false;
@Schema(description = "index in the migration progression")
public Integer stepId = 0;
@Schema(description = "number of element in the migration")
public Integer count;
@Schema(description = "Log generate by the migration")
@Column(length = -1)
public String log = "";
}

Some files were not shown because too many files have changed in this diff Show More