[DEV] continue integration Mongo

This commit is contained in:
Edouard DUPIN 2024-10-13 23:59:56 +02:00
parent 5689700def
commit c1e9b4bb4c
8 changed files with 77 additions and 74 deletions

View File

@ -0,0 +1,34 @@
package org.kar.archidata.converter.morphia;
import java.sql.Timestamp;
import org.bson.BsonReader;
import org.bson.BsonType;
import org.bson.BsonWriter;
import org.bson.codecs.Codec;
public class SqlTimestampCodec implements Codec<Timestamp> {
@Override
public void encode(
final BsonWriter writer,
final Timestamp value,
final org.bson.codecs.EncoderContext encoderContext) {
writer.writeDateTime(value.getTime());
}
@Override
public Timestamp decode(final BsonReader reader, final org.bson.codecs.DecoderContext decoderContext) {
final BsonType bsonType = reader.getCurrentBsonType();
if (bsonType == BsonType.DATE_TIME) {
return new Timestamp(reader.readDateTime());
} else {
throw new IllegalArgumentException("Expected a DATE_TIME but found " + bsonType);
}
}
@Override
public Class<Timestamp> getEncoderClass() {
return Timestamp.class;
}
}

View File

@ -10,6 +10,7 @@ import java.sql.Timestamp;
import java.time.Instant;
import java.time.LocalDate;
import java.time.LocalTime;
import java.time.ZoneId;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
@ -179,6 +180,10 @@ public class DataAccessMorphia extends DataAccess {
final String fieldName,
final Document docSet,
final Document docUnSet) throws Exception {
if (field.get(data) == null) {
docUnSet.append(fieldName, "");
return;
}
if (type == long.class) {
docSet.append(fieldName, field.getLong(data));
return;
@ -331,76 +336,33 @@ public class DataAccessMorphia extends DataAccess {
return;
}
if (type == Timestamp.class) {
LOGGER.error("TODO: TimeStamp ... ");
/*
final Timestamp tmp = rs.getTimestamp(count.value);
if (rs.wasNull()) {
field.set(data, null);
} else {
field.set(data, tmp);
countNotNull.inc();
}
*/
final Date value = doc.get(fieldName, Date.class);
final Timestamp newData = new Timestamp(value.getTime());
field.set(data, newData);
return;
}
if (type == Date.class) {
LOGGER.error("TODO: Date ... ");
/*
try {
final Timestamp tmp = rs.getTimestamp(count.value);
if (rs.wasNull()) {
field.set(data, null);
} else {
field.set(data, Date.from(tmp.toInstant()));
countNotNull.inc();
}
} catch (final SQLException ex) {
final String tmp = rs.getString(count.value);
LOGGER.error("Fail to parse the SQL time !!! {}", tmp);
if (rs.wasNull()) {
field.set(data, null);
} else {
final Date date = DateTools.parseDate(tmp);
LOGGER.error("Fail to parse the SQL time !!! {}", date);
field.set(data, date);
countNotNull.inc();
}
}*/
final Date value = doc.get(fieldName, Date.class);
field.set(data, value);
return;
}
if (type == Instant.class) {
LOGGER.error("TODO: Instant ... ");
/*
final String tmp = rs.getString(count.value);
if (rs.wasNull()) {
field.set(data, null);
} else {
field.set(data, Instant.parse(tmp));
countNotNull.inc();
}
*/
final Date value = doc.get(fieldName, Date.class);
final Instant newData = value.toInstant();
field.set(data, newData);
return;
}
if (type == LocalDate.class) {
LOGGER.error("TODO: LocalDate ... ");
/*
final java.sql.Date tmp = rs.getDate(count.value);
if (rs.wasNull()) {
field.set(data, null);
} else {
field.set(data, tmp.toLocalDate());
countNotNull.inc();
}
*/
final Date value = doc.get(fieldName, Date.class);
final LocalDate newData = value.toInstant().atZone(ZoneId.systemDefault()).toLocalDate();
field.set(data, newData);
return;
}
if (type == LocalTime.class) {
LOGGER.error("TODO: LocalTime ... ");
/*
final java.sql.Time tmp = rs.getTime(count.value);
if (rs.wasNull()) {
field.set(data, null);
} else {
field.set(data, tmp.toLocalTime());
countNotNull.inc();
}
*/
final Long value = doc.getLong(fieldName);
final LocalTime newData = LocalTime.ofNanoOfDay(value);
field.set(data, newData);
return;
}
if (type == String.class) {
final String value = doc.getString(fieldName);

View File

@ -7,6 +7,7 @@ import org.bson.UuidRepresentation;
import org.bson.codecs.configuration.CodecRegistries;
import org.bson.codecs.configuration.CodecRegistry;
import org.bson.codecs.pojo.PojoCodecProvider;
import org.kar.archidata.converter.morphia.SqlTimestampCodec;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -35,6 +36,7 @@ public class DbInterfaceMorphia extends DbInterface implements Closeable {
final ConnectionString connectionString = new ConnectionString(dbUrl);
// Créer un CodecRegistry pour UUID
//final CodecRegistry uuidCodecRegistry = CodecRegistries.fromCodecs(new UUIDCodec());
final CodecRegistry SqlTimestampCodecRegistry = CodecRegistries.fromCodecs(new SqlTimestampCodec());
// Créer un CodecRegistry pour POJOs
final CodecRegistry pojoCodecRegistry = CodecRegistries
.fromProviders(PojoCodecProvider.builder().automatic(true).build());
@ -45,7 +47,7 @@ public class DbInterfaceMorphia extends DbInterface implements Closeable {
final CodecRegistry codecRegistry = CodecRegistries.fromRegistries(
MongoClientSettings.getDefaultCodecRegistry(),
CodecRegistries.fromCodecs(new org.bson.codecs.UuidCodec(UuidRepresentation.STANDARD)),
pojoCodecRegistry);
pojoCodecRegistry, SqlTimestampCodecRegistry);
// Configurer MongoClientSettings
final MongoClientSettings clientSettings = MongoClientSettings.builder() //
.applyConnectionString(connectionString)//
@ -53,7 +55,7 @@ public class DbInterfaceMorphia extends DbInterface implements Closeable {
.uuidRepresentation(UuidRepresentation.STANDARD)//
.build();
this.mongoClient = MongoClients.create(clientSettings);
this.datastore = Morphia.createDatastore(this.mongoClient, "karusic");
this.datastore = Morphia.createDatastore(this.mongoClient, dbName);
// Map entities
this.datastore.getMapper().map(classes);
// Ensure indexes

View File

@ -6,6 +6,7 @@ import org.kar.archidata.model.GenericDataSoftDelete;
import com.fasterxml.jackson.annotation.JsonInclude;
import dev.morphia.annotations.Entity;
import io.swagger.v3.oas.annotations.media.Schema;
import jakarta.persistence.Column;
import jakarta.persistence.Table;
@ -16,6 +17,7 @@ import jakarta.ws.rs.DefaultValue;
// TODO: Add a migration Hash to be sure that the current migration init is correct and has not change...
@Table(name = "KAR_migration")
@Entity("KAR_migration")
@DataIfNotExists
@JsonInclude(JsonInclude.Include.NON_NULL)
public class Migration extends GenericDataSoftDelete {

View File

@ -19,14 +19,16 @@ public class ConfigureDb {
modeTest = "MY-SQL";
}
// override the local test:
//modeTest = "MONGO";
modeTest = "MONGO";
if ("SQLITE-MEMORY".equalsIgnoreCase(modeTest)) {
ConfigBaseVariable.dbType = "sqlite";
ConfigBaseVariable.bdDatabase = null;
ConfigBaseVariable.dbHost = "memory";
// for test we need to connect all time the DB
ConfigBaseVariable.dbKeepConnected = "true";
} else if ("SQLITE".equalsIgnoreCase(modeTest)) {
ConfigBaseVariable.dbType = "sqlite";
ConfigBaseVariable.bdDatabase = null;
ConfigBaseVariable.dbKeepConnected = "true";
} else if ("MY-SQL".equalsIgnoreCase(modeTest)) {
ConfigBaseVariable.dbType = "mysql";
@ -35,6 +37,7 @@ public class ConfigureDb {
ConfigBaseVariable.dbUser = "root";
} else if ("MONGO".equalsIgnoreCase(modeTest)) {
ConfigBaseVariable.dbType = "mongo";
ConfigBaseVariable.bdDatabase = "test_db";
ConfigBaseVariable.bdDatabase = "test_mongo_db";
} else {
// User local modification ...

View File

@ -8,12 +8,12 @@ import org.junit.jupiter.api.Order;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestMethodOrder;
import org.junit.jupiter.api.extension.ExtendWith;
import org.kar.archidata.db.DBEntry;
import org.kar.archidata.tools.ConfigBaseVariable;
import org.kar.archidata.tools.RESTApi;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import test.kar.archidata.ConfigureDb;
import test.kar.archidata.StepwiseExtension;
import test.kar.archidata.apiExtern.model.SimpleArchiveTable;
@ -30,6 +30,7 @@ public class TestAPI {
@BeforeAll
public static void configureWebServer() throws Exception {
ConfigureDb.configure();
LOGGER.info("configure server ...");
webInterface = new WebLauncherTest();
LOGGER.info("Clean previous table");
@ -46,9 +47,7 @@ public class TestAPI {
LOGGER.info("Kill the web server");
webInterface.stop();
webInterface = null;
LOGGER.info("Remove the test db");
DBEntry.closeAllForceMode();
ConfigBaseVariable.clearAllValue();
ConfigureDb.clear();
}
@Order(1)

View File

@ -36,7 +36,6 @@ public class WebLauncher {
private final DataAccess da;
public WebLauncher() {
ConfigBaseVariable.bdDatabase = "karusic";
this.da = DataAccess.createInterface();
}

View File

@ -1,5 +1,8 @@
package test.kar.archidata.migration;
import java.io.IOException;
import org.kar.archidata.dataAccess.DataAccess;
import org.kar.archidata.migration.MigrationSqlStep;
class MigrationFail extends MigrationSqlStep {
@ -16,10 +19,9 @@ class MigrationFail extends MigrationSqlStep {
@Override
public void generateStep() throws Exception {
addAction("""
ALTER TABLE `TestTableMigrationqs`
RENAME COLUMN `testDataMisqdgration1` TO `testDataMiqsdgration2`
""");
addAction((final DataAccess da) -> {
throw new IOException("FAIL migration");
});
display();
}