Added errors to database and simple error view in frontend

This commit is contained in:
Jan 2025-09-21 13:29:12 +02:00
parent 5e114ce859
commit 0501c99903
33 changed files with 1152 additions and 497 deletions

View file

@ -50,7 +50,9 @@ export default {
return this.errorLogStore.getErrors; return this.errorLogStore.getErrors;
}, },
buildDate(date) { buildDate(date) {
return `${date[0]}-${date[1].toString().padStart(2, '0')}-${date[2].toString().padStart(2, '0')} ${date[3].toString().padStart(2, '0')}:${date[4].toString().padStart(2, '0')}:${date[5].toString().padStart(2, '0')}` if(date === null) return "not set";
return `${date[0]}-${date[1].toString().padStart(2, '0')}-${date[2].toString().padStart(2, '0')} ${date[3]?.toString().padStart(2, '0') ?? '00'}:${date[4]?.toString().padStart(2, '0') ?? '00'}:${date[5]?.toString().padStart(2, '0') ?? '00'}`
}, },
showDetails(error) { showDetails(error) {
console.log("click") console.log("click")

View file

@ -60,12 +60,12 @@ export const useErrorStore = defineStore('error', {
this.startAutoSubmitTimer(); this.startAutoSubmitTimer();
}); });
if (response.ok) { if (response?.ok) {
this.stopAutoSubmitTimer() this.stopAutoSubmitTimer()
this.sendCache = []; this.sendCache = [];
} else { } else {
console.error("Error transmitting errors: " + url, params); console.error("Error transmitting errors: " + url, params);
console.error(response, await response.text()); console.error(response, await response?.text());
this.startAutoSubmitTimer(); this.startAutoSubmitTimer();
} }
}, },

View file

@ -2,6 +2,7 @@ package de.avatic.lcc.config;
import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Configuration;
import org.springframework.core.task.TaskExecutor;
import org.springframework.scheduling.annotation.EnableAsync; import org.springframework.scheduling.annotation.EnableAsync;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor; import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
@ -11,7 +12,7 @@ import java.util.concurrent.Executor;
@EnableAsync @EnableAsync
public class AsyncConfig { public class AsyncConfig {
@Bean(name = "taskExecutor") @Bean(name = "calculationExecutor")
public Executor taskExecutor() { public Executor taskExecutor() {
ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor(); ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor();
executor.setCorePoolSize(4); executor.setCorePoolSize(4);
@ -21,4 +22,15 @@ public class AsyncConfig {
executor.initialize(); executor.initialize();
return executor; return executor;
} }
@Bean(name = "bulkProcessingExecutor")
public Executor bulkProcessingExecutor() {
ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor();
executor.setCorePoolSize(1);
executor.setMaxPoolSize(1);
executor.setQueueCapacity(100);
executor.setThreadNamePrefix("bulk-processing-");
executor.initialize();
return executor;
}
} }

View file

@ -3,10 +3,11 @@ package de.avatic.lcc.controller.bulk;
import com.azure.core.annotation.BodyParam; import com.azure.core.annotation.BodyParam;
import de.avatic.lcc.dto.bulk.BulkFileType; import de.avatic.lcc.dto.bulk.BulkFileType;
import de.avatic.lcc.dto.bulk.BulkStatusDTO; import de.avatic.lcc.dto.bulk.BulkOperationDTO;
import de.avatic.lcc.service.bulk.BulkExportService; import de.avatic.lcc.service.bulk.BulkOperationService;
import de.avatic.lcc.service.bulk.BulkFileProcessingService;
import de.avatic.lcc.service.bulk.TemplateExportService; import de.avatic.lcc.service.bulk.TemplateExportService;
import de.avatic.lcc.util.exception.base.BadRequestException;
import org.springframework.core.io.ByteArrayResource;
import org.springframework.core.io.InputStreamResource; import org.springframework.core.io.InputStreamResource;
import org.springframework.http.HttpHeaders; import org.springframework.http.HttpHeaders;
import org.springframework.http.MediaType; import org.springframework.http.MediaType;
@ -14,7 +15,6 @@ import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*; import org.springframework.web.bind.annotation.*;
import org.springframework.web.multipart.MultipartFile; import org.springframework.web.multipart.MultipartFile;
import java.io.IOException;
import java.util.List; import java.util.List;
/** /**
@ -26,48 +26,29 @@ import java.util.List;
@RequestMapping("/api/bulk") @RequestMapping("/api/bulk")
public class BulkOperationController { public class BulkOperationController {
private final BulkFileProcessingService bulkProcessingService; private final BulkOperationService bulkOperationService;
private final TemplateExportService templateExportService; private final TemplateExportService templateExportService;
private final BulkExportService bulkExportService;
public BulkOperationController(BulkFileProcessingService bulkProcessingService, TemplateExportService templateExportService, BulkExportService bulkExportService) { public BulkOperationController(BulkOperationService bulkOperationService, TemplateExportService templateExportService) {
this.bulkProcessingService = bulkProcessingService; this.bulkOperationService = bulkOperationService;
this.templateExportService = templateExportService; this.templateExportService = templateExportService;
this.bulkExportService = bulkExportService;
} }
/**
* Retrieves the current status of all bulk processing operations. @GetMapping({"/status/", "/status"})
* public ResponseEntity<List<BulkOperationDTO>> getUploadStatus() {
* @return A ResponseEntity with the bulk processing status payload. return ResponseEntity.ok(bulkOperationService.getStatus());
*/
@GetMapping({"/status/","/status"})
public ResponseEntity<List<BulkStatusDTO>> getUploadStatus() {
return ResponseEntity.ok(bulkProcessingService.getStatus());
} }
/**
* Handles the upload of a file for a specific processing type and file type. @PostMapping({"/upload/{type}", "/upload/{type}/"})
* public ResponseEntity<Void> uploadFile(@PathVariable BulkFileType type, @BodyParam("file") MultipartFile file) {
* @param type The file type being uploaded, as defined in {@link BulkFileType}. bulkOperationService.processFileImport(type, file);
* @param file The file to be uploaded, provided as a multipart file. return ResponseEntity.ok().build();
* @return A ResponseEntity indicating whether the upload was processed successfully.
*/
@PostMapping({"/upload/{type}","/upload/{type}/"})
public ResponseEntity<Integer> uploadFile(@PathVariable BulkFileType type, @BodyParam("file") MultipartFile file) {
return ResponseEntity.ok(bulkProcessingService.processFile(type, file));
} }
/**
* Generates and downloads a template file for the specified file type. @GetMapping({"/templates/{type}", "/templates/{type}/"})
*
* @param type The type of file template to generate, based on the {@link BulkFileType} enumeration.
* @return A ResponseEntity with the generated template as an InputStreamResource.
* The response includes the appropriate Excel MIME type and a Content-Disposition header
* specifying it as a downloadable file.
* @throws IllegalArgumentException if the file type is invalid.
*/
@GetMapping({"/templates/{type}","/templates/{type}/"})
public ResponseEntity<InputStreamResource> generateTemplate(@PathVariable BulkFileType type) { public ResponseEntity<InputStreamResource> generateTemplate(@PathVariable BulkFileType type) {
HttpHeaders headers = new HttpHeaders(); HttpHeaders headers = new HttpHeaders();
headers.add("Content-Disposition", "attachment; filename=lcc_template_" + type.name().toLowerCase() + ".xlsx"); headers.add("Content-Disposition", "attachment; filename=lcc_template_" + type.name().toLowerCase() + ".xlsx");
@ -79,60 +60,36 @@ public class BulkOperationController {
.body(new InputStreamResource(templateExportService.generateTemplate(BulkFileType.valueOf(type.name().toUpperCase())))); .body(new InputStreamResource(templateExportService.generateTemplate(BulkFileType.valueOf(type.name().toUpperCase()))));
} }
/**
* Downloads an export file for the specified file type.
*
* @param type The type of file to export, as defined in {@link BulkFileType}.
* @return A ResponseEntity with the exported file as an InputStreamResource.
* The file is served as an Excel document, with proper headers for download.
* @throws IllegalArgumentException if the provided file type is not supported.
*/
@GetMapping({"/download/{type}", "/download/{type}/"}) @GetMapping({"/download/{type}", "/download/{type}/"})
public ResponseEntity<InputStreamResource> scheduleDownload(@PathVariable BulkFileType type) throws IOException { public ResponseEntity<Void> scheduleDownload(@PathVariable BulkFileType type) {
bulkOperationService.processFileExport(type);
return ResponseEntity.ok().build();
}
@GetMapping({"/download/{type}/{validity_period_id}", "/download/{type}/{validity_period_id}/"})
public ResponseEntity<Void> scheduleDownload(@PathVariable BulkFileType type, @PathVariable("validity_period_id") Integer validityPeriodId) {
bulkOperationService.processFileExport(type, validityPeriodId);
return ResponseEntity.ok().build();
}
@GetMapping({"/file/{processId}", "/file/{processId}/"})
public ResponseEntity<InputStreamResource> download(@PathVariable("processId") Integer id) {
var op = bulkOperationService.getBulkOperation(id);
if (op == null)
throw new BadRequestException("No such file.", "No file found with id: " + id);
HttpHeaders headers = new HttpHeaders(); HttpHeaders headers = new HttpHeaders();
headers.add("Content-Disposition", "attachment; filename=lcc_export_" + type.name().toLowerCase() + ".xlsx"); headers.add("Content-Disposition", "attachment; filename=lcc_export_" + op.getFileType().name().toLowerCase() + ".xlsx");
return ResponseEntity return ResponseEntity
.ok() .ok()
.headers(headers) .headers(headers)
.contentType(MediaType.parseMediaType("application/vnd.ms-excel")) .contentType(MediaType.parseMediaType("application/vnd.ms-excel"))
.body(new InputStreamResource(bulkExportService.generateExport(BulkFileType.valueOf(type.name().toUpperCase())))); .body(new InputStreamResource(new ByteArrayResource(op.getFile())));
} }
/**
* Downloads an export file for the specified file type, filtered by a validity period.
*
* @param type The type of file to export, according to {@link BulkFileType}.
* @param validityPeriodId The ID of the validity period to apply filtering for the export.
* @return A ResponseEntity containing the exported file as an InputStreamResource.
* The file is served as an Excel document, with appropriate headers for download.
* @throws IllegalArgumentException if the file type or validity period ID is invalid.
*/
@GetMapping({"/download/{type}/{validity_period_id}","/download/{type}/{validity_period_id}/"})
public ResponseEntity<InputStreamResource> scheduleDownload(@PathVariable BulkFileType type, @PathVariable("validity_period_id") Integer validityPeriodId) throws IOException {
HttpHeaders headers = new HttpHeaders();
headers.add("Content-Disposition", "attachment; filename=lcc_export_" + type.name().toLowerCase() + ".xlsx");
return ResponseEntity
.ok()
.headers(headers)
.contentType(MediaType.parseMediaType("application/vnd.ms-excel"))
.body(new InputStreamResource(bulkExportService.generateExport(BulkFileType.valueOf(type.name().toUpperCase()), validityPeriodId)));
}
// @GetMapping({"/file/{processId}","/file/{processId}/"})
// public ResponseEntity<InputStreamResource> download(@PathVariable Integer processId) throws IOException {
// bulkExportService.export(processId);
//
// HttpHeaders headers = new HttpHeaders();
// headers.add("Content-Disposition", "attachment; filename=lcc_export_" + type.name().toLowerCase() + ".xlsx");
//
//
// return ResponseEntity
// .ok()
// .headers(headers)
// .contentType(MediaType.parseMediaType("application/vnd.ms-excel"))
// .body(new InputStreamResource(bulkExportService.generateExport(BulkFileType.valueOf(type.name().toUpperCase()), validityPeriodId)));
// }
} }

View file

@ -0,0 +1,61 @@
package de.avatic.lcc.dto.bulk;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.time.LocalDateTime;
public class BulkOperationDTO {
private int id;
@JsonProperty("file_type")
private BulkFileType fileType;
@JsonProperty("processing_type")
private BulkProcessingType processingType;
private BulkOperationState state;
@JsonProperty("timestamp")
private LocalDateTime createdAt;
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public BulkFileType getFileType() {
return fileType;
}
public void setFileType(BulkFileType fileType) {
this.fileType = fileType;
}
public BulkProcessingType getProcessingType() {
return processingType;
}
public void setProcessingType(BulkProcessingType processingType) {
this.processingType = processingType;
}
public BulkOperationState getState() {
return state;
}
public void setState(BulkOperationState state) {
this.state = state;
}
public LocalDateTime getCreatedAt() {
return createdAt;
}
public void setCreatedAt(LocalDateTime createdAt) {
this.createdAt = createdAt;
}
}

View file

@ -0,0 +1,5 @@
package de.avatic.lcc.dto.bulk;
public enum BulkOperationState {
SCHEDULED, PROCESSING, COMPLETED, EXCEPTION
}

View file

@ -1,5 +1,5 @@
package de.avatic.lcc.dto.bulk; package de.avatic.lcc.dto.bulk;
public enum BulkProcessingType { public enum BulkProcessingType {
UPLOAD, DOWNLOAD IMPORT, EXPORT
} }

View file

@ -1,5 +0,0 @@
package de.avatic.lcc.dto.bulk;
public enum BulkState {
QUEUED, PROCESSING, COMPLETED, FAILED
}

View file

@ -1,12 +0,0 @@
package de.avatic.lcc.dto.bulk;
public class BulkStatusDTO {
private BulkFileType operation;
private int processingId;
private BulkState state;
}

View file

@ -0,0 +1,29 @@
package de.avatic.lcc.model.bulk;
public class BulkInstruction<T> {
private BulkInstructionType type;
private T entity;
public BulkInstruction(T entity, BulkInstructionType type) {
this.entity = entity;
this.type = type;
}
public BulkInstructionType getType() {
return type;
}
public void setType(BulkInstructionType type) {
this.type = type;
}
public T getEntity() {
return entity;
}
public void setEntity(T entity) {
this.entity = entity;
}
}

View file

@ -1,5 +1,5 @@
package de.avatic.lcc.model.bulk; package de.avatic.lcc.model.bulk;
public enum BulkOperationType { public enum BulkInstructionType {
UPDATE, DELETE UPDATE, DELETE
} }

View file

@ -1,29 +1,91 @@
package de.avatic.lcc.model.bulk; package de.avatic.lcc.model.bulk;
public class BulkOperation<T> { import de.avatic.lcc.dto.bulk.BulkFileType;
import de.avatic.lcc.dto.bulk.BulkOperationState;
import de.avatic.lcc.dto.bulk.BulkProcessingType;
private BulkOperationType type; import java.time.LocalDateTime;
private T entity; public class BulkOperation {
public BulkOperation(T entity, BulkOperationType type) { private Integer id;
this.entity = entity;
this.type = type; private Integer userId;
private Integer validityPeriodId;
private BulkProcessingType processingType;
private BulkFileType fileType;
private BulkOperationState processState;
private byte[] file;
private LocalDateTime createdAt;
public Integer getId() {
return id;
} }
public BulkOperationType getType() { public void setId(Integer id) {
return type; this.id = id;
} }
public void setType(BulkOperationType type) { public Integer getUserId() {
this.type = type; return userId;
} }
public T getEntity() { public void setUserId(Integer userId) {
return entity; this.userId = userId;
} }
public void setEntity(T entity) { public BulkFileType getFileType() {
this.entity = entity; return fileType;
}
public void setFileType(BulkFileType fileType) {
this.fileType = fileType;
}
public BulkOperationState getProcessState() {
return processState;
}
public void setProcessState(BulkOperationState processState) {
this.processState = processState;
}
public byte[] getFile() {
return file;
}
public void setFile(byte[] file) {
this.file = file;
}
public LocalDateTime getCreatedAt() {
return createdAt;
}
public void setCreatedAt(LocalDateTime createdAt) {
this.createdAt = createdAt;
}
public BulkProcessingType getProcessingType() {
return processingType;
}
public void setProcessingType(BulkProcessingType processingType) {
this.processingType = processingType;
}
public Integer getValidityPeriodId() {
return validityPeriodId;
}
public void setValidityPeriodId(Integer validityPeriodId) {
this.validityPeriodId = validityPeriodId;
} }
} }

View file

@ -1,71 +0,0 @@
package de.avatic.lcc.model.bulk;
import de.avatic.lcc.dto.bulk.BulkFileType;
import de.avatic.lcc.dto.bulk.BulkProcessingType;
import de.avatic.lcc.dto.bulk.BulkState;
import org.apache.commons.compress.parallel.InputStreamSupplier;
import org.springframework.http.ResponseEntity;
import org.springframework.web.multipart.MultipartFile;
public class BulkProcess {
private int id;
private BulkState state;
private BulkFileType type;
private BulkProcessingType processingType;
private MultipartFile bulkRequest;
private ResponseEntity<InputStreamSupplier> bulkResponse;
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public BulkState getState() {
return state;
}
public void setState(BulkState state) {
this.state = state;
}
public BulkFileType getType() {
return type;
}
public void setType(BulkFileType type) {
this.type = type;
}
public BulkProcessingType getProcessingType() {
return processingType;
}
public void setProcessingType(BulkProcessingType processingType) {
this.processingType = processingType;
}
public MultipartFile getBulkRequest() {
return bulkRequest;
}
public void setBulkRequest(MultipartFile bulkRequest) {
this.bulkRequest = bulkRequest;
}
public ResponseEntity getBulkResponse() {
return bulkResponse;
}
public void setBulkResponse(ResponseEntity bulkResponse) {
this.bulkResponse = bulkResponse;
}
}

View file

@ -1,5 +0,0 @@
package de.avatic.lcc.model.bulk;
public enum BulkProcessState {
QUEUED, PROCESSING, COMPLETED, FAILED
}

View file

@ -5,14 +5,19 @@ import de.avatic.lcc.model.ValidityTuple;
import de.avatic.lcc.model.nodes.Node; import de.avatic.lcc.model.nodes.Node;
import de.avatic.lcc.repositories.pagination.SearchQueryPagination; import de.avatic.lcc.repositories.pagination.SearchQueryPagination;
import de.avatic.lcc.repositories.pagination.SearchQueryResult; import de.avatic.lcc.repositories.pagination.SearchQueryResult;
import org.apache.commons.lang3.NotImplementedException;
import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.core.RowMapper; import org.springframework.jdbc.core.RowMapper;
import org.springframework.jdbc.core.namedparam.MapSqlParameterSource;
import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate;
import org.springframework.jdbc.support.GeneratedKeyHolder;
import org.springframework.jdbc.support.KeyHolder;
import org.springframework.stereotype.Repository; import org.springframework.stereotype.Repository;
import org.springframework.transaction.annotation.Transactional; import org.springframework.transaction.annotation.Transactional;
import java.sql.PreparedStatement;
import java.sql.ResultSet; import java.sql.ResultSet;
import java.sql.SQLException; import java.sql.SQLException;
import java.sql.Statement;
import java.util.*; import java.util.*;
@Repository @Repository
@ -20,9 +25,11 @@ public class NodeRepository {
private final JdbcTemplate jdbcTemplate; private final JdbcTemplate jdbcTemplate;
private final NamedParameterJdbcTemplate namedParameterJdbcTemplate;
public NodeRepository(JdbcTemplate jdbcTemplate) { public NodeRepository(JdbcTemplate jdbcTemplate, NamedParameterJdbcTemplate namedParameterJdbcTemplate) {
this.jdbcTemplate = jdbcTemplate; this.jdbcTemplate = jdbcTemplate;
this.namedParameterJdbcTemplate = namedParameterJdbcTemplate;
} }
@Transactional @Transactional
@ -144,31 +151,109 @@ public class NodeRepository {
@Transactional @Transactional
public Optional<Integer> setDeprecatedById(Integer id) { public Optional<Integer> setDeprecatedById(Integer id) {
String query = "UPDATE node SET is_deprecated = TRUE WHERE id = ?"; String query = "UPDATE node SET is_deprecated = TRUE WHERE id = ?";
// Mark all linked RouteNodes as outdated
jdbcTemplate.update("UPDATE premise_route_node SET is_outdated = TRUE WHERE node_id = ?", id);
return Optional.ofNullable(jdbcTemplate.update(query, id) == 0 ? null : id); return Optional.ofNullable(jdbcTemplate.update(query, id) == 0 ? null : id);
} }
@Transactional @Transactional
public Optional<Integer> update(Node node) { public Optional<Integer> update(Node node) {
if (node.getId() == null) {
return Optional.empty();
}
throw new NotImplementedException("Update of nodes is not yet implemented!"); String updateNodeSql = """
//TODO update predecessors and outbound_countries too UPDATE node SET
//TODO implement correctly country_id = ?,
name = ?,
address = ?,
external_mapping_id = ?,
predecessor_required = ?,
is_destination = ?,
is_source = ?,
is_intermediate = ?,
geo_lat = ?,
geo_lng = ?,
is_deprecated = ?,
updated_at = CURRENT_TIMESTAMP
WHERE id = ?
""";
//TODO if node is updated set all linked RouteNodes to outdated! int rowsUpdated = jdbcTemplate.update(updateNodeSql,
node.getCountryId(),
node.getName(),
node.getAddress(),
node.getExternalMappingId(),
node.getPredecessorRequired(),
node.getDestination(),
node.getSource(),
node.getIntermediate(),
node.getGeoLat(),
node.getGeoLng(),
node.getDeprecated(),
node.getId()
);
// String query = "UPDATE node SET name = ?, address = ?, country_id = ?, is_source = ?, is_destination = ?, is_intermediate = ?, predecessor_required = ? WHERE id = ?"; if (rowsUpdated == 0) {
// return Optional.empty();
// var nodeId = jdbcTemplate.update(query, }
// node.getName(),
// node.getAddress(), if (node.getNodePredecessors() != null) {
// node.getCountryId(),
// node.getSource(), var deletePredecessorEntriesSql = """
// node.getDestination(), DELETE npe FROM node_predecessor_entry npe
// node.getIntermediate(), JOIN node_predecessor_chain npc ON npe.node_predecessor_chain_id = npc.id
// node.getPredecessorRequired(), WHERE npc.node_id = ?""";
// node.getId()) == 0 ? null : node.getId();
// var deletePredecessorChainSql = """
// return Optional.ofNullable(nodeId); DELETE FROM node_predecessor_chain WHERE node_id = ?
""";
jdbcTemplate.update(deletePredecessorEntriesSql, node.getId());
jdbcTemplate.update(deletePredecessorChainSql, node.getId());
// Insert new predecessor chains
for (Map<Integer, Integer> predecessorChain : node.getNodePredecessors()) {
// Insert new chain
KeyHolder chainKeyHolder = new GeneratedKeyHolder();
jdbcTemplate.update(connection -> {
PreparedStatement ps = connection.prepareStatement(
"INSERT INTO node_predecessor_chain (node_id) VALUES (?)",
Statement.RETURN_GENERATED_KEYS);
ps.setInt(1, node.getId());
return ps;
}, chainKeyHolder);
Integer chainId = chainKeyHolder.getKey().intValue();
// Insert entries for this chain
for (Map.Entry<Integer, Integer> entry : predecessorChain.entrySet()) {
jdbcTemplate.update(
"INSERT INTO node_predecessor_entry (node_id, node_predecessor_chain_id, sequence_number) VALUES (?, ?, ?)",
entry.getValue(), chainId, entry.getKey()
);
}
}
}
jdbcTemplate.update("DELETE FROM outbound_country_mapping WHERE node_id = ?", node.getId());
if (node.getOutboundCountries() != null) {
for (Integer countryId : node.getOutboundCountries()) {
jdbcTemplate.update(
"INSERT INTO outbound_country_mapping (node_id, country_id) VALUES (?, ?)",
node.getId(), countryId
);
}
}
// Mark all linked RouteNodes as outdated
jdbcTemplate.update("UPDATE premise_route_node SET is_outdated = TRUE WHERE node_id = ?", node.getId());
return Optional.of(node.getId());
} }
public List<Node> searchNode(String filter, int limit, NodeType nodeType, boolean excludeDeprecated) { public List<Node> searchNode(String filter, int limit, NodeType nodeType, boolean excludeDeprecated) {
@ -240,51 +325,6 @@ public class NodeRepository {
return Optional.ofNullable(node); return Optional.ofNullable(node);
} }
@Transactional
public List<List<Node>> findNodeListsForReportingByMaterialId(Integer materialId) {
String validityPeriodSql = """
SELECT DISTINCT cj.validity_period_id
FROM premise p
INNER JOIN calculation_job cj ON p.id = cj.premise_id
WHERE p.material_id = ?
""";
List<Integer> validityPeriodIds = jdbcTemplate.queryForList(validityPeriodSql, Integer.class, materialId);
// For each validity period, get the set of supplier_node_ids
List<List<Node>> nodes = new ArrayList<>();
for (Integer validityPeriodId : validityPeriodIds) {
String suppliersSql = """
SELECT DISTINCT n.*
FROM premise p
INNER JOIN calculation_job cj ON p.id = cj.premise_id
INNER JOIN node n ON p.supplier_node_id = n.id
WHERE p.material_id = ?
AND cj.validity_period_id = ?
AND p.supplier_node_id IS NOT NULL
""";
String userSuppliersSql = """
SELECT DISTINCT un.*
FROM premise p
INNER JOIN calculation_job cj ON p.id = cj.premise_id
INNER JOIN sys_user_node un ON p.user_supplier_node_id = un.id
WHERE p.material_id = ?
AND cj.validity_period_id = ?
AND p.user_supplier_node_id IS NOT NULL
""";
var periodNodes = new ArrayList<>(jdbcTemplate.query(suppliersSql, new NodeMapper(), materialId, validityPeriodId));
periodNodes.addAll(jdbcTemplate.query(userSuppliersSql, new NodeMapper(), materialId, validityPeriodId));
nodes.add(periodNodes);
}
return nodes;
}
/** /**
* Resolves chains of predecessors for a specified destination chain by its ID. * Resolves chains of predecessors for a specified destination chain by its ID.
@ -356,7 +396,7 @@ public class NodeRepository {
) <= ? ) <= ?
"""; """;
return jdbcTemplate.query(query, new NodeMapper(), node.getGeoLat(), node.getGeoLng(), node.getGeoLat(),regionRadius); return jdbcTemplate.query(query, new NodeMapper(), node.getGeoLat(), node.getGeoLng(), node.getGeoLat(), regionRadius);
} }
@ -414,6 +454,109 @@ public class NodeRepository {
} }
@Transactional
public Map<String, Integer> getIdsByExternalMappingIds(List<String> mappingIds) {
if (mappingIds == null || mappingIds.isEmpty()) {
return new HashMap<>();
}
String sql = "SELECT external_mapping_id, id FROM node WHERE external_mapping_id IN (:mappingIds)";
MapSqlParameterSource parameters = new MapSqlParameterSource();
parameters.addValue("mappingIds", mappingIds);
return namedParameterJdbcTemplate.query(sql, parameters, rs -> {
Map<String, Integer> result = new HashMap<>();
while (rs.next()) {
result.put(rs.getString("external_mapping_id"), rs.getInt("id"));
}
return result;
});
}
@Transactional
public Integer insert(Node node) {
// Insert the main node record
String insertNodeSql = """
INSERT INTO node (
country_id,
name,
address,
external_mapping_id,
predecessor_required,
is_destination,
is_source,
is_intermediate,
geo_lat,
geo_lng,
is_deprecated
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
""";
KeyHolder keyHolder = new GeneratedKeyHolder();
int rowsInserted = jdbcTemplate.update(connection -> {
PreparedStatement ps = connection.prepareStatement(insertNodeSql, Statement.RETURN_GENERATED_KEYS);
ps.setObject(1, node.getCountryId());
ps.setString(2, node.getName());
ps.setString(3, node.getAddress());
ps.setString(4, node.getExternalMappingId());
ps.setBoolean(5, node.getPredecessorRequired() != null ? node.getPredecessorRequired() : false);
ps.setBoolean(6, node.getDestination() != null ? node.getDestination() : false);
ps.setBoolean(7, node.getSource() != null ? node.getSource() : false);
ps.setBoolean(8, node.getIntermediate() != null ? node.getIntermediate() : false);
ps.setBigDecimal(9, node.getGeoLat());
ps.setBigDecimal(10, node.getGeoLng());
ps.setBoolean(11, node.getDeprecated() != null ? node.getDeprecated() : false);
return ps;
}, keyHolder);
if (rowsInserted == 0) {
return null;
}
Integer nodeId = Objects.requireNonNull(keyHolder.getKey()).intValue();
// Insert predecessors if they exist
if (node.getNodePredecessors() != null) {
for (Map<Integer, Integer> predecessorChain : node.getNodePredecessors()) {
// Insert new chain
KeyHolder chainKeyHolder = new GeneratedKeyHolder();
jdbcTemplate.update(connection -> {
PreparedStatement ps = connection.prepareStatement(
"INSERT INTO node_predecessor_chain (node_id) VALUES (?)",
Statement.RETURN_GENERATED_KEYS);
ps.setInt(1, nodeId);
return ps;
}, chainKeyHolder);
Integer chainId = Objects.requireNonNull(chainKeyHolder.getKey()).intValue();
// Insert entries for this chain
for (Map.Entry<Integer, Integer> entry : predecessorChain.entrySet()) {
jdbcTemplate.update(
"INSERT INTO node_predecessor_entry (node_id, node_predecessor_chain_id, sequence_number) VALUES (?, ?, ?)",
entry.getValue(), chainId, entry.getKey()
);
}
}
}
// Insert outbound countries if they exist
if (node.getOutboundCountries() != null) {
for (Integer countryId : node.getOutboundCountries()) {
jdbcTemplate.update(
"INSERT INTO outbound_country_mapping (node_id, country_id) VALUES (?, ?)",
nodeId, countryId
);
}
}
return nodeId;
}
private class NodeMapper implements RowMapper<Node> { private class NodeMapper implements RowMapper<Node> {
@Override @Override

View file

@ -1,7 +1,158 @@
package de.avatic.lcc.repositories.bulk; package de.avatic.lcc.repositories.bulk;
import de.avatic.lcc.dto.bulk.BulkFileType;
import de.avatic.lcc.dto.bulk.BulkOperationState;
import de.avatic.lcc.dto.bulk.BulkProcessingType;
import de.avatic.lcc.model.bulk.BulkOperation;
import de.avatic.lcc.util.exception.internalerror.DatabaseException;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.core.RowMapper;
import org.springframework.jdbc.support.GeneratedKeyHolder;
import org.springframework.stereotype.Repository; import org.springframework.stereotype.Repository;
import org.springframework.transaction.annotation.Transactional;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
@Repository @Repository
public class BulkOperationRepository { public class BulkOperationRepository {
private final JdbcTemplate jdbcTemplate;
public BulkOperationRepository(JdbcTemplate jdbcTemplate) {
this.jdbcTemplate = jdbcTemplate;
}
@Transactional
public Integer insert(BulkOperation operation) {
String sql = """
INSERT INTO bulk_operation (user_id, bulk_file_type, bulk_processing_type, state, file, validity_period_id)
VALUES (?, ?, ?, ?, ?, ?)
""";
GeneratedKeyHolder keyHolder = new GeneratedKeyHolder();
var affectedRows = jdbcTemplate.update(connection -> {
PreparedStatement ps = connection.prepareStatement(sql, Statement.RETURN_GENERATED_KEYS);
ps.setInt(1, operation.getUserId());
ps.setString(2, operation.getFileType().name());
ps.setString(3, operation.getProcessingType().name());
ps.setString(4, operation.getProcessState().name());
ps.setBytes(5, operation.getFile());
if (Objects.isNull(operation.getValidityPeriodId()))
ps.setNull(6, java.sql.Types.INTEGER);
else
ps.setInt(6, operation.getValidityPeriodId());
return ps;
}, keyHolder);
if (affectedRows != 1)
throw new DatabaseException("Unable to schedule bulk operation");
// Return the generated ID
return keyHolder.getKey() != null ? keyHolder.getKey().intValue() : null;
}
@Transactional
public void removeOld() {
String sql = """
DELETE FROM bulk_operation
WHERE created_at < DATE_SUB(NOW(), INTERVAL 7 DAY)
AND state NOT IN ('QUEUED', 'PROCESSING')
""";
jdbcTemplate.update(sql);
}
@Transactional
public void updateState(Integer id, BulkOperationState state) {
String sql = """
UPDATE bulk_operation
SET state = ?
WHERE id = ?
""";
jdbcTemplate.update(sql, state.name(), id);
}
@Transactional
public List<BulkOperation> listByUserId(Integer userId) {
String sql = """
SELECT id, user_id, bulk_file_type, bulk_processing_type, state, created_at
FROM bulk_operation
WHERE user_id = ?
ORDER BY created_at DESC
""";
return jdbcTemplate.query(sql, new BulkOperationRowMapper(true), userId);
}
@Transactional
public Optional<BulkOperation> getOperationById(Integer id) {
String sql = """
SELECT id, user_id, bulk_file_type, bulk_processing_type, state, file, created_at
FROM bulk_operation
WHERE id = ?
""";
List<BulkOperation> results = jdbcTemplate.query(sql, new BulkOperationRowMapper(false), id);
return Optional.ofNullable(results.isEmpty() ? null : results.getFirst());
}
@Transactional
public void update(BulkOperation op) {
String sql = """
UPDATE bulk_operation
SET user_id = ?, bulk_file_type = ?, state = ?, file = ?, validity_period_id = ?
WHERE id = ?
""";
jdbcTemplate.update(sql,
op.getUserId(),
op.getFileType().name(),
op.getProcessState().name(),
op.getFile(),
op.getValidityPeriodId(),
op.getId()
);
}
private static class BulkOperationRowMapper implements RowMapper<BulkOperation> {
private final boolean skipFile;
BulkOperationRowMapper() {
this(false);
}
BulkOperationRowMapper(boolean skipFile) {
this.skipFile = skipFile;
}
@Override
public BulkOperation mapRow(ResultSet rs, int rowNum) throws SQLException {
BulkOperation operation = new BulkOperation();
operation.setId(rs.getInt("id"));
operation.setUserId(rs.getInt("user_id"));
operation.setProcessingType(BulkProcessingType.valueOf(rs.getString("bulk_processing_type")));
operation.setFileType(BulkFileType.valueOf(rs.getString("bulk_file_type")));
operation.setProcessState(BulkOperationState.valueOf(rs.getString("state")));
if (!skipFile)
operation.setFile(rs.getBytes("file"));
operation.setCreatedAt(rs.getTimestamp("created_at").toLocalDateTime());
return operation;
}
}
} }

View file

@ -7,22 +7,25 @@ import de.avatic.lcc.repositories.pagination.SearchQueryPagination;
import de.avatic.lcc.repositories.pagination.SearchQueryResult; import de.avatic.lcc.repositories.pagination.SearchQueryResult;
import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.core.RowMapper; import org.springframework.jdbc.core.RowMapper;
import org.springframework.jdbc.core.namedparam.MapSqlParameterSource;
import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate;
import org.springframework.stereotype.Repository; import org.springframework.stereotype.Repository;
import org.springframework.transaction.annotation.Transactional; import org.springframework.transaction.annotation.Transactional;
import java.sql.ResultSet; import java.sql.ResultSet;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.List; import java.util.*;
import java.util.Optional;
@Repository @Repository
public class CountryRepository { public class CountryRepository {
private final JdbcTemplate jdbcTemplate; private final JdbcTemplate jdbcTemplate;
private final NamedParameterJdbcTemplate namedParameterJdbcTemplate;
public CountryRepository(JdbcTemplate jdbcTemplate) { public CountryRepository(JdbcTemplate jdbcTemplate, NamedParameterJdbcTemplate namedParameterJdbcTemplate) {
this.jdbcTemplate = jdbcTemplate; this.jdbcTemplate = jdbcTemplate;
this.namedParameterJdbcTemplate = namedParameterJdbcTemplate;
} }
@Transactional @Transactional
@ -141,6 +144,19 @@ public class CountryRepository {
return jdbcTemplate.query(query, new CountryMapper()); return jdbcTemplate.query(query, new CountryMapper());
} }
public List<Country> getByIsoCodes(List<IsoCode> outboundCountries) {
if (outboundCountries == null || outboundCountries.isEmpty()) {
return Collections.emptyList();
}
String sql = "SELECT * FROM country WHERE iso_code IN (:isoCodes)";
MapSqlParameterSource parameters = new MapSqlParameterSource();
parameters.addValue("isoCodes", outboundCountries);
return namedParameterJdbcTemplate.query(sql, parameters, new CountryMapper());
}
private static class CountryMapper implements RowMapper<Country> { private static class CountryMapper implements RowMapper<Country> {
@Override @Override

View file

@ -6,7 +6,6 @@ import de.avatic.lcc.model.error.SysErrorTraceItem;
import de.avatic.lcc.model.error.SysErrorType; import de.avatic.lcc.model.error.SysErrorType;
import de.avatic.lcc.repositories.pagination.SearchQueryPagination; import de.avatic.lcc.repositories.pagination.SearchQueryPagination;
import de.avatic.lcc.repositories.pagination.SearchQueryResult; import de.avatic.lcc.repositories.pagination.SearchQueryResult;
import jakarta.validation.constraints.Min;
import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.core.namedparam.MapSqlParameterSource; import org.springframework.jdbc.core.namedparam.MapSqlParameterSource;
import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate; import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate;
@ -31,6 +30,38 @@ public class SysErrorRepository {
this.namedParameterJdbcTemplate = namedParameterJdbcTemplate; this.namedParameterJdbcTemplate = namedParameterJdbcTemplate;
} }
@Transactional
public Integer insert(SysError error) {
String errorSql = "INSERT INTO sys_error (user_id, title, code, message, pinia, calculation_job_id, bulk_operation_id, type) VALUES (?, ?, ?, ?, ?, ?, ?, ?)";
KeyHolder keyHolder = new GeneratedKeyHolder();
jdbcTemplate.update(connection -> {
PreparedStatement ps = connection.prepareStatement(errorSql, Statement.RETURN_GENERATED_KEYS);
ps.setObject(1, error.getUserId()); // Use setObject for nullable Integer
ps.setString(2, error.getTitle());
ps.setString(3, error.getCode());
ps.setString(4, error.getMessage());
ps.setString(5, error.getPinia());
ps.setObject(6, error.getCalculationJobId()); // Use setObject for nullable Integer
ps.setObject(7, error.getBulkOperationId()); // Use setObject for nullable Integer
ps.setString(8, error.getType().name());
return ps;
}, keyHolder);
// Get the generated error ID
Integer errorId = Objects.requireNonNull(keyHolder.getKey()).intValue();
// Insert trace items if they exist
if (error.getTrace() != null && !error.getTrace().isEmpty()) {
insertTraceItems(errorId, error.getTrace());
}
return errorId;
}
@Transactional @Transactional
public void insert(List<SysError> errors) { public void insert(List<SysError> errors) {
// First insert the sys_error records // First insert the sys_error records
@ -93,13 +124,13 @@ public class SysErrorRepository {
// Build main query with pagination // Build main query with pagination
String sql = """ String sql = """
SELECT e.id, e.user_id, e.title, e.code, e.message, e.pinia, SELECT e.id, e.user_id, e.title, e.code, e.message, e.pinia,
e.calculation_job_id, e.bulk_operation_id, e.type, e.created_at e.calculation_job_id, e.bulk_operation_id, e.type, e.created_at
FROM sys_error e FROM sys_error e
""" + whereClause.toString() + """ """ + whereClause.toString() + """
ORDER BY e.created_at DESC ORDER BY e.created_at DESC
LIMIT :limit OFFSET :offset LIMIT :limit OFFSET :offset
"""; """;
// Add pagination parameters // Add pagination parameters
parameters.addValue("limit", pagination.getLimit()); parameters.addValue("limit", pagination.getLimit());
@ -140,11 +171,11 @@ public class SysErrorRepository {
} }
String traceSql = """ String traceSql = """
SELECT error_id, id, line, file, method, fullPath SELECT error_id, id, line, file, method, fullPath
FROM sys_error_trace_item FROM sys_error_trace_item
WHERE error_id IN (:errorIds) WHERE error_id IN (:errorIds)
ORDER BY error_id, id ORDER BY error_id, id
"""; """;
MapSqlParameterSource traceParameters = new MapSqlParameterSource("errorIds", errorIds); MapSqlParameterSource traceParameters = new MapSqlParameterSource("errorIds", errorIds);

View file

@ -2,31 +2,27 @@ package de.avatic.lcc.service.bulk;
import de.avatic.lcc.dto.bulk.BulkFileType; import de.avatic.lcc.dto.bulk.BulkFileType;
import de.avatic.lcc.model.bulk.BulkFileTypes; import de.avatic.lcc.model.bulk.BulkFileTypes;
import de.avatic.lcc.model.bulk.BulkOperation;
import de.avatic.lcc.model.bulk.HiddenTableType; import de.avatic.lcc.model.bulk.HiddenTableType;
import de.avatic.lcc.repositories.rates.ValidityPeriodRepository; import de.avatic.lcc.repositories.rates.ValidityPeriodRepository;
import de.avatic.lcc.service.bulk.helper.HeaderCellStyleProvider; import de.avatic.lcc.service.bulk.helper.HeaderCellStyleProvider;
import de.avatic.lcc.service.excelMapper.*; import de.avatic.lcc.service.excelMapper.*;
import de.avatic.lcc.util.exception.base.InternalErrorException;
import org.apache.poi.ss.usermodel.CellStyle; import org.apache.poi.ss.usermodel.CellStyle;
import org.apache.poi.ss.usermodel.Sheet; import org.apache.poi.ss.usermodel.Sheet;
import org.apache.poi.ss.usermodel.SheetVisibility; import org.apache.poi.ss.usermodel.SheetVisibility;
import org.apache.poi.ss.usermodel.Workbook; import org.apache.poi.ss.usermodel.Workbook;
import org.apache.poi.xssf.usermodel.XSSFWorkbook; import org.apache.poi.xssf.usermodel.XSSFWorkbook;
import org.springframework.beans.factory.annotation.Value; import org.springframework.beans.factory.annotation.Value;
import org.springframework.core.io.ByteArrayResource;
import org.springframework.core.io.InputStreamSource;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import java.io.ByteArrayOutputStream; import java.io.ByteArrayOutputStream;
import java.io.IOException; import java.io.IOException;
import java.io.OutputStream;
@Service @Service
public class BulkExportService { public class BulkExportService {
private final HeaderCellStyleProvider headerCellStyleProvider; private final HeaderCellStyleProvider headerCellStyleProvider;
private final ContainerRateExcelMapper containerRateExcelMapper; private final ContainerRateExcelMapper containerRateExcelMapper;
private final ValidityPeriodRepository validityPeriodRepository;
private final MatrixRateExcelMapper matrixRateExcelMapper; private final MatrixRateExcelMapper matrixRateExcelMapper;
private final MaterialExcelMapper materialExcelMapper; private final MaterialExcelMapper materialExcelMapper;
private final PackagingExcelMapper packagingExcelMapper; private final PackagingExcelMapper packagingExcelMapper;
@ -35,10 +31,9 @@ public class BulkExportService {
private final HiddenCountryExcelMapper hiddenCountryExcelMapper; private final HiddenCountryExcelMapper hiddenCountryExcelMapper;
private final String sheetPassword; private final String sheetPassword;
public BulkExportService(@Value("${lcc.bulk.sheet_password}") String sheetPassword, HeaderCellStyleProvider headerCellStyleProvider, ContainerRateExcelMapper containerRateExcelMapper, ValidityPeriodRepository validityPeriodRepository, MatrixRateExcelMapper matrixRateExcelMapper, MaterialExcelMapper materialExcelMapper, PackagingExcelMapper packagingExcelMapper, NodeExcelMapper nodeExcelMapper, HiddenNodeExcelMapper hiddenNodeExcelMapper, HiddenCountryExcelMapper hiddenCountryExcelMapper) { public BulkExportService(@Value("${lcc.bulk.sheet_password}") String sheetPassword, HeaderCellStyleProvider headerCellStyleProvider, ContainerRateExcelMapper containerRateExcelMapper, MatrixRateExcelMapper matrixRateExcelMapper, MaterialExcelMapper materialExcelMapper, PackagingExcelMapper packagingExcelMapper, NodeExcelMapper nodeExcelMapper, HiddenNodeExcelMapper hiddenNodeExcelMapper, HiddenCountryExcelMapper hiddenCountryExcelMapper) {
this.headerCellStyleProvider = headerCellStyleProvider; this.headerCellStyleProvider = headerCellStyleProvider;
this.containerRateExcelMapper = containerRateExcelMapper; this.containerRateExcelMapper = containerRateExcelMapper;
this.validityPeriodRepository = validityPeriodRepository;
this.matrixRateExcelMapper = matrixRateExcelMapper; this.matrixRateExcelMapper = matrixRateExcelMapper;
this.materialExcelMapper = materialExcelMapper; this.materialExcelMapper = materialExcelMapper;
this.packagingExcelMapper = packagingExcelMapper; this.packagingExcelMapper = packagingExcelMapper;
@ -48,61 +43,58 @@ public class BulkExportService {
this.sheetPassword = sheetPassword; this.sheetPassword = sheetPassword;
} }
public InputStreamSource generateExport(BulkFileType bulkFileType) throws IOException {
return generateExport(bulkFileType, validityPeriodRepository.getValidPeriodId().orElseThrow(() -> new InternalErrorException("No valid period found that is VALID")));
}
public InputStreamSource generateExport(BulkFileType bulkFileType, Integer periodId) throws IOException { public void processOperation(BulkOperation op) throws IOException {
try (ByteArrayOutputStream outputStream = new ByteArrayOutputStream()) {
Workbook workbook = new XSSFWorkbook();
Sheet worksheet = workbook.createSheet(BulkFileTypes.valueOf(bulkFileType.name()).getSheetName());
CellStyle style = headerCellStyleProvider.createHeaderCellStyle(workbook); var bulkFileType = op.getFileType();
var periodId = op.getValidityPeriodId();
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
//TODO: check if a parallel task is needed. Workbook workbook = new XSSFWorkbook();
Sheet worksheet = workbook.createSheet(BulkFileTypes.valueOf(bulkFileType.name()).getSheetName());
CellStyle style = headerCellStyleProvider.createHeaderCellStyle(workbook);
if (bulkFileType.equals(BulkFileType.COUNTRY_MATRIX) || bulkFileType.equals(BulkFileType.NODE)) { if (bulkFileType.equals(BulkFileType.COUNTRY_MATRIX) || bulkFileType.equals(BulkFileType.NODE)) {
var hiddenCountrySheet = workbook.createSheet(HiddenTableType.COUNTRY_HIDDEN_TABLE.getSheetName()); var hiddenCountrySheet = workbook.createSheet(HiddenTableType.COUNTRY_HIDDEN_TABLE.getSheetName());
hiddenCountryExcelMapper.fillSheet(hiddenCountrySheet, style); hiddenCountryExcelMapper.fillSheet(hiddenCountrySheet, style);
hiddenCountrySheet.protectSheet(sheetPassword); hiddenCountrySheet.protectSheet(sheetPassword);
workbook.setSheetVisibility(workbook.getSheetIndex(hiddenCountrySheet), SheetVisibility.HIDDEN); workbook.setSheetVisibility(workbook.getSheetIndex(hiddenCountrySheet), SheetVisibility.HIDDEN);
} else if (bulkFileType.equals(BulkFileType.CONTAINER_RATE) || bulkFileType.equals(BulkFileType.PACKAGING)) { } else if (bulkFileType.equals(BulkFileType.CONTAINER_RATE) || bulkFileType.equals(BulkFileType.PACKAGING)) {
var hiddenNodeSheet = workbook.createSheet(HiddenTableType.NODE_HIDDEN_TABLE.getSheetName()); var hiddenNodeSheet = workbook.createSheet(HiddenTableType.NODE_HIDDEN_TABLE.getSheetName());
hiddenNodeExcelMapper.fillSheet(hiddenNodeSheet, style, BulkFileType.PACKAGING.equals(bulkFileType)); hiddenNodeExcelMapper.fillSheet(hiddenNodeSheet, style, BulkFileType.PACKAGING.equals(bulkFileType));
hiddenNodeSheet.protectSheet(sheetPassword); hiddenNodeSheet.protectSheet(sheetPassword);
workbook.setSheetVisibility(workbook.getSheetIndex(hiddenNodeSheet), SheetVisibility.HIDDEN); workbook.setSheetVisibility(workbook.getSheetIndex(hiddenNodeSheet), SheetVisibility.HIDDEN);
}
// Create headers based on the bulk file type
switch (bulkFileType) {
case CONTAINER_RATE:
containerRateExcelMapper.fillSheet(worksheet, style, periodId);
containerRateExcelMapper.createConstraints(workbook, worksheet);
break;
case COUNTRY_MATRIX:
matrixRateExcelMapper.fillSheet(worksheet, style, periodId);
matrixRateExcelMapper.createConstraints(workbook, worksheet);
break;
case MATERIAL:
materialExcelMapper.fillSheet(worksheet, style);
materialExcelMapper.createConstraints(worksheet);
break;
case PACKAGING:
packagingExcelMapper.fillSheet(worksheet, style);
packagingExcelMapper.createConstraints(workbook, worksheet);
break;
case NODE:
nodeExcelMapper.fillSheet(worksheet, style);
nodeExcelMapper.createConstraints(workbook, worksheet);
break;
}
// Return the Excel file as an InputStreamSource
workbook.write(outputStream);
return new ByteArrayResource(outputStream.toByteArray());
} catch (IOException e) {
throw new RuntimeException("Failed to generate template", e);
} }
// Create headers based on the bulk file type
switch (bulkFileType) {
case CONTAINER_RATE:
containerRateExcelMapper.fillSheet(worksheet, style, periodId);
containerRateExcelMapper.createConstraints(workbook, worksheet);
break;
case COUNTRY_MATRIX:
matrixRateExcelMapper.fillSheet(worksheet, style, periodId);
matrixRateExcelMapper.createConstraints(workbook, worksheet);
break;
case MATERIAL:
materialExcelMapper.fillSheet(worksheet, style);
materialExcelMapper.createConstraints(worksheet);
break;
case PACKAGING:
packagingExcelMapper.fillSheet(worksheet, style);
packagingExcelMapper.createConstraints(workbook, worksheet);
break;
case NODE:
nodeExcelMapper.fillSheet(worksheet, style);
nodeExcelMapper.createConstraints(workbook, worksheet);
break;
}
// Return the Excel file as an InputStreamSource
workbook.write(outputStream);
op.setFile(outputStream.toByteArray());
} }
} }

View file

@ -1,83 +0,0 @@
package de.avatic.lcc.service.bulk;
import de.avatic.lcc.dto.bulk.BulkFileType;
import de.avatic.lcc.dto.bulk.BulkStatusDTO;
import de.avatic.lcc.model.bulk.BulkFileTypes;
import de.avatic.lcc.service.excelMapper.*;
import de.avatic.lcc.util.exception.badrequest.FileFormatNotSupportedException;
import de.avatic.lcc.util.exception.base.BadRequestException;
import org.apache.poi.ss.usermodel.Sheet;
import org.apache.poi.ss.usermodel.Workbook;
import org.apache.poi.xssf.usermodel.XSSFWorkbook;
import org.springframework.stereotype.Service;
import org.springframework.web.multipart.MultipartFile;
import java.io.InputStream;
import java.util.List;
@Service
public class BulkFileProcessingService {
private final MatrixRateExcelMapper matrixRateExcelMapper;
private final ContainerRateExcelMapper containerRateExcelMapper;
private final MaterialExcelMapper materialExcelMapper;
private final PackagingExcelMapper packagingExcelMapper;
private final NodeExcelMapper nodeExcelMapper;
private final BulkProcessingService bulkStatusService;
public BulkFileProcessingService(MatrixRateExcelMapper matrixRateExcelMapper, ContainerRateExcelMapper containerRateExcelMapper, MaterialExcelMapper materialExcelMapper, PackagingExcelMapper packagingExcelMapper, NodeExcelMapper nodeExcelMapper, BulkProcessingService bulkStatusService) {
this.matrixRateExcelMapper = matrixRateExcelMapper;
this.containerRateExcelMapper = containerRateExcelMapper;
this.materialExcelMapper = materialExcelMapper;
this.packagingExcelMapper = packagingExcelMapper;
this.nodeExcelMapper = nodeExcelMapper;
this.bulkStatusService = bulkStatusService;
}
public Integer processFile(BulkFileType type, MultipartFile file) {
//TODO: launch parallel task
String contentType = file.getContentType();
if (!"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet".equals(contentType) &&
!"application/vnd.ms-excel".equals(contentType)) {
throw new FileFormatNotSupportedException(contentType);
}
try (InputStream in = file.getInputStream()) {
Workbook workbook = new XSSFWorkbook(in);
Sheet sheet = workbook.getSheet(BulkFileTypes.valueOf(type.name()).getSheetName());
switch (type) {
case CONTAINER_RATE:
var containerRates = containerRateExcelMapper.extractSheet(sheet);
break;
case COUNTRY_MATRIX:
var matrixRates = matrixRateExcelMapper.extractSheet(sheet);
break;
case MATERIAL:
var materials = materialExcelMapper.extractSheet(sheet);
break;
case PACKAGING:
var packaging = packagingExcelMapper.extractSheet(sheet);
break;
case NODE:
var nodes = nodeExcelMapper.extractSheet(sheet);
// check predecessors chains for loops or contradictions
break;
default:
}
} catch (Exception e) {
throw new BadRequestException("Unable to read excel sheet", e.getMessage(), e);
}
return 0;
}
public List<BulkStatusDTO> getStatus() {
return bulkStatusService.getStatus();
}
}

View file

@ -0,0 +1,105 @@
package de.avatic.lcc.service.bulk;
import de.avatic.lcc.excelmodel.ExcelNode;
import de.avatic.lcc.model.bulk.BulkFileTypes;
import de.avatic.lcc.model.bulk.BulkInstruction;
import de.avatic.lcc.model.bulk.BulkInstructionType;
import de.avatic.lcc.model.bulk.BulkOperation;
import de.avatic.lcc.repositories.NodeRepository;
import de.avatic.lcc.service.excelMapper.*;
import de.avatic.lcc.service.transformer.generic.NodeTransformer;
import de.avatic.lcc.util.exception.internalerror.ExcelValidationError;
import org.apache.poi.ss.usermodel.Sheet;
import org.apache.poi.ss.usermodel.Workbook;
import org.apache.poi.xssf.usermodel.XSSFWorkbook;
import org.springframework.stereotype.Service;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
@Service
public class BulkImportService {
private final MatrixRateExcelMapper matrixRateExcelMapper;
private final ContainerRateExcelMapper containerRateExcelMapper;
private final MaterialExcelMapper materialExcelMapper;
private final PackagingExcelMapper packagingExcelMapper;
private final NodeExcelMapper nodeExcelMapper;
private final NodeRepository nodeRepository;
private final NodeTransformer nodeTransformer;
public BulkImportService(MatrixRateExcelMapper matrixRateExcelMapper, ContainerRateExcelMapper containerRateExcelMapper, MaterialExcelMapper materialExcelMapper, PackagingExcelMapper packagingExcelMapper, NodeExcelMapper nodeExcelMapper, NodeRepository nodeRepository, NodeTransformer nodeTransformer) {
this.matrixRateExcelMapper = matrixRateExcelMapper;
this.containerRateExcelMapper = containerRateExcelMapper;
this.materialExcelMapper = materialExcelMapper;
this.packagingExcelMapper = packagingExcelMapper;
this.nodeExcelMapper = nodeExcelMapper;
this.nodeRepository = nodeRepository;
this.nodeTransformer = nodeTransformer;
}
public void processOperation(BulkOperation op) throws IOException {
var file = op.getFile();
var type = op.getFileType();
InputStream in = new ByteArrayInputStream(file);
Workbook workbook = new XSSFWorkbook(in);
Sheet sheet = workbook.getSheet(BulkFileTypes.valueOf(type.name()).getSheetName());
switch (type) {
case CONTAINER_RATE:
var containerRates = containerRateExcelMapper.extractSheet(sheet);
break;
case COUNTRY_MATRIX:
var matrixRates = matrixRateExcelMapper.extractSheet(sheet);
break;
case MATERIAL:
var materials = materialExcelMapper.extractSheet(sheet);
break;
case PACKAGING:
var packaging = packagingExcelMapper.extractSheet(sheet);
break;
case NODE:
var nodeInstructions = nodeExcelMapper.extractSheet(sheet);
nodeInstructions.forEach(this::processNodeInstructions);
break;
default:
}
}
private void processNodeInstructions(BulkInstruction<ExcelNode> instr) {
BulkInstructionType instrType = instr.getType();
ExcelNode excelNode = instr.getEntity();
if (instrType == BulkInstructionType.UPDATE) {
updateNode(excelNode);
} else if (instrType == BulkInstructionType.DELETE) {
deleteNode(excelNode);
}
}
private void deleteNode(ExcelNode excelNode) {
nodeRepository.setDeprecatedById(excelNode.getId());
}
private void updateNode(ExcelNode excelNode) {
var node = nodeRepository.getByExternalMappingId(excelNode.getExternalMappingId());
if(node.isEmpty()) {
nodeRepository.insert(nodeTransformer.toNodeEntity(excelNode));
} else {
nodeRepository.update(nodeTransformer.toNodeEntity(excelNode));
}
}
}

View file

@ -0,0 +1,82 @@
package de.avatic.lcc.service.bulk;
import de.avatic.lcc.dto.bulk.BulkOperationState;
import de.avatic.lcc.dto.bulk.BulkProcessingType;
import de.avatic.lcc.model.error.SysError;
import de.avatic.lcc.model.error.SysErrorType;
import de.avatic.lcc.repositories.bulk.BulkOperationRepository;
import de.avatic.lcc.repositories.error.SysErrorRepository;
import de.avatic.lcc.service.transformer.error.SysErrorTransformer;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.util.Arrays;
@Service
public class BulkOperationExecutionService {
private final BulkOperationRepository bulkOperationRepository;
private final BulkExportService bulkExportService;
private final BulkImportService bulkImportService;
private final SysErrorRepository sysErrorRepository;
private final SysErrorTransformer sysErrorTransformer;
public BulkOperationExecutionService(BulkOperationRepository bulkOperationRepository, BulkExportService bulkExportService, BulkImportService bulkImportService, SysErrorRepository sysErrorRepository, SysErrorTransformer sysErrorTransformer) {
this.bulkOperationRepository = bulkOperationRepository;
this.bulkExportService = bulkExportService;
this.bulkImportService = bulkImportService;
this.sysErrorRepository = sysErrorRepository;
this.sysErrorTransformer = sysErrorTransformer;
}
@Transactional
@Async("bulkProcessingExecutor")
public void launchExecution(Integer id) {
var operation = bulkOperationRepository.getOperationById(id);
if (operation.isPresent()) {
var op = operation.get();
if (op.getProcessState() == BulkOperationState.SCHEDULED) {
bulkOperationRepository.updateState(id, BulkOperationState.PROCESSING);
try {
if (op.getProcessingType() == BulkProcessingType.EXPORT) {
bulkExportService.processOperation(op);
op.setProcessState(BulkOperationState.COMPLETED);
} else {
bulkImportService.processOperation(op);
op.setProcessState(BulkOperationState.COMPLETED);
}
} catch (Exception e) {
op.setProcessState(BulkOperationState.EXCEPTION);
var error = new SysError();
error.setType(SysErrorType.BULK);
error.setCode(e.getClass().getSimpleName());
error.setTitle("Bulk Operation Execution " + op.getId() + " failed");
error.setMessage(e.getMessage());
error.setUserId(op.getUserId());
error.setBulkOperationId(op.getId());
error.setTrace(Arrays.stream(e.getStackTrace()).map(sysErrorTransformer::toSysErrorTraceItem).toList());
sysErrorRepository.insert(error);
}
bulkOperationRepository.update(op);
}
}
}
}

View file

@ -0,0 +1,93 @@
package de.avatic.lcc.service.bulk;
import de.avatic.lcc.dto.bulk.BulkFileType;
import de.avatic.lcc.dto.bulk.BulkOperationDTO;
import de.avatic.lcc.dto.bulk.BulkOperationState;
import de.avatic.lcc.dto.bulk.BulkProcessingType;
import de.avatic.lcc.model.bulk.BulkOperation;
import de.avatic.lcc.repositories.bulk.BulkOperationRepository;
import de.avatic.lcc.repositories.rates.ValidityPeriodRepository;
import de.avatic.lcc.service.transformer.bulk.BulkOperationTransformer;
import de.avatic.lcc.util.exception.badrequest.FileFormatNotSupportedException;
import de.avatic.lcc.util.exception.base.InternalErrorException;
import org.springframework.stereotype.Service;
import org.springframework.web.multipart.MultipartFile;
import java.io.IOException;
import java.util.List;
@Service
public class BulkOperationService {
private final BulkOperationRepository bulkOperationRepository;
private final ValidityPeriodRepository validityPeriodRepository;
private final BulkOperationTransformer bulkOperationTransformer;
private final BulkOperationExecutionService bulkOperationExecutionService;
public BulkOperationService(BulkOperationRepository bulkOperationRepository, ValidityPeriodRepository validityPeriodRepository, BulkOperationTransformer bulkOperationTransformer, BulkOperationExecutionService bulkOperationExecutionService) {
this.bulkOperationRepository = bulkOperationRepository;
this.validityPeriodRepository = validityPeriodRepository;
this.bulkOperationTransformer = bulkOperationTransformer;
this.bulkOperationExecutionService = bulkOperationExecutionService;
}
public void processFileImport(BulkFileType fileType, MultipartFile file) {
int userId = 1; //TODO actual user
String contentType = file.getContentType();
if (!"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet".equals(contentType) &&
!"application/vnd.ms-excel".equals(contentType)) {
throw new FileFormatNotSupportedException(contentType);
}
try {
BulkOperation op = new BulkOperation();
op.setFileType(fileType);
op.setUserId(userId);
op.setProcessState(BulkOperationState.SCHEDULED);
op.setProcessingType(BulkProcessingType.IMPORT);
op.setFile(file.getInputStream().readAllBytes());
op.setValidityPeriodId(null);
var opId = bulkOperationRepository.insert(op);
bulkOperationExecutionService.launchExecution(opId);
} catch (IOException e) {
throw new RuntimeException(e); //TODO throw a nice exception
}
}
public void processFileExport(BulkFileType type, Integer validityPeriodId) {
int userId = 1; //TODO set actual user id
BulkOperation op = new BulkOperation();
op.setUserId(userId);
op.setFileType(type);
op.setProcessingType(BulkProcessingType.EXPORT);
op.setProcessState(BulkOperationState.SCHEDULED);
op.setValidityPeriodId(validityPeriodId);
var opId = bulkOperationRepository.insert(op);
bulkOperationExecutionService.launchExecution(opId);
}
public void processFileExport(BulkFileType type) {
processFileExport(type, validityPeriodRepository.getValidPeriodId().orElseThrow(() -> new InternalErrorException("No valid period found that is VALID")));
}
public List<BulkOperationDTO> getStatus() {
int userId = 1; //TODO actual user
return bulkOperationRepository.listByUserId(userId).stream().map(bulkOperationTransformer::toBulkOperationDTO).toList();
}
public BulkOperation getBulkOperation(Integer id) {
return bulkOperationRepository.getOperationById(id).orElseThrow();
}
}

View file

@ -1,30 +0,0 @@
package de.avatic.lcc.service.bulk;
import de.avatic.lcc.dto.bulk.BulkProcessingType;
import de.avatic.lcc.dto.bulk.BulkStatusDTO;
import de.avatic.lcc.model.bulk.BulkProcess;
import org.springframework.stereotype.Service;
import org.springframework.web.multipart.MultipartFile;
import java.util.List;
import java.util.Queue;
@Service
public class BulkProcessingService {
int processCount = 0;
private Queue<BulkProcess> processes;
public Integer queueUpload(MultipartFile bulkRequest, BulkProcessingType processingType) {
return processCount++;
}
public List<BulkStatusDTO> getStatus() {
return null; //TODO implement me
}
}

View file

@ -49,11 +49,9 @@ public class CalculationExecutionService {
private final ContainerCalculationService containerCalculationService; private final ContainerCalculationService containerCalculationService;
private final ShippingFrequencyCalculationService shippingFrequencyCalculationService; private final ShippingFrequencyCalculationService shippingFrequencyCalculationService;
private final CalculationJobRepository calculationJobRepository; private final CalculationJobRepository calculationJobRepository;
private final CalculationJobDestinationRepository calculationJobDestinationRepository;
private final CalculationJobRouteSectionRepository calculationJobRouteSectionRepository;
public CalculationExecutionService(PremiseRepository premiseRepository, DestinationRepository destinationRepository, RouteRepository routeRepository, RouteSectionRepository routeSectionRepository, CustomCostCalculationService customCostCalculationService, RouteSectionCostCalculationService routeSectionCostCalculationService, HandlingCostCalculationService handlingCostCalculationService, InventoryCostCalculationService inventoryCostCalculationService, PropertyRepository propertyRepository, AirfreightCalculationService airfreightCalculationService, PremiseToHuService premiseToHuService, ContainerCalculationService containerCalculationService, ShippingFrequencyCalculationService shippingFrequencyCalculationService, CalculationJobRepository calculationJobRepository, CalculationJobDestinationRepository calculationJobDestinationRepository, CalculationJobRouteSectionRepository calculationJobRouteSectionRepository) { public CalculationExecutionService(PremiseRepository premiseRepository, DestinationRepository destinationRepository, RouteRepository routeRepository, RouteSectionRepository routeSectionRepository, CustomCostCalculationService customCostCalculationService, RouteSectionCostCalculationService routeSectionCostCalculationService, HandlingCostCalculationService handlingCostCalculationService, InventoryCostCalculationService inventoryCostCalculationService, PropertyRepository propertyRepository, AirfreightCalculationService airfreightCalculationService, PremiseToHuService premiseToHuService, ContainerCalculationService containerCalculationService, ShippingFrequencyCalculationService shippingFrequencyCalculationService, CalculationJobRepository calculationJobRepository) {
this.premiseRepository = premiseRepository; this.premiseRepository = premiseRepository;
this.destinationRepository = destinationRepository; this.destinationRepository = destinationRepository;
this.routeRepository = routeRepository; this.routeRepository = routeRepository;
@ -68,8 +66,7 @@ public class CalculationExecutionService {
this.containerCalculationService = containerCalculationService; this.containerCalculationService = containerCalculationService;
this.shippingFrequencyCalculationService = shippingFrequencyCalculationService; this.shippingFrequencyCalculationService = shippingFrequencyCalculationService;
this.calculationJobRepository = calculationJobRepository; this.calculationJobRepository = calculationJobRepository;
this.calculationJobDestinationRepository = calculationJobDestinationRepository;
this.calculationJobRouteSectionRepository = calculationJobRouteSectionRepository;
} }
private static ContainerType getBestContainerType(Map<ContainerType, List<SectionInfo>> sectionResults) { private static ContainerType getBestContainerType(Map<ContainerType, List<SectionInfo>> sectionResults) {
@ -84,7 +81,7 @@ public class CalculationExecutionService {
} }
@Transactional @Transactional
@Async("taskExecutor") @Async("calculationExecutor")
public CompletableFuture<CalculationResult> launchJobCalculation(Integer calculationId) { public CompletableFuture<CalculationResult> launchJobCalculation(Integer calculationId) {
try { try {
return CompletableFuture.completedFuture(new CalculationResult(calculationId, calculateJob(calculationId))); return CompletableFuture.completedFuture(new CalculationResult(calculationId, calculateJob(calculationId)));

View file

@ -5,7 +5,7 @@ import de.avatic.lcc.dto.error.FrontendErrorDTO;
import de.avatic.lcc.repositories.error.SysErrorRepository; import de.avatic.lcc.repositories.error.SysErrorRepository;
import de.avatic.lcc.repositories.pagination.SearchQueryPagination; import de.avatic.lcc.repositories.pagination.SearchQueryPagination;
import de.avatic.lcc.repositories.pagination.SearchQueryResult; import de.avatic.lcc.repositories.pagination.SearchQueryResult;
import de.avatic.lcc.service.transformer.error.SysErrorMapper; import de.avatic.lcc.service.transformer.error.SysErrorTransformer;
import jakarta.validation.constraints.Min; import jakarta.validation.constraints.Min;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
@ -17,18 +17,18 @@ public class SysErrorService {
private final SysErrorRepository sysErrorRepository; private final SysErrorRepository sysErrorRepository;
private final SysErrorMapper sysErrorMapper; private final SysErrorTransformer sysErrorTransformer;
public SysErrorService(SysErrorRepository sysErrorRepository, SysErrorMapper sysErrorMapper) { public SysErrorService(SysErrorRepository sysErrorRepository, SysErrorTransformer sysErrorTransformer) {
this.sysErrorRepository = sysErrorRepository; this.sysErrorRepository = sysErrorRepository;
this.sysErrorMapper = sysErrorMapper; this.sysErrorTransformer = sysErrorTransformer;
} }
public void addErrors(List<FrontendErrorDTO> dto) { public void addErrors(List<FrontendErrorDTO> dto) {
sysErrorRepository.insert(dto.stream().map(sysErrorMapper::toSysErrorEntity).toList()); sysErrorRepository.insert(dto.stream().map(sysErrorTransformer::toSysErrorEntity).toList());
} }
public SearchQueryResult<ErrorLogDTO> listErrors(Optional<String> filter, @Min(1) int page, @Min(1) int limit) { public SearchQueryResult<ErrorLogDTO> listErrors(Optional<String> filter, @Min(1) int page, @Min(1) int limit) {
return SearchQueryResult.map(sysErrorRepository.listErrors(filter, new SearchQueryPagination(page, limit)), sysErrorMapper::toSysErrorDto); return SearchQueryResult.map(sysErrorRepository.listErrors(filter, new SearchQueryPagination(page, limit)), sysErrorTransformer::toSysErrorDto);
} }
} }

View file

@ -1,7 +1,7 @@
package de.avatic.lcc.service.excelMapper; package de.avatic.lcc.service.excelMapper;
import de.avatic.lcc.model.bulk.BulkOperation; import de.avatic.lcc.model.bulk.BulkInstruction;
import de.avatic.lcc.model.bulk.BulkOperationType; import de.avatic.lcc.model.bulk.BulkInstructionType;
import de.avatic.lcc.model.bulk.header.MaterialHeader; import de.avatic.lcc.model.bulk.header.MaterialHeader;
import de.avatic.lcc.model.materials.Material; import de.avatic.lcc.model.materials.Material;
import de.avatic.lcc.repositories.MaterialRepository; import de.avatic.lcc.repositories.MaterialRepository;
@ -37,7 +37,7 @@ public class MaterialExcelMapper {
private void mapToRow(Material material, Row row) { private void mapToRow(Material material, Row row) {
row.createCell(MaterialHeader.OPERATION.ordinal()).setCellValue(BulkOperationType.UPDATE.name()); row.createCell(MaterialHeader.OPERATION.ordinal()).setCellValue(BulkInstructionType.UPDATE.name());
row.createCell(MaterialHeader.PART_NUMBER.ordinal()).setCellValue(material.getPartNumber()); row.createCell(MaterialHeader.PART_NUMBER.ordinal()).setCellValue(material.getPartNumber());
row.createCell(MaterialHeader.DESCRIPTION.ordinal()).setCellValue(material.getName()); row.createCell(MaterialHeader.DESCRIPTION.ordinal()).setCellValue(material.getName());
@ -48,13 +48,13 @@ public class MaterialExcelMapper {
constraintGenerator.createLengthConstraint(sheet, MaterialHeader.PART_NUMBER.ordinal(), 0, 12); constraintGenerator.createLengthConstraint(sheet, MaterialHeader.PART_NUMBER.ordinal(), 0, 12);
constraintGenerator.createLengthConstraint(sheet, MaterialHeader.HS_CODE.ordinal(), 0, 11); constraintGenerator.createLengthConstraint(sheet, MaterialHeader.HS_CODE.ordinal(), 0, 11);
constraintGenerator.createLengthConstraint(sheet, MaterialHeader.DESCRIPTION.ordinal(), 1, 500); constraintGenerator.createLengthConstraint(sheet, MaterialHeader.DESCRIPTION.ordinal(), 1, 500);
constraintGenerator.createEnumConstraint(sheet, MaterialHeader.OPERATION.ordinal(), BulkOperationType.class); constraintGenerator.createEnumConstraint(sheet, MaterialHeader.OPERATION.ordinal(), BulkInstructionType.class);
} }
public List<BulkOperation<Material>> extractSheet(Sheet sheet) { public List<BulkInstruction<Material>> extractSheet(Sheet sheet) {
headerGenerator.validateHeader(sheet, MaterialHeader.class); headerGenerator.validateHeader(sheet, MaterialHeader.class);
var materials = new ArrayList<BulkOperation<Material>>(); var materials = new ArrayList<BulkInstruction<Material>>();
sheet.forEach(row -> { sheet.forEach(row -> {
if(row.getRowNum() == 0) return; if(row.getRowNum() == 0) return;
materials.add(mapToEntity(row)); materials.add(mapToEntity(row));
@ -63,13 +63,13 @@ public class MaterialExcelMapper {
} }
private BulkOperation<Material> mapToEntity(Row row) { private BulkInstruction<Material> mapToEntity(Row row) {
Material entity = new Material(); Material entity = new Material();
constraintGenerator.validateLengthConstraint(row, MaterialHeader.PART_NUMBER.ordinal(), 0, 12); constraintGenerator.validateLengthConstraint(row, MaterialHeader.PART_NUMBER.ordinal(), 0, 12);
constraintGenerator.validateLengthConstraint(row, MaterialHeader.HS_CODE.ordinal(), 0, 11); constraintGenerator.validateLengthConstraint(row, MaterialHeader.HS_CODE.ordinal(), 0, 11);
constraintGenerator.validateLengthConstraint(row, MaterialHeader.DESCRIPTION.ordinal(), 1, 500); constraintGenerator.validateLengthConstraint(row, MaterialHeader.DESCRIPTION.ordinal(), 1, 500);
constraintGenerator.validateEnumConstraint(row, MaterialHeader.OPERATION.ordinal(), BulkOperationType.class); constraintGenerator.validateEnumConstraint(row, MaterialHeader.OPERATION.ordinal(), BulkInstructionType.class);
entity.setPartNumber(row.getCell(MaterialHeader.PART_NUMBER.ordinal()).getStringCellValue()); entity.setPartNumber(row.getCell(MaterialHeader.PART_NUMBER.ordinal()).getStringCellValue());
@ -80,7 +80,7 @@ public class MaterialExcelMapper {
if(!validateHsCode(entity.getHsCode())) throw new IllegalArgumentException("Invalid HS Code"); if(!validateHsCode(entity.getHsCode())) throw new IllegalArgumentException("Invalid HS Code");
return new BulkOperation<>(entity,BulkOperationType.valueOf(row.getCell(MaterialHeader.OPERATION.ordinal()).getStringCellValue())); return new BulkInstruction<>(entity, BulkInstructionType.valueOf(row.getCell(MaterialHeader.OPERATION.ordinal()).getStringCellValue()));
} }
private String normalizePartNumber(String partNumber) { private String normalizePartNumber(String partNumber) {

View file

@ -44,7 +44,7 @@ public class NodeExcelMapper {
} }
private void mapToRow(Node node, Row row) { private void mapToRow(Node node, Row row) {
row.createCell(NodeHeader.OPERATION.ordinal()).setCellValue(BulkOperationType.UPDATE.name()); row.createCell(NodeHeader.OPERATION.ordinal()).setCellValue(BulkInstructionType.UPDATE.name());
row.createCell(NodeHeader.MAPPING_ID.ordinal()).setCellValue(node.getExternalMappingId()); row.createCell(NodeHeader.MAPPING_ID.ordinal()).setCellValue(node.getExternalMappingId());
row.createCell(NodeHeader.NAME.ordinal()).setCellValue(node.getName()); row.createCell(NodeHeader.NAME.ordinal()).setCellValue(node.getName());
row.createCell(NodeHeader.ADDRESS.ordinal()).setCellValue(node.getAddress()); row.createCell(NodeHeader.ADDRESS.ordinal()).setCellValue(node.getAddress());
@ -86,14 +86,14 @@ public class NodeExcelMapper {
constraintGenerator.createLengthConstraint(sheet, NodeHeader.ADDRESS.ordinal(), 1, 500); constraintGenerator.createLengthConstraint(sheet, NodeHeader.ADDRESS.ordinal(), 1, 500);
constraintGenerator.createLengthConstraint(sheet, NodeHeader.NAME.ordinal(), 1, 255); constraintGenerator.createLengthConstraint(sheet, NodeHeader.NAME.ordinal(), 1, 255);
constraintGenerator.createEnumConstraint(sheet, NodeHeader.OPERATION.ordinal(), BulkOperationType.class); constraintGenerator.createEnumConstraint(sheet, NodeHeader.OPERATION.ordinal(), BulkInstructionType.class);
} }
public List<BulkOperation<ExcelNode>> extractSheet(Sheet sheet) { public List<BulkInstruction<ExcelNode>> extractSheet(Sheet sheet) {
headerGenerator.validateHeader(sheet, NodeHeader.class); headerGenerator.validateHeader(sheet, NodeHeader.class);
var nodes = new ArrayList<BulkOperation<ExcelNode>>(); var nodes = new ArrayList<BulkInstruction<ExcelNode>>();
sheet.forEach(row -> { sheet.forEach(row -> {
if (row.getRowNum() == 0) return; if (row.getRowNum() == 0) return;
nodes.add(mapToEntity(row)); nodes.add(mapToEntity(row));
@ -101,7 +101,7 @@ public class NodeExcelMapper {
return nodes; return nodes;
} }
private BulkOperation<ExcelNode> mapToEntity(Row row) { private BulkInstruction<ExcelNode> mapToEntity(Row row) {
ExcelNode entity = new ExcelNode(); ExcelNode entity = new ExcelNode();
validateConstraints(row); validateConstraints(row);
@ -119,7 +119,7 @@ public class NodeExcelMapper {
entity.setNodePredecessors(mapChainsFromCell(row.getCell(NodeHeader.PREDECESSOR_NODES.ordinal()).getStringCellValue())); entity.setNodePredecessors(mapChainsFromCell(row.getCell(NodeHeader.PREDECESSOR_NODES.ordinal()).getStringCellValue()));
entity.setOutboundCountries(mapOutboundCountriesFromCell(row.getCell(NodeHeader.OUTBOUND_COUNTRIES.ordinal()).getStringCellValue())); entity.setOutboundCountries(mapOutboundCountriesFromCell(row.getCell(NodeHeader.OUTBOUND_COUNTRIES.ordinal()).getStringCellValue()));
return new BulkOperation<>(entity, BulkOperationType.valueOf(row.getCell(NodeHeader.OPERATION.ordinal()).getStringCellValue())); return new BulkInstruction<>(entity, BulkInstructionType.valueOf(row.getCell(NodeHeader.OPERATION.ordinal()).getStringCellValue()));
} }
@ -140,7 +140,7 @@ public class NodeExcelMapper {
constraintGenerator.validateBooleanConstraint(row, NodeHeader.IS_PREDECESSOR_MANDATORY.ordinal()); constraintGenerator.validateBooleanConstraint(row, NodeHeader.IS_PREDECESSOR_MANDATORY.ordinal());
constraintGenerator.validateLengthConstraint(row, NodeHeader.ADDRESS.ordinal(), 1, 500); constraintGenerator.validateLengthConstraint(row, NodeHeader.ADDRESS.ordinal(), 1, 500);
constraintGenerator.validateLengthConstraint(row, NodeHeader.NAME.ordinal(), 1, 255); constraintGenerator.validateLengthConstraint(row, NodeHeader.NAME.ordinal(), 1, 255);
constraintGenerator.validateEnumConstraint(row, NodeHeader.OPERATION.ordinal(), BulkOperationType.class); constraintGenerator.validateEnumConstraint(row, NodeHeader.OPERATION.ordinal(), BulkInstructionType.class);
} }

View file

@ -61,7 +61,7 @@ public class PackagingExcelMapper {
Optional<PackagingDimension> shu = packagingDimensionRepository.getById(packaging.getShuId()); Optional<PackagingDimension> shu = packagingDimensionRepository.getById(packaging.getShuId());
Optional<PackagingDimension> hu = packagingDimensionRepository.getById(packaging.getShuId()); Optional<PackagingDimension> hu = packagingDimensionRepository.getById(packaging.getShuId());
row.createCell(PackagingHeader.OPERATION.ordinal()).setCellValue(BulkOperationType.UPDATE.name()); row.createCell(PackagingHeader.OPERATION.ordinal()).setCellValue(BulkInstructionType.UPDATE.name());
row.createCell(PackagingHeader.PART_NUMBER.ordinal()).setCellValue(materialRepository.getByIdIncludeDeprecated(packaging.getMaterialId()).orElseThrow().getPartNumber()); row.createCell(PackagingHeader.PART_NUMBER.ordinal()).setCellValue(materialRepository.getByIdIncludeDeprecated(packaging.getMaterialId()).orElseThrow().getPartNumber());
row.createCell(PackagingHeader.SUPPLIER.ordinal()).setCellValue(nodeRepository.getById(packaging.getSupplierId()).orElseThrow().getExternalMappingId()); row.createCell(PackagingHeader.SUPPLIER.ordinal()).setCellValue(nodeRepository.getById(packaging.getSupplierId()).orElseThrow().getExternalMappingId());
@ -123,7 +123,7 @@ public class PackagingExcelMapper {
constraintGenerator.createEnumConstraint(sheet, PackagingHeader.HU_DIMENSION_UNIT.ordinal(), DimensionUnit.class); constraintGenerator.createEnumConstraint(sheet, PackagingHeader.HU_DIMENSION_UNIT.ordinal(), DimensionUnit.class);
constraintGenerator.createEnumConstraint(sheet, PackagingHeader.HU_WEIGHT_UNIT.ordinal(), WeightUnit.class); constraintGenerator.createEnumConstraint(sheet, PackagingHeader.HU_WEIGHT_UNIT.ordinal(), WeightUnit.class);
constraintGenerator.createEnumConstraint(sheet, PackagingHeader.OPERATION.ordinal(), BulkOperationType.class); constraintGenerator.createEnumConstraint(sheet, PackagingHeader.OPERATION.ordinal(), BulkInstructionType.class);
//TODO: check hu dimensions... //TODO: check hu dimensions...
@ -191,7 +191,7 @@ public class PackagingExcelMapper {
constraintGenerator.validateEnumConstraint(row, PackagingHeader.SHU_WEIGHT_UNIT.ordinal(), WeightUnit.class); constraintGenerator.validateEnumConstraint(row, PackagingHeader.SHU_WEIGHT_UNIT.ordinal(), WeightUnit.class);
constraintGenerator.validateEnumConstraint(row, PackagingHeader.HU_DIMENSION_UNIT.ordinal(), DimensionUnit.class); constraintGenerator.validateEnumConstraint(row, PackagingHeader.HU_DIMENSION_UNIT.ordinal(), DimensionUnit.class);
constraintGenerator.validateEnumConstraint(row, PackagingHeader.HU_WEIGHT_UNIT.ordinal(), WeightUnit.class); constraintGenerator.validateEnumConstraint(row, PackagingHeader.HU_WEIGHT_UNIT.ordinal(), WeightUnit.class);
constraintGenerator.validateEnumConstraint(row, PackagingHeader.OPERATION.ordinal(), BulkOperationType.class); constraintGenerator.validateEnumConstraint(row, PackagingHeader.OPERATION.ordinal(), BulkInstructionType.class);
} }
} }

View file

@ -0,0 +1,26 @@
package de.avatic.lcc.service.transformer.bulk;
import de.avatic.lcc.dto.bulk.BulkOperationDTO;
import de.avatic.lcc.model.bulk.BulkOperation;
import org.springframework.stereotype.Service;
@Service
public class BulkOperationTransformer {
public BulkOperationDTO toBulkOperationDTO(BulkOperation entity) {
BulkOperationDTO dto = new BulkOperationDTO();
dto.setId(entity.getId());
dto.setFileType(entity.getFileType());
dto.setProcessingType(entity.getProcessingType());
dto.setState(entity.getProcessState());
dto.setCreatedAt(entity.getCreatedAt());
return dto;
}
}

View file

@ -14,7 +14,7 @@ import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
@Service @Service
public class SysErrorMapper { public class SysErrorTransformer {
private static final String TRACE_REGEX = "at\\s+(?:async\\s+)?(?:(.+?)\\s+)?\\(([^?]+(?:\\?[^:]*)?):(\\d+):\\d+\\)"; private static final String TRACE_REGEX = "at\\s+(?:async\\s+)?(?:(.+?)\\s+)?\\(([^?]+(?:\\?[^:]*)?):(\\d+):\\d+\\)";
@ -43,7 +43,7 @@ public class SysErrorMapper {
} else { } else {
entity.setType(SysErrorType.BACKEND); entity.setType(SysErrorType.BACKEND);
entity.setTrace(toSysErrorTraceItem(frontendErrorDTO.getError().getTrace())); entity.setTrace(frontendErrorDTO.getError().getTrace().stream().map(this::toSysErrorTraceItem).toList());
} }
return entity; return entity;
@ -74,20 +74,17 @@ public class SysErrorMapper {
return items; return items;
} }
private List<SysErrorTraceItem> toSysErrorTraceItem(List<StackTraceElement> trace) { public SysErrorTraceItem toSysErrorTraceItem(StackTraceElement traceElement) {
List<SysErrorTraceItem> items = new ArrayList<>();
for(var traceElement : trace) {
SysErrorTraceItem item = new SysErrorTraceItem(); SysErrorTraceItem item = new SysErrorTraceItem();
item.setFile(traceElement.getFileName()); item.setFile(traceElement.getFileName());
item.setLine(traceElement.getLineNumber()); item.setLine(traceElement.getLineNumber());
item.setMethod(traceElement.getMethodName()); item.setMethod(traceElement.getMethodName());
item.setFullPath("at " + traceElement.getClassName() + "." + traceElement.getMethodName()); item.setFullPath("at " + traceElement.getClassName() + "." + traceElement.getMethodName());
items.add(item);
}
return items;
return item;
} }

View file

@ -2,12 +2,17 @@ package de.avatic.lcc.service.transformer.generic;
import de.avatic.lcc.dto.generic.NodeDTO; import de.avatic.lcc.dto.generic.NodeDTO;
import de.avatic.lcc.dto.generic.NodeType; import de.avatic.lcc.dto.generic.NodeType;
import de.avatic.lcc.excelmodel.ExcelNode;
import de.avatic.lcc.model.country.Country;
import de.avatic.lcc.model.country.IsoCode;
import de.avatic.lcc.model.nodes.Node; import de.avatic.lcc.model.nodes.Node;
import de.avatic.lcc.model.premises.route.RouteNode; import de.avatic.lcc.model.premises.route.RouteNode;
import de.avatic.lcc.repositories.NodeRepository;
import de.avatic.lcc.repositories.country.CountryRepository; import de.avatic.lcc.repositories.country.CountryRepository;
import de.avatic.lcc.util.exception.internalerror.ExcelValidationError;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import java.util.ArrayList; import java.util.*;
@Service @Service
public class NodeTransformer { public class NodeTransformer {
@ -15,11 +20,13 @@ public class NodeTransformer {
private final CountryTransformer countryTransformerService; private final CountryTransformer countryTransformerService;
private final CountryRepository countryRepository; private final CountryRepository countryRepository;
private final LocationTransformer locationTransformer; private final LocationTransformer locationTransformer;
private final NodeRepository nodeRepository;
public NodeTransformer(CountryTransformer countryTransformerService, CountryRepository countryRepository, LocationTransformer locationTransformer) { public NodeTransformer(CountryTransformer countryTransformerService, CountryRepository countryRepository, LocationTransformer locationTransformer, NodeRepository nodeRepository) {
this.countryTransformerService = countryTransformerService; this.countryTransformerService = countryTransformerService;
this.countryRepository = countryRepository; this.countryRepository = countryRepository;
this.locationTransformer = locationTransformer; this.locationTransformer = locationTransformer;
this.nodeRepository = nodeRepository;
} }
public NodeDTO toNodeDTO(Node entity) { public NodeDTO toNodeDTO(Node entity) {
@ -64,4 +71,93 @@ public class NodeTransformer {
return dto; return dto;
} }
public Node toNodeEntity(ExcelNode excelNode) {
Node entity = new Node();
var countryId = countryRepository.getByIsoCode(excelNode.getCountryId());
var isoCodes = excelNode.getOutboundCountries().stream().map(IsoCode::valueOf).toList();
var outboundCountry = countryRepository.getByIsoCodes(isoCodes);
var unknownCountries = isoCodes.stream().filter(c -> outboundCountry.stream().noneMatch(o -> o.getIsoCode().equals(c))).toList();
if (!unknownCountries.isEmpty()) {
throw new ExcelValidationError("Outbound country list contains unknown country code:" + unknownCountries);
}
if (countryId.isEmpty()) {
throw new ExcelValidationError("Country with iso code " + excelNode.getCountryId() + " does not exist.");
}
entity.setId(excelNode.getId());
entity.setName(excelNode.getName());
entity.setAddress(excelNode.getAddress());
entity.setCountryId(countryId.get().getId());
entity.setGeoLat(excelNode.getGeoLat());
entity.setGeoLng(excelNode.getGeoLng());
entity.setSource(excelNode.getSource());
entity.setIntermediate(excelNode.getIntermediate());
entity.setDestination(excelNode.getDestination());
entity.setPredecessorRequired(excelNode.getPredecessorRequired());
entity.setDeprecated(false);
entity.setOutboundCountries(outboundCountry.stream().map(Country::getId).toList());
entity.setNodePredecessors(convertPredecessors(excelNode.getNodePredecessors()));
checkPredecessors(excelNode, entity.getNodePredecessors());
return entity;
}
private List<Map<Integer, Integer>> convertPredecessors(List<List<String>> mappingIdLists) {
var predecessorLists = new ArrayList<Map<Integer, Integer>>();
var nodeIds = nodeRepository.getIdsByExternalMappingIds(mappingIdLists.stream().flatMap(List::stream).distinct().toList());
for (var mappingIdList : mappingIdLists) {
var predecessorList = new HashMap<Integer, Integer>();
int predecessorIndex = 1;
for (String mappingId : mappingIdList) {
var nodeId = nodeIds.get(mappingId);
if (nodeId == null) {
throw new ExcelValidationError("Predecessor list contains unknown node id: " + mappingId + ".");
} else {
predecessorList.put(predecessorIndex++, nodeId);
}
}
predecessorLists.add(predecessorList);
}
return predecessorLists;
}
private void checkPredecessors(ExcelNode excelNode, List<Map<Integer, Integer>> predecessorLists) {
predecessorLists.forEach(predecessorList -> {
if(predecessorList.containsValue(excelNode.getId()))
throw new ExcelValidationError("Predecessor list of node " + excelNode.getExternalMappingId() + " contains itself.");
var duplicates = findDuplicates(predecessorList.values());
if(!duplicates.isEmpty()) {
throw new ExcelValidationError(String.format("Predecessor list of node '%s' contains loop caused by: %s", excelNode.getExternalMappingId(), duplicates) );
}
});
}
public static <T> Set<T> findDuplicates(Collection<T> collection) {
Set<T> seen = new HashSet<>();
Set<T> duplicates = new HashSet<>();
for (T item : collection) {
if (!seen.add(item)) {
duplicates.add(item);
}
}
return duplicates;
}
} }

View file

@ -567,16 +567,20 @@ CREATE TABLE IF NOT EXISTS calculation_job_route_section
CREATE TABLE IF NOT EXISTS bulk_operation CREATE TABLE IF NOT EXISTS bulk_operation
( (
id INT NOT NULL AUTO_INCREMENT PRIMARY KEY, id INT NOT NULL AUTO_INCREMENT PRIMARY KEY,
user_id INT NOT NULL, user_id INT NOT NULL,
bulk_file_type CHAR(32) NOT NULL, bulk_file_type CHAR(32) NOT NULL,
state CHAR(10) NOT NULL, bulk_processing_type CHAR(32) NOT NULL,
file LONGBLOB NOT NULL, state CHAR(10) NOT NULL,
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, file LONGBLOB DEFAULT NULL,
validity_period_id INT DEFAULT NULL,
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (user_id) REFERENCES sys_user (id), FOREIGN KEY (user_id) REFERENCES sys_user (id),
FOREIGN KEY (validity_period_id) REFERENCES validity_period (id),
CONSTRAINT chk_bulk_file_type CHECK (bulk_file_type IN CONSTRAINT chk_bulk_file_type CHECK (bulk_file_type IN
('CONTAINER_RATE', 'COUNTRY_MATRIX', 'MATERIAL', 'PACKAGING', 'NODE')), ('CONTAINER_RATE', 'COUNTRY_MATRIX', 'MATERIAL', 'PACKAGING', 'NODE')),
CONSTRAINT chk_bulk_operation_state CHECK (state IN ('SCHEDULED', 'PROCESSING', 'COMPLETED', 'EXCEPTION')) CONSTRAINT chk_bulk_operation_state CHECK (state IN ('SCHEDULED', 'PROCESSING', 'COMPLETED', 'EXCEPTION')),
CONSTRAINT chk_bulk_processing_type CHECK (bulk_processing_type IN ('IMPORT', 'EXPORT'))
); );
CREATE TABLE IF NOT EXISTS sys_error CREATE TABLE IF NOT EXISTS sys_error
@ -603,7 +607,7 @@ CREATE TABLE IF NOT EXISTS sys_error_trace_item
( (
id INT NOT NULL AUTO_INCREMENT PRIMARY KEY, id INT NOT NULL AUTO_INCREMENT PRIMARY KEY,
error_id INT NOT NULL, error_id INT NOT NULL,
line INT UNSIGNED NOT NULL, line INT,
file VARCHAR(255) NOT NULL, file VARCHAR(255) NOT NULL,
method VARCHAR(255) NOT NULL, method VARCHAR(255) NOT NULL,
fullPath VARCHAR(512) NOT NULL, fullPath VARCHAR(512) NOT NULL,