Compare commits
44 commits
main
...
feature/sy
| Author | SHA1 | Date | |
|---|---|---|---|
| f5683dc5c3 | |||
| b674b8f477 | |||
| c727bbccc2 | |||
| adf3666430 | |||
| b389480cc8 | |||
| 00dc7e9843 | |||
| 26986b1131 | |||
| b708af177f | |||
| f86e2fb1d8 | |||
| ae83b0845c | |||
| 9dca1e8abb | |||
| 8794a8a193 | |||
| 15854e1076 | |||
| 8e428af4d2 | |||
| 21d00b8756 | |||
| 96715562e6 | |||
| 8d08fedbc4 | |||
| a381ca7ef8 | |||
| ffc08ebff6 | |||
| 52116be1c3 | |||
| 5c8165c60e | |||
| a5fd03cc68 | |||
| 3f8453f93b | |||
| 1a5a00e111 | |||
| 861c5e7bbc | |||
| 6fc0839320 | |||
| 919c9d0499 | |||
| c25f00bb01 | |||
| 8e6cc8cf07 | |||
| 5fb025e4b3 | |||
| e53f865210 | |||
| b1d46c1057 | |||
| 1baf3111aa | |||
| cd411d8b01 | |||
| 48ce77dad3 | |||
| 5b2018c9e0 | |||
| 28ee19d654 | |||
| 0d4fb1f04f | |||
| 5866f8edc8 | |||
| eff5d26ea3 | |||
| 1084c5b1cd | |||
| 29675f9ff4 | |||
| 10a8cfa72b | |||
| 417221eca8 |
141 changed files with 63559 additions and 1192 deletions
21
.claude/settings.local.json
Normal file
21
.claude/settings.local.json
Normal file
|
|
@ -0,0 +1,21 @@
|
||||||
|
{
|
||||||
|
"permissions": {
|
||||||
|
"allow": [
|
||||||
|
"Bash(tree:*)",
|
||||||
|
"Bash(xargs:*)",
|
||||||
|
"Bash(mvn compile:*)",
|
||||||
|
"Bash(mvn test-compile:*)",
|
||||||
|
"Bash(find:*)",
|
||||||
|
"Bash(mvn test:*)",
|
||||||
|
"Bash(tee:*)",
|
||||||
|
"Bash(export TESTCONTAINERS_RYUK_DISABLED=true)",
|
||||||
|
"Bash(echo:*)",
|
||||||
|
"Bash(pgrep:*)",
|
||||||
|
"Bash(pkill:*)",
|
||||||
|
"Bash(ls:*)",
|
||||||
|
"Bash(sleep 120 echo \"=== Screenshots generated so far ===\" ls -la target/screenshots/case_*.png)",
|
||||||
|
"Bash(wc:*)",
|
||||||
|
"Bash(export DOCKER_HOST=unix:///run/user/1000/podman/podman.sock)"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
74
.gitea/workflows/test.yml
Normal file
74
.gitea/workflows/test.yml
Normal file
|
|
@ -0,0 +1,74 @@
|
||||||
|
name: Tests
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [main, dev]
|
||||||
|
pull_request:
|
||||||
|
branches: [main]
|
||||||
|
|
||||||
|
env:
|
||||||
|
ALLURE_SERVER: "http://10.80.0.6:5050"
|
||||||
|
ALLURE_PROJECT: "lcc"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
test:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
container:
|
||||||
|
image: catthehacker/ubuntu:act-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup Java 23
|
||||||
|
uses: actions/setup-java@v4
|
||||||
|
with:
|
||||||
|
distribution: 'temurin'
|
||||||
|
java-version: '23'
|
||||||
|
cache: 'maven'
|
||||||
|
|
||||||
|
- name: Run Tests
|
||||||
|
run: mvn verify -B --no-transfer-progress
|
||||||
|
env:
|
||||||
|
TESTCONTAINERS_RYUK_DISABLED: "true"
|
||||||
|
|
||||||
|
- name: Prepare Allure Results
|
||||||
|
if: always()
|
||||||
|
run: |
|
||||||
|
cat > target/allure-results/executor.json << EOF
|
||||||
|
{
|
||||||
|
"name": "Gitea Actions",
|
||||||
|
"type": "gitea",
|
||||||
|
"buildName": "#${{ gitea.run_number }}",
|
||||||
|
"buildOrder": ${{ gitea.run_number }},
|
||||||
|
"buildUrl": "${{ gitea.server_url }}/${{ gitea.repository }}/actions/runs/${{ gitea.run_id }}"
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
- name: Upload to Allure
|
||||||
|
if: always()
|
||||||
|
run: |
|
||||||
|
# Projekt anlegen falls nicht vorhanden
|
||||||
|
curl -s -o /dev/null \
|
||||||
|
-u admin:${{ secrets.ALLURE_PASSWORD }} \
|
||||||
|
-X POST "${ALLURE_SERVER}/allure-docker-service/projects" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d '{"id": "'${ALLURE_PROJECT}'"}' || true
|
||||||
|
|
||||||
|
# Results aufräumen
|
||||||
|
curl -s \
|
||||||
|
-u admin:${{ secrets.ALLURE_PASSWORD }} \
|
||||||
|
"${ALLURE_SERVER}/allure-docker-service/clean-results?project_id=${ALLURE_PROJECT}"
|
||||||
|
|
||||||
|
# Results hochladen
|
||||||
|
for f in target/allure-results/*; do
|
||||||
|
[ -f "$f" ] && curl -s \
|
||||||
|
-u admin:${{ secrets.ALLURE_PASSWORD }} \
|
||||||
|
-X POST "${ALLURE_SERVER}/allure-docker-service/send-results?project_id=${ALLURE_PROJECT}" \
|
||||||
|
-F "results[]=@$f"
|
||||||
|
done
|
||||||
|
|
||||||
|
# Report generieren
|
||||||
|
curl -s \
|
||||||
|
-u admin:${{ secrets.ALLURE_PASSWORD }} \
|
||||||
|
"${ALLURE_SERVER}/allure-docker-service/generate-report?project_id=${ALLURE_PROJECT}"
|
||||||
1
.gitignore
vendored
1
.gitignore
vendored
|
|
@ -14,6 +14,7 @@ target/
|
||||||
.sts4-cache
|
.sts4-cache
|
||||||
.env.example
|
.env.example
|
||||||
/.env
|
/.env
|
||||||
|
/.env.*
|
||||||
|
|
||||||
### IntelliJ IDEA ###
|
### IntelliJ IDEA ###
|
||||||
.idea
|
.idea
|
||||||
|
|
|
||||||
585
CLAUDE.md
Normal file
585
CLAUDE.md
Normal file
|
|
@ -0,0 +1,585 @@
|
||||||
|
# CLAUDE.md
|
||||||
|
|
||||||
|
This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
|
||||||
|
|
||||||
|
## Project Overview
|
||||||
|
|
||||||
|
LCC (Logistic Cost Calculator) is a Spring Boot 3.5.9 backend API for calculating complex logistics costs across supply chain networks. It handles materials, packaging, transportation rates, route planning, and multi-component cost calculations including customs duties, handling, inventory, and risk assessment.
|
||||||
|
|
||||||
|
**Database Support:** The application supports both **MySQL 8.0** and **MSSQL Server 2022** through a database abstraction layer (`SqlDialectProvider`), allowing deployment flexibility across different database platforms.
|
||||||
|
|
||||||
|
## Build & Run Commands
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Build the project
|
||||||
|
mvn clean install
|
||||||
|
|
||||||
|
# Run the application (default: MySQL)
|
||||||
|
mvn spring-boot:run
|
||||||
|
|
||||||
|
# Run with MSSQL
|
||||||
|
mvn spring-boot:run -Dspring.profiles.active=mssql
|
||||||
|
|
||||||
|
# Run all tests on MySQL
|
||||||
|
mvn test -Dspring.profiles.active=test,mysql
|
||||||
|
|
||||||
|
# Run all tests on MSSQL
|
||||||
|
mvn test -Dspring.profiles.active=test,mssql
|
||||||
|
|
||||||
|
# Run repository integration tests on both databases
|
||||||
|
mvn test -Dtest="*RepositoryIntegrationTest" -Dspring.profiles.active=test,mysql
|
||||||
|
mvn test -Dtest="*RepositoryIntegrationTest" -Dspring.profiles.active=test,mssql
|
||||||
|
|
||||||
|
# Run a specific test class
|
||||||
|
mvn test -Dtest=NodeControllerIntegrationTest
|
||||||
|
|
||||||
|
# Run a specific test method
|
||||||
|
mvn test -Dtest=NodeControllerIntegrationTest#shouldReturnListOfNodesWithDefaultPagination
|
||||||
|
|
||||||
|
# Skip tests during build
|
||||||
|
mvn clean install -DskipTests
|
||||||
|
|
||||||
|
# Generate JAXB classes from WSDL (EU taxation service)
|
||||||
|
mvn jaxb:generate
|
||||||
|
```
|
||||||
|
|
||||||
|
## Development Environment (Distrobox)
|
||||||
|
|
||||||
|
**IMPORTANT:** This project runs inside a **Distrobox** container. This affects how TestContainers and Podman work.
|
||||||
|
|
||||||
|
### TestContainers with Distrobox + Podman
|
||||||
|
|
||||||
|
TestContainers needs access to the **host's Podman socket**, not the one inside the Distrobox. The configuration is handled via `~/.testcontainers.properties`:
|
||||||
|
|
||||||
|
```properties
|
||||||
|
docker.host=unix:///run/host/run/user/1000/podman/podman.sock
|
||||||
|
ryuk.disabled=true
|
||||||
|
```
|
||||||
|
|
||||||
|
### Troubleshooting TestContainers / Podman Issues
|
||||||
|
|
||||||
|
If tests fail with "Could not find a valid Docker environment":
|
||||||
|
|
||||||
|
1. **Check if Podman works on the host:**
|
||||||
|
```bash
|
||||||
|
distrobox-host-exec podman info
|
||||||
|
```
|
||||||
|
|
||||||
|
2. **If you see cgroup or UID/GID errors, run migration on the host:**
|
||||||
|
```bash
|
||||||
|
distrobox-host-exec podman system migrate
|
||||||
|
```
|
||||||
|
|
||||||
|
3. **Restart podman socket on host if needed:**
|
||||||
|
```bash
|
||||||
|
distrobox-host-exec systemctl --user restart podman.socket
|
||||||
|
```
|
||||||
|
|
||||||
|
4. **Verify the host socket is accessible from Distrobox:**
|
||||||
|
```bash
|
||||||
|
ls -la /run/host/run/user/1000/podman/podman.sock
|
||||||
|
```
|
||||||
|
|
||||||
|
5. **Test container execution via host:**
|
||||||
|
```bash
|
||||||
|
distrobox-host-exec podman run --rm hello-world
|
||||||
|
```
|
||||||
|
|
||||||
|
### Key Paths
|
||||||
|
|
||||||
|
| Path | Description |
|
||||||
|
|------|-------------|
|
||||||
|
| `/run/host/run/user/1000/podman/podman.sock` | Host's Podman socket (accessible from Distrobox) |
|
||||||
|
| `~/.testcontainers.properties` | TestContainers configuration file |
|
||||||
|
|
||||||
|
## Architecture
|
||||||
|
|
||||||
|
### Layered Architecture
|
||||||
|
```
|
||||||
|
Controllers → DTOs → Services → Transformers → Repositories → SqlDialectProvider → Database (MySQL/MSSQL)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Package Structure (`de.avatic.lcc`)
|
||||||
|
- **controller/** - REST endpoints organized by domain (calculation, configuration, bulk, users, report)
|
||||||
|
- **service/access/** - Business logic for domain entities (PremisesService, MaterialService, NodeService, etc.)
|
||||||
|
- **service/calculation/** - Logistics cost calculation orchestration and step services
|
||||||
|
- **service/calculation/execution/steps/** - Individual calculation components (airfreight, handling, inventory, customs, etc.)
|
||||||
|
- **service/bulk/** - Excel-based bulk import/export operations
|
||||||
|
- **service/api/** - External API integrations (Azure Maps geocoding, EU taxation)
|
||||||
|
- **service/transformer/** - Entity-to-DTO mapping
|
||||||
|
- **repositories/** - JDBC-based data access (not JPA) with custom RowMappers
|
||||||
|
- **database/dialect/** - Database abstraction layer (SqlDialectProvider, MySQLDialectProvider, MSSQLDialectProvider)
|
||||||
|
- **model/db/** - Database entity classes
|
||||||
|
- **dto/** - Data transfer objects for API contracts
|
||||||
|
|
||||||
|
### Key Design Decisions
|
||||||
|
- **JDBC over JPA**: Uses `JdbcTemplate` and `NamedParameterJdbcTemplate` for complex queries
|
||||||
|
- **SqlDialectProvider abstraction**: Database-agnostic SQL through dialect-specific implementations (MySQL/MSSQL)
|
||||||
|
- **Transformer layer**: Explicit DTO mapping keeps entities separate from API contracts
|
||||||
|
- **Calculation chain**: Cost calculations broken into fine-grained services in `execution/steps/`
|
||||||
|
- **Profile-based configuration**: Spring profiles for environment-specific database selection
|
||||||
|
|
||||||
|
### Core Calculation Flow
|
||||||
|
```
|
||||||
|
CalculationExecutionService.launchJobCalculation()
|
||||||
|
→ ContainerCalculationService (container type selection: FEU/TEU/HC/TRUCK)
|
||||||
|
→ RouteSectionCostCalculationService (per-section costs)
|
||||||
|
→ AirfreightCalculationService
|
||||||
|
→ HandlingCostCalculationService
|
||||||
|
→ InventoryCostCalculationService
|
||||||
|
→ CustomCostCalculationService (tariff/duties)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Authorization Model
|
||||||
|
Role-based access control via `@PreAuthorize` annotations:
|
||||||
|
- SUPER, CALCULATION, MATERIAL, FREIGHT, PACKAGING, BASIC
|
||||||
|
|
||||||
|
## Testing
|
||||||
|
|
||||||
|
### Test Architecture
|
||||||
|
|
||||||
|
**Integration Test Base Class:**
|
||||||
|
All repository integration tests extend `AbstractRepositoryIntegrationTest`, which provides:
|
||||||
|
- `JdbcTemplate` for test data setup
|
||||||
|
- `SqlDialectProvider` for database-agnostic SQL
|
||||||
|
- Helper methods: `isMysql()`, `isMssql()`, `executeRawSql()`
|
||||||
|
- Automatic TestContainers setup via `@Testcontainers`
|
||||||
|
- Transaction isolation via `@Transactional`
|
||||||
|
|
||||||
|
**TestContainers Setup:**
|
||||||
|
```java
|
||||||
|
@SpringBootTest(classes = {RepositoryTestConfig.class})
|
||||||
|
@Testcontainers
|
||||||
|
@Import(DatabaseTestConfiguration.class)
|
||||||
|
@Transactional
|
||||||
|
public abstract class AbstractRepositoryIntegrationTest {
|
||||||
|
@Autowired
|
||||||
|
protected JdbcTemplate jdbcTemplate;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
protected SqlDialectProvider dialectProvider;
|
||||||
|
|
||||||
|
protected boolean isMysql() {
|
||||||
|
return getDatabaseProfile().contains("mysql");
|
||||||
|
}
|
||||||
|
|
||||||
|
protected void executeRawSql(String sql, Object... params) {
|
||||||
|
jdbcTemplate.update(sql, params);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**DatabaseTestConfiguration:**
|
||||||
|
- MySQL: `MySQLContainer` with `mysql:8.0` image
|
||||||
|
- MSSQL: `MSSQLServerContainer` with `mcr.microsoft.com/mssql/server:2022-latest` image
|
||||||
|
- Profile-based activation via `@Profile("mysql")` and `@Profile("mssql")`
|
||||||
|
|
||||||
|
### Database-Agnostic Test Patterns
|
||||||
|
|
||||||
|
**Pattern 1: Boolean literals in test data**
|
||||||
|
```java
|
||||||
|
String sql = String.format(
|
||||||
|
"INSERT INTO node (name, is_active) VALUES (?, %s)",
|
||||||
|
dialectProvider.getBooleanTrue());
|
||||||
|
```
|
||||||
|
|
||||||
|
**Pattern 2: Auto-increment ID retrieval**
|
||||||
|
```java
|
||||||
|
executeRawSql("INSERT INTO table (name) VALUES (?)", name);
|
||||||
|
String selectSql = isMysql() ? "SELECT LAST_INSERT_ID()" : "SELECT CAST(@@IDENTITY AS INT)";
|
||||||
|
return jdbcTemplate.queryForObject(selectSql, Integer.class);
|
||||||
|
```
|
||||||
|
|
||||||
|
**Pattern 3: Date functions**
|
||||||
|
```java
|
||||||
|
String dateFunc = isMysql() ? "NOW()" : "GETDATE()";
|
||||||
|
String sql = String.format("INSERT INTO table (created_at) VALUES (%s)", dateFunc);
|
||||||
|
```
|
||||||
|
|
||||||
|
### Running Tests
|
||||||
|
|
||||||
|
**Run all tests on MySQL:**
|
||||||
|
```bash
|
||||||
|
mvn test -Dspring.profiles.active=test,mysql
|
||||||
|
```
|
||||||
|
|
||||||
|
**Run all tests on MSSQL:**
|
||||||
|
```bash
|
||||||
|
mvn test -Dspring.profiles.active=test,mssql
|
||||||
|
```
|
||||||
|
|
||||||
|
**Run specific repository tests:**
|
||||||
|
```bash
|
||||||
|
mvn test -Dtest=CalculationJobRepositoryIntegrationTest -Dspring.profiles.active=test,mysql
|
||||||
|
mvn test -Dtest=CalculationJobRepositoryIntegrationTest -Dspring.profiles.active=test,mssql
|
||||||
|
```
|
||||||
|
|
||||||
|
**Run all repository integration tests on both databases:**
|
||||||
|
```bash
|
||||||
|
mvn test -Dtest="*RepositoryIntegrationTest" -Dspring.profiles.active=test,mysql
|
||||||
|
mvn test -Dtest="*RepositoryIntegrationTest" -Dspring.profiles.active=test,mssql
|
||||||
|
```
|
||||||
|
|
||||||
|
### Test Coverage
|
||||||
|
|
||||||
|
**Current Status (as of Phase 6 completion):**
|
||||||
|
- **365 tests** passing on both MySQL and MSSQL (100% success rate)
|
||||||
|
- **28 repository integration test classes** covering:
|
||||||
|
- Calculation repositories (CalculationJobRepository, CalculationJobDestinationRepository, CalculationJobRouteSectionRepository)
|
||||||
|
- Configuration repositories (NodeRepository, MaterialRepository, PackagingRepository, CountryRepository)
|
||||||
|
- Rate repositories (ContainerRateRepository, MatrixRateRepository)
|
||||||
|
- Property repositories (PropertyRepository, CountryPropertyRepository, PackagingPropertiesRepository)
|
||||||
|
- User repositories (UserRepository, GroupRepository)
|
||||||
|
- Bulk operation repositories (BulkOperationRepository)
|
||||||
|
- And 14 additional repositories
|
||||||
|
|
||||||
|
**Test Data:**
|
||||||
|
- `@Sql` annotations for controller integration tests from `src/test/resources/master_data/`
|
||||||
|
- Repository tests use inline SQL with `executeRawSql()` for database-agnostic test data setup
|
||||||
|
- Test data cleanup in `@BeforeEach` respects foreign key constraints
|
||||||
|
|
||||||
|
## Database
|
||||||
|
|
||||||
|
### Multi-Database Support
|
||||||
|
|
||||||
|
The application supports both **MySQL 8.0** and **MSSQL Server 2022** through the `SqlDialectProvider` abstraction layer.
|
||||||
|
|
||||||
|
**Database selection via Spring profiles:**
|
||||||
|
- `mysql` - MySQL 8.0 (default)
|
||||||
|
- `mssql` - Microsoft SQL Server 2022
|
||||||
|
|
||||||
|
**Environment variables:**
|
||||||
|
```bash
|
||||||
|
export SPRING_PROFILES_ACTIVE=mysql # or mssql
|
||||||
|
export DB_HOST=localhost
|
||||||
|
export DB_DATABASE=lcc
|
||||||
|
export DB_USER=your_user
|
||||||
|
export DB_PASSWORD=your_password
|
||||||
|
```
|
||||||
|
|
||||||
|
### SqlDialectProvider Pattern
|
||||||
|
|
||||||
|
Database-specific SQL syntax is abstracted through `de.avatic.lcc.database.dialect.SqlDialectProvider`:
|
||||||
|
|
||||||
|
- **MySQLDialectProvider** - MySQL-specific SQL (LIMIT/OFFSET, NOW(), ON DUPLICATE KEY UPDATE, FOR UPDATE SKIP LOCKED)
|
||||||
|
- **MSSQLDialectProvider** - MSSQL-specific SQL (OFFSET/FETCH, GETDATE(), MERGE, WITH (UPDLOCK, READPAST))
|
||||||
|
|
||||||
|
**Key dialect differences:**
|
||||||
|
| Feature | MySQL | MSSQL |
|
||||||
|
|---------|-------|-------|
|
||||||
|
| Pagination | `LIMIT ? OFFSET ?` | `OFFSET ? ROWS FETCH NEXT ? ROWS ONLY` |
|
||||||
|
| Current timestamp | `NOW()` | `GETDATE()` |
|
||||||
|
| Date subtraction | `DATE_SUB(NOW(), INTERVAL 3 DAY)` | `DATEADD(DAY, -3, GETDATE())` |
|
||||||
|
| Boolean literals | `TRUE`, `FALSE` | `1`, `0` |
|
||||||
|
| Auto-increment | `AUTO_INCREMENT` | `IDENTITY(1,1)` |
|
||||||
|
| Upsert | `ON DUPLICATE KEY UPDATE` | `MERGE` statement |
|
||||||
|
| Insert ignore | `INSERT IGNORE` | `IF NOT EXISTS ... INSERT` |
|
||||||
|
| Skip locked rows | `FOR UPDATE SKIP LOCKED` | `WITH (UPDLOCK, READPAST)` |
|
||||||
|
| Last insert ID | `LAST_INSERT_ID()` | `CAST(@@IDENTITY AS INT)` |
|
||||||
|
|
||||||
|
**Repository usage example:**
|
||||||
|
```java
|
||||||
|
@Repository
|
||||||
|
public class ExampleRepository {
|
||||||
|
private final JdbcTemplate jdbcTemplate;
|
||||||
|
private final SqlDialectProvider dialectProvider;
|
||||||
|
|
||||||
|
public ExampleRepository(JdbcTemplate jdbcTemplate, SqlDialectProvider dialectProvider) {
|
||||||
|
this.jdbcTemplate = jdbcTemplate;
|
||||||
|
this.dialectProvider = dialectProvider;
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<Entity> list(int limit, int offset) {
|
||||||
|
String sql = "SELECT * FROM table ORDER BY id " +
|
||||||
|
dialectProvider.buildPaginationClause(limit, offset);
|
||||||
|
Object[] params = dialectProvider.getPaginationParameters(limit, offset);
|
||||||
|
return jdbcTemplate.query(sql, params, rowMapper);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Flyway Migrations
|
||||||
|
|
||||||
|
Database-specific migrations are organized by database type:
|
||||||
|
|
||||||
|
```
|
||||||
|
src/main/resources/db/migration/
|
||||||
|
├── mysql/
|
||||||
|
│ ├── V1__Create_schema.sql
|
||||||
|
│ ├── V2__Property_Set_Period.sql
|
||||||
|
│ └── V3-V12 (additional migrations)
|
||||||
|
└── mssql/
|
||||||
|
├── V1__Create_schema.sql
|
||||||
|
├── V2__Property_Set_Period.sql
|
||||||
|
└── V3-V12 (MSSQL-specific conversions)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Migration naming:** `V{N}__{Description}.sql`
|
||||||
|
|
||||||
|
**Key schema differences:**
|
||||||
|
- MySQL uses `AUTO_INCREMENT`, MSSQL uses `IDENTITY(1,1)`
|
||||||
|
- MySQL supports `TIMESTAMP ... ON UPDATE CURRENT_TIMESTAMP`, MSSQL requires triggers
|
||||||
|
- MySQL `BOOLEAN` maps to MSSQL `BIT`
|
||||||
|
- Check constraints syntax differs (BETWEEN vs >= AND <=)
|
||||||
|
|
||||||
|
### Key Tables
|
||||||
|
|
||||||
|
Core entities:
|
||||||
|
- **premiss**, **premiss_sink**, **premiss_route** - Supply chain scenarios and routing
|
||||||
|
- **calculation_job**, **calculation_job_destination**, **calculation_job_route_section** - Calculation workflow
|
||||||
|
- **node** - Suppliers, destinations, intermediate locations
|
||||||
|
- **material**, **packaging** - Product and packaging master data
|
||||||
|
- **container_rate**, **country_matrix_rate** - Transportation rates
|
||||||
|
- **property_set**, **property** - Versioned configuration properties
|
||||||
|
|
||||||
|
## Important Database Considerations
|
||||||
|
|
||||||
|
### Concurrency Control
|
||||||
|
|
||||||
|
**Calculation Job Locking:**
|
||||||
|
The `CalculationJobRepository.fetchAndLockNextJob()` method uses database-specific row-level locking to prevent concurrent job processing:
|
||||||
|
|
||||||
|
- **MySQL**: `FOR UPDATE SKIP LOCKED` - Skips locked rows and returns next available job
|
||||||
|
- **MSSQL**: `WITH (UPDLOCK, READPAST)` - Similar semantics but different syntax
|
||||||
|
|
||||||
|
Both implementations ensure that multiple job processors can run concurrently without conflicts.
|
||||||
|
|
||||||
|
### Transaction Isolation
|
||||||
|
|
||||||
|
- Default isolation level: READ_COMMITTED
|
||||||
|
- Repository tests use `@Transactional` for automatic rollback
|
||||||
|
- Critical operations (job locking, rate updates) use pessimistic locking
|
||||||
|
|
||||||
|
### Schema Conversion Gotchas
|
||||||
|
|
||||||
|
When adding new Flyway migrations, be aware of these differences:
|
||||||
|
|
||||||
|
**Auto-increment columns:**
|
||||||
|
```sql
|
||||||
|
-- MySQL
|
||||||
|
id INT AUTO_INCREMENT PRIMARY KEY
|
||||||
|
|
||||||
|
-- MSSQL
|
||||||
|
id INT IDENTITY(1,1) PRIMARY KEY
|
||||||
|
```
|
||||||
|
|
||||||
|
**Timestamp with auto-update:**
|
||||||
|
```sql
|
||||||
|
-- MySQL
|
||||||
|
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP
|
||||||
|
|
||||||
|
-- MSSQL (requires trigger)
|
||||||
|
updated_at DATETIME2 DEFAULT GETDATE()
|
||||||
|
-- Plus CREATE TRIGGER for ON UPDATE behavior
|
||||||
|
```
|
||||||
|
|
||||||
|
**Boolean values:**
|
||||||
|
```sql
|
||||||
|
-- MySQL
|
||||||
|
is_active BOOLEAN DEFAULT TRUE
|
||||||
|
|
||||||
|
-- MSSQL
|
||||||
|
is_active BIT DEFAULT 1
|
||||||
|
```
|
||||||
|
|
||||||
|
**Check constraints:**
|
||||||
|
```sql
|
||||||
|
-- MySQL
|
||||||
|
CHECK (latitude BETWEEN -90 AND 90)
|
||||||
|
|
||||||
|
-- MSSQL
|
||||||
|
CHECK (latitude >= -90 AND latitude <= 90)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Performance Considerations
|
||||||
|
|
||||||
|
- Both databases use similar execution plans for most queries
|
||||||
|
- Indexes are defined identically in both migration sets
|
||||||
|
- MSSQL may benefit from additional statistics maintenance for complex joins
|
||||||
|
- Performance regression < 5% observed in comparative testing
|
||||||
|
|
||||||
|
## External Integrations
|
||||||
|
|
||||||
|
- **Azure AD**: OAuth2/OIDC authentication
|
||||||
|
- **Azure Maps**: Geocoding and route distance calculations (GeoApiService, DistanceApiService)
|
||||||
|
- **EU Taxation API**: TARIC nomenclature lookup for customs duties (EUTaxationApiService)
|
||||||
|
|
||||||
|
## Configuration
|
||||||
|
|
||||||
|
### Profile-Based Database Configuration
|
||||||
|
|
||||||
|
The application uses Spring profiles for database selection:
|
||||||
|
|
||||||
|
**application-mysql.properties:**
|
||||||
|
```properties
|
||||||
|
spring.profiles.active=mysql
|
||||||
|
spring.datasource.driver-class-name=com.mysql.cj.jdbc.Driver
|
||||||
|
spring.datasource.url=jdbc:mysql://${DB_HOST:localhost}:3306/${DB_DATABASE}
|
||||||
|
spring.datasource.username=${DB_USER}
|
||||||
|
spring.datasource.password=${DB_PASSWORD}
|
||||||
|
|
||||||
|
spring.flyway.enabled=true
|
||||||
|
spring.flyway.locations=classpath:db/migration/mysql
|
||||||
|
spring.flyway.baseline-on-migrate=true
|
||||||
|
```
|
||||||
|
|
||||||
|
**application-mssql.properties:**
|
||||||
|
```properties
|
||||||
|
spring.profiles.active=mssql
|
||||||
|
spring.datasource.driver-class-name=com.microsoft.sqlserver.jdbc.SQLServerDriver
|
||||||
|
spring.datasource.url=jdbc:sqlserver://${DB_HOST:localhost}:1433;databaseName=${DB_DATABASE};encrypt=true;trustServerCertificate=true
|
||||||
|
spring.datasource.username=${DB_USER}
|
||||||
|
spring.datasource.password=${DB_PASSWORD}
|
||||||
|
|
||||||
|
spring.flyway.enabled=true
|
||||||
|
spring.flyway.locations=classpath:db/migration/mssql
|
||||||
|
spring.flyway.baseline-on-migrate=true
|
||||||
|
```
|
||||||
|
|
||||||
|
**Environment Variables:**
|
||||||
|
```bash
|
||||||
|
# MySQL setup
|
||||||
|
export SPRING_PROFILES_ACTIVE=mysql
|
||||||
|
export DB_HOST=localhost
|
||||||
|
export DB_DATABASE=lcc
|
||||||
|
export DB_USER=root
|
||||||
|
export DB_PASSWORD=your_password
|
||||||
|
|
||||||
|
# MSSQL setup
|
||||||
|
export SPRING_PROFILES_ACTIVE=mssql
|
||||||
|
export DB_HOST=localhost
|
||||||
|
export DB_DATABASE=lcc
|
||||||
|
export DB_USER=sa
|
||||||
|
export DB_PASSWORD=YourStrong!Passw0rd
|
||||||
|
```
|
||||||
|
|
||||||
|
### Application Properties
|
||||||
|
|
||||||
|
Key properties in `application.properties`:
|
||||||
|
- `lcc.auth.identify.by` - User identification method (workday)
|
||||||
|
- `calculation.job.processor.*` - Async calculation job settings
|
||||||
|
- Flyway enabled by default; migrations run on startup
|
||||||
|
|
||||||
|
**Database-specific bean activation:**
|
||||||
|
- `@Profile("mysql")` - Activates MySQLDialectProvider
|
||||||
|
- `@Profile("mssql")` - Activates MSSQLDialectProvider
|
||||||
|
|
||||||
|
## Quick Reference
|
||||||
|
|
||||||
|
### Switching Databases
|
||||||
|
|
||||||
|
**Switch from MySQL to MSSQL:**
|
||||||
|
```bash
|
||||||
|
# Update environment
|
||||||
|
export SPRING_PROFILES_ACTIVE=mssql
|
||||||
|
export DB_HOST=localhost
|
||||||
|
export DB_DATABASE=lcc
|
||||||
|
export DB_USER=sa
|
||||||
|
export DB_PASSWORD=YourStrong!Passw0rd
|
||||||
|
|
||||||
|
# Run application
|
||||||
|
mvn spring-boot:run
|
||||||
|
```
|
||||||
|
|
||||||
|
**Switch back to MySQL:**
|
||||||
|
```bash
|
||||||
|
export SPRING_PROFILES_ACTIVE=mysql
|
||||||
|
export DB_HOST=localhost
|
||||||
|
export DB_DATABASE=lcc
|
||||||
|
export DB_USER=root
|
||||||
|
export DB_PASSWORD=your_password
|
||||||
|
|
||||||
|
mvn spring-boot:run
|
||||||
|
```
|
||||||
|
|
||||||
|
### Running Migrations
|
||||||
|
|
||||||
|
Migrations run automatically on application startup when Flyway is enabled.
|
||||||
|
|
||||||
|
**Manual migration with Flyway CLI:**
|
||||||
|
```bash
|
||||||
|
# MySQL
|
||||||
|
flyway -url=jdbc:mysql://localhost:3306/lcc -user=root -password=pass -locations=filesystem:src/main/resources/db/migration/mysql migrate
|
||||||
|
|
||||||
|
# MSSQL
|
||||||
|
flyway -url=jdbc:sqlserver://localhost:1433;databaseName=lcc -user=sa -password=pass -locations=filesystem:src/main/resources/db/migration/mssql migrate
|
||||||
|
```
|
||||||
|
|
||||||
|
### Testing Checklist
|
||||||
|
|
||||||
|
When modifying repositories or adding new database-dependent code:
|
||||||
|
|
||||||
|
1. **Run unit tests** (if applicable)
|
||||||
|
```bash
|
||||||
|
mvn test -Dtest=MySQLDialectProviderTest
|
||||||
|
mvn test -Dtest=MSSQLDialectProviderTest
|
||||||
|
```
|
||||||
|
|
||||||
|
2. **Run repository integration tests on MySQL**
|
||||||
|
```bash
|
||||||
|
mvn test -Dtest="*RepositoryIntegrationTest" -Dspring.profiles.active=test,mysql
|
||||||
|
```
|
||||||
|
|
||||||
|
3. **Run repository integration tests on MSSQL**
|
||||||
|
```bash
|
||||||
|
mvn test -Dtest="*RepositoryIntegrationTest" -Dspring.profiles.active=test,mssql
|
||||||
|
```
|
||||||
|
|
||||||
|
4. **Run full test suite on both databases**
|
||||||
|
```bash
|
||||||
|
mvn test -Dspring.profiles.active=test,mysql
|
||||||
|
mvn test -Dspring.profiles.active=test,mssql
|
||||||
|
```
|
||||||
|
|
||||||
|
### Common Repository Patterns
|
||||||
|
|
||||||
|
**Pattern 1: Constructor injection with SqlDialectProvider**
|
||||||
|
```java
|
||||||
|
@Repository
|
||||||
|
public class ExampleRepository {
|
||||||
|
private final JdbcTemplate jdbcTemplate;
|
||||||
|
private final SqlDialectProvider dialectProvider;
|
||||||
|
|
||||||
|
public ExampleRepository(JdbcTemplate jdbcTemplate, SqlDialectProvider dialectProvider) {
|
||||||
|
this.jdbcTemplate = jdbcTemplate;
|
||||||
|
this.dialectProvider = dialectProvider;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Pattern 2: Pagination queries**
|
||||||
|
```java
|
||||||
|
public List<Entity> list(int limit, int offset) {
|
||||||
|
String sql = "SELECT * FROM table WHERE condition ORDER BY id " +
|
||||||
|
dialectProvider.buildPaginationClause(limit, offset);
|
||||||
|
Object[] params = ArrayUtils.addAll(
|
||||||
|
new Object[]{conditionValue},
|
||||||
|
dialectProvider.getPaginationParameters(limit, offset)
|
||||||
|
);
|
||||||
|
return jdbcTemplate.query(sql, params, rowMapper);
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Pattern 3: Insert with ID retrieval**
|
||||||
|
```java
|
||||||
|
public Integer create(Entity entity) {
|
||||||
|
String sql = "INSERT INTO table (name, is_active) VALUES (?, ?)";
|
||||||
|
jdbcTemplate.update(sql, entity.getName(), entity.isActive());
|
||||||
|
|
||||||
|
String idSql = dialectProvider.getLastInsertIdQuery();
|
||||||
|
return jdbcTemplate.queryForObject(idSql, Integer.class);
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Pattern 4: Upsert operations**
|
||||||
|
```java
|
||||||
|
public void upsert(Entity entity) {
|
||||||
|
String sql = dialectProvider.buildUpsertStatement(
|
||||||
|
"table_name",
|
||||||
|
List.of("unique_col1", "unique_col2"), // unique columns
|
||||||
|
List.of("unique_col1", "unique_col2", "value"), // insert columns
|
||||||
|
List.of("value") // update columns
|
||||||
|
);
|
||||||
|
jdbcTemplate.update(sql, entity.getCol1(), entity.getCol2(), entity.getValue());
|
||||||
|
}
|
||||||
|
```
|
||||||
131
db.sh
Executable file
131
db.sh
Executable file
|
|
@ -0,0 +1,131 @@
|
||||||
|
#!/bin/bash
|
||||||
|
# db.sh - Manage database containers
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||||
|
cd "$SCRIPT_DIR"
|
||||||
|
|
||||||
|
usage() {
|
||||||
|
echo "Usage: $0 <mysql|mssql> [--clean] [--users] [--down]"
|
||||||
|
echo ""
|
||||||
|
echo "Options:"
|
||||||
|
echo " mysql|mssql Which database to start"
|
||||||
|
echo " --clean Delete volumes and start fresh"
|
||||||
|
echo " --users Only import test users (database must be running)"
|
||||||
|
echo " --down Stop the database container"
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
# Parse parameters
|
||||||
|
DB=""
|
||||||
|
CLEAN=false
|
||||||
|
USERS_ONLY=false
|
||||||
|
DOWN_ONLY=false
|
||||||
|
|
||||||
|
for arg in "$@"; do
|
||||||
|
case $arg in
|
||||||
|
mysql|mssql)
|
||||||
|
DB=$arg
|
||||||
|
;;
|
||||||
|
--clean)
|
||||||
|
CLEAN=true
|
||||||
|
;;
|
||||||
|
--users)
|
||||||
|
USERS_ONLY=true
|
||||||
|
;;
|
||||||
|
--down)
|
||||||
|
DOWN_ONLY=true
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
usage
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
[ -z "$DB" ] && usage
|
||||||
|
|
||||||
|
# Stop container only
|
||||||
|
if [ "$DOWN_ONLY" = true ]; then
|
||||||
|
if [ "$DB" = "mysql" ]; then
|
||||||
|
echo "==> Stopping MySQL..."
|
||||||
|
podman-compose down 2>/dev/null || true
|
||||||
|
elif [ "$DB" = "mssql" ]; then
|
||||||
|
echo "==> Stopping MSSQL..."
|
||||||
|
podman-compose --profile mssql down 2>/dev/null || true
|
||||||
|
fi
|
||||||
|
echo "==> Done!"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Import users only
|
||||||
|
if [ "$USERS_ONLY" = true ]; then
|
||||||
|
if [ "$DB" = "mysql" ]; then
|
||||||
|
echo "==> Importing users into MySQL..."
|
||||||
|
DB_USER=$(grep SPRING_DATASOURCE_USERNAME .env | cut -d= -f2)
|
||||||
|
DB_PASS=$(grep SPRING_DATASOURCE_PASSWORD .env | cut -d= -f2)
|
||||||
|
podman exec -i lcc-mysql-local mysql -u"${DB_USER}" -p"${DB_PASS}" lcc \
|
||||||
|
< src/test/resources/master_data/users.sql
|
||||||
|
echo "==> Users imported!"
|
||||||
|
elif [ "$DB" = "mssql" ]; then
|
||||||
|
echo "==> Importing users into MSSQL..."
|
||||||
|
DB_PASS=$(grep DB_ROOT_PASSWORD .env.mssql | cut -d= -f2)
|
||||||
|
podman exec -e "SQLCMDPASSWORD=${DB_PASS}" lcc-mssql-local /opt/mssql-tools18/bin/sqlcmd \
|
||||||
|
-S localhost -U sa -d lcc -C \
|
||||||
|
-i /dev/stdin < src/test/resources/master_data/users_mssql.sql
|
||||||
|
echo "==> Users imported!"
|
||||||
|
fi
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "==> Stopping all DB containers..."
|
||||||
|
podman-compose --profile mssql down 2>/dev/null || true
|
||||||
|
|
||||||
|
if [ "$CLEAN" = true ]; then
|
||||||
|
echo "==> Deleting volumes..."
|
||||||
|
podman volume rm lcc_tool_mysql-data-local 2>/dev/null || true
|
||||||
|
podman volume rm lcc_tool_mssql-data-local 2>/dev/null || true
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "==> Linking .env -> .env.$DB"
|
||||||
|
rm -f .env
|
||||||
|
ln -s .env.$DB .env
|
||||||
|
|
||||||
|
# Check if volume exists (for init decision)
|
||||||
|
VOLUME_EXISTS=false
|
||||||
|
if [ "$DB" = "mysql" ]; then
|
||||||
|
podman volume exists lcc_tool_mysql-data-local 2>/dev/null && VOLUME_EXISTS=true
|
||||||
|
elif [ "$DB" = "mssql" ]; then
|
||||||
|
podman volume exists lcc_tool_mssql-data-local 2>/dev/null && VOLUME_EXISTS=true
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "==> Starting $DB..."
|
||||||
|
if [ "$DB" = "mysql" ]; then
|
||||||
|
podman-compose up -d mysql
|
||||||
|
|
||||||
|
echo "==> Waiting for MySQL..."
|
||||||
|
until podman exec lcc-mysql-local mysqladmin ping -h localhost --silent 2>/dev/null; do
|
||||||
|
sleep 2
|
||||||
|
done
|
||||||
|
echo "==> MySQL is ready!"
|
||||||
|
|
||||||
|
elif [ "$DB" = "mssql" ]; then
|
||||||
|
podman-compose --profile mssql up -d mssql
|
||||||
|
|
||||||
|
echo "==> Waiting for MSSQL..."
|
||||||
|
until [ "$(podman inspect -f '{{.State.Health.Status}}' lcc-mssql-local 2>/dev/null)" = "healthy" ]; do
|
||||||
|
sleep 2
|
||||||
|
done
|
||||||
|
echo "==> MSSQL is ready!"
|
||||||
|
|
||||||
|
if [ "$VOLUME_EXISTS" = false ]; then
|
||||||
|
echo "==> New volume detected, creating database..."
|
||||||
|
DB_PASS=$(grep DB_ROOT_PASSWORD .env | cut -d= -f2)
|
||||||
|
podman exec lcc-mssql-local /opt/mssql-tools18/bin/sqlcmd \
|
||||||
|
-S localhost -U sa -P "${DB_PASS}" -C \
|
||||||
|
-Q "IF NOT EXISTS (SELECT * FROM sys.databases WHERE name = 'lcc') CREATE DATABASE lcc"
|
||||||
|
echo "==> Database 'lcc' created!"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "==> Done! .env points to .env.$DB"
|
||||||
|
|
@ -2,6 +2,8 @@ services:
|
||||||
mysql:
|
mysql:
|
||||||
image: mysql:8.4
|
image: mysql:8.4
|
||||||
container_name: lcc-mysql-local
|
container_name: lcc-mysql-local
|
||||||
|
env_file:
|
||||||
|
- .env.mysql
|
||||||
environment:
|
environment:
|
||||||
MYSQL_ROOT_PASSWORD: ${DB_ROOT_PASSWORD}
|
MYSQL_ROOT_PASSWORD: ${DB_ROOT_PASSWORD}
|
||||||
MYSQL_DATABASE: lcc
|
MYSQL_DATABASE: lcc
|
||||||
|
|
@ -20,6 +22,30 @@ services:
|
||||||
retries: 5
|
retries: 5
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|
||||||
|
# MSSQL Database (optional - nur für MSSQL-Tests)
|
||||||
|
mssql:
|
||||||
|
image: mcr.microsoft.com/mssql/server:2022-latest
|
||||||
|
container_name: lcc-mssql-local
|
||||||
|
environment:
|
||||||
|
ACCEPT_EULA: "Y"
|
||||||
|
MSSQL_SA_PASSWORD: ${DB_ROOT_PASSWORD}
|
||||||
|
MSSQL_PID: "Developer"
|
||||||
|
volumes:
|
||||||
|
- mssql-data-local:/var/opt/mssql
|
||||||
|
ports:
|
||||||
|
- "1433:1433"
|
||||||
|
networks:
|
||||||
|
- lcc-network-local
|
||||||
|
healthcheck:
|
||||||
|
test: /opt/mssql-tools18/bin/sqlcmd -S localhost -U sa -P "$${MSSQL_SA_PASSWORD}" -Q "SELECT 1" -C || exit 1
|
||||||
|
interval: 10s
|
||||||
|
timeout: 5s
|
||||||
|
retries: 10
|
||||||
|
start_period: 30s
|
||||||
|
restart: unless-stopped
|
||||||
|
profiles:
|
||||||
|
- mssql # Startet nur mit: docker-compose --profile mssql up
|
||||||
|
|
||||||
lcc-app:
|
lcc-app:
|
||||||
#image: git.avatic.de/avatic/lcc:latest
|
#image: git.avatic.de/avatic/lcc:latest
|
||||||
# Oder für lokales Bauen:
|
# Oder für lokales Bauen:
|
||||||
|
|
@ -29,7 +55,7 @@ services:
|
||||||
mysql:
|
mysql:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
env_file:
|
env_file:
|
||||||
- .env
|
- .env.mysql
|
||||||
environment:
|
environment:
|
||||||
# Überschreibe die Datasource URL für Docker-Netzwerk
|
# Überschreibe die Datasource URL für Docker-Netzwerk
|
||||||
SPRING_DATASOURCE_URL: jdbc:mysql://mysql:3306/lcc
|
SPRING_DATASOURCE_URL: jdbc:mysql://mysql:3306/lcc
|
||||||
|
|
@ -44,6 +70,7 @@ services:
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
mysql-data-local:
|
mysql-data-local:
|
||||||
|
mssql-data-local:
|
||||||
|
|
||||||
networks:
|
networks:
|
||||||
lcc-network-local:
|
lcc-network-local:
|
||||||
|
|
|
||||||
0
mvnw
vendored
Normal file → Executable file
0
mvnw
vendored
Normal file → Executable file
80
pom.xml
80
pom.xml
|
|
@ -31,8 +31,17 @@
|
||||||
<spring-cloud-azure.version>5.24.1</spring-cloud-azure.version>
|
<spring-cloud-azure.version>5.24.1</spring-cloud-azure.version>
|
||||||
<mockito.version>5.20.0</mockito.version>
|
<mockito.version>5.20.0</mockito.version>
|
||||||
<flyway.version>11.18.0</flyway.version>
|
<flyway.version>11.18.0</flyway.version>
|
||||||
|
<surefire.excludedGroups>analysis</surefire.excludedGroups>
|
||||||
|
<aspectj.version>1.9.21</aspectj.version>
|
||||||
</properties>
|
</properties>
|
||||||
<dependencies>
|
<dependencies>
|
||||||
|
<!-- Allure -->
|
||||||
|
<dependency>
|
||||||
|
<groupId>io.qameta.allure</groupId>
|
||||||
|
<artifactId>allure-junit5</artifactId>
|
||||||
|
<version>2.29.0</version>
|
||||||
|
<scope>test</scope>
|
||||||
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.springframework.boot</groupId>
|
<groupId>org.springframework.boot</groupId>
|
||||||
<artifactId>spring-boot-starter-jdbc</artifactId>
|
<artifactId>spring-boot-starter-jdbc</artifactId>
|
||||||
|
|
@ -90,6 +99,12 @@
|
||||||
<artifactId>mysql-connector-j</artifactId>
|
<artifactId>mysql-connector-j</artifactId>
|
||||||
<scope>runtime</scope>
|
<scope>runtime</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>com.microsoft.sqlserver</groupId>
|
||||||
|
<artifactId>mssql-jdbc</artifactId>
|
||||||
|
<version>12.6.1.jre11</version>
|
||||||
|
<scope>runtime</scope>
|
||||||
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.springframework.boot</groupId>
|
<groupId>org.springframework.boot</groupId>
|
||||||
<artifactId>spring-boot-starter-test</artifactId>
|
<artifactId>spring-boot-starter-test</artifactId>
|
||||||
|
|
@ -178,6 +193,10 @@
|
||||||
<groupId>org.flywaydb</groupId>
|
<groupId>org.flywaydb</groupId>
|
||||||
<artifactId>flyway-mysql</artifactId>
|
<artifactId>flyway-mysql</artifactId>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.flywaydb</groupId>
|
||||||
|
<artifactId>flyway-sqlserver</artifactId>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.glassfish.jaxb</groupId>
|
<groupId>org.glassfish.jaxb</groupId>
|
||||||
|
|
@ -195,6 +214,52 @@
|
||||||
<version>3.2.3</version>
|
<version>3.2.3</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
|
<!-- TestContainers for multi-database integration testing -->
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.springframework.boot</groupId>
|
||||||
|
<artifactId>spring-boot-testcontainers</artifactId>
|
||||||
|
<scope>test</scope>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.testcontainers</groupId>
|
||||||
|
<artifactId>testcontainers</artifactId>
|
||||||
|
<version>1.19.7</version>
|
||||||
|
<scope>test</scope>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.testcontainers</groupId>
|
||||||
|
<artifactId>mysql</artifactId>
|
||||||
|
<version>1.19.7</version>
|
||||||
|
<scope>test</scope>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.testcontainers</groupId>
|
||||||
|
<artifactId>mssqlserver</artifactId>
|
||||||
|
<version>1.19.7</version>
|
||||||
|
<scope>test</scope>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.testcontainers</groupId>
|
||||||
|
<artifactId>junit-jupiter</artifactId>
|
||||||
|
<version>1.19.7</version>
|
||||||
|
<scope>test</scope>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<!-- Playwright for E2E testing -->
|
||||||
|
<dependency>
|
||||||
|
<groupId>com.microsoft.playwright</groupId>
|
||||||
|
<artifactId>playwright</artifactId>
|
||||||
|
<version>1.48.0</version>
|
||||||
|
<scope>test</scope>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.aspectj</groupId>
|
||||||
|
<artifactId>aspectjweaver</artifactId>
|
||||||
|
<version>1.9.21</version>
|
||||||
|
<scope>test</scope>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
</dependencies>
|
</dependencies>
|
||||||
<dependencyManagement>
|
<dependencyManagement>
|
||||||
<dependencies>
|
<dependencies>
|
||||||
|
|
@ -210,6 +275,7 @@
|
||||||
|
|
||||||
<build>
|
<build>
|
||||||
<plugins>
|
<plugins>
|
||||||
|
|
||||||
<plugin>
|
<plugin>
|
||||||
<groupId>org.codehaus.mojo</groupId>
|
<groupId>org.codehaus.mojo</groupId>
|
||||||
<artifactId>versions-maven-plugin</artifactId>
|
<artifactId>versions-maven-plugin</artifactId>
|
||||||
|
|
@ -235,15 +301,27 @@
|
||||||
</configuration>
|
</configuration>
|
||||||
</plugin>
|
</plugin>
|
||||||
<plugin>
|
<plugin>
|
||||||
|
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
<artifactId>maven-surefire-plugin</artifactId>
|
<artifactId>maven-surefire-plugin</artifactId>
|
||||||
<version>3.5.4</version>
|
<version>3.5.4</version>
|
||||||
<configuration>
|
<configuration>
|
||||||
<argLine>
|
<argLine>
|
||||||
-javaagent:${settings.localRepository}/org/mockito/mockito-core/${mockito.version}/mockito-core-${mockito.version}.jar
|
-javaagent:${settings.localRepository}/org/mockito/mockito-core/${mockito.version}/mockito-core-${mockito.version}.jar
|
||||||
|
-javaagent:${settings.localRepository}/org/aspectj/aspectjweaver/${aspectj.version}/aspectjweaver-${aspectj.version}.jar
|
||||||
</argLine>
|
</argLine>
|
||||||
|
<systemPropertyVariables>
|
||||||
|
<allure.results.directory>${project.build.directory}/allure-results</allure.results.directory>
|
||||||
|
</systemPropertyVariables>
|
||||||
|
<!-- Exclude analysis tests by default -->
|
||||||
|
<excludedGroups>${surefire.excludedGroups}</excludedGroups>
|
||||||
</configuration>
|
</configuration>
|
||||||
|
<dependencies>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.aspectj</groupId>
|
||||||
|
<artifactId>aspectjweaver</artifactId>
|
||||||
|
<version>1.9.21</version>
|
||||||
|
</dependency>
|
||||||
|
</dependencies>
|
||||||
</plugin>
|
</plugin>
|
||||||
<plugin>
|
<plugin>
|
||||||
<groupId>org.springframework.boot</groupId>
|
<groupId>org.springframework.boot</groupId>
|
||||||
|
|
|
||||||
118
src/frontend/package-lock.json
generated
118
src/frontend/package-lock.json
generated
|
|
@ -10,7 +10,6 @@
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@phosphor-icons/vue": "^2.2.1",
|
"@phosphor-icons/vue": "^2.2.1",
|
||||||
"@vueuse/core": "^13.6.0",
|
"@vueuse/core": "^13.6.0",
|
||||||
"azure-maps-control": "^3.6.1",
|
|
||||||
"chart.js": "^4.5.0",
|
"chart.js": "^4.5.0",
|
||||||
"leaflet": "^1.9.4",
|
"leaflet": "^1.9.4",
|
||||||
"loglevel": "^1.9.2",
|
"loglevel": "^1.9.2",
|
||||||
|
|
@ -43,27 +42,6 @@
|
||||||
"node": ">=6.0.0"
|
"node": ">=6.0.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@azure/msal-browser": {
|
|
||||||
"version": "2.39.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/@azure/msal-browser/-/msal-browser-2.39.0.tgz",
|
|
||||||
"integrity": "sha512-kks/n2AJzKUk+DBqZhiD+7zeQGBl+WpSOQYzWy6hff3bU0ZrYFqr4keFLlzB5VKuKZog0X59/FGHb1RPBDZLVg==",
|
|
||||||
"license": "MIT",
|
|
||||||
"dependencies": {
|
|
||||||
"@azure/msal-common": "13.3.3"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">=0.8.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/@azure/msal-common": {
|
|
||||||
"version": "13.3.3",
|
|
||||||
"resolved": "https://registry.npmjs.org/@azure/msal-common/-/msal-common-13.3.3.tgz",
|
|
||||||
"integrity": "sha512-n278DdCXKeiWhLwhEL7/u9HRMyzhUXLefeajiknf6AmEedoiOiv2r5aRJ7LXdT3NGPyubkdIbthaJlVtmuEqvA==",
|
|
||||||
"license": "MIT",
|
|
||||||
"engines": {
|
|
||||||
"node": ">=0.8.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/@babel/code-frame": {
|
"node_modules/@babel/code-frame": {
|
||||||
"version": "7.27.1",
|
"version": "7.27.1",
|
||||||
"resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.27.1.tgz",
|
"resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.27.1.tgz",
|
||||||
|
|
@ -95,7 +73,6 @@
|
||||||
"integrity": "sha512-yDBHV9kQNcr2/sUr9jghVyz9C3Y5G2zUM2H2lo+9mKv4sFgbA8s8Z9t8D1jiTkGoO/NoIfKMyKWr4s6CN23ZwQ==",
|
"integrity": "sha512-yDBHV9kQNcr2/sUr9jghVyz9C3Y5G2zUM2H2lo+9mKv4sFgbA8s8Z9t8D1jiTkGoO/NoIfKMyKWr4s6CN23ZwQ==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"peer": true,
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@ampproject/remapping": "^2.2.0",
|
"@ampproject/remapping": "^2.2.0",
|
||||||
"@babel/code-frame": "^7.27.1",
|
"@babel/code-frame": "^7.27.1",
|
||||||
|
|
@ -980,46 +957,6 @@
|
||||||
"integrity": "sha512-M5UknZPHRu3DEDWoipU6sE8PdkZ6Z/S+v4dD+Ke8IaNlpdSQah50lz1KtcFBa2vsdOnwbbnxJwVM4wty6udA5w==",
|
"integrity": "sha512-M5UknZPHRu3DEDWoipU6sE8PdkZ6Z/S+v4dD+Ke8IaNlpdSQah50lz1KtcFBa2vsdOnwbbnxJwVM4wty6udA5w==",
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
"node_modules/@mapbox/jsonlint-lines-primitives": {
|
|
||||||
"version": "2.0.2",
|
|
||||||
"resolved": "https://registry.npmjs.org/@mapbox/jsonlint-lines-primitives/-/jsonlint-lines-primitives-2.0.2.tgz",
|
|
||||||
"integrity": "sha512-rY0o9A5ECsTQRVhv7tL/OyDpGAoUB4tTvLiW1DSzQGq4bvTPhNw1VpSNjDJc5GFZ2XuyOtSWSVN05qOtcD71qQ==",
|
|
||||||
"engines": {
|
|
||||||
"node": ">= 0.6"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/@mapbox/mapbox-gl-supported": {
|
|
||||||
"version": "2.0.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/@mapbox/mapbox-gl-supported/-/mapbox-gl-supported-2.0.1.tgz",
|
|
||||||
"integrity": "sha512-HP6XvfNIzfoMVfyGjBckjiAOQK9WfX0ywdLubuPMPv+Vqf5fj0uCbgBQYpiqcWZT6cbyyRnTSXDheT1ugvF6UQ==",
|
|
||||||
"license": "BSD-3-Clause"
|
|
||||||
},
|
|
||||||
"node_modules/@mapbox/unitbezier": {
|
|
||||||
"version": "0.0.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/@mapbox/unitbezier/-/unitbezier-0.0.1.tgz",
|
|
||||||
"integrity": "sha512-nMkuDXFv60aBr9soUG5q+GvZYL+2KZHVvsqFCzqnkGEf46U2fvmytHaEVc1/YZbiLn8X+eR3QzX1+dwDO1lxlw==",
|
|
||||||
"license": "BSD-2-Clause"
|
|
||||||
},
|
|
||||||
"node_modules/@maplibre/maplibre-gl-style-spec": {
|
|
||||||
"version": "20.4.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/@maplibre/maplibre-gl-style-spec/-/maplibre-gl-style-spec-20.4.0.tgz",
|
|
||||||
"integrity": "sha512-AzBy3095fTFPjDjmWpR2w6HVRAZJ6hQZUCwk5Plz6EyfnfuQW1odeW5i2Ai47Y6TBA2hQnC+azscjBSALpaWgw==",
|
|
||||||
"license": "ISC",
|
|
||||||
"dependencies": {
|
|
||||||
"@mapbox/jsonlint-lines-primitives": "~2.0.2",
|
|
||||||
"@mapbox/unitbezier": "^0.0.1",
|
|
||||||
"json-stringify-pretty-compact": "^4.0.0",
|
|
||||||
"minimist": "^1.2.8",
|
|
||||||
"quickselect": "^2.0.0",
|
|
||||||
"rw": "^1.3.3",
|
|
||||||
"tinyqueue": "^3.0.0"
|
|
||||||
},
|
|
||||||
"bin": {
|
|
||||||
"gl-style-format": "dist/gl-style-format.mjs",
|
|
||||||
"gl-style-migrate": "dist/gl-style-migrate.mjs",
|
|
||||||
"gl-style-validate": "dist/gl-style-validate.mjs"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/@phosphor-icons/vue": {
|
"node_modules/@phosphor-icons/vue": {
|
||||||
"version": "2.2.1",
|
"version": "2.2.1",
|
||||||
"resolved": "https://registry.npmjs.org/@phosphor-icons/vue/-/vue-2.2.1.tgz",
|
"resolved": "https://registry.npmjs.org/@phosphor-icons/vue/-/vue-2.2.1.tgz",
|
||||||
|
|
@ -1345,12 +1282,6 @@
|
||||||
"integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==",
|
"integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==",
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
"node_modules/@types/geojson": {
|
|
||||||
"version": "7946.0.16",
|
|
||||||
"resolved": "https://registry.npmjs.org/@types/geojson/-/geojson-7946.0.16.tgz",
|
|
||||||
"integrity": "sha512-6C8nqWur3j98U6+lXDfTUWIfgvZU+EumvpHKcYjujKH7woYyLj2sUmff0tRhrqM7BohUw7Pz3ZB1jj2gW9Fvmg==",
|
|
||||||
"license": "MIT"
|
|
||||||
},
|
|
||||||
"node_modules/@types/web-bluetooth": {
|
"node_modules/@types/web-bluetooth": {
|
||||||
"version": "0.0.21",
|
"version": "0.0.21",
|
||||||
"resolved": "https://registry.npmjs.org/@types/web-bluetooth/-/web-bluetooth-0.0.21.tgz",
|
"resolved": "https://registry.npmjs.org/@types/web-bluetooth/-/web-bluetooth-0.0.21.tgz",
|
||||||
|
|
@ -1696,18 +1627,6 @@
|
||||||
"url": "https://github.com/sponsors/jonschlinkert"
|
"url": "https://github.com/sponsors/jonschlinkert"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/azure-maps-control": {
|
|
||||||
"version": "3.6.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/azure-maps-control/-/azure-maps-control-3.6.1.tgz",
|
|
||||||
"integrity": "sha512-EqJ96GOjUcCG9XizUbyqDu92x3KKT9C9AwRL3hmPicQjn00ql7em6RbBqJYO4nvIoH53DG6MOITj9t/zv1mQYg==",
|
|
||||||
"license": "SEE LICENSE.TXT",
|
|
||||||
"dependencies": {
|
|
||||||
"@azure/msal-browser": "^2.32.1",
|
|
||||||
"@mapbox/mapbox-gl-supported": "^2.0.1",
|
|
||||||
"@maplibre/maplibre-gl-style-spec": "^20.0.0",
|
|
||||||
"@types/geojson": "^7946.0.14"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/binary-extensions": {
|
"node_modules/binary-extensions": {
|
||||||
"version": "2.3.0",
|
"version": "2.3.0",
|
||||||
"resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz",
|
"resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz",
|
||||||
|
|
@ -1761,7 +1680,6 @@
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"peer": true,
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"caniuse-lite": "^1.0.30001737",
|
"caniuse-lite": "^1.0.30001737",
|
||||||
"electron-to-chromium": "^1.5.211",
|
"electron-to-chromium": "^1.5.211",
|
||||||
|
|
@ -1817,7 +1735,6 @@
|
||||||
"resolved": "https://registry.npmjs.org/chart.js/-/chart.js-4.5.0.tgz",
|
"resolved": "https://registry.npmjs.org/chart.js/-/chart.js-4.5.0.tgz",
|
||||||
"integrity": "sha512-aYeC/jDgSEx8SHWZvANYMioYMZ2KX02W6f6uVfyteuCGcadDLcYVHdfdygsTQkQ4TKn5lghoojAsPj5pu0SnvQ==",
|
"integrity": "sha512-aYeC/jDgSEx8SHWZvANYMioYMZ2KX02W6f6uVfyteuCGcadDLcYVHdfdygsTQkQ4TKn5lghoojAsPj5pu0SnvQ==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"peer": true,
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@kurkle/color": "^0.3.0"
|
"@kurkle/color": "^0.3.0"
|
||||||
},
|
},
|
||||||
|
|
@ -2371,12 +2288,6 @@
|
||||||
"node": ">=6"
|
"node": ">=6"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/json-stringify-pretty-compact": {
|
|
||||||
"version": "4.0.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/json-stringify-pretty-compact/-/json-stringify-pretty-compact-4.0.0.tgz",
|
|
||||||
"integrity": "sha512-3CNZ2DnrpByG9Nqj6Xo8vqbjT4F6N+tb4Gb28ESAZjYZ5yqvmc56J+/kuIwkaAMOyblTQhUW7PxMkUb8Q36N3Q==",
|
|
||||||
"license": "MIT"
|
|
||||||
},
|
|
||||||
"node_modules/json5": {
|
"node_modules/json5": {
|
||||||
"version": "2.2.3",
|
"version": "2.2.3",
|
||||||
"resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz",
|
"resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz",
|
||||||
|
|
@ -2447,15 +2358,6 @@
|
||||||
"@jridgewell/sourcemap-codec": "^1.5.5"
|
"@jridgewell/sourcemap-codec": "^1.5.5"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/minimist": {
|
|
||||||
"version": "1.2.8",
|
|
||||||
"resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz",
|
|
||||||
"integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==",
|
|
||||||
"license": "MIT",
|
|
||||||
"funding": {
|
|
||||||
"url": "https://github.com/sponsors/ljharb"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/mitt": {
|
"node_modules/mitt": {
|
||||||
"version": "3.0.1",
|
"version": "3.0.1",
|
||||||
"resolved": "https://registry.npmjs.org/mitt/-/mitt-3.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/mitt/-/mitt-3.0.1.tgz",
|
||||||
|
|
@ -2700,12 +2602,6 @@
|
||||||
"url": "https://github.com/sponsors/sindresorhus"
|
"url": "https://github.com/sponsors/sindresorhus"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/quickselect": {
|
|
||||||
"version": "2.0.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/quickselect/-/quickselect-2.0.0.tgz",
|
|
||||||
"integrity": "sha512-RKJ22hX8mHe3Y6wH/N3wCM6BWtjaxIyyUIkpHOvfFnxdI4yD4tBXEBKSbriGujF6jnSVkJrffuo6vxACiSSxIw==",
|
|
||||||
"license": "ISC"
|
|
||||||
},
|
|
||||||
"node_modules/readdirp": {
|
"node_modules/readdirp": {
|
||||||
"version": "3.6.0",
|
"version": "3.6.0",
|
||||||
"resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz",
|
"resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz",
|
||||||
|
|
@ -2789,12 +2685,6 @@
|
||||||
"url": "https://github.com/sponsors/sindresorhus"
|
"url": "https://github.com/sponsors/sindresorhus"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/rw": {
|
|
||||||
"version": "1.3.3",
|
|
||||||
"resolved": "https://registry.npmjs.org/rw/-/rw-1.3.3.tgz",
|
|
||||||
"integrity": "sha512-PdhdWy89SiZogBLaw42zdeqtRJ//zFd2PgQavcICDUgJT5oW10QCRKbJ6bg4r0/UY2M6BWd5tkxuGFRvCkgfHQ==",
|
|
||||||
"license": "BSD-3-Clause"
|
|
||||||
},
|
|
||||||
"node_modules/semver": {
|
"node_modules/semver": {
|
||||||
"version": "6.3.1",
|
"version": "6.3.1",
|
||||||
"resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
|
"resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
|
||||||
|
|
@ -2915,12 +2805,6 @@
|
||||||
"url": "https://github.com/sponsors/SuperchupuDev"
|
"url": "https://github.com/sponsors/SuperchupuDev"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/tinyqueue": {
|
|
||||||
"version": "3.0.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/tinyqueue/-/tinyqueue-3.0.0.tgz",
|
|
||||||
"integrity": "sha512-gRa9gwYU3ECmQYv3lslts5hxuIa90veaEcxDYuu3QGOIAEM2mOZkVHp48ANJuu1CURtRdHKUBY5Lm1tHV+sD4g==",
|
|
||||||
"license": "ISC"
|
|
||||||
},
|
|
||||||
"node_modules/to-regex-range": {
|
"node_modules/to-regex-range": {
|
||||||
"version": "5.0.1",
|
"version": "5.0.1",
|
||||||
"resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz",
|
||||||
|
|
@ -3018,7 +2902,6 @@
|
||||||
"resolved": "https://registry.npmjs.org/vite/-/vite-7.1.4.tgz",
|
"resolved": "https://registry.npmjs.org/vite/-/vite-7.1.4.tgz",
|
||||||
"integrity": "sha512-X5QFK4SGynAeeIt+A7ZWnApdUyHYm+pzv/8/A57LqSGcI88U6R6ipOs3uCesdc6yl7nl+zNO0t8LmqAdXcQihw==",
|
"integrity": "sha512-X5QFK4SGynAeeIt+A7ZWnApdUyHYm+pzv/8/A57LqSGcI88U6R6ipOs3uCesdc6yl7nl+zNO0t8LmqAdXcQihw==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"peer": true,
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"esbuild": "^0.25.0",
|
"esbuild": "^0.25.0",
|
||||||
"fdir": "^6.5.0",
|
"fdir": "^6.5.0",
|
||||||
|
|
@ -3250,7 +3133,6 @@
|
||||||
"resolved": "https://registry.npmjs.org/vue/-/vue-3.5.21.tgz",
|
"resolved": "https://registry.npmjs.org/vue/-/vue-3.5.21.tgz",
|
||||||
"integrity": "sha512-xxf9rum9KtOdwdRkiApWL+9hZEMWE90FHh8yS1+KJAiWYh+iGWV1FquPjoO9VUHQ+VIhsCXNNyZ5Sf4++RVZBA==",
|
"integrity": "sha512-xxf9rum9KtOdwdRkiApWL+9hZEMWE90FHh8yS1+KJAiWYh+iGWV1FquPjoO9VUHQ+VIhsCXNNyZ5Sf4++RVZBA==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"peer": true,
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@vue/compiler-dom": "3.5.21",
|
"@vue/compiler-dom": "3.5.21",
|
||||||
"@vue/compiler-sfc": "3.5.21",
|
"@vue/compiler-sfc": "3.5.21",
|
||||||
|
|
|
||||||
|
|
@ -86,7 +86,7 @@ export default {
|
||||||
flex-direction: column;
|
flex-direction: column;
|
||||||
gap: 1.6rem;
|
gap: 1.6rem;
|
||||||
width: min(80vw, 180rem);
|
width: min(80vw, 180rem);
|
||||||
height: min(80vh, 120rem);
|
height: min(90vh, 120rem);
|
||||||
min-height: 0;
|
min-height: 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -107,6 +107,27 @@
|
||||||
<modal :z-index="2000" :state="modalShow">
|
<modal :z-index="2000" :state="modalShow">
|
||||||
<div class="modal-content-container">
|
<div class="modal-content-container">
|
||||||
<h3 class="sub-header">{{ modalTitle }}</h3>
|
<h3 class="sub-header">{{ modalTitle }}</h3>
|
||||||
|
|
||||||
|
<!-- Part Number Chips -->
|
||||||
|
<div v-if="shouldShowPartNumbers" class="parts-selection-container">
|
||||||
|
<div class="parts-chips">
|
||||||
|
<basic-badge
|
||||||
|
v-for="partNumber in selectedPartNumbers.slice(0, 5)"
|
||||||
|
:key="partNumber"
|
||||||
|
variant="primary"
|
||||||
|
size="compact"
|
||||||
|
class="part-chip"
|
||||||
|
>
|
||||||
|
{{ partNumber }}
|
||||||
|
</basic-badge>
|
||||||
|
<span v-if="selectedPartNumbers.length > 5" class="parts-ellipsis">...</span>
|
||||||
|
</div>
|
||||||
|
<div v-if="partNumberCountText" class="parts-count">
|
||||||
|
{{ partNumberCountText }}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<!-- END: Part Number Chips -->
|
||||||
|
|
||||||
<component
|
<component
|
||||||
:is="modalComponentType"
|
:is="modalComponentType"
|
||||||
ref="modalComponent"
|
ref="modalComponent"
|
||||||
|
|
@ -176,6 +197,7 @@ import Modal from "@/components/UI/Modal.vue";
|
||||||
import PriceEdit from "@/components/layout/edit/PriceEdit.vue";
|
import PriceEdit from "@/components/layout/edit/PriceEdit.vue";
|
||||||
import MaterialEdit from "@/components/layout/edit/MaterialEdit.vue";
|
import MaterialEdit from "@/components/layout/edit/MaterialEdit.vue";
|
||||||
import PackagingEdit from "@/components/layout/edit/PackagingEdit.vue";
|
import PackagingEdit from "@/components/layout/edit/PackagingEdit.vue";
|
||||||
|
import BasicBadge from "@/components/UI/BasicBadge.vue";
|
||||||
|
|
||||||
import {useNotificationStore} from "@/store/notification.js";
|
import {useNotificationStore} from "@/store/notification.js";
|
||||||
import {useDestinationEditStore} from "@/store/destinationEdit.js";
|
import {useDestinationEditStore} from "@/store/destinationEdit.js";
|
||||||
|
|
@ -211,7 +233,8 @@ export default {
|
||||||
CalculationListItem,
|
CalculationListItem,
|
||||||
Checkbox,
|
Checkbox,
|
||||||
BulkEditRow,
|
BulkEditRow,
|
||||||
BasicButton
|
BasicButton,
|
||||||
|
BasicBadge
|
||||||
},
|
},
|
||||||
data() {
|
data() {
|
||||||
return {
|
return {
|
||||||
|
|
@ -286,6 +309,55 @@ export default {
|
||||||
return "Please wait. Prepare calculation ..."
|
return "Please wait. Prepare calculation ..."
|
||||||
|
|
||||||
return this.processingMessage;
|
return this.processingMessage;
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extrahiert eindeutige Teilenummern aus ausgewählten Premises
|
||||||
|
* @returns {Array<string>} Array eindeutiger Teilenummern, sortiert
|
||||||
|
*/
|
||||||
|
selectedPartNumbers() {
|
||||||
|
// Guard: Keine editIds oder nicht relevant
|
||||||
|
if (!this.editIds || this.editIds.length === 0) {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
|
||||||
|
// Nur für Material/Price/Packaging Modals anzeigen
|
||||||
|
const relevantTypes = ['material', 'price', 'packaging'];
|
||||||
|
if (!relevantTypes.includes(this.modalType)) {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Teilenummern extrahieren
|
||||||
|
const partNumbers = this.editIds
|
||||||
|
.map(id => {
|
||||||
|
const premise = this.premiseEditStore.getById(id);
|
||||||
|
return premise?.material?.part_number;
|
||||||
|
})
|
||||||
|
.filter(partNumber => partNumber != null && partNumber !== '');
|
||||||
|
|
||||||
|
// Duplikate entfernen und sortieren
|
||||||
|
return [...new Set(partNumbers)].sort();
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
logger.log('Error extracting part numbers:', error);
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Prüft ob Part Numbers angezeigt werden sollen
|
||||||
|
*/
|
||||||
|
shouldShowPartNumbers() {
|
||||||
|
return this.selectedPartNumbers.length > 0;
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Anzahl-Text für viele Teile (> 5)
|
||||||
|
*/
|
||||||
|
partNumberCountText() {
|
||||||
|
const count = this.selectedPartNumbers.length;
|
||||||
|
return count > 5 ? `${count} part numbers` : null;
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
watch: {
|
watch: {
|
||||||
|
|
@ -630,6 +702,38 @@ export default {
|
||||||
margin-bottom: 1.6rem;
|
margin-bottom: 1.6rem;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* Part Number Chips Styling */
|
||||||
|
.parts-selection-container {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: 0.4rem;
|
||||||
|
margin-bottom: 1.6rem;
|
||||||
|
padding-bottom: 1.6rem;
|
||||||
|
border-bottom: 0.1rem solid rgba(107, 134, 156, 0.1);
|
||||||
|
}
|
||||||
|
|
||||||
|
.parts-chips {
|
||||||
|
display: flex;
|
||||||
|
flex-wrap: wrap;
|
||||||
|
gap: 0.6rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.part-chip {
|
||||||
|
flex-shrink: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.parts-ellipsis {
|
||||||
|
font-size: 1.4rem;
|
||||||
|
color: #6B869C;
|
||||||
|
align-self: center;
|
||||||
|
padding: 0 0.4rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.parts-count {
|
||||||
|
font-size: 1.2rem;
|
||||||
|
color: #9CA3AF;
|
||||||
|
}
|
||||||
|
|
||||||
/* Global style für copy-mode cursor */
|
/* Global style für copy-mode cursor */
|
||||||
.edit-calculation-container.has-selection :deep(.edit-calculation-list-header-cell--copyable:hover) {
|
.edit-calculation-container.has-selection :deep(.edit-calculation-list-header-cell--copyable:hover) {
|
||||||
cursor: url("data:image/svg+xml;base64,PD94bWwgdmVyc2lvbj0iMS4wIiBlbmNvZGluZz0iVVRGLTgiPz48c3ZnIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyIgdmlld0JveD0iMCAwIDEyOC41MSAxMzQuMDUiPjxkZWZzPjxzdHlsZT4uY3tmaWxsOm5vbmU7fS5jLC5ke3N0cm9rZTojMDEwMTAxO3N0cm9rZS1saW5lY2FwOnJvdW5kO3N0cm9rZS1saW5lam9pbjpyb3VuZDtzdHJva2Utd2lkdGg6NXB4O30uZHtmaWxsOiNmZmY7fTwvc3R5bGU+PC9kZWZzPjxnIGlkPSJhIj48cGF0aCBjbGFzcz0iYyIgZD0ibTU0Ljg5LDExMi41MWgtMi4yNGMtMS4yNCwwLTIuMjQtMS0yLjI0LTIuMjR2LTIuMjQiLz48bGluZSBjbGFzcz0iYyIgeDE9IjcwLjU3IiB5MT0iNzYuNjciIHgyPSI2My44NSIgeTI9Ijc2LjY3Ii8+PGxpbmUgY2xhc3M9ImMiIHgxPSI3MC41NyIgeTE9IjExMi41MSIgeDI9IjY2LjA5IiB5Mj0iMTEyLjUxIi8+PGxpbmUgY2xhc3M9ImMiIHgxPSI4Ni4yNSIgeTE9Ijk5LjA3IiB4Mj0iODYuMjUiIHkyPSI5Mi4zNSIvPjxsaW5lIGNsYXNzPSJjIiB4MT0iNTAuNDEiIHkxPSI5Ni44MyIgeDI9IjUwLjQxIiB5Mj0iOTIuMzUiLz48cGF0aCBjbGFzcz0iYyIgZD0ibTgxLjc3LDExMi41MWgyLjI0YzEuMjQsMCwyLjI0LTEsMi4yNC0yLjI0di0yLjI0Ii8+PHBhdGggY2xhc3M9ImMiIGQ9Im04MS43Nyw3Ni42N2gyLjI0YzEuMjQsMCwyLjI0LDEsMi4yNCwyLjI0djIuMjQiLz48cGF0aCBjbGFzcz0iYyIgZD0ibTU0Ljg5LDc2LjY3aC0yLjI0Yy0xLjI0LDAtMi4yNCwxLTIuMjQsMi4yNHYyLjI0Ii8+PHBhdGggY2xhc3M9ImMiIGQ9Im04Ni4yNSw5OS4wN2gxMS4yYzEuMjQsMCwyLjI0LTEsMi4yNC0yLjI0di0zMS4zNmMwLTEuMjQtMS0yLjI0LTIuMjQtMi4yNGgtMzEuMzZjLTEuMjQsMC0yLjI0LDEtMi4yNCwyLjI0djExLjIiLz48L2c+PGcgaWQ9ImIiPjxwYXRoIGNsYXNzPSJkIiBkPSJtNDQuMDgsNDQuMDdsMzIuOTQtOS4yYzEuNjktLjUyLDIuNjQtMi4zMSwyLjEyLTQtLjMtLjk4LTEuMDUtMS43NS0yLjAxLTIuMDlMNi43MywyLjY3Yy0xLjY3LS41Ny0zLjQ5LjMzLTQuMDYsMi0uMjMuNjYtLjIzLDEuMzgsMCwyLjA1bDI2LjExLDcwLjRjLjU4LDEuNjcsMi40LDIuNTYsNC4wNywxLjk4Ljk3LS4zMywxLjcxLTEuMTEsMi4wMS0yLjA5bDkuMjItMzIuOTRaIi8+PC9nPjwvc3ZnPg==") 12 12, pointer;
|
cursor: url("data:image/svg+xml;base64,PD94bWwgdmVyc2lvbj0iMS4wIiBlbmNvZGluZz0iVVRGLTgiPz48c3ZnIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyIgdmlld0JveD0iMCAwIDEyOC41MSAxMzQuMDUiPjxkZWZzPjxzdHlsZT4uY3tmaWxsOm5vbmU7fS5jLC5ke3N0cm9rZTojMDEwMTAxO3N0cm9rZS1saW5lY2FwOnJvdW5kO3N0cm9rZS1saW5lam9pbjpyb3VuZDtzdHJva2Utd2lkdGg6NXB4O30uZHtmaWxsOiNmZmY7fTwvc3R5bGU+PC9kZWZzPjxnIGlkPSJhIj48cGF0aCBjbGFzcz0iYyIgZD0ibTU0Ljg5LDExMi41MWgtMi4yNGMtMS4yNCwwLTIuMjQtMS0yLjI0LTIuMjR2LTIuMjQiLz48bGluZSBjbGFzcz0iYyIgeDE9IjcwLjU3IiB5MT0iNzYuNjciIHgyPSI2My44NSIgeTI9Ijc2LjY3Ii8+PGxpbmUgY2xhc3M9ImMiIHgxPSI3MC41NyIgeTE9IjExMi41MSIgeDI9IjY2LjA5IiB5Mj0iMTEyLjUxIi8+PGxpbmUgY2xhc3M9ImMiIHgxPSI4Ni4yNSIgeTE9Ijk5LjA3IiB4Mj0iODYuMjUiIHkyPSI5Mi4zNSIvPjxsaW5lIGNsYXNzPSJjIiB4MT0iNTAuNDEiIHkxPSI5Ni44MyIgeDI9IjUwLjQxIiB5Mj0iOTIuMzUiLz48cGF0aCBjbGFzcz0iYyIgZD0ibTgxLjc3LDExMi41MWgyLjI0YzEuMjQsMCwyLjI0LTEsMi4yNC0yLjI0di0yLjI0Ii8+PHBhdGggY2xhc3M9ImMiIGQ9Im04MS43Nyw3Ni42N2gyLjI0YzEuMjQsMCwyLjI0LDEsMi4yNCwyLjI0djIuMjQiLz48cGF0aCBjbGFzcz0iYyIgZD0ibTU0Ljg5LDc2LjY3aC0yLjI0Yy0xLjI0LDAtMi4yNCwxLTIuMjQsMi4yNHYyLjI0Ii8+PHBhdGggY2xhc3M9ImMiIGQ9Im04Ni4yNSw5OS4wN2gxMS4yYzEuMjQsMCwyLjI0LTEsMi4yNC0yLjI0di0zMS4zNmMwLTEuMjQtMS0yLjI0LTIuMjQtMi4yNGgtMzEuMzZjLTEuMjQsMC0yLjI0LDEtMi4yNCwyLjI0djExLjIiLz48L2c+PGcgaWQ9ImIiPjxwYXRoIGNsYXNzPSJkIiBkPSJtNDQuMDgsNDQuMDdsMzIuOTQtOS4yYzEuNjktLjUyLDIuNjQtMi4zMSwyLjEyLTQtLjMtLjk4LTEuMDUtMS43NS0yLjAxLTIuMDlMNi43MywyLjY3Yy0xLjY3LS41Ny0zLjQ5LjMzLTQuMDYsMi0uMjMuNjYtLjIzLDEuMzgsMCwyLjA1bDI2LjExLDcwLjRjLjU4LDEuNjcsMi40LDIuNTYsNC4wNywxLjk4Ljk3LS4zMywxLjcxLTEuMTEsMi4wMS0yLjA5bDkuMjItMzIuOTRaIi8+PC9nPjwvc3ZnPg==") 12 12, pointer;
|
||||||
|
|
|
||||||
|
|
@ -35,6 +35,7 @@ export default defineConfig({
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
server: {
|
server: {
|
||||||
|
host: true,
|
||||||
proxy: {
|
proxy: {
|
||||||
'/api': {
|
'/api': {
|
||||||
target: 'http://localhost:8080',
|
target: 'http://localhost:8080',
|
||||||
|
|
|
||||||
|
|
@ -81,9 +81,15 @@ public class PremiseController {
|
||||||
@GetMapping({"/search", "/search/"})
|
@GetMapping({"/search", "/search/"})
|
||||||
@PreAuthorize("hasAnyRole('SUPER', 'CALCULATION')")
|
@PreAuthorize("hasAnyRole('SUPER', 'CALCULATION')")
|
||||||
public ResponseEntity<PremiseSearchResultDTO> findMaterialsAndSuppliers(@RequestParam String search) {
|
public ResponseEntity<PremiseSearchResultDTO> findMaterialsAndSuppliers(@RequestParam String search) {
|
||||||
|
log.info("Search request received with query: '{}' (length: {})", search, search != null ? search.length() : 0);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
return ResponseEntity.ok(premiseSearchStringAnalyzerService.findMaterialAndSuppliers(search));
|
var result = premiseSearchStringAnalyzerService.findMaterialAndSuppliers(search);
|
||||||
|
log.info("Search result: {} materials, {} suppliers, {} user suppliers",
|
||||||
|
result.getMaterials() != null ? result.getMaterials().size() : 0,
|
||||||
|
result.getSupplier() != null ? result.getSupplier().size() : 0,
|
||||||
|
result.getUserSupplier() != null ? result.getUserSupplier().size() : 0);
|
||||||
|
return ResponseEntity.ok(result);
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
throw new BadRequestException("Bad string encoding", "Unable to decode request", e);
|
throw new BadRequestException("Bad string encoding", "Unable to decode request", e);
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,454 @@
|
||||||
|
package de.avatic.lcc.database.dialect;
|
||||||
|
|
||||||
|
import org.springframework.context.annotation.Profile;
|
||||||
|
import org.springframework.stereotype.Component;
|
||||||
|
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Microsoft SQL Server-specific implementation of {@link SqlDialectProvider}.
|
||||||
|
*
|
||||||
|
* <p>This provider generates SQL syntax compatible with SQL Server 2017+.
|
||||||
|
* It is automatically activated when the "mssql" Spring profile is active.</p>
|
||||||
|
*
|
||||||
|
* @author LCC Team
|
||||||
|
* @since 1.0
|
||||||
|
*/
|
||||||
|
@Component
|
||||||
|
@Profile("mssql")
|
||||||
|
public class MSSQLDialectProvider implements SqlDialectProvider {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getDialectName() {
|
||||||
|
return "Microsoft SQL Server";
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getDriverClassName() {
|
||||||
|
return "com.microsoft.sqlserver.jdbc.SQLServerDriver";
|
||||||
|
}
|
||||||
|
|
||||||
|
// ========== Pagination ==========
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Builds MSSQL pagination clause using OFFSET/FETCH.
|
||||||
|
*
|
||||||
|
* <p>MSSQL syntax: {@code OFFSET ? ROWS FETCH NEXT ? ROWS ONLY}</p>
|
||||||
|
*
|
||||||
|
* @param limit maximum number of rows to return
|
||||||
|
* @param offset number of rows to skip
|
||||||
|
* @return MSSQL pagination clause
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public String buildPaginationClause(int limit, int offset) {
|
||||||
|
return "OFFSET ? ROWS FETCH NEXT ? ROWS ONLY";
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns pagination parameters for MSSQL in correct order: [offset, limit].
|
||||||
|
*
|
||||||
|
* <p>Note: MSSQL requires OFFSET first, then FETCH NEXT (opposite of MySQL).</p>
|
||||||
|
*
|
||||||
|
* @param limit maximum number of rows
|
||||||
|
* @param offset number of rows to skip
|
||||||
|
* @return array with [offset, limit] (reversed compared to MySQL)
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public Object[] getPaginationParameters(int limit, int offset) {
|
||||||
|
return new Object[]{offset, limit}; // MSSQL: offset first, then limit
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the maximum LIMIT value for MSSQL.
|
||||||
|
*
|
||||||
|
* <p>MSSQL INT max value: {@code 2147483647}</p>
|
||||||
|
*
|
||||||
|
* @return "2147483647"
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public String getMaxLimitValue() {
|
||||||
|
return "2147483647"; // INT max value in MSSQL
|
||||||
|
}
|
||||||
|
|
||||||
|
// ========== Upsert/Insert Ignore ==========
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Builds MSSQL MERGE statement for upsert operations.
|
||||||
|
*
|
||||||
|
* <p>MSSQL uses MERGE instead of MySQL's ON DUPLICATE KEY UPDATE.</p>
|
||||||
|
*
|
||||||
|
* <p>Example generated SQL:</p>
|
||||||
|
* <pre>
|
||||||
|
* MERGE INTO table AS target
|
||||||
|
* USING (SELECT ? AS col1, ? AS col2) AS source
|
||||||
|
* ON target.key1 = source.key1 AND target.key2 = source.key2
|
||||||
|
* WHEN MATCHED THEN
|
||||||
|
* UPDATE SET target.col3 = source.col3
|
||||||
|
* WHEN NOT MATCHED THEN
|
||||||
|
* INSERT (col1, col2, col3) VALUES (source.col1, source.col2, source.col3);
|
||||||
|
* </pre>
|
||||||
|
*
|
||||||
|
* @param tableName target table name
|
||||||
|
* @param uniqueColumns columns that define uniqueness (for ON clause)
|
||||||
|
* @param insertColumns all columns to insert
|
||||||
|
* @param updateColumns columns to update on match
|
||||||
|
* @return MSSQL MERGE statement
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public String buildUpsertStatement(
|
||||||
|
String tableName,
|
||||||
|
List<String> uniqueColumns,
|
||||||
|
List<String> insertColumns,
|
||||||
|
List<String> updateColumns
|
||||||
|
) {
|
||||||
|
if (tableName == null || uniqueColumns.isEmpty() || insertColumns.isEmpty()) {
|
||||||
|
throw new IllegalArgumentException("tableName, uniqueColumns, and insertColumns must not be empty");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build source column list with placeholders
|
||||||
|
String sourceColumns = insertColumns.stream()
|
||||||
|
.map(col -> "? AS " + col)
|
||||||
|
.collect(Collectors.joining(", "));
|
||||||
|
|
||||||
|
// Build ON clause matching unique columns
|
||||||
|
String onClause = uniqueColumns.stream()
|
||||||
|
.map(col -> "target." + col + " = source." + col)
|
||||||
|
.collect(Collectors.joining(" AND "));
|
||||||
|
|
||||||
|
// Build UPDATE SET clause (only if updateColumns is not empty)
|
||||||
|
String updateClause = "";
|
||||||
|
if (updateColumns != null && !updateColumns.isEmpty()) {
|
||||||
|
updateClause = "WHEN MATCHED THEN UPDATE SET " +
|
||||||
|
updateColumns.stream()
|
||||||
|
.map(col -> "target." + col + " = source." + col)
|
||||||
|
.collect(Collectors.joining(", ")) + " ";
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build INSERT clause
|
||||||
|
String insertColumnList = String.join(", ", insertColumns);
|
||||||
|
String insertValueList = insertColumns.stream()
|
||||||
|
.map(col -> "source." + col)
|
||||||
|
.collect(Collectors.joining(", "));
|
||||||
|
|
||||||
|
return String.format(
|
||||||
|
"MERGE INTO %s AS target " +
|
||||||
|
"USING (SELECT %s) AS source " +
|
||||||
|
"ON %s " +
|
||||||
|
"%s" + // UPDATE clause (may be empty)
|
||||||
|
"WHEN NOT MATCHED THEN " +
|
||||||
|
"INSERT (%s) VALUES (%s);",
|
||||||
|
tableName,
|
||||||
|
sourceColumns,
|
||||||
|
onClause,
|
||||||
|
updateClause,
|
||||||
|
insertColumnList,
|
||||||
|
insertValueList
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String buildInsertIgnoreStatement(
|
||||||
|
String tableName,
|
||||||
|
List<String> columns,
|
||||||
|
List<String> uniqueColumns
|
||||||
|
) {
|
||||||
|
String columnList = String.join(", ", columns);
|
||||||
|
String placeholders = columns.stream().map(c -> "?").collect(Collectors.joining(", "));
|
||||||
|
String uniqueCondition = uniqueColumns.stream()
|
||||||
|
.map(c -> String.format("target.%s = source.%s", c, c))
|
||||||
|
.collect(Collectors.joining(" AND "));
|
||||||
|
String sourceColumns = columns.stream()
|
||||||
|
.map(c -> String.format("source.%s", c))
|
||||||
|
.collect(Collectors.joining(", "));
|
||||||
|
|
||||||
|
return String.format(
|
||||||
|
"MERGE INTO %s AS target " +
|
||||||
|
"USING (SELECT %s) AS source (%s) " +
|
||||||
|
"ON %s " +
|
||||||
|
"WHEN NOT MATCHED THEN INSERT (%s) VALUES (%s);",
|
||||||
|
tableName,
|
||||||
|
placeholders,
|
||||||
|
columnList,
|
||||||
|
uniqueCondition,
|
||||||
|
columnList,
|
||||||
|
sourceColumns
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ========== Locking Strategies ==========
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Builds MSSQL SELECT with UPDLOCK and READPAST hints (equivalent to MySQL SKIP LOCKED).
|
||||||
|
*
|
||||||
|
* <p>MSSQL syntax: {@code SELECT ... FROM table WITH (UPDLOCK, READPAST)}</p>
|
||||||
|
*
|
||||||
|
* <p>The WITH hint must be placed after the table name in FROM clause.</p>
|
||||||
|
*
|
||||||
|
* @param selectStatement base SELECT statement
|
||||||
|
* @return SELECT statement with UPDLOCK, READPAST hints
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public String buildSelectForUpdateSkipLocked(String selectStatement) {
|
||||||
|
// Insert WITH (UPDLOCK, READPAST) after the first table name in FROM clause
|
||||||
|
// This is a simplified approach - assumes "FROM tablename" pattern
|
||||||
|
return selectStatement.replaceFirst(
|
||||||
|
"FROM\\s+(\\w+)",
|
||||||
|
"FROM $1 WITH (UPDLOCK, READPAST)"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Builds MSSQL SELECT with UPDLOCK hint (standard pessimistic locking).
|
||||||
|
*
|
||||||
|
* <p>MSSQL syntax: {@code SELECT ... FROM table WITH (UPDLOCK, ROWLOCK)}</p>
|
||||||
|
*
|
||||||
|
* @param selectStatement base SELECT statement
|
||||||
|
* @return SELECT statement with UPDLOCK hint
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public String buildSelectForUpdate(String selectStatement) {
|
||||||
|
return selectStatement.replaceFirst(
|
||||||
|
"FROM\\s+(\\w+)",
|
||||||
|
"FROM $1 WITH (UPDLOCK, ROWLOCK)"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ========== Date/Time Functions ==========
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns MSSQL current timestamp function: {@code GETDATE()}.
|
||||||
|
*
|
||||||
|
* @return {@code GETDATE()}
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public String getCurrentTimestamp() {
|
||||||
|
return "GETDATE()";
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Builds MSSQL date subtraction using DATEADD with negative value.
|
||||||
|
*
|
||||||
|
* <p>MSSQL syntax: {@code DATEADD(DAY, -?, GETDATE())}</p>
|
||||||
|
*
|
||||||
|
* @param baseDate base date expression (or null to use GETDATE())
|
||||||
|
* @param value placeholder for subtraction amount
|
||||||
|
* @param unit time unit (DAY, HOUR, MINUTE, etc.)
|
||||||
|
* @return MSSQL DATEADD expression with negative value
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public String buildDateSubtraction(String baseDate, String value, DateUnit unit) {
|
||||||
|
String base = (baseDate != null && !baseDate.isEmpty()) ? baseDate : "GETDATE()";
|
||||||
|
// MSSQL uses DATEADD with negative value for subtraction
|
||||||
|
return String.format("DATEADD(%s, -%s, %s)", unit.name(), value, base);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Builds MSSQL date addition using DATEADD.
|
||||||
|
*
|
||||||
|
* <p>MSSQL syntax: {@code DATEADD(DAY, ?, GETDATE())}</p>
|
||||||
|
*
|
||||||
|
* @param baseDate base date expression (or null to use GETDATE())
|
||||||
|
* @param value placeholder for addition amount
|
||||||
|
* @param unit time unit (DAY, HOUR, MINUTE, etc.)
|
||||||
|
* @return MSSQL DATEADD expression
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public String buildDateAddition(String baseDate, String value, DateUnit unit) {
|
||||||
|
String base = (baseDate != null && !baseDate.isEmpty()) ? baseDate : "GETDATE()";
|
||||||
|
return String.format("DATEADD(%s, %s, %s)", unit.name(), value, base);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extracts date part from datetime expression using CAST.
|
||||||
|
*
|
||||||
|
* <p>MSSQL syntax: {@code CAST(column AS DATE)}</p>
|
||||||
|
*
|
||||||
|
* @param columnOrExpression column name or expression
|
||||||
|
* @return MSSQL CAST expression
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public String extractDate(String columnOrExpression) {
|
||||||
|
return String.format("CAST(%s AS DATE)", columnOrExpression);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ========== Auto-increment Reset ==========
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Resets IDENTITY counter for a table using DBCC CHECKIDENT.
|
||||||
|
*
|
||||||
|
* <p>MSSQL syntax: {@code DBCC CHECKIDENT ('table', RESEED, 0)}</p>
|
||||||
|
*
|
||||||
|
* @param tableName table to reset IDENTITY counter
|
||||||
|
* @return MSSQL DBCC CHECKIDENT statement
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public String buildAutoIncrementReset(String tableName) {
|
||||||
|
return String.format("DBCC CHECKIDENT ('%s', RESEED, 0)", tableName);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ========== Geospatial Distance Calculation ==========
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Builds Haversine distance formula for MSSQL.
|
||||||
|
*
|
||||||
|
* <p>MSSQL supports the same trigonometric functions as MySQL (SIN, COS, ACOS, RADIANS),
|
||||||
|
* so the formula is identical. Calculates great-circle distance in kilometers.</p>
|
||||||
|
*
|
||||||
|
* <p>Formula:</p>
|
||||||
|
* <pre>
|
||||||
|
* 6371 * ACOS(
|
||||||
|
* COS(RADIANS(lat1)) * COS(RADIANS(lat2)) * COS(RADIANS(lng2) - RADIANS(lng1)) +
|
||||||
|
* SIN(RADIANS(lat1)) * SIN(RADIANS(lat2))
|
||||||
|
* )
|
||||||
|
* </pre>
|
||||||
|
*
|
||||||
|
* @param lat1 first latitude column/expression
|
||||||
|
* @param lng1 first longitude column/expression
|
||||||
|
* @param lat2 second latitude column/expression
|
||||||
|
* @param lng2 second longitude column/expression
|
||||||
|
* @return Haversine distance expression in kilometers
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public String buildHaversineDistance(String lat1, String lng1, String lat2, String lng2) {
|
||||||
|
return String.format(
|
||||||
|
"6371 * ACOS(" +
|
||||||
|
"COS(RADIANS(%s)) * COS(RADIANS(%s)) * " +
|
||||||
|
"COS(RADIANS(%s) - RADIANS(%s)) + " +
|
||||||
|
"SIN(RADIANS(%s)) * SIN(RADIANS(%s))" +
|
||||||
|
")",
|
||||||
|
lat1, lat2, lng2, lng1, lat1, lat2
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ========== String/Type Functions ==========
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Builds string concatenation using CONCAT function (SQL Server 2012+).
|
||||||
|
*
|
||||||
|
* <p>MSSQL syntax: {@code CONCAT(a, b, c)}</p>
|
||||||
|
*
|
||||||
|
* @param expressions expressions to concatenate
|
||||||
|
* @return MSSQL CONCAT expression
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public String buildConcat(String... expressions) {
|
||||||
|
if (expressions == null || expressions.length == 0) {
|
||||||
|
return "''";
|
||||||
|
}
|
||||||
|
return "CONCAT(" + String.join(", ", expressions) + ")";
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Casts expression to string type.
|
||||||
|
*
|
||||||
|
* <p>MSSQL syntax: {@code CAST(expression AS VARCHAR(MAX))}</p>
|
||||||
|
*
|
||||||
|
* @param expression expression to cast to string
|
||||||
|
* @return MSSQL CAST expression
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public String castToString(String expression) {
|
||||||
|
return String.format("CAST(%s AS VARCHAR(MAX))", expression);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ========== RETURNING Clause Support ==========
|
||||||
|
|
||||||
|
/**
|
||||||
|
* MSSQL supports RETURNING clause via OUTPUT INSERTED.
|
||||||
|
*
|
||||||
|
* @return true
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public boolean supportsReturningClause() {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Builds MSSQL OUTPUT clause for INSERT statements.
|
||||||
|
*
|
||||||
|
* <p>MSSQL syntax: {@code OUTPUT INSERTED.column1, INSERTED.column2}</p>
|
||||||
|
*
|
||||||
|
* @param columns columns to return from inserted row
|
||||||
|
* @return MSSQL OUTPUT INSERTED clause
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public String buildReturningClause(String... columns) {
|
||||||
|
if (columns == null || columns.length == 0) {
|
||||||
|
throw new IllegalArgumentException("At least one column must be specified");
|
||||||
|
}
|
||||||
|
String columnList = Arrays.stream(columns)
|
||||||
|
.map(col -> "INSERTED." + col)
|
||||||
|
.collect(Collectors.joining(", "));
|
||||||
|
return "OUTPUT " + columnList;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns MSSQL IDENTITY definition for auto-increment columns.
|
||||||
|
*
|
||||||
|
* <p>MSSQL syntax: {@code IDENTITY(1,1)}</p>
|
||||||
|
*
|
||||||
|
* @return {@code IDENTITY(1,1)}
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public String getAutoIncrementDefinition() {
|
||||||
|
return "IDENTITY(1,1)";
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns MSSQL timestamp column definition.
|
||||||
|
*
|
||||||
|
* <p>MSSQL uses DATETIME2 with DEFAULT constraint.
|
||||||
|
* Note: MSSQL doesn't support ON UPDATE CURRENT_TIMESTAMP like MySQL,
|
||||||
|
* so updates must be handled via triggers or application logic.</p>
|
||||||
|
*
|
||||||
|
* @return DATETIME2 column definition
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public String getTimestampDefinition() {
|
||||||
|
return "DATETIME2 DEFAULT GETDATE()";
|
||||||
|
}
|
||||||
|
|
||||||
|
// ========== Boolean Literals ==========
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns MSSQL boolean TRUE literal as numeric 1.
|
||||||
|
*
|
||||||
|
* <p>MSSQL BIT type uses 1 for TRUE.</p>
|
||||||
|
*
|
||||||
|
* @return "1"
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public String getBooleanTrue() {
|
||||||
|
return "1";
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns MSSQL boolean FALSE literal as numeric 0.
|
||||||
|
*
|
||||||
|
* <p>MSSQL BIT type uses 0 for FALSE.</p>
|
||||||
|
*
|
||||||
|
* @return "0"
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public String getBooleanFalse() {
|
||||||
|
return "0";
|
||||||
|
}
|
||||||
|
|
||||||
|
// ========== Identifier Escaping ==========
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Escapes identifier with square brackets for MSSQL reserved words.
|
||||||
|
*
|
||||||
|
* <p>MSSQL uses square brackets to escape reserved words like 'file', 'user', 'order'.</p>
|
||||||
|
*
|
||||||
|
* @param identifier column or table name to escape
|
||||||
|
* @return escaped identifier with square brackets
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public String escapeIdentifier(String identifier) {
|
||||||
|
// MSSQL uses square brackets for escaping reserved words
|
||||||
|
return "[" + identifier + "]";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,205 @@
|
||||||
|
package de.avatic.lcc.database.dialect;
|
||||||
|
|
||||||
|
import org.springframework.context.annotation.Profile;
|
||||||
|
import org.springframework.stereotype.Component;
|
||||||
|
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* MySQL-specific implementation of {@link SqlDialectProvider}.
|
||||||
|
*
|
||||||
|
* <p>This provider generates SQL syntax compatible with MySQL 8.0+.
|
||||||
|
* It is automatically activated when the "mysql" Spring profile is active.</p>
|
||||||
|
*
|
||||||
|
* @author LCC Team
|
||||||
|
* @since 1.0
|
||||||
|
*/
|
||||||
|
@Component
|
||||||
|
@Profile("!mssql")
|
||||||
|
public class MySQLDialectProvider implements SqlDialectProvider {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getDialectName() {
|
||||||
|
return "MySQL";
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getDriverClassName() {
|
||||||
|
return "com.mysql.cj.jdbc.Driver";
|
||||||
|
}
|
||||||
|
|
||||||
|
// ========== Pagination ==========
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String buildPaginationClause(int limit, int offset) {
|
||||||
|
return "LIMIT ? OFFSET ?";
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Object[] getPaginationParameters(int limit, int offset) {
|
||||||
|
return new Object[]{limit, offset};
|
||||||
|
}
|
||||||
|
|
||||||
|
// ========== Upsert Operations ==========
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String buildUpsertStatement(
|
||||||
|
String tableName,
|
||||||
|
List<String> uniqueColumns,
|
||||||
|
List<String> insertColumns,
|
||||||
|
List<String> updateColumns
|
||||||
|
) {
|
||||||
|
// INSERT INTO table (col1, col2, ...) VALUES (?, ?, ...)
|
||||||
|
String insertPart = String.format(
|
||||||
|
"INSERT INTO %s (%s) VALUES (%s)",
|
||||||
|
tableName,
|
||||||
|
String.join(", ", insertColumns),
|
||||||
|
insertColumns.stream().map(c -> "?").collect(Collectors.joining(", "))
|
||||||
|
);
|
||||||
|
|
||||||
|
// ON DUPLICATE KEY UPDATE col1 = VALUES(col1), col2 = VALUES(col2), ...
|
||||||
|
String updatePart = updateColumns.stream()
|
||||||
|
.map(col -> col + " = VALUES(" + col + ")")
|
||||||
|
.collect(Collectors.joining(", "));
|
||||||
|
|
||||||
|
return insertPart + " ON DUPLICATE KEY UPDATE " + updatePart;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String buildInsertIgnoreStatement(
|
||||||
|
String tableName,
|
||||||
|
List<String> columns,
|
||||||
|
List<String> uniqueColumns
|
||||||
|
) {
|
||||||
|
return String.format(
|
||||||
|
"INSERT IGNORE INTO %s (%s) VALUES (%s)",
|
||||||
|
tableName,
|
||||||
|
String.join(", ", columns),
|
||||||
|
columns.stream().map(c -> "?").collect(Collectors.joining(", "))
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ========== Locking Strategies ==========
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String buildSelectForUpdateSkipLocked(String selectStatement) {
|
||||||
|
return selectStatement + " FOR UPDATE SKIP LOCKED";
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String buildSelectForUpdate(String selectStatement) {
|
||||||
|
return selectStatement + " FOR UPDATE";
|
||||||
|
}
|
||||||
|
|
||||||
|
// ========== Date/Time Functions ==========
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getCurrentTimestamp() {
|
||||||
|
return "NOW()";
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String buildDateSubtraction(String baseDate, String value, DateUnit unit) {
|
||||||
|
String base = baseDate != null ? baseDate : "NOW()";
|
||||||
|
return String.format("DATE_SUB(%s, INTERVAL %s %s)", base, value, unit.name());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String buildDateAddition(String baseDate, String value, DateUnit unit) {
|
||||||
|
String base = baseDate != null ? baseDate : "NOW()";
|
||||||
|
return String.format("DATE_ADD(%s, INTERVAL %s %s)", base, value, unit.name());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String extractDate(String columnOrExpression) {
|
||||||
|
return "DATE(" + columnOrExpression + ")";
|
||||||
|
}
|
||||||
|
|
||||||
|
// ========== Auto-increment Reset ==========
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String buildAutoIncrementReset(String tableName) {
|
||||||
|
return String.format("ALTER TABLE %s AUTO_INCREMENT = 1", tableName);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ========== Geospatial Distance Calculation ==========
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String buildHaversineDistance(String lat1, String lng1, String lat2, String lng2) {
|
||||||
|
// Haversine formula: 6371 km (Earth radius) * acos(...)
|
||||||
|
// Formula: d = 2R * arcsin(sqrt(sin²((lat2-lat1)/2) + cos(lat1)*cos(lat2)*sin²((lon2-lon1)/2)))
|
||||||
|
// Simplified: R * acos(cos(lat1)*cos(lat2)*cos(lng2-lng1) + sin(lat1)*sin(lat2))
|
||||||
|
// Returns distance in KILOMETERS
|
||||||
|
return String.format(
|
||||||
|
"6371 * ACOS(COS(RADIANS(%s)) * COS(RADIANS(%s)) * " +
|
||||||
|
"COS(RADIANS(%s) - RADIANS(%s)) + SIN(RADIANS(%s)) * SIN(RADIANS(%s)))",
|
||||||
|
lat1, lat2, lng2, lng1, lat1, lat2
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ========== String/Type Functions ==========
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String buildConcat(String... expressions) {
|
||||||
|
return "CONCAT(" + String.join(", ", expressions) + ")";
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String castToString(String expression) {
|
||||||
|
return "CAST(" + expression + " AS CHAR)";
|
||||||
|
}
|
||||||
|
|
||||||
|
// ========== Bulk Operations ==========
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getMaxLimitValue() {
|
||||||
|
// MySQL BIGINT UNSIGNED max value
|
||||||
|
return "18446744073709551615";
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean supportsReturningClause() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String buildReturningClause(String... columns) {
|
||||||
|
throw new UnsupportedOperationException(
|
||||||
|
"MySQL does not support RETURNING clause. Use LAST_INSERT_ID() or GeneratedKeyHolder instead."
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ========== Schema/DDL ==========
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getAutoIncrementDefinition() {
|
||||||
|
return "INT NOT NULL AUTO_INCREMENT";
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getTimestampDefinition() {
|
||||||
|
return "TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP";
|
||||||
|
}
|
||||||
|
|
||||||
|
// ========== Boolean Literals ==========
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getBooleanTrue() {
|
||||||
|
return "TRUE";
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getBooleanFalse() {
|
||||||
|
return "FALSE";
|
||||||
|
}
|
||||||
|
|
||||||
|
// ========== Identifier Escaping ==========
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String escapeIdentifier(String identifier) {
|
||||||
|
// MySQL uses backticks for escaping reserved words
|
||||||
|
return "`" + identifier + "`";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,403 @@
|
||||||
|
package de.avatic.lcc.database.dialect;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Provides database-specific SQL syntax for different RDBMS implementations.
|
||||||
|
* Supports MySQL and MSSQL Server with identical semantic behavior.
|
||||||
|
*
|
||||||
|
* <p>This interface abstracts database-specific SQL patterns to enable multi-database support
|
||||||
|
* in the LCC application. Each dialect provider implements the SQL syntax specific to
|
||||||
|
* its target database while maintaining consistent semantics across all implementations.</p>
|
||||||
|
*
|
||||||
|
* @author LCC Team
|
||||||
|
* @since 1.0
|
||||||
|
*/
|
||||||
|
public interface SqlDialectProvider {
|
||||||
|
|
||||||
|
// ========== Metadata ==========
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the dialect name (e.g., "MySQL", "MSSQL").
|
||||||
|
*
|
||||||
|
* @return the name of the database dialect
|
||||||
|
*/
|
||||||
|
String getDialectName();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the JDBC driver class name for this dialect.
|
||||||
|
*
|
||||||
|
* @return the fully qualified JDBC driver class name
|
||||||
|
*/
|
||||||
|
String getDriverClassName();
|
||||||
|
|
||||||
|
// ========== Pagination ==========
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generates the pagination clause for limiting and offsetting query results.
|
||||||
|
*
|
||||||
|
* <p>Examples:</p>
|
||||||
|
* <ul>
|
||||||
|
* <li>MySQL: {@code LIMIT ? OFFSET ?}</li>
|
||||||
|
* <li>MSSQL: {@code OFFSET ? ROWS FETCH NEXT ? ROWS ONLY}</li>
|
||||||
|
* </ul>
|
||||||
|
*
|
||||||
|
* <p><b>Note:</b> MSSQL requires an ORDER BY clause before OFFSET/FETCH.</p>
|
||||||
|
*
|
||||||
|
* @param limit maximum number of rows to return
|
||||||
|
* @param offset number of rows to skip
|
||||||
|
* @return SQL clause for pagination (without parameter values)
|
||||||
|
*/
|
||||||
|
String buildPaginationClause(int limit, int offset);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns parameter values in the correct order for the pagination clause.
|
||||||
|
*
|
||||||
|
* <p>Parameter order varies by database:</p>
|
||||||
|
* <ul>
|
||||||
|
* <li>MySQL: {@code [limit, offset]}</li>
|
||||||
|
* <li>MSSQL: {@code [offset, limit]}</li>
|
||||||
|
* </ul>
|
||||||
|
*
|
||||||
|
* @param limit maximum number of rows to return
|
||||||
|
* @param offset number of rows to skip
|
||||||
|
* @return array of parameters in database-specific order
|
||||||
|
*/
|
||||||
|
Object[] getPaginationParameters(int limit, int offset);
|
||||||
|
|
||||||
|
// ========== Upsert Operations ==========
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Builds an UPSERT (INSERT or UPDATE) statement.
|
||||||
|
*
|
||||||
|
* <p>Database-specific implementations:</p>
|
||||||
|
* <ul>
|
||||||
|
* <li>MySQL: {@code INSERT ... ON DUPLICATE KEY UPDATE ...}</li>
|
||||||
|
* <li>MSSQL: {@code MERGE ... WHEN MATCHED THEN UPDATE WHEN NOT MATCHED THEN INSERT ...}</li>
|
||||||
|
* </ul>
|
||||||
|
*
|
||||||
|
* @param tableName target table name
|
||||||
|
* @param uniqueColumns columns that define uniqueness (for matching existing rows)
|
||||||
|
* @param insertColumns all columns to insert in a new row
|
||||||
|
* @param updateColumns columns to update if row exists
|
||||||
|
* @return complete UPSERT SQL statement with placeholders
|
||||||
|
*/
|
||||||
|
String buildUpsertStatement(
|
||||||
|
String tableName,
|
||||||
|
List<String> uniqueColumns,
|
||||||
|
List<String> insertColumns,
|
||||||
|
List<String> updateColumns
|
||||||
|
);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Builds an INSERT IGNORE statement that inserts only if the row does not exist.
|
||||||
|
*
|
||||||
|
* <p>Database-specific implementations:</p>
|
||||||
|
* <ul>
|
||||||
|
* <li>MySQL: {@code INSERT IGNORE INTO ...}</li>
|
||||||
|
* <li>MSSQL: {@code IF NOT EXISTS (...) INSERT INTO ...}</li>
|
||||||
|
* </ul>
|
||||||
|
*
|
||||||
|
* @param tableName target table name
|
||||||
|
* @param columns columns to insert
|
||||||
|
* @param uniqueColumns columns that define uniqueness (for existence check)
|
||||||
|
* @return INSERT IGNORE statement with placeholders
|
||||||
|
*/
|
||||||
|
String buildInsertIgnoreStatement(
|
||||||
|
String tableName,
|
||||||
|
List<String> columns,
|
||||||
|
List<String> uniqueColumns
|
||||||
|
);
|
||||||
|
|
||||||
|
// ========== Locking Strategies ==========
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Builds SELECT FOR UPDATE with skip locked capability for pessimistic locking.
|
||||||
|
*
|
||||||
|
* <p>This is critical for {@code CalculationJobRepository} concurrent job processing.</p>
|
||||||
|
*
|
||||||
|
* <p>Database-specific implementations:</p>
|
||||||
|
* <ul>
|
||||||
|
* <li>MySQL: {@code SELECT ... FOR UPDATE SKIP LOCKED}</li>
|
||||||
|
* <li>MSSQL: {@code SELECT ... WITH (UPDLOCK, READPAST)}</li>
|
||||||
|
* </ul>
|
||||||
|
*
|
||||||
|
* @param selectStatement base SELECT statement (without locking clause)
|
||||||
|
* @return complete statement with pessimistic locking that skips locked rows
|
||||||
|
*/
|
||||||
|
String buildSelectForUpdateSkipLocked(String selectStatement);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Builds standard SELECT FOR UPDATE for pessimistic locking (waits for locks).
|
||||||
|
*
|
||||||
|
* <p>Database-specific implementations:</p>
|
||||||
|
* <ul>
|
||||||
|
* <li>MySQL: {@code SELECT ... FOR UPDATE}</li>
|
||||||
|
* <li>MSSQL: {@code SELECT ... WITH (UPDLOCK, ROWLOCK)}</li>
|
||||||
|
* </ul>
|
||||||
|
*
|
||||||
|
* @param selectStatement base SELECT statement (without locking clause)
|
||||||
|
* @return complete statement with pessimistic locking
|
||||||
|
*/
|
||||||
|
String buildSelectForUpdate(String selectStatement);
|
||||||
|
|
||||||
|
// ========== Date/Time Functions ==========
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the SQL function for getting the current timestamp.
|
||||||
|
*
|
||||||
|
* <p>Database-specific implementations:</p>
|
||||||
|
* <ul>
|
||||||
|
* <li>MySQL: {@code NOW()}</li>
|
||||||
|
* <li>MSSQL: {@code GETDATE()}</li>
|
||||||
|
* </ul>
|
||||||
|
*
|
||||||
|
* @return SQL function for current timestamp
|
||||||
|
*/
|
||||||
|
String getCurrentTimestamp();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Builds a date subtraction expression.
|
||||||
|
*
|
||||||
|
* <p>Database-specific implementations:</p>
|
||||||
|
* <ul>
|
||||||
|
* <li>MySQL: {@code DATE_SUB(NOW(), INTERVAL ? DAY)}</li>
|
||||||
|
* <li>MSSQL: {@code DATEADD(DAY, -?, GETDATE())}</li>
|
||||||
|
* </ul>
|
||||||
|
*
|
||||||
|
* @param baseDate base date expression (or null to use current timestamp)
|
||||||
|
* @param value placeholder for number of time units to subtract (e.g., "?")
|
||||||
|
* @param unit time unit (DAY, HOUR, MINUTE, etc.)
|
||||||
|
* @return date subtraction expression
|
||||||
|
*/
|
||||||
|
String buildDateSubtraction(String baseDate, String value, DateUnit unit);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Builds a date addition expression.
|
||||||
|
*
|
||||||
|
* <p>Database-specific implementations:</p>
|
||||||
|
* <ul>
|
||||||
|
* <li>MySQL: {@code DATE_ADD(NOW(), INTERVAL ? DAY)}</li>
|
||||||
|
* <li>MSSQL: {@code DATEADD(DAY, ?, GETDATE())}</li>
|
||||||
|
* </ul>
|
||||||
|
*
|
||||||
|
* @param baseDate base date expression (or null to use current timestamp)
|
||||||
|
* @param value placeholder for number of time units to add (e.g., "?")
|
||||||
|
* @param unit time unit (DAY, HOUR, MINUTE, etc.)
|
||||||
|
* @return date addition expression
|
||||||
|
*/
|
||||||
|
String buildDateAddition(String baseDate, String value, DateUnit unit);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extracts the date part from a datetime expression (ignoring time component).
|
||||||
|
*
|
||||||
|
* <p>Database-specific implementations:</p>
|
||||||
|
* <ul>
|
||||||
|
* <li>MySQL: {@code DATE(column)}</li>
|
||||||
|
* <li>MSSQL: {@code CAST(column AS DATE)}</li>
|
||||||
|
* </ul>
|
||||||
|
*
|
||||||
|
* @param columnOrExpression column name or expression to extract date from
|
||||||
|
* @return expression that extracts date component
|
||||||
|
*/
|
||||||
|
String extractDate(String columnOrExpression);
|
||||||
|
|
||||||
|
// ========== Auto-increment Reset ==========
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Resets the auto-increment counter for a table (primarily used in tests).
|
||||||
|
*
|
||||||
|
* <p>Database-specific implementations:</p>
|
||||||
|
* <ul>
|
||||||
|
* <li>MySQL: {@code ALTER TABLE table AUTO_INCREMENT = 1}</li>
|
||||||
|
* <li>MSSQL: {@code DBCC CHECKIDENT ('table', RESEED, 0)}</li>
|
||||||
|
* </ul>
|
||||||
|
*
|
||||||
|
* @param tableName table to reset auto-increment counter
|
||||||
|
* @return SQL statement to reset auto-increment
|
||||||
|
*/
|
||||||
|
String buildAutoIncrementReset(String tableName);
|
||||||
|
|
||||||
|
// ========== Geospatial Distance Calculation ==========
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Builds a Haversine distance calculation expression.
|
||||||
|
*
|
||||||
|
* <p>Used in {@code NodeRepository} for finding nearby nodes based on geographic coordinates.
|
||||||
|
* Calculates the great-circle distance between two points on Earth's surface.</p>
|
||||||
|
*
|
||||||
|
* <p>Both MySQL and MSSQL support trigonometric functions (SIN, COS, ACOS, RADIANS),
|
||||||
|
* so the implementation is similar across databases.</p>
|
||||||
|
*
|
||||||
|
* @param lat1 first latitude column or expression
|
||||||
|
* @param lng1 first longitude column or expression
|
||||||
|
* @param lat2 second latitude column or expression
|
||||||
|
* @param lng2 second longitude column or expression
|
||||||
|
* @return expression calculating distance in meters
|
||||||
|
*/
|
||||||
|
String buildHaversineDistance(String lat1, String lng1, String lat2, String lng2);
|
||||||
|
|
||||||
|
// ========== String/Type Functions ==========
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Builds a string concatenation expression.
|
||||||
|
*
|
||||||
|
* <p>Database-specific implementations:</p>
|
||||||
|
* <ul>
|
||||||
|
* <li>MySQL: {@code CONCAT(a, b, c)}</li>
|
||||||
|
* <li>MSSQL: {@code CONCAT(a, b, c)} (SQL Server 2012+) or {@code a + b + c}</li>
|
||||||
|
* </ul>
|
||||||
|
*
|
||||||
|
* @param expressions expressions to concatenate
|
||||||
|
* @return concatenation expression
|
||||||
|
*/
|
||||||
|
String buildConcat(String... expressions);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Converts an expression to string type.
|
||||||
|
*
|
||||||
|
* <p>Database-specific implementations:</p>
|
||||||
|
* <ul>
|
||||||
|
* <li>MySQL: {@code CAST(x AS CHAR)}</li>
|
||||||
|
* <li>MSSQL: {@code CAST(x AS VARCHAR(MAX))}</li>
|
||||||
|
* </ul>
|
||||||
|
*
|
||||||
|
* @param expression expression to convert to string
|
||||||
|
* @return cast-to-string expression
|
||||||
|
*/
|
||||||
|
String castToString(String expression);
|
||||||
|
|
||||||
|
// ========== Bulk Operations ==========
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the maximum safe value for LIMIT clause.
|
||||||
|
*
|
||||||
|
* <p>Used for workarounds in queries that need to skip LIMIT but still use OFFSET.</p>
|
||||||
|
* <ul>
|
||||||
|
* <li>MySQL: {@code 18446744073709551615} (BIGINT UNSIGNED max)</li>
|
||||||
|
* <li>MSSQL: {@code 2147483647} (INT max)</li>
|
||||||
|
* </ul>
|
||||||
|
*
|
||||||
|
* @return maximum limit value as string
|
||||||
|
*/
|
||||||
|
String getMaxLimitValue();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks if the dialect supports RETURNING clause for INSERT statements.
|
||||||
|
*
|
||||||
|
* <ul>
|
||||||
|
* <li>MySQL: {@code false} (use LAST_INSERT_ID())</li>
|
||||||
|
* <li>MSSQL: {@code true} (supports OUTPUT INSERTED.id)</li>
|
||||||
|
* </ul>
|
||||||
|
*
|
||||||
|
* @return true if RETURNING clause is supported
|
||||||
|
*/
|
||||||
|
boolean supportsReturningClause();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Builds a RETURNING clause for INSERT statement.
|
||||||
|
*
|
||||||
|
* <p>MSSQL example: {@code OUTPUT INSERTED.id}</p>
|
||||||
|
*
|
||||||
|
* @param columns columns to return
|
||||||
|
* @return RETURNING clause
|
||||||
|
* @throws UnsupportedOperationException if dialect does not support RETURNING
|
||||||
|
*/
|
||||||
|
String buildReturningClause(String... columns);
|
||||||
|
|
||||||
|
// ========== Schema/DDL ==========
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the auto-increment column definition for schema creation.
|
||||||
|
*
|
||||||
|
* <p>Database-specific implementations:</p>
|
||||||
|
* <ul>
|
||||||
|
* <li>MySQL: {@code INT NOT NULL AUTO_INCREMENT}</li>
|
||||||
|
* <li>MSSQL: {@code INT NOT NULL IDENTITY(1,1)}</li>
|
||||||
|
* </ul>
|
||||||
|
*
|
||||||
|
* @return auto-increment column definition
|
||||||
|
*/
|
||||||
|
String getAutoIncrementDefinition();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the timestamp column definition with automatic update capability.
|
||||||
|
*
|
||||||
|
* <p>Database-specific implementations:</p>
|
||||||
|
* <ul>
|
||||||
|
* <li>MySQL: {@code TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP}</li>
|
||||||
|
* <li>MSSQL: {@code DATETIME2 NOT NULL DEFAULT GETDATE()} (requires trigger for ON UPDATE)</li>
|
||||||
|
* </ul>
|
||||||
|
*
|
||||||
|
* <p><b>Note:</b> For MSSQL, triggers must be created separately to handle ON UPDATE behavior.</p>
|
||||||
|
*
|
||||||
|
* @return timestamp column definition
|
||||||
|
*/
|
||||||
|
String getTimestampDefinition();
|
||||||
|
|
||||||
|
// ========== Boolean Literals ==========
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the SQL literal for boolean TRUE value.
|
||||||
|
*
|
||||||
|
* <p>Database-specific implementations:</p>
|
||||||
|
* <ul>
|
||||||
|
* <li>MySQL: {@code TRUE}</li>
|
||||||
|
* <li>MSSQL: {@code 1}</li>
|
||||||
|
* </ul>
|
||||||
|
*
|
||||||
|
* @return SQL literal for true
|
||||||
|
*/
|
||||||
|
String getBooleanTrue();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the SQL literal for boolean FALSE value.
|
||||||
|
*
|
||||||
|
* <p>Database-specific implementations:</p>
|
||||||
|
* <ul>
|
||||||
|
* <li>MySQL: {@code FALSE}</li>
|
||||||
|
* <li>MSSQL: {@code 0}</li>
|
||||||
|
* </ul>
|
||||||
|
*
|
||||||
|
* @return SQL literal for false
|
||||||
|
*/
|
||||||
|
String getBooleanFalse();
|
||||||
|
|
||||||
|
// ========== Identifier Escaping ==========
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Escapes a column or table identifier if it conflicts with reserved words.
|
||||||
|
*
|
||||||
|
* <p>Database-specific implementations:</p>
|
||||||
|
* <ul>
|
||||||
|
* <li>MySQL: {@code `identifier`}</li>
|
||||||
|
* <li>MSSQL: {@code [identifier]}</li>
|
||||||
|
* </ul>
|
||||||
|
*
|
||||||
|
* <p>Used for reserved words like "file", "user", "order", etc.</p>
|
||||||
|
*
|
||||||
|
* @param identifier column or table name to escape
|
||||||
|
* @return escaped identifier
|
||||||
|
*/
|
||||||
|
String escapeIdentifier(String identifier);
|
||||||
|
|
||||||
|
// ========== Helper Enums ==========
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Time units for date arithmetic operations.
|
||||||
|
*/
|
||||||
|
enum DateUnit {
|
||||||
|
/** Year unit */
|
||||||
|
YEAR,
|
||||||
|
/** Month unit */
|
||||||
|
MONTH,
|
||||||
|
/** Day unit */
|
||||||
|
DAY,
|
||||||
|
/** Hour unit */
|
||||||
|
HOUR,
|
||||||
|
/** Minute unit */
|
||||||
|
MINUTE,
|
||||||
|
/** Second unit */
|
||||||
|
SECOND
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -1,5 +1,6 @@
|
||||||
package de.avatic.lcc.repositories;
|
package de.avatic.lcc.repositories;
|
||||||
|
|
||||||
|
import de.avatic.lcc.database.dialect.SqlDialectProvider;
|
||||||
import de.avatic.lcc.model.db.materials.Material;
|
import de.avatic.lcc.model.db.materials.Material;
|
||||||
import de.avatic.lcc.repositories.pagination.SearchQueryPagination;
|
import de.avatic.lcc.repositories.pagination.SearchQueryPagination;
|
||||||
import de.avatic.lcc.repositories.pagination.SearchQueryResult;
|
import de.avatic.lcc.repositories.pagination.SearchQueryResult;
|
||||||
|
|
@ -18,19 +19,21 @@ import java.util.stream.Collectors;
|
||||||
public class MaterialRepository {
|
public class MaterialRepository {
|
||||||
|
|
||||||
JdbcTemplate jdbcTemplate;
|
JdbcTemplate jdbcTemplate;
|
||||||
|
SqlDialectProvider dialectProvider;
|
||||||
|
|
||||||
@Autowired
|
@Autowired
|
||||||
public MaterialRepository(JdbcTemplate jdbcTemplate) {
|
public MaterialRepository(JdbcTemplate jdbcTemplate, SqlDialectProvider dialectProvider) {
|
||||||
this.jdbcTemplate = jdbcTemplate;
|
this.jdbcTemplate = jdbcTemplate;
|
||||||
|
this.dialectProvider = dialectProvider;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static String buildCountQuery(String filter, boolean excludeDeprecated) {
|
private String buildCountQuery(String filter, boolean excludeDeprecated) {
|
||||||
StringBuilder queryBuilder = new StringBuilder("""
|
StringBuilder queryBuilder = new StringBuilder("""
|
||||||
SELECT count(*)
|
SELECT count(*)
|
||||||
FROM material WHERE 1=1""");
|
FROM material WHERE 1=1""");
|
||||||
|
|
||||||
if (excludeDeprecated) {
|
if (excludeDeprecated) {
|
||||||
queryBuilder.append(" AND is_deprecated = FALSE");
|
queryBuilder.append(" AND is_deprecated = ").append(dialectProvider.getBooleanFalse());
|
||||||
}
|
}
|
||||||
if (filter != null) {
|
if (filter != null) {
|
||||||
queryBuilder.append(" AND (name LIKE ? OR part_number LIKE ?) ");
|
queryBuilder.append(" AND (name LIKE ? OR part_number LIKE ?) ");
|
||||||
|
|
@ -39,18 +42,19 @@ public class MaterialRepository {
|
||||||
return queryBuilder.toString();
|
return queryBuilder.toString();
|
||||||
}
|
}
|
||||||
|
|
||||||
private static String buildQuery(String filter, boolean excludeDeprecated) {
|
private String buildQuery(String filter, boolean excludeDeprecated, SearchQueryPagination pagination) {
|
||||||
StringBuilder queryBuilder = new StringBuilder("""
|
StringBuilder queryBuilder = new StringBuilder("""
|
||||||
SELECT id, name, part_number, normalized_part_number, hs_code, is_deprecated
|
SELECT id, name, part_number, normalized_part_number, hs_code, is_deprecated
|
||||||
FROM material WHERE 1=1""");
|
FROM material WHERE 1=1""");
|
||||||
|
|
||||||
if (excludeDeprecated) {
|
if (excludeDeprecated) {
|
||||||
queryBuilder.append(" AND is_deprecated = FALSE");
|
queryBuilder.append(" AND is_deprecated = ").append(dialectProvider.getBooleanFalse());
|
||||||
}
|
}
|
||||||
if (filter != null) {
|
if (filter != null) {
|
||||||
queryBuilder.append(" AND (name LIKE ? OR part_number LIKE ? ) ");
|
queryBuilder.append(" AND (name LIKE ? OR part_number LIKE ? ) ");
|
||||||
}
|
}
|
||||||
queryBuilder.append(" ORDER BY normalized_part_number LIMIT ? OFFSET ?");
|
queryBuilder.append(" ORDER BY normalized_part_number ");
|
||||||
|
queryBuilder.append(dialectProvider.buildPaginationClause(pagination.getLimit(), pagination.getOffset()));
|
||||||
return queryBuilder.toString();
|
return queryBuilder.toString();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -95,20 +99,22 @@ public class MaterialRepository {
|
||||||
|
|
||||||
@Transactional
|
@Transactional
|
||||||
public Optional<Integer> setDeprecatedById(Integer id) {
|
public Optional<Integer> setDeprecatedById(Integer id) {
|
||||||
String query = "UPDATE material SET is_deprecated = TRUE WHERE id = ?";
|
String query = "UPDATE material SET is_deprecated = " + dialectProvider.getBooleanTrue() + " WHERE id = ?";
|
||||||
return Optional.ofNullable(jdbcTemplate.update(query, id) == 0 ? null : id);
|
return Optional.ofNullable(jdbcTemplate.update(query, id) == 0 ? null : id);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Transactional
|
@Transactional
|
||||||
public SearchQueryResult<Material> listMaterials(Optional<String> filter, boolean excludeDeprecated, SearchQueryPagination pagination) {
|
public SearchQueryResult<Material> listMaterials(Optional<String> filter, boolean excludeDeprecated, SearchQueryPagination pagination) {
|
||||||
|
|
||||||
String query = buildQuery(filter.orElse(null), excludeDeprecated);
|
String query = buildQuery(filter.orElse(null), excludeDeprecated, pagination);
|
||||||
|
|
||||||
|
Object[] paginationParams = dialectProvider.getPaginationParameters(pagination.getLimit(), pagination.getOffset());
|
||||||
|
|
||||||
var materials = filter.isPresent() ?
|
var materials = filter.isPresent() ?
|
||||||
jdbcTemplate.query(query, new MaterialMapper(),
|
jdbcTemplate.query(query, new MaterialMapper(),
|
||||||
filter.get() + "%", filter.get() + "%", pagination.getLimit(), pagination.getOffset()) :
|
filter.get() + "%", filter.get() + "%", paginationParams[0], paginationParams[1]) :
|
||||||
jdbcTemplate.query(query, new MaterialMapper(),
|
jdbcTemplate.query(query, new MaterialMapper(),
|
||||||
pagination.getLimit(), pagination.getOffset());
|
paginationParams[0], paginationParams[1]);
|
||||||
|
|
||||||
String countQuery = buildCountQuery(filter.orElse(null), excludeDeprecated);
|
String countQuery = buildCountQuery(filter.orElse(null), excludeDeprecated);
|
||||||
|
|
||||||
|
|
@ -134,7 +140,7 @@ public class MaterialRepository {
|
||||||
|
|
||||||
@Transactional
|
@Transactional
|
||||||
public Optional<Material> getById(Integer id) {
|
public Optional<Material> getById(Integer id) {
|
||||||
String query = "SELECT * FROM material WHERE id = ? AND is_deprecated = FALSE";
|
String query = "SELECT * FROM material WHERE id = ? AND is_deprecated = " + dialectProvider.getBooleanFalse();
|
||||||
|
|
||||||
var material = jdbcTemplate.query(query, new MaterialMapper(), id);
|
var material = jdbcTemplate.query(query, new MaterialMapper(), id);
|
||||||
|
|
||||||
|
|
@ -146,7 +152,7 @@ public class MaterialRepository {
|
||||||
|
|
||||||
@Transactional
|
@Transactional
|
||||||
public void deleteById(Integer id) {
|
public void deleteById(Integer id) {
|
||||||
String deleteQuery = "UPDATE material SET is_deprecated = TRUE WHERE id = ?";
|
String deleteQuery = "UPDATE material SET is_deprecated = " + dialectProvider.getBooleanTrue() + " WHERE id = ?";
|
||||||
jdbcTemplate.update(deleteQuery, id);
|
jdbcTemplate.update(deleteQuery, id);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -210,9 +216,9 @@ public class MaterialRepository {
|
||||||
.map(id -> "?")
|
.map(id -> "?")
|
||||||
.collect(Collectors.joining(","));
|
.collect(Collectors.joining(","));
|
||||||
|
|
||||||
String sql = "UPDATE material SET is_deprecated = TRUE WHERE id IN ("+placeholders+")";
|
String sql = "UPDATE material SET is_deprecated = " + dialectProvider.getBooleanTrue() + " WHERE id IN ("+placeholders+")";
|
||||||
|
|
||||||
jdbcTemplate.update(sql, ids);
|
jdbcTemplate.update(sql, ids.toArray());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,6 @@
|
||||||
package de.avatic.lcc.repositories;
|
package de.avatic.lcc.repositories;
|
||||||
|
|
||||||
|
import de.avatic.lcc.database.dialect.SqlDialectProvider;
|
||||||
import de.avatic.lcc.dto.generic.NodeType;
|
import de.avatic.lcc.dto.generic.NodeType;
|
||||||
import de.avatic.lcc.model.db.ValidityTuple;
|
import de.avatic.lcc.model.db.ValidityTuple;
|
||||||
import de.avatic.lcc.model.db.nodes.Node;
|
import de.avatic.lcc.model.db.nodes.Node;
|
||||||
|
|
@ -27,10 +28,12 @@ public class NodeRepository {
|
||||||
|
|
||||||
private final JdbcTemplate jdbcTemplate;
|
private final JdbcTemplate jdbcTemplate;
|
||||||
private final NamedParameterJdbcTemplate namedParameterJdbcTemplate;
|
private final NamedParameterJdbcTemplate namedParameterJdbcTemplate;
|
||||||
|
private final SqlDialectProvider dialectProvider;
|
||||||
|
|
||||||
public NodeRepository(JdbcTemplate jdbcTemplate, NamedParameterJdbcTemplate namedParameterJdbcTemplate) {
|
public NodeRepository(JdbcTemplate jdbcTemplate, NamedParameterJdbcTemplate namedParameterJdbcTemplate, SqlDialectProvider dialectProvider) {
|
||||||
this.jdbcTemplate = jdbcTemplate;
|
this.jdbcTemplate = jdbcTemplate;
|
||||||
this.namedParameterJdbcTemplate = namedParameterJdbcTemplate;
|
this.namedParameterJdbcTemplate = namedParameterJdbcTemplate;
|
||||||
|
this.dialectProvider = dialectProvider;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Transactional
|
@Transactional
|
||||||
|
|
@ -102,11 +105,13 @@ public class NodeRepository {
|
||||||
List<Node> entities = null;
|
List<Node> entities = null;
|
||||||
Integer totalCount = 0;
|
Integer totalCount = 0;
|
||||||
|
|
||||||
|
Object[] paginationParams = dialectProvider.getPaginationParameters(pagination.getLimit(), pagination.getOffset());
|
||||||
|
|
||||||
if (filter == null) {
|
if (filter == null) {
|
||||||
entities = jdbcTemplate.query(query, new NodeMapper(), pagination.getLimit(), pagination.getOffset());
|
entities = jdbcTemplate.query(query, new NodeMapper(), paginationParams[0], paginationParams[1]);
|
||||||
totalCount = jdbcTemplate.queryForObject(countQuery, Integer.class);
|
totalCount = jdbcTemplate.queryForObject(countQuery, Integer.class);
|
||||||
} else {
|
} else {
|
||||||
entities = jdbcTemplate.query(query, new NodeMapper(), "%" + filter + "%", "%" + filter + "%", "%" + filter + "%", "%" + filter + "%", pagination.getLimit(), pagination.getOffset());
|
entities = jdbcTemplate.query(query, new NodeMapper(), "%" + filter + "%", "%" + filter + "%", "%" + filter + "%", "%" + filter + "%", paginationParams[0], paginationParams[1]);
|
||||||
totalCount = jdbcTemplate.queryForObject(countQuery, Integer.class, "%" + filter + "%", "%" + filter + "%", "%" + filter + "%", "%" + filter + "%");
|
totalCount = jdbcTemplate.queryForObject(countQuery, Integer.class, "%" + filter + "%", "%" + filter + "%", "%" + filter + "%", "%" + filter + "%");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -122,7 +127,7 @@ public class NodeRepository {
|
||||||
WHERE 1=1""");
|
WHERE 1=1""");
|
||||||
|
|
||||||
if (excludeDeprecated) {
|
if (excludeDeprecated) {
|
||||||
queryBuilder.append(" AND node.is_deprecated = FALSE");
|
queryBuilder.append(" AND node.is_deprecated = ").append(dialectProvider.getBooleanFalse());
|
||||||
}
|
}
|
||||||
if (filter != null) {
|
if (filter != null) {
|
||||||
queryBuilder.append(" AND (node.name LIKE ? OR node.external_mapping_id LIKE ? OR node.address LIKE ? OR country.iso_code LIKE ?)");
|
queryBuilder.append(" AND (node.name LIKE ? OR node.external_mapping_id LIKE ? OR node.address LIKE ? OR country.iso_code LIKE ?)");
|
||||||
|
|
@ -140,21 +145,22 @@ public class NodeRepository {
|
||||||
""");
|
""");
|
||||||
|
|
||||||
if (excludeDeprecated) {
|
if (excludeDeprecated) {
|
||||||
queryBuilder.append(" AND node.is_deprecated = FALSE");
|
queryBuilder.append(" AND node.is_deprecated = ").append(dialectProvider.getBooleanFalse());
|
||||||
}
|
}
|
||||||
if (filter != null) {
|
if (filter != null) {
|
||||||
queryBuilder.append(" AND (node.name LIKE ? OR node.external_mapping_id LIKE ? OR node.address LIKE ? OR country.iso_code LIKE ?)");
|
queryBuilder.append(" AND (node.name LIKE ? OR node.external_mapping_id LIKE ? OR node.address LIKE ? OR country.iso_code LIKE ?)");
|
||||||
}
|
}
|
||||||
queryBuilder.append(" ORDER BY node.id LIMIT ? OFFSET ?");
|
queryBuilder.append(" ORDER BY node.id ");
|
||||||
|
queryBuilder.append(dialectProvider.buildPaginationClause(searchQueryPagination.getLimit(), searchQueryPagination.getOffset()));
|
||||||
return queryBuilder.toString();
|
return queryBuilder.toString();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Transactional
|
@Transactional
|
||||||
public Optional<Integer> setDeprecatedById(Integer id) {
|
public Optional<Integer> setDeprecatedById(Integer id) {
|
||||||
String query = "UPDATE node SET is_deprecated = TRUE WHERE id = ?";
|
String query = "UPDATE node SET is_deprecated = " + dialectProvider.getBooleanTrue() + " WHERE id = ?";
|
||||||
|
|
||||||
// Mark all linked RouteNodes as outdated
|
// Mark all linked RouteNodes as outdated
|
||||||
jdbcTemplate.update("UPDATE premise_route_node SET is_outdated = TRUE WHERE node_id = ?", id);
|
jdbcTemplate.update("UPDATE premise_route_node SET is_outdated = " + dialectProvider.getBooleanTrue() + " WHERE node_id = ?", id);
|
||||||
|
|
||||||
|
|
||||||
return Optional.ofNullable(jdbcTemplate.update(query, id) == 0 ? null : id);
|
return Optional.ofNullable(jdbcTemplate.update(query, id) == 0 ? null : id);
|
||||||
|
|
@ -169,7 +175,7 @@ public class NodeRepository {
|
||||||
if(node.isUserNode())
|
if(node.isUserNode())
|
||||||
throw new DatabaseException("Cannot update user node in node repository.");
|
throw new DatabaseException("Cannot update user node in node repository.");
|
||||||
|
|
||||||
String updateNodeSql = """
|
String updateNodeSql = String.format("""
|
||||||
UPDATE node SET
|
UPDATE node SET
|
||||||
country_id = ?,
|
country_id = ?,
|
||||||
name = ?,
|
name = ?,
|
||||||
|
|
@ -182,9 +188,9 @@ public class NodeRepository {
|
||||||
geo_lat = ?,
|
geo_lat = ?,
|
||||||
geo_lng = ?,
|
geo_lng = ?,
|
||||||
is_deprecated = ?,
|
is_deprecated = ?,
|
||||||
updated_at = CURRENT_TIMESTAMP
|
updated_at = %s
|
||||||
WHERE id = ?
|
WHERE id = ?
|
||||||
""";
|
""", dialectProvider.getCurrentTimestamp());
|
||||||
|
|
||||||
int rowsUpdated = jdbcTemplate.update(updateNodeSql,
|
int rowsUpdated = jdbcTemplate.update(updateNodeSql,
|
||||||
node.getCountryId(),
|
node.getCountryId(),
|
||||||
|
|
@ -255,7 +261,7 @@ public class NodeRepository {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Mark all linked RouteNodes as outdated
|
// Mark all linked RouteNodes as outdated
|
||||||
jdbcTemplate.update("UPDATE premise_route_node SET is_outdated = TRUE WHERE node_id = ?", node.getId());
|
jdbcTemplate.update("UPDATE premise_route_node SET is_outdated = " + dialectProvider.getBooleanTrue() + " WHERE node_id = ?", node.getId());
|
||||||
|
|
||||||
// Mark all distance matrix entries as stale
|
// Mark all distance matrix entries as stale
|
||||||
jdbcTemplate.update("UPDATE distance_matrix SET state = 'STALE' WHERE ((from_node_id = ?) OR (to_node_id = ?))", node.getId(), node.getId());
|
jdbcTemplate.update("UPDATE distance_matrix SET state = 'STALE' WHERE ((from_node_id = ?) OR (to_node_id = ?))", node.getId(), node.getId());
|
||||||
|
|
@ -288,11 +294,11 @@ public class NodeRepository {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (nodeType.equals(NodeType.SOURCE)) {
|
if (nodeType.equals(NodeType.SOURCE)) {
|
||||||
queryBuilder.append("is_source = true");
|
queryBuilder.append("is_source = ").append(dialectProvider.getBooleanTrue());
|
||||||
} else if (nodeType.equals(NodeType.DESTINATION)) {
|
} else if (nodeType.equals(NodeType.DESTINATION)) {
|
||||||
queryBuilder.append("is_destination = true");
|
queryBuilder.append("is_destination = ").append(dialectProvider.getBooleanTrue());
|
||||||
} else if (nodeType.equals(NodeType.INTERMEDIATE)) {
|
} else if (nodeType.equals(NodeType.INTERMEDIATE)) {
|
||||||
queryBuilder.append("is_intermediate = true");
|
queryBuilder.append("is_intermediate = ").append(dialectProvider.getBooleanTrue());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -303,11 +309,15 @@ public class NodeRepository {
|
||||||
} else {
|
} else {
|
||||||
queryBuilder.append(" AND ");
|
queryBuilder.append(" AND ");
|
||||||
}
|
}
|
||||||
queryBuilder.append("is_deprecated = false");
|
queryBuilder.append("is_deprecated = ").append(dialectProvider.getBooleanFalse());
|
||||||
}
|
}
|
||||||
|
|
||||||
queryBuilder.append(" LIMIT ?");
|
// MSSQL requires ORDER BY before OFFSET
|
||||||
parameters.add(limit);
|
queryBuilder.append(" ORDER BY id ");
|
||||||
|
queryBuilder.append(dialectProvider.buildPaginationClause(limit, 0));
|
||||||
|
Object[] paginationParams = dialectProvider.getPaginationParameters(limit, 0);
|
||||||
|
parameters.add(paginationParams[0]);
|
||||||
|
parameters.add(paginationParams[1]);
|
||||||
|
|
||||||
return jdbcTemplate.query(queryBuilder.toString(), new NodeMapper(), parameters.toArray());
|
return jdbcTemplate.query(queryBuilder.toString(), new NodeMapper(), parameters.toArray());
|
||||||
}
|
}
|
||||||
|
|
@ -315,7 +325,7 @@ public class NodeRepository {
|
||||||
public List<Node> listAllNodes(boolean onlySources) {
|
public List<Node> listAllNodes(boolean onlySources) {
|
||||||
StringBuilder queryBuilder = new StringBuilder("SELECT * FROM node");
|
StringBuilder queryBuilder = new StringBuilder("SELECT * FROM node");
|
||||||
if (onlySources) {
|
if (onlySources) {
|
||||||
queryBuilder.append(" WHERE is_source = true");
|
queryBuilder.append(" WHERE is_source = ").append(dialectProvider.getBooleanTrue());
|
||||||
}
|
}
|
||||||
queryBuilder.append(" ORDER BY id");
|
queryBuilder.append(" ORDER BY id");
|
||||||
|
|
||||||
|
|
@ -393,40 +403,35 @@ public class NodeRepository {
|
||||||
@Transactional
|
@Transactional
|
||||||
public List<Node> getByDistance(Node node, Integer regionRadius) {
|
public List<Node> getByDistance(Node node, Integer regionRadius) {
|
||||||
|
|
||||||
if(node.isUserNode()) {
|
String haversineFormula = dialectProvider.buildHaversineDistance("geo_lat", "geo_lng", "?", "?");
|
||||||
String query = """
|
|
||||||
SELECT * FROM node
|
|
||||||
WHERE is_deprecated = FALSE AND
|
|
||||||
(
|
|
||||||
6371 * acos(
|
|
||||||
cos(radians(?)) *
|
|
||||||
cos(radians(geo_lat)) *
|
|
||||||
cos(radians(geo_lng) - radians(?)) +
|
|
||||||
sin(radians(?)) *
|
|
||||||
sin(radians(geo_lat))
|
|
||||||
)
|
|
||||||
) <= ?
|
|
||||||
""";
|
|
||||||
|
|
||||||
return jdbcTemplate.query(query, new NodeMapper(), node.getGeoLat(), node.getGeoLng(), node.getGeoLat(), regionRadius);
|
if(node.isUserNode()) {
|
||||||
|
String query = String.format("""
|
||||||
|
SELECT * FROM node
|
||||||
|
WHERE is_deprecated = %s AND
|
||||||
|
(%s) <= ?
|
||||||
|
""", dialectProvider.getBooleanFalse(), haversineFormula);
|
||||||
|
|
||||||
|
return jdbcTemplate.query(query, new NodeMapper(),
|
||||||
|
node.getGeoLat(), // for COS(RADIANS(?))
|
||||||
|
node.getGeoLng(), // for COS(RADIANS(?) - RADIANS(geo_lng))
|
||||||
|
node.getGeoLat(), // for SIN(RADIANS(?))
|
||||||
|
regionRadius); // for <= ?
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
String query = """
|
String query = String.format("""
|
||||||
SELECT * FROM node
|
SELECT * FROM node
|
||||||
WHERE is_deprecated = FALSE AND id != ? AND
|
WHERE is_deprecated = %s AND id != ? AND
|
||||||
(
|
(%s) <= ?
|
||||||
6371 * acos(
|
""", dialectProvider.getBooleanFalse(), haversineFormula);
|
||||||
cos(radians(?)) *
|
|
||||||
cos(radians(geo_lat)) *
|
|
||||||
cos(radians(geo_lng) - radians(?)) +
|
|
||||||
sin(radians(?)) *
|
|
||||||
sin(radians(geo_lat))
|
|
||||||
)
|
|
||||||
) <= ?
|
|
||||||
""";
|
|
||||||
|
|
||||||
return jdbcTemplate.query(query, new NodeMapper(), node.getId(), node.getGeoLat(), node.getGeoLng(), node.getGeoLat(), regionRadius);
|
return jdbcTemplate.query(query, new NodeMapper(),
|
||||||
|
node.getId(), // for id != ?
|
||||||
|
node.getGeoLat(), // for COS(RADIANS(?))
|
||||||
|
node.getGeoLng(), // for COS(RADIANS(?) - RADIANS(geo_lng))
|
||||||
|
node.getGeoLat(), // for SIN(RADIANS(?))
|
||||||
|
regionRadius); // for <= ?
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -441,12 +446,12 @@ public class NodeRepository {
|
||||||
* Returns an empty list if no outbound nodes are found.
|
* Returns an empty list if no outbound nodes are found.
|
||||||
*/
|
*/
|
||||||
public List<Node> getAllOutboundFor(Integer countryId) {
|
public List<Node> getAllOutboundFor(Integer countryId) {
|
||||||
String query = """
|
String query = String.format("""
|
||||||
SELECT node.*
|
SELECT node.*
|
||||||
FROM node
|
FROM node
|
||||||
LEFT JOIN outbound_country_mapping ON outbound_country_mapping.node_id = node.id
|
LEFT JOIN outbound_country_mapping ON outbound_country_mapping.node_id = node.id
|
||||||
WHERE node.is_deprecated = FALSE AND (outbound_country_mapping.country_id = ? OR (node.is_intermediate = TRUE AND node.country_id = ?))
|
WHERE node.is_deprecated = %s AND (outbound_country_mapping.country_id = ? OR (node.is_intermediate = %s AND node.country_id = ?))
|
||||||
""";
|
""", dialectProvider.getBooleanFalse(), dialectProvider.getBooleanTrue());
|
||||||
|
|
||||||
return jdbcTemplate.query(query, new NodeMapper(), countryId, countryId);
|
return jdbcTemplate.query(query, new NodeMapper(), countryId, countryId);
|
||||||
}
|
}
|
||||||
|
|
@ -472,7 +477,7 @@ public class NodeRepository {
|
||||||
|
|
||||||
public Optional<Node> getByDestinationId(Integer id) {
|
public Optional<Node> getByDestinationId(Integer id) {
|
||||||
|
|
||||||
String query = "SELECT node.* FROM node INNER JOIN premise_destination WHERE node.id = premise_destination.destination_node_id AND premise_destination.id = ?";
|
String query = "SELECT node.* FROM node INNER JOIN premise_destination ON node.id = premise_destination.destination_node_id WHERE premise_destination.id = ?";
|
||||||
|
|
||||||
var node = jdbcTemplate.query(query, new NodeMapper(), id);
|
var node = jdbcTemplate.query(query, new NodeMapper(), id);
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
package de.avatic.lcc.repositories;
|
package de.avatic.lcc.repositories;
|
||||||
|
|
||||||
import de.avatic.lcc.service.api.EUTaxationApiService;
|
import de.avatic.lcc.database.dialect.SqlDialectProvider;
|
||||||
import org.springframework.jdbc.core.JdbcTemplate;
|
import org.springframework.jdbc.core.JdbcTemplate;
|
||||||
import org.springframework.stereotype.Repository;
|
import org.springframework.stereotype.Repository;
|
||||||
|
|
||||||
|
|
@ -10,19 +10,24 @@ import java.util.List;
|
||||||
public class NomenclatureRepository {
|
public class NomenclatureRepository {
|
||||||
|
|
||||||
private final JdbcTemplate jdbcTemplate;
|
private final JdbcTemplate jdbcTemplate;
|
||||||
private final EUTaxationApiService eUTaxationApiService;
|
private final SqlDialectProvider dialectProvider;
|
||||||
|
|
||||||
public NomenclatureRepository(JdbcTemplate jdbcTemplate, EUTaxationApiService eUTaxationApiService) {
|
public NomenclatureRepository(JdbcTemplate jdbcTemplate, SqlDialectProvider dialectProvider) {
|
||||||
this.jdbcTemplate = jdbcTemplate;
|
this.jdbcTemplate = jdbcTemplate;
|
||||||
this.eUTaxationApiService = eUTaxationApiService;
|
this.dialectProvider = dialectProvider;
|
||||||
}
|
}
|
||||||
|
|
||||||
public List<String> searchHsCode(String search) {
|
public List<String> searchHsCode(String search) {
|
||||||
String sql = """
|
String concatExpression = dialectProvider.buildConcat("?", "'%'");
|
||||||
SELECT hs_code FROM nomenclature WHERE hs_code LIKE CONCAT(?, '%') LIMIT 10
|
String sql = String.format(
|
||||||
""";
|
"SELECT hs_code FROM nomenclature WHERE hs_code LIKE %s ORDER BY hs_code %s",
|
||||||
|
concatExpression,
|
||||||
|
dialectProvider.buildPaginationClause(10, 0)
|
||||||
|
);
|
||||||
|
|
||||||
return jdbcTemplate.queryForList (sql, String.class, search);
|
Object[] paginationParams = dialectProvider.getPaginationParameters(10, 0);
|
||||||
|
|
||||||
|
return jdbcTemplate.queryForList(sql, String.class, search, paginationParams[0], paginationParams[1]);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,6 @@
|
||||||
package de.avatic.lcc.repositories.bulk;
|
package de.avatic.lcc.repositories.bulk;
|
||||||
|
|
||||||
|
import de.avatic.lcc.database.dialect.SqlDialectProvider;
|
||||||
import de.avatic.lcc.dto.bulk.BulkFileType;
|
import de.avatic.lcc.dto.bulk.BulkFileType;
|
||||||
import de.avatic.lcc.dto.bulk.BulkOperationState;
|
import de.avatic.lcc.dto.bulk.BulkOperationState;
|
||||||
import de.avatic.lcc.dto.bulk.BulkProcessingType;
|
import de.avatic.lcc.dto.bulk.BulkProcessingType;
|
||||||
|
|
@ -24,9 +25,11 @@ public class BulkOperationRepository {
|
||||||
|
|
||||||
|
|
||||||
private final JdbcTemplate jdbcTemplate;
|
private final JdbcTemplate jdbcTemplate;
|
||||||
|
private final SqlDialectProvider dialectProvider;
|
||||||
|
|
||||||
public BulkOperationRepository(JdbcTemplate jdbcTemplate) {
|
public BulkOperationRepository(JdbcTemplate jdbcTemplate, SqlDialectProvider dialectProvider) {
|
||||||
this.jdbcTemplate = jdbcTemplate;
|
this.jdbcTemplate = jdbcTemplate;
|
||||||
|
this.dialectProvider = dialectProvider;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Transactional
|
@Transactional
|
||||||
|
|
@ -34,10 +37,10 @@ public class BulkOperationRepository {
|
||||||
|
|
||||||
removeOld(operation.getUserId());
|
removeOld(operation.getUserId());
|
||||||
|
|
||||||
String sql = """
|
String sql = String.format("""
|
||||||
INSERT INTO bulk_operation (user_id, bulk_file_type, bulk_processing_type, state, file, validity_period_id)
|
INSERT INTO bulk_operation (user_id, bulk_file_type, bulk_processing_type, state, %s, validity_period_id)
|
||||||
VALUES (?, ?, ?, ?, ?, ?)
|
VALUES (?, ?, ?, ?, ?, ?)
|
||||||
""";
|
""", dialectProvider.escapeIdentifier("file"));
|
||||||
|
|
||||||
GeneratedKeyHolder keyHolder = new GeneratedKeyHolder();
|
GeneratedKeyHolder keyHolder = new GeneratedKeyHolder();
|
||||||
|
|
||||||
|
|
@ -66,43 +69,49 @@ public class BulkOperationRepository {
|
||||||
|
|
||||||
@Transactional
|
@Transactional
|
||||||
public void removeOld(Integer userId) {
|
public void removeOld(Integer userId) {
|
||||||
// First, update sys_error records to set bulk_operation_id to NULL
|
// First, fetch the IDs of the 10 newest operations to keep
|
||||||
// for bulk operations that will be deleted (all but the 10 newest for the current user)
|
// (MySQL doesn't support LIMIT in IN/NOT IN subqueries)
|
||||||
String updateErrorsSql = """
|
String fetchNewestSql = "SELECT id FROM bulk_operation WHERE user_id = ? AND state NOT IN ('SCHEDULED', 'PROCESSING') ORDER BY created_at DESC " +
|
||||||
|
dialectProvider.buildPaginationClause(10, 0);
|
||||||
|
Object[] paginationParams = dialectProvider.getPaginationParameters(10, 0);
|
||||||
|
Object[] fetchParams = new Object[]{userId, paginationParams[0], paginationParams[1]};
|
||||||
|
|
||||||
|
List<Integer> newestIds = jdbcTemplate.queryForList(fetchNewestSql, Integer.class, fetchParams);
|
||||||
|
|
||||||
|
// If there are 10 or fewer operations, nothing to delete
|
||||||
|
if (newestIds.size() <= 10) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build comma-separated list of IDs to keep
|
||||||
|
String idsToKeep = newestIds.stream()
|
||||||
|
.map(String::valueOf)
|
||||||
|
.reduce((a, b) -> a + "," + b)
|
||||||
|
.orElse("0");
|
||||||
|
|
||||||
|
// Update sys_error records to set bulk_operation_id to NULL for operations that will be deleted
|
||||||
|
String updateErrorsSql = String.format("""
|
||||||
UPDATE sys_error
|
UPDATE sys_error
|
||||||
SET bulk_operation_id = NULL
|
SET bulk_operation_id = NULL
|
||||||
WHERE bulk_operation_id IN (
|
WHERE bulk_operation_id IN (
|
||||||
SELECT id FROM (
|
SELECT id FROM bulk_operation
|
||||||
SELECT id
|
|
||||||
FROM bulk_operation
|
|
||||||
WHERE user_id = ?
|
WHERE user_id = ?
|
||||||
AND state NOT IN ('SCHEDULED', 'PROCESSING')
|
AND state NOT IN ('SCHEDULED', 'PROCESSING')
|
||||||
ORDER BY created_at DESC
|
AND id NOT IN (%s)
|
||||||
LIMIT 18446744073709551615 OFFSET 10
|
|
||||||
) AS old_operations
|
|
||||||
)
|
)
|
||||||
""";
|
""", idsToKeep);
|
||||||
|
|
||||||
jdbcTemplate.update(updateErrorsSql, userId);
|
jdbcTemplate.update(updateErrorsSql, userId);
|
||||||
|
|
||||||
// Then delete the old bulk_operation entries (keeping only the 10 newest for the current user)
|
// Delete the old bulk_operation entries (keeping only the 10 newest for the current user)
|
||||||
String deleteBulkSql = """
|
String deleteBulkSql = String.format("""
|
||||||
DELETE FROM bulk_operation
|
DELETE FROM bulk_operation
|
||||||
WHERE user_id = ?
|
WHERE user_id = ?
|
||||||
AND state NOT IN ('SCHEDULED', 'PROCESSING')
|
AND state NOT IN ('SCHEDULED', 'PROCESSING')
|
||||||
AND id NOT IN (
|
AND id NOT IN (%s)
|
||||||
SELECT id FROM (
|
""", idsToKeep);
|
||||||
SELECT id
|
|
||||||
FROM bulk_operation
|
|
||||||
WHERE user_id = ?
|
|
||||||
AND state NOT IN ('SCHEDULED', 'PROCESSING')
|
|
||||||
ORDER BY created_at DESC
|
|
||||||
LIMIT 10
|
|
||||||
) AS newest_operations
|
|
||||||
)
|
|
||||||
""";
|
|
||||||
|
|
||||||
jdbcTemplate.update(deleteBulkSql, userId, userId);
|
jdbcTemplate.update(deleteBulkSql, userId);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Transactional
|
@Transactional
|
||||||
|
|
@ -121,33 +130,44 @@ public class BulkOperationRepository {
|
||||||
|
|
||||||
cleanupTimeouts(userId);
|
cleanupTimeouts(userId);
|
||||||
|
|
||||||
String sql = """
|
String baseQuery = """
|
||||||
SELECT id, user_id, bulk_file_type, bulk_processing_type, state, created_at, validity_period_id
|
SELECT id, user_id, bulk_file_type, bulk_processing_type, state, created_at, validity_period_id
|
||||||
FROM bulk_operation
|
FROM bulk_operation
|
||||||
WHERE user_id = ?
|
WHERE user_id = ?
|
||||||
|
ORDER BY created_at DESC
|
||||||
ORDER BY created_at DESC LIMIT 10
|
|
||||||
""";
|
""";
|
||||||
|
|
||||||
return jdbcTemplate.query(sql, new BulkOperationRowMapper(true), userId);
|
String sql = baseQuery + dialectProvider.buildPaginationClause(10, 0);
|
||||||
|
Object[] paginationParams = dialectProvider.getPaginationParameters(10, 0);
|
||||||
|
|
||||||
|
// Combine userId with pagination params
|
||||||
|
Object[] allParams = new Object[]{userId, paginationParams[0], paginationParams[1]};
|
||||||
|
|
||||||
|
return jdbcTemplate.query(sql, new BulkOperationRowMapper(true), allParams);
|
||||||
}
|
}
|
||||||
|
|
||||||
private void cleanupTimeouts(Integer userId) {
|
private void cleanupTimeouts(Integer userId) {
|
||||||
|
|
||||||
String sql = """
|
// Build date subtraction expression (60 minutes ago)
|
||||||
UPDATE bulk_operation SET state = 'EXCEPTION' WHERE user_id = ? AND (state = 'PROCESSING' OR state = 'SCHEDULED') AND created_at < NOW() - INTERVAL 60 MINUTE
|
String dateCondition = dialectProvider.buildDateSubtraction(null, "60", SqlDialectProvider.DateUnit.MINUTE);
|
||||||
""";
|
|
||||||
|
String sql = String.format("""
|
||||||
|
UPDATE bulk_operation SET state = 'EXCEPTION'
|
||||||
|
WHERE user_id = ?
|
||||||
|
AND (state = 'PROCESSING' OR state = 'SCHEDULED')
|
||||||
|
AND created_at < %s
|
||||||
|
""", dateCondition);
|
||||||
|
|
||||||
jdbcTemplate.update(sql, userId);
|
jdbcTemplate.update(sql, userId);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Transactional
|
@Transactional
|
||||||
public Optional<BulkOperation> getOperationById(Integer id) {
|
public Optional<BulkOperation> getOperationById(Integer id) {
|
||||||
String sql = """
|
String sql = String.format("""
|
||||||
SELECT id, user_id, bulk_file_type, bulk_processing_type, state, file, created_at, validity_period_id
|
SELECT id, user_id, bulk_file_type, bulk_processing_type, state, %s, created_at, validity_period_id
|
||||||
FROM bulk_operation
|
FROM bulk_operation
|
||||||
WHERE id = ?
|
WHERE id = ?
|
||||||
""";
|
""", dialectProvider.escapeIdentifier("file"));
|
||||||
|
|
||||||
List<BulkOperation> results = jdbcTemplate.query(sql, new BulkOperationRowMapper(false), id);
|
List<BulkOperation> results = jdbcTemplate.query(sql, new BulkOperationRowMapper(false), id);
|
||||||
|
|
||||||
|
|
@ -156,11 +176,11 @@ public class BulkOperationRepository {
|
||||||
|
|
||||||
@Transactional
|
@Transactional
|
||||||
public void update(BulkOperation op) {
|
public void update(BulkOperation op) {
|
||||||
String sql = """
|
String sql = String.format("""
|
||||||
UPDATE bulk_operation
|
UPDATE bulk_operation
|
||||||
SET user_id = ?, bulk_file_type = ?, state = ?, file = ?, validity_period_id = ?
|
SET user_id = ?, bulk_file_type = ?, state = ?, %s = ?, validity_period_id = ?
|
||||||
WHERE id = ?
|
WHERE id = ?
|
||||||
""";
|
""", dialectProvider.escapeIdentifier("file"));
|
||||||
|
|
||||||
jdbcTemplate.update(sql,
|
jdbcTemplate.update(sql,
|
||||||
op.getUserId(),
|
op.getUserId(),
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,6 @@
|
||||||
package de.avatic.lcc.repositories.calculation;
|
package de.avatic.lcc.repositories.calculation;
|
||||||
|
|
||||||
|
import de.avatic.lcc.database.dialect.SqlDialectProvider;
|
||||||
import de.avatic.lcc.model.db.calculations.CalculationJob;
|
import de.avatic.lcc.model.db.calculations.CalculationJob;
|
||||||
import de.avatic.lcc.model.db.calculations.CalculationJobPriority;
|
import de.avatic.lcc.model.db.calculations.CalculationJobPriority;
|
||||||
import de.avatic.lcc.model.db.calculations.CalculationJobState;
|
import de.avatic.lcc.model.db.calculations.CalculationJobState;
|
||||||
|
|
@ -18,9 +19,11 @@ import java.util.Optional;
|
||||||
@Repository
|
@Repository
|
||||||
public class CalculationJobRepository {
|
public class CalculationJobRepository {
|
||||||
private final JdbcTemplate jdbcTemplate;
|
private final JdbcTemplate jdbcTemplate;
|
||||||
|
private final SqlDialectProvider dialectProvider;
|
||||||
|
|
||||||
public CalculationJobRepository(JdbcTemplate jdbcTemplate) {
|
public CalculationJobRepository(JdbcTemplate jdbcTemplate, SqlDialectProvider dialectProvider) {
|
||||||
this.jdbcTemplate = jdbcTemplate;
|
this.jdbcTemplate = jdbcTemplate;
|
||||||
|
this.dialectProvider = dialectProvider;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Transactional
|
@Transactional
|
||||||
|
|
@ -63,7 +66,8 @@ public class CalculationJobRepository {
|
||||||
*/
|
*/
|
||||||
@Transactional
|
@Transactional
|
||||||
public Optional<CalculationJob> fetchAndLockNextJob() {
|
public Optional<CalculationJob> fetchAndLockNextJob() {
|
||||||
String sql = """
|
// Build base query with ORDER BY (required for OFFSET/FETCH in MSSQL)
|
||||||
|
String baseQuery = """
|
||||||
SELECT * FROM calculation_job
|
SELECT * FROM calculation_job
|
||||||
WHERE (job_state = 'CREATED')
|
WHERE (job_state = 'CREATED')
|
||||||
OR (job_state = 'EXCEPTION' AND retries < 3)
|
OR (job_state = 'EXCEPTION' AND retries < 3)
|
||||||
|
|
@ -75,11 +79,18 @@ public class CalculationJobRepository {
|
||||||
WHEN job_state = 'EXCEPTION' THEN 4
|
WHEN job_state = 'EXCEPTION' THEN 4
|
||||||
END,
|
END,
|
||||||
calculation_date
|
calculation_date
|
||||||
LIMIT 1
|
|
||||||
FOR UPDATE SKIP LOCKED
|
|
||||||
""";
|
""";
|
||||||
|
|
||||||
var jobs = jdbcTemplate.query(sql, new CalculationJobMapper());
|
// Add pagination (LIMIT 1 OFFSET 0)
|
||||||
|
String paginatedQuery = baseQuery + " " + dialectProvider.buildPaginationClause(1, 0);
|
||||||
|
|
||||||
|
// Add pessimistic locking with skip locked
|
||||||
|
String sql = dialectProvider.buildSelectForUpdateSkipLocked(paginatedQuery);
|
||||||
|
|
||||||
|
// Get pagination parameters in correct order for the database
|
||||||
|
Object[] params = dialectProvider.getPaginationParameters(1, 0);
|
||||||
|
|
||||||
|
var jobs = jdbcTemplate.query(sql, new CalculationJobMapper(), params);
|
||||||
|
|
||||||
if (jobs.isEmpty()) {
|
if (jobs.isEmpty()) {
|
||||||
return Optional.empty();
|
return Optional.empty();
|
||||||
|
|
@ -151,9 +162,14 @@ public class CalculationJobRepository {
|
||||||
public Optional<CalculationJob> getCalculationJobWithJobStateValid(Integer periodId, Integer setId, Integer nodeId, Integer materialId) {
|
public Optional<CalculationJob> getCalculationJobWithJobStateValid(Integer periodId, Integer setId, Integer nodeId, Integer materialId) {
|
||||||
|
|
||||||
/* there should only be one job per period id, node id and material id combination */
|
/* there should only be one job per period id, node id and material id combination */
|
||||||
String query = "SELECT * FROM calculation_job AS cj INNER JOIN premise AS p ON cj.premise_id = p.id WHERE job_state = 'VALID' AND validity_period_id = ? AND property_set_id = ? AND p.supplier_node_id = ? AND material_id = ? ORDER BY cj.calculation_date DESC LIMIT 1";
|
String baseQuery = "SELECT * FROM calculation_job AS cj INNER JOIN premise AS p ON cj.premise_id = p.id WHERE job_state = 'VALID' AND validity_period_id = ? AND property_set_id = ? AND p.supplier_node_id = ? AND material_id = ? ORDER BY cj.calculation_date DESC ";
|
||||||
|
String query = baseQuery + dialectProvider.buildPaginationClause(1, 0);
|
||||||
|
Object[] params = dialectProvider.getPaginationParameters(1, 0);
|
||||||
|
|
||||||
var job = jdbcTemplate.query(query, new CalculationJobMapper(), periodId, setId, nodeId, materialId);
|
// Combine business logic params with pagination params
|
||||||
|
Object[] allParams = new Object[]{periodId, setId, nodeId, materialId, params[0], params[1]};
|
||||||
|
|
||||||
|
var job = jdbcTemplate.query(query, new CalculationJobMapper(), allParams);
|
||||||
|
|
||||||
if (job.isEmpty())
|
if (job.isEmpty())
|
||||||
return Optional.empty();
|
return Optional.empty();
|
||||||
|
|
@ -165,9 +181,14 @@ public class CalculationJobRepository {
|
||||||
public Optional<CalculationJob> getCalculationJobWithJobStateValidUserNodeId(Integer periodId, Integer setId, Integer userNodeId, Integer materialId) {
|
public Optional<CalculationJob> getCalculationJobWithJobStateValidUserNodeId(Integer periodId, Integer setId, Integer userNodeId, Integer materialId) {
|
||||||
|
|
||||||
/* there should only be one job per period id, node id and material id combination */
|
/* there should only be one job per period id, node id and material id combination */
|
||||||
String query = "SELECT * FROM calculation_job AS cj INNER JOIN premise AS p ON cj.premise_id = p.id WHERE job_state = 'VALID' AND validity_period_id = ? AND property_set_id = ? AND p.user_supplier_node_id = ? AND material_id = ? ORDER BY cj.calculation_date DESC LIMIT 1";
|
String baseQuery = "SELECT * FROM calculation_job AS cj INNER JOIN premise AS p ON cj.premise_id = p.id WHERE job_state = 'VALID' AND validity_period_id = ? AND property_set_id = ? AND p.user_supplier_node_id = ? AND material_id = ? ORDER BY cj.calculation_date DESC ";
|
||||||
|
String query = baseQuery + dialectProvider.buildPaginationClause(1, 0);
|
||||||
|
Object[] params = dialectProvider.getPaginationParameters(1, 0);
|
||||||
|
|
||||||
var job = jdbcTemplate.query(query, new CalculationJobMapper(), periodId, setId, userNodeId, materialId);
|
// Combine business logic params with pagination params
|
||||||
|
Object[] allParams = new Object[]{periodId, setId, userNodeId, materialId, params[0], params[1]};
|
||||||
|
|
||||||
|
var job = jdbcTemplate.query(query, new CalculationJobMapper(), allParams);
|
||||||
|
|
||||||
if (job.isEmpty())
|
if (job.isEmpty())
|
||||||
return Optional.empty();
|
return Optional.empty();
|
||||||
|
|
@ -211,8 +232,14 @@ public class CalculationJobRepository {
|
||||||
@Transactional
|
@Transactional
|
||||||
public CalculationJobState getLastStateFor(Integer premiseId) {
|
public CalculationJobState getLastStateFor(Integer premiseId) {
|
||||||
|
|
||||||
String sql = "SELECT job_state FROM calculation_job WHERE premise_id = ? ORDER BY calculation_date DESC LIMIT 1";
|
String baseQuery = "SELECT job_state FROM calculation_job WHERE premise_id = ? ORDER BY calculation_date DESC ";
|
||||||
var result = jdbcTemplate.query(sql, (rs, rowNum) -> CalculationJobState.valueOf(rs.getString("job_state")), premiseId);
|
String sql = baseQuery + dialectProvider.buildPaginationClause(1, 0);
|
||||||
|
Object[] params = dialectProvider.getPaginationParameters(1, 0);
|
||||||
|
|
||||||
|
// Combine business logic params with pagination params
|
||||||
|
Object[] allParams = new Object[]{premiseId, params[0], params[1]};
|
||||||
|
|
||||||
|
var result = jdbcTemplate.query(sql, (rs, rowNum) -> CalculationJobState.valueOf(rs.getString("job_state")), allParams);
|
||||||
|
|
||||||
if (result.isEmpty())
|
if (result.isEmpty())
|
||||||
return null;
|
return null;
|
||||||
|
|
@ -227,9 +254,13 @@ public class CalculationJobRepository {
|
||||||
|
|
||||||
public Integer getFailedJobByUserId(Integer userId) {
|
public Integer getFailedJobByUserId(Integer userId) {
|
||||||
|
|
||||||
String sql = "SELECT COUNT(*) FROM calculation_job WHERE user_id = ? AND job_state = 'EXCEPTION' AND calculation_date > DATE_SUB(NOW(), INTERVAL 3 DAY)";
|
// Build date subtraction expression using dialect provider
|
||||||
|
String dateCondition = dialectProvider.buildDateSubtraction(null, "3", SqlDialectProvider.DateUnit.DAY);
|
||||||
|
|
||||||
|
String sql = String.format(
|
||||||
|
"SELECT COUNT(*) FROM calculation_job WHERE user_id = ? AND job_state = 'EXCEPTION' AND calculation_date > %s",
|
||||||
|
dateCondition
|
||||||
|
);
|
||||||
|
|
||||||
return jdbcTemplate.queryForObject(sql, Integer.class, userId);
|
return jdbcTemplate.queryForObject(sql, Integer.class, userId);
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,6 @@
|
||||||
package de.avatic.lcc.repositories.country;
|
package de.avatic.lcc.repositories.country;
|
||||||
|
|
||||||
|
import de.avatic.lcc.database.dialect.SqlDialectProvider;
|
||||||
import de.avatic.lcc.dto.generic.PropertyDTO;
|
import de.avatic.lcc.dto.generic.PropertyDTO;
|
||||||
import de.avatic.lcc.model.db.properties.CountryPropertyMappingId;
|
import de.avatic.lcc.model.db.properties.CountryPropertyMappingId;
|
||||||
import de.avatic.lcc.model.db.rates.ValidityPeriodState;
|
import de.avatic.lcc.model.db.rates.ValidityPeriodState;
|
||||||
|
|
@ -20,9 +21,11 @@ public class CountryPropertyRepository {
|
||||||
|
|
||||||
|
|
||||||
private final JdbcTemplate jdbcTemplate;
|
private final JdbcTemplate jdbcTemplate;
|
||||||
|
private final SqlDialectProvider dialectProvider;
|
||||||
|
|
||||||
public CountryPropertyRepository(JdbcTemplate jdbcTemplate) {
|
public CountryPropertyRepository(JdbcTemplate jdbcTemplate, SqlDialectProvider dialectProvider) {
|
||||||
this.jdbcTemplate = jdbcTemplate;
|
this.jdbcTemplate = jdbcTemplate;
|
||||||
|
this.dialectProvider = dialectProvider;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Transactional
|
@Transactional
|
||||||
|
|
@ -44,11 +47,14 @@ public class CountryPropertyRepository {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
String query = """
|
String query = dialectProvider.buildUpsertStatement(
|
||||||
INSERT INTO country_property (property_value, country_id, country_property_type_id, property_set_id) VALUES (?, ?, ?, ?) ON DUPLICATE KEY UPDATE property_value = ?
|
"country_property",
|
||||||
""";
|
List.of("property_set_id", "country_property_type_id", "country_id"),
|
||||||
|
List.of("property_value", "country_id", "country_property_type_id", "property_set_id"),
|
||||||
|
List.of("property_value")
|
||||||
|
);
|
||||||
|
|
||||||
int affectedRows = jdbcTemplate.update(query, value, countryId, typeId, setId, value);
|
int affectedRows = jdbcTemplate.update(query, value, countryId, typeId, setId);
|
||||||
|
|
||||||
if(!(affectedRows > 0))
|
if(!(affectedRows > 0))
|
||||||
throw new DatabaseException("Could not update property value for country " + countryId + " and property type " + mappingId);
|
throw new DatabaseException("Could not update property value for country " + countryId + " and property type " + mappingId);
|
||||||
|
|
@ -144,7 +150,6 @@ public class CountryPropertyRepository {
|
||||||
type.external_mapping_id as externalMappingId,
|
type.external_mapping_id as externalMappingId,
|
||||||
type.validation_rule as validationRule,
|
type.validation_rule as validationRule,
|
||||||
type.is_required as is_required,
|
type.is_required as is_required,
|
||||||
type.is_required as is_required,
|
|
||||||
type.description as description,
|
type.description as description,
|
||||||
type.property_group as propertyGroup,
|
type.property_group as propertyGroup,
|
||||||
type.sequence_number as sequenceNumber,
|
type.sequence_number as sequenceNumber,
|
||||||
|
|
@ -153,8 +158,10 @@ public class CountryPropertyRepository {
|
||||||
FROM country_property_type AS type
|
FROM country_property_type AS type
|
||||||
LEFT JOIN country_property AS cp ON cp.country_property_type_id = type.id AND cp.country_id = ?
|
LEFT JOIN country_property AS cp ON cp.country_property_type_id = type.id AND cp.country_id = ?
|
||||||
LEFT JOIN property_set AS ps ON ps.id = cp.property_set_id AND ps.state IN ('DRAFT', 'VALID')
|
LEFT JOIN property_set AS ps ON ps.id = cp.property_set_id AND ps.state IN ('DRAFT', 'VALID')
|
||||||
GROUP BY type.id, type.name, type.data_type, type.external_mapping_id, type.validation_rule
|
GROUP BY type.id, type.name, type.data_type, type.external_mapping_id, type.validation_rule,
|
||||||
HAVING draftValue IS NOT NULL OR validValue IS NOT NULL;
|
type.is_required, type.description, type.property_group, type.sequence_number
|
||||||
|
HAVING MAX(CASE WHEN ps.state = 'DRAFT' THEN cp.property_value END) IS NOT NULL
|
||||||
|
OR MAX(CASE WHEN ps.state = 'VALID' THEN cp.property_value END) IS NOT NULL;
|
||||||
""";
|
""";
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -184,9 +191,13 @@ public class CountryPropertyRepository {
|
||||||
LEFT JOIN country_property AS property ON property.country_property_type_id = type.id
|
LEFT JOIN country_property AS property ON property.country_property_type_id = type.id
|
||||||
LEFT JOIN property_set AS propertySet ON propertySet.id = property.property_set_id WHERE propertySet.state = 'VALID'""";
|
LEFT JOIN property_set AS propertySet ON propertySet.id = property.property_set_id WHERE propertySet.state = 'VALID'""";
|
||||||
|
|
||||||
|
String insertQuery = dialectProvider.buildInsertIgnoreStatement(
|
||||||
|
"country_property",
|
||||||
|
List.of("property_value", "country_id", "country_property_type_id", "property_set_id"),
|
||||||
|
List.of("property_set_id", "country_property_type_id", "country_id")
|
||||||
|
);
|
||||||
|
|
||||||
jdbcTemplate.query(query, (rs, rowNum) -> {
|
jdbcTemplate.query(query, (rs, rowNum) -> {
|
||||||
String insertQuery = "INSERT IGNORE INTO country_property (property_value, country_id, country_property_type_id, property_set_id) VALUES (?, ?, ?, ?)";
|
|
||||||
jdbcTemplate.update(insertQuery, rs.getString("value"), rs.getInt("country_id"), rs.getInt("typeId"), setId);
|
jdbcTemplate.update(insertQuery, rs.getString("value"), rs.getInt("country_id"), rs.getInt("typeId"), setId);
|
||||||
return null;
|
return null;
|
||||||
});
|
});
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,6 @@
|
||||||
package de.avatic.lcc.repositories.country;
|
package de.avatic.lcc.repositories.country;
|
||||||
|
|
||||||
|
import de.avatic.lcc.database.dialect.SqlDialectProvider;
|
||||||
import de.avatic.lcc.model.db.country.Country;
|
import de.avatic.lcc.model.db.country.Country;
|
||||||
import de.avatic.lcc.model.db.country.IsoCode;
|
import de.avatic.lcc.model.db.country.IsoCode;
|
||||||
import de.avatic.lcc.model.db.country.RegionCode;
|
import de.avatic.lcc.model.db.country.RegionCode;
|
||||||
|
|
@ -22,10 +23,12 @@ public class CountryRepository {
|
||||||
|
|
||||||
private final JdbcTemplate jdbcTemplate;
|
private final JdbcTemplate jdbcTemplate;
|
||||||
private final NamedParameterJdbcTemplate namedParameterJdbcTemplate;
|
private final NamedParameterJdbcTemplate namedParameterJdbcTemplate;
|
||||||
|
private final SqlDialectProvider dialectProvider;
|
||||||
|
|
||||||
public CountryRepository(JdbcTemplate jdbcTemplate, NamedParameterJdbcTemplate namedParameterJdbcTemplate) {
|
public CountryRepository(JdbcTemplate jdbcTemplate, NamedParameterJdbcTemplate namedParameterJdbcTemplate, SqlDialectProvider dialectProvider) {
|
||||||
this.jdbcTemplate = jdbcTemplate;
|
this.jdbcTemplate = jdbcTemplate;
|
||||||
this.namedParameterJdbcTemplate = namedParameterJdbcTemplate;
|
this.namedParameterJdbcTemplate = namedParameterJdbcTemplate;
|
||||||
|
this.dialectProvider = dialectProvider;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Transactional
|
@Transactional
|
||||||
|
|
@ -66,13 +69,15 @@ public class CountryRepository {
|
||||||
|
|
||||||
@Transactional
|
@Transactional
|
||||||
public SearchQueryResult<Country> listCountries(Optional<String> filter, boolean excludeDeprecated, SearchQueryPagination pagination) {
|
public SearchQueryResult<Country> listCountries(Optional<String> filter, boolean excludeDeprecated, SearchQueryPagination pagination) {
|
||||||
String query = buildQuery(filter.orElse(null), excludeDeprecated, true);
|
String query = buildQuery(filter.orElse(null), excludeDeprecated, pagination);
|
||||||
|
|
||||||
|
Object[] paginationParams = dialectProvider.getPaginationParameters(pagination.getLimit(), pagination.getOffset());
|
||||||
|
|
||||||
var countries = filter.isPresent() ?
|
var countries = filter.isPresent() ?
|
||||||
jdbcTemplate.query(query, new CountryMapper(),
|
jdbcTemplate.query(query, new CountryMapper(),
|
||||||
"%" + filter.get() + "%", "%" + filter.get() + "%", "%" + filter.get() + "%", pagination.getLimit(), pagination.getOffset()) :
|
"%" + filter.get() + "%", "%" + filter.get() + "%", "%" + filter.get() + "%", paginationParams[0], paginationParams[1]) :
|
||||||
jdbcTemplate.query(query, new CountryMapper()
|
jdbcTemplate.query(query, new CountryMapper()
|
||||||
, pagination.getLimit(), pagination.getOffset());
|
, paginationParams[0], paginationParams[1]);
|
||||||
|
|
||||||
Integer totalCount = filter.isPresent() ?
|
Integer totalCount = filter.isPresent() ?
|
||||||
jdbcTemplate.queryForObject(
|
jdbcTemplate.queryForObject(
|
||||||
|
|
@ -89,7 +94,7 @@ public class CountryRepository {
|
||||||
|
|
||||||
@Transactional
|
@Transactional
|
||||||
public SearchQueryResult<Country> listCountries(Optional<String> filter, boolean excludeDeprecated) {
|
public SearchQueryResult<Country> listCountries(Optional<String> filter, boolean excludeDeprecated) {
|
||||||
String query = buildQuery(filter.orElse(null), excludeDeprecated, false);
|
String query = buildQuery(filter.orElse(null), excludeDeprecated, null);
|
||||||
|
|
||||||
var countries = filter.map(f -> jdbcTemplate.query(query, new CountryMapper(),
|
var countries = filter.map(f -> jdbcTemplate.query(query, new CountryMapper(),
|
||||||
"%" + f + "%", "%" + f + "%", "%" + f + "%"))
|
"%" + f + "%", "%" + f + "%", "%" + f + "%"))
|
||||||
|
|
@ -111,7 +116,7 @@ public class CountryRepository {
|
||||||
FROM country WHERE 1=1""");
|
FROM country WHERE 1=1""");
|
||||||
|
|
||||||
if (excludeDeprecated) {
|
if (excludeDeprecated) {
|
||||||
queryBuilder.append(" AND is_deprecated = FALSE");
|
queryBuilder.append(" AND is_deprecated = ").append(dialectProvider.getBooleanFalse());
|
||||||
}
|
}
|
||||||
if (filter != null) {
|
if (filter != null) {
|
||||||
queryBuilder.append(" AND (iso_code LIKE ? OR region_code LIKE ? or name LIKE ?) ");
|
queryBuilder.append(" AND (iso_code LIKE ? OR region_code LIKE ? or name LIKE ?) ");
|
||||||
|
|
@ -120,21 +125,20 @@ public class CountryRepository {
|
||||||
return queryBuilder.toString();
|
return queryBuilder.toString();
|
||||||
}
|
}
|
||||||
|
|
||||||
private String buildQuery(String filter, boolean excludeDeprecated, boolean hasLimit) {
|
private String buildQuery(String filter, boolean excludeDeprecated, SearchQueryPagination pagination) {
|
||||||
StringBuilder queryBuilder = new StringBuilder("""
|
StringBuilder queryBuilder = new StringBuilder("""
|
||||||
SELECT id, iso_code, region_code, is_deprecated, name
|
SELECT id, iso_code, region_code, is_deprecated, name
|
||||||
FROM country WHERE 1=1""");
|
FROM country WHERE 1=1""");
|
||||||
|
|
||||||
if (excludeDeprecated) {
|
if (excludeDeprecated) {
|
||||||
queryBuilder.append(" AND is_deprecated = FALSE ");
|
queryBuilder.append(" AND is_deprecated = ").append(dialectProvider.getBooleanFalse()).append(" ");
|
||||||
}
|
}
|
||||||
if (filter != null) {
|
if (filter != null) {
|
||||||
queryBuilder.append(" AND (iso_code LIKE ? OR region_code LIKE ? OR name LIKE ?) ");
|
queryBuilder.append(" AND (iso_code LIKE ? OR region_code LIKE ? OR name LIKE ?) ");
|
||||||
}
|
}
|
||||||
if (hasLimit) {
|
|
||||||
queryBuilder.append(" ORDER BY iso_code LIMIT ? OFFSET ? ");
|
|
||||||
} else {
|
|
||||||
queryBuilder.append(" ORDER BY iso_code ");
|
queryBuilder.append(" ORDER BY iso_code ");
|
||||||
|
if (pagination != null) {
|
||||||
|
queryBuilder.append(dialectProvider.buildPaginationClause(pagination.getLimit(), pagination.getOffset()));
|
||||||
}
|
}
|
||||||
return queryBuilder.toString();
|
return queryBuilder.toString();
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,6 @@
|
||||||
package de.avatic.lcc.repositories.error;
|
package de.avatic.lcc.repositories.error;
|
||||||
|
|
||||||
|
import de.avatic.lcc.database.dialect.SqlDialectProvider;
|
||||||
import de.avatic.lcc.dto.error.CalculationJobDumpDTO;
|
import de.avatic.lcc.dto.error.CalculationJobDumpDTO;
|
||||||
import de.avatic.lcc.dto.error.CalculationJobDestinationDumpDTO;
|
import de.avatic.lcc.dto.error.CalculationJobDestinationDumpDTO;
|
||||||
import de.avatic.lcc.dto.error.CalculationJobRouteSectionDumpDTO;
|
import de.avatic.lcc.dto.error.CalculationJobRouteSectionDumpDTO;
|
||||||
|
|
@ -31,16 +32,17 @@ import java.util.Map;
|
||||||
public class DumpRepository {
|
public class DumpRepository {
|
||||||
|
|
||||||
private final NamedParameterJdbcTemplate namedParameterJdbcTemplate;
|
private final NamedParameterJdbcTemplate namedParameterJdbcTemplate;
|
||||||
|
|
||||||
private final JdbcTemplate jdbcTemplate;
|
private final JdbcTemplate jdbcTemplate;
|
||||||
private final PremiseRepository premiseRepository;
|
private final PremiseRepository premiseRepository;
|
||||||
private final PremiseTransformer premiseTransformer;
|
private final PremiseTransformer premiseTransformer;
|
||||||
|
private final SqlDialectProvider dialectProvider;
|
||||||
|
|
||||||
public DumpRepository(NamedParameterJdbcTemplate namedParameterJdbcTemplate, JdbcTemplate jdbcTemplate, PremiseRepository premiseRepository, PremiseTransformer premiseTransformer) {
|
public DumpRepository(NamedParameterJdbcTemplate namedParameterJdbcTemplate, JdbcTemplate jdbcTemplate, PremiseRepository premiseRepository, PremiseTransformer premiseTransformer, SqlDialectProvider dialectProvider) {
|
||||||
this.namedParameterJdbcTemplate = namedParameterJdbcTemplate;
|
this.namedParameterJdbcTemplate = namedParameterJdbcTemplate;
|
||||||
this.jdbcTemplate = jdbcTemplate;
|
this.jdbcTemplate = jdbcTemplate;
|
||||||
this.premiseRepository = premiseRepository;
|
this.premiseRepository = premiseRepository;
|
||||||
this.premiseTransformer = premiseTransformer;
|
this.premiseTransformer = premiseTransformer;
|
||||||
|
this.dialectProvider = dialectProvider;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Transactional(readOnly = true)
|
@Transactional(readOnly = true)
|
||||||
|
|
@ -112,12 +114,12 @@ public class DumpRepository {
|
||||||
}
|
}
|
||||||
|
|
||||||
private List<ErrorLogTraceItemDto> loadErrorTraceItems(Integer errorId) {
|
private List<ErrorLogTraceItemDto> loadErrorTraceItems(Integer errorId) {
|
||||||
String traceQuery = """
|
String traceQuery = String.format("""
|
||||||
SELECT line, file, method, fullPath
|
SELECT line, %s, method, fullPath
|
||||||
FROM sys_error_trace_item
|
FROM sys_error_trace_item
|
||||||
WHERE error_id = :errorId
|
WHERE error_id = :errorId
|
||||||
ORDER BY id
|
ORDER BY id
|
||||||
""";
|
""", dialectProvider.escapeIdentifier("file"));
|
||||||
|
|
||||||
MapSqlParameterSource params = new MapSqlParameterSource("errorId", errorId);
|
MapSqlParameterSource params = new MapSqlParameterSource("errorId", errorId);
|
||||||
|
|
||||||
|
|
@ -272,20 +274,17 @@ public class DumpRepository {
|
||||||
|
|
||||||
public SearchQueryResult<CalculationJobDumpDTO> listDumps(SearchQueryPagination searchQueryPagination) {
|
public SearchQueryResult<CalculationJobDumpDTO> listDumps(SearchQueryPagination searchQueryPagination) {
|
||||||
|
|
||||||
String calculationJobQuery = """
|
String calculationJobQuery = String.format("""
|
||||||
SELECT cj.id, cj.premise_id, cj.calculation_date, cj.validity_period_id,
|
SELECT cj.id, cj.premise_id, cj.calculation_date, cj.validity_period_id,
|
||||||
cj.property_set_id, cj.job_state, cj.error_id, cj.user_id
|
cj.property_set_id, cj.job_state, cj.error_id, cj.user_id
|
||||||
FROM calculation_job cj
|
FROM calculation_job cj
|
||||||
ORDER BY id DESC LIMIT :limit OFFSET :offset
|
ORDER BY id DESC %s
|
||||||
""";
|
""", dialectProvider.buildPaginationClause(searchQueryPagination.getLimit(), searchQueryPagination.getOffset()));
|
||||||
|
|
||||||
MapSqlParameterSource params = new MapSqlParameterSource();
|
Object[] paginationParams = dialectProvider.getPaginationParameters(searchQueryPagination.getLimit(), searchQueryPagination.getOffset());
|
||||||
params.addValue("offset", searchQueryPagination.getOffset());
|
|
||||||
params.addValue("limit", searchQueryPagination.getLimit());
|
|
||||||
|
|
||||||
var dumps = namedParameterJdbcTemplate.query(
|
var dumps = jdbcTemplate.query(
|
||||||
calculationJobQuery,
|
calculationJobQuery,
|
||||||
params,
|
|
||||||
(rs, _) -> {
|
(rs, _) -> {
|
||||||
CalculationJobDumpDTO dto = new CalculationJobDumpDTO();
|
CalculationJobDumpDTO dto = new CalculationJobDumpDTO();
|
||||||
dto.setId(rs.getInt("id"));
|
dto.setId(rs.getInt("id"));
|
||||||
|
|
@ -308,7 +307,8 @@ public class DumpRepository {
|
||||||
}
|
}
|
||||||
|
|
||||||
return dto;
|
return dto;
|
||||||
});
|
},
|
||||||
|
paginationParams[0], paginationParams[1]);
|
||||||
|
|
||||||
for(var dump : dumps) {
|
for(var dump : dumps) {
|
||||||
// Load premise details
|
// Load premise details
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,7 @@
|
||||||
package de.avatic.lcc.repositories.error;
|
package de.avatic.lcc.repositories.error;
|
||||||
|
|
||||||
|
|
||||||
|
import de.avatic.lcc.database.dialect.SqlDialectProvider;
|
||||||
import de.avatic.lcc.model.db.error.SysError;
|
import de.avatic.lcc.model.db.error.SysError;
|
||||||
import de.avatic.lcc.model.db.error.SysErrorTraceItem;
|
import de.avatic.lcc.model.db.error.SysErrorTraceItem;
|
||||||
import de.avatic.lcc.model.db.error.SysErrorType;
|
import de.avatic.lcc.model.db.error.SysErrorType;
|
||||||
|
|
@ -27,10 +28,12 @@ public class SysErrorRepository {
|
||||||
|
|
||||||
private final JdbcTemplate jdbcTemplate;
|
private final JdbcTemplate jdbcTemplate;
|
||||||
private final NamedParameterJdbcTemplate namedParameterJdbcTemplate;
|
private final NamedParameterJdbcTemplate namedParameterJdbcTemplate;
|
||||||
|
private final SqlDialectProvider dialectProvider;
|
||||||
|
|
||||||
public SysErrorRepository(JdbcTemplate jdbcTemplate, NamedParameterJdbcTemplate namedParameterJdbcTemplate) {
|
public SysErrorRepository(JdbcTemplate jdbcTemplate, NamedParameterJdbcTemplate namedParameterJdbcTemplate, SqlDialectProvider dialectProvider) {
|
||||||
this.jdbcTemplate = jdbcTemplate;
|
this.jdbcTemplate = jdbcTemplate;
|
||||||
this.namedParameterJdbcTemplate = namedParameterJdbcTemplate;
|
this.namedParameterJdbcTemplate = namedParameterJdbcTemplate;
|
||||||
|
this.dialectProvider = dialectProvider;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Transactional
|
@Transactional
|
||||||
|
|
@ -99,7 +102,8 @@ public class SysErrorRepository {
|
||||||
}
|
}
|
||||||
|
|
||||||
private void insertTraceItems(Integer errorId, List<SysErrorTraceItem> traceItems) {
|
private void insertTraceItems(Integer errorId, List<SysErrorTraceItem> traceItems) {
|
||||||
String traceSql = "INSERT INTO sys_error_trace_item (error_id, line, file, method, fullPath) VALUES (?, ?, ?, ?, ?)";
|
String traceSql = String.format("INSERT INTO sys_error_trace_item (error_id, line, %s, method, fullPath) VALUES (?, ?, ?, ?, ?)",
|
||||||
|
dialectProvider.escapeIdentifier("file"));
|
||||||
|
|
||||||
jdbcTemplate.batchUpdate(traceSql, traceItems, traceItems.size(),
|
jdbcTemplate.batchUpdate(traceSql, traceItems, traceItems.size(),
|
||||||
(ps, traceItem) -> {
|
(ps, traceItem) -> {
|
||||||
|
|
@ -114,35 +118,40 @@ public class SysErrorRepository {
|
||||||
@Transactional
|
@Transactional
|
||||||
public SearchQueryResult<SysError> listErrors(Optional<String> filter, SearchQueryPagination pagination) {
|
public SearchQueryResult<SysError> listErrors(Optional<String> filter, SearchQueryPagination pagination) {
|
||||||
StringBuilder whereClause = new StringBuilder();
|
StringBuilder whereClause = new StringBuilder();
|
||||||
MapSqlParameterSource parameters = new MapSqlParameterSource();
|
List<Object> params = new ArrayList<>();
|
||||||
|
|
||||||
// Build WHERE clause if filter is provided
|
// Build WHERE clause if filter is provided
|
||||||
if (filter.isPresent() && !filter.get().trim().isEmpty()) {
|
if (filter.isPresent() && !filter.get().trim().isEmpty()) {
|
||||||
String filterValue = "%" + filter.get().trim() + "%";
|
String filterValue = "%" + filter.get().trim() + "%";
|
||||||
whereClause.append(" WHERE (e.title LIKE :filter OR e.message LIKE :filter OR e.code LIKE :filter)");
|
whereClause.append(" WHERE (e.title LIKE ? OR e.message LIKE ? OR e.code LIKE ?)");
|
||||||
parameters.addValue("filter", filterValue);
|
params.add(filterValue);
|
||||||
|
params.add(filterValue);
|
||||||
|
params.add(filterValue);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Count total elements
|
// Count total elements
|
||||||
String countSql = "SELECT COUNT(*) FROM sys_error e" + whereClause;
|
String countSql = "SELECT COUNT(*) FROM sys_error e" + whereClause;
|
||||||
Integer totalElements = namedParameterJdbcTemplate.queryForObject(countSql, parameters, Integer.class);
|
Integer totalElements = params.isEmpty()
|
||||||
|
? jdbcTemplate.queryForObject(countSql, Integer.class)
|
||||||
|
: jdbcTemplate.queryForObject(countSql, Integer.class, params.toArray());
|
||||||
|
|
||||||
// Build main query with pagination
|
// Build main query with pagination
|
||||||
String sql = """
|
String sql = String.format("""
|
||||||
SELECT e.id, e.user_id, e.title, e.code, e.message, e.pinia,
|
SELECT e.id, e.user_id, e.title, e.code, e.message, e.pinia,
|
||||||
e.calculation_job_id, e.bulk_operation_id, e.type, e.created_at, e.request
|
e.calculation_job_id, e.bulk_operation_id, e.type, e.created_at, e.request
|
||||||
FROM sys_error e
|
FROM sys_error e
|
||||||
""" + whereClause + """
|
%s
|
||||||
ORDER BY e.created_at DESC
|
ORDER BY e.created_at DESC
|
||||||
LIMIT :limit OFFSET :offset
|
%s
|
||||||
""";
|
""", whereClause, dialectProvider.buildPaginationClause(pagination.getLimit(), pagination.getOffset()));
|
||||||
|
|
||||||
// Add pagination parameters
|
// Add pagination parameters
|
||||||
parameters.addValue("limit", pagination.getLimit());
|
Object[] paginationParams = dialectProvider.getPaginationParameters(pagination.getLimit(), pagination.getOffset());
|
||||||
parameters.addValue("offset", pagination.getOffset());
|
params.add(paginationParams[0]);
|
||||||
|
params.add(paginationParams[1]);
|
||||||
|
|
||||||
// Execute query
|
// Execute query
|
||||||
List<SysError> errors = namedParameterJdbcTemplate.query(sql, parameters, new SysErrorMapper());
|
List<SysError> errors = jdbcTemplate.query(sql, new SysErrorMapper(), params.toArray());
|
||||||
|
|
||||||
// Load trace items for each error
|
// Load trace items for each error
|
||||||
if (!errors.isEmpty()) {
|
if (!errors.isEmpty()) {
|
||||||
|
|
@ -162,12 +171,12 @@ public class SysErrorRepository {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
String traceSql = """
|
String traceSql = String.format("""
|
||||||
SELECT error_id, id, line, file, method, fullPath
|
SELECT error_id, id, line, %s, method, fullPath
|
||||||
FROM sys_error_trace_item
|
FROM sys_error_trace_item
|
||||||
WHERE error_id IN (:errorIds)
|
WHERE error_id IN (:errorIds)
|
||||||
ORDER BY error_id, id
|
ORDER BY error_id, id
|
||||||
""";
|
""", dialectProvider.escapeIdentifier("file"));
|
||||||
|
|
||||||
MapSqlParameterSource traceParameters = new MapSqlParameterSource("errorIds", errorIds);
|
MapSqlParameterSource traceParameters = new MapSqlParameterSource("errorIds", errorIds);
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,6 @@
|
||||||
package de.avatic.lcc.repositories.packaging;
|
package de.avatic.lcc.repositories.packaging;
|
||||||
|
|
||||||
|
import de.avatic.lcc.database.dialect.SqlDialectProvider;
|
||||||
import de.avatic.lcc.model.db.packaging.PackagingDimension;
|
import de.avatic.lcc.model.db.packaging.PackagingDimension;
|
||||||
import de.avatic.lcc.model.db.packaging.PackagingType;
|
import de.avatic.lcc.model.db.packaging.PackagingType;
|
||||||
import de.avatic.lcc.model.db.utils.DimensionUnit;
|
import de.avatic.lcc.model.db.utils.DimensionUnit;
|
||||||
|
|
@ -19,18 +20,21 @@ import java.util.Optional;
|
||||||
public class PackagingDimensionRepository {
|
public class PackagingDimensionRepository {
|
||||||
|
|
||||||
private final JdbcTemplate jdbcTemplate;
|
private final JdbcTemplate jdbcTemplate;
|
||||||
|
private final SqlDialectProvider dialectProvider;
|
||||||
|
|
||||||
public PackagingDimensionRepository(JdbcTemplate jdbcTemplate) {
|
public PackagingDimensionRepository(JdbcTemplate jdbcTemplate, SqlDialectProvider dialectProvider) {
|
||||||
this.jdbcTemplate = jdbcTemplate;
|
this.jdbcTemplate = jdbcTemplate;
|
||||||
|
this.dialectProvider = dialectProvider;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Transactional
|
@Transactional
|
||||||
public Optional<PackagingDimension> getById(Integer id) {
|
public Optional<PackagingDimension> getById(Integer id) {
|
||||||
String query = """
|
String query = String.format("""
|
||||||
SELECT id, displayed_dimension_unit, displayed_weight_unit, width, length, height,
|
SELECT id, displayed_dimension_unit, displayed_weight_unit, width, length, height,
|
||||||
weight, content_unit_count, type, is_deprecated
|
weight, content_unit_count, type, is_deprecated
|
||||||
FROM packaging_dimension
|
FROM packaging_dimension
|
||||||
WHERE packaging_dimension.id = ? AND packaging_dimension.is_deprecated = false""";
|
WHERE packaging_dimension.id = ? AND packaging_dimension.is_deprecated = %s""",
|
||||||
|
dialectProvider.getBooleanFalse());
|
||||||
|
|
||||||
|
|
||||||
//TODO: what if i need to get deprecated materials?
|
//TODO: what if i need to get deprecated materials?
|
||||||
|
|
@ -113,7 +117,7 @@ public class PackagingDimensionRepository {
|
||||||
}
|
}
|
||||||
|
|
||||||
public Optional<Integer> setDeprecatedById(Integer id) {
|
public Optional<Integer> setDeprecatedById(Integer id) {
|
||||||
String query = "UPDATE packaging_dimension SET is_deprecated = TRUE WHERE id = ?";
|
String query = "UPDATE packaging_dimension SET is_deprecated = " + dialectProvider.getBooleanTrue() + " WHERE id = ?";
|
||||||
return Optional.ofNullable(jdbcTemplate.update(query, id) == 0 ? null : id);
|
return Optional.ofNullable(jdbcTemplate.update(query, id) == 0 ? null : id);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,6 @@
|
||||||
package de.avatic.lcc.repositories.packaging;
|
package de.avatic.lcc.repositories.packaging;
|
||||||
|
|
||||||
|
import de.avatic.lcc.database.dialect.SqlDialectProvider;
|
||||||
import de.avatic.lcc.model.db.properties.PackagingProperty;
|
import de.avatic.lcc.model.db.properties.PackagingProperty;
|
||||||
import de.avatic.lcc.model.db.properties.PropertyDataType;
|
import de.avatic.lcc.model.db.properties.PropertyDataType;
|
||||||
import de.avatic.lcc.model.db.properties.PropertyType;
|
import de.avatic.lcc.model.db.properties.PropertyType;
|
||||||
|
|
@ -16,9 +17,11 @@ import java.util.Optional;
|
||||||
public class PackagingPropertiesRepository {
|
public class PackagingPropertiesRepository {
|
||||||
|
|
||||||
private final JdbcTemplate jdbcTemplate;
|
private final JdbcTemplate jdbcTemplate;
|
||||||
|
private final SqlDialectProvider dialectProvider;
|
||||||
|
|
||||||
public PackagingPropertiesRepository(JdbcTemplate jdbcTemplate) {
|
public PackagingPropertiesRepository(JdbcTemplate jdbcTemplate, SqlDialectProvider dialectProvider) {
|
||||||
this.jdbcTemplate = jdbcTemplate;
|
this.jdbcTemplate = jdbcTemplate;
|
||||||
|
this.dialectProvider = dialectProvider;
|
||||||
}
|
}
|
||||||
|
|
||||||
public List<PackagingProperty> getByPackagingId(Integer id) {
|
public List<PackagingProperty> getByPackagingId(Integer id) {
|
||||||
|
|
@ -94,11 +97,14 @@ public class PackagingPropertiesRepository {
|
||||||
|
|
||||||
|
|
||||||
public void update(Integer packagingId, Integer typeId, String value) {
|
public void update(Integer packagingId, Integer typeId, String value) {
|
||||||
String query = """
|
String query = dialectProvider.buildUpsertStatement(
|
||||||
INSERT INTO packaging_property (property_value, packaging_id, packaging_property_type_id) VALUES (?, ?, ?)
|
"packaging_property",
|
||||||
ON DUPLICATE KEY UPDATE property_value = ?""";
|
List.of("packaging_id", "packaging_property_type_id"),
|
||||||
|
List.of("property_value", "packaging_id", "packaging_property_type_id"),
|
||||||
|
List.of("property_value")
|
||||||
|
);
|
||||||
|
|
||||||
jdbcTemplate.update(query, value, packagingId, typeId, value);
|
jdbcTemplate.update(query, value, packagingId, typeId);
|
||||||
}
|
}
|
||||||
|
|
||||||
public Integer getTypeIdByMappingId(String mappingId) {
|
public Integer getTypeIdByMappingId(String mappingId) {
|
||||||
|
|
@ -108,11 +114,14 @@ public class PackagingPropertiesRepository {
|
||||||
|
|
||||||
public void update(Integer packagingId, String typeId, String value) {
|
public void update(Integer packagingId, String typeId, String value) {
|
||||||
|
|
||||||
String query = """
|
String query = dialectProvider.buildUpsertStatement(
|
||||||
INSERT INTO packaging_property (property_value, packaging_id, packaging_property_type_id) VALUES (?, ?, ?)
|
"packaging_property",
|
||||||
ON DUPLICATE KEY UPDATE property_value = ?""";
|
List.of("packaging_id", "packaging_property_type_id"),
|
||||||
|
List.of("property_value", "packaging_id", "packaging_property_type_id"),
|
||||||
|
List.of("property_value")
|
||||||
|
);
|
||||||
|
|
||||||
jdbcTemplate.update(query, value, packagingId, typeId, value);
|
jdbcTemplate.update(query, value, packagingId, typeId);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,6 @@
|
||||||
package de.avatic.lcc.repositories.packaging;
|
package de.avatic.lcc.repositories.packaging;
|
||||||
|
|
||||||
|
import de.avatic.lcc.database.dialect.SqlDialectProvider;
|
||||||
import de.avatic.lcc.model.db.packaging.Packaging;
|
import de.avatic.lcc.model.db.packaging.Packaging;
|
||||||
import de.avatic.lcc.repositories.pagination.SearchQueryPagination;
|
import de.avatic.lcc.repositories.pagination.SearchQueryPagination;
|
||||||
import de.avatic.lcc.repositories.pagination.SearchQueryResult;
|
import de.avatic.lcc.repositories.pagination.SearchQueryResult;
|
||||||
|
|
@ -45,40 +46,44 @@ public class PackagingRepository {
|
||||||
|
|
||||||
|
|
||||||
private final JdbcTemplate jdbcTemplate;
|
private final JdbcTemplate jdbcTemplate;
|
||||||
|
private final SqlDialectProvider dialectProvider;
|
||||||
|
|
||||||
public PackagingRepository(JdbcTemplate jdbcTemplate) {
|
public PackagingRepository(JdbcTemplate jdbcTemplate, SqlDialectProvider dialectProvider) {
|
||||||
this.jdbcTemplate = jdbcTemplate;
|
this.jdbcTemplate = jdbcTemplate;
|
||||||
|
this.dialectProvider = dialectProvider;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Transactional
|
@Transactional
|
||||||
public SearchQueryResult<Packaging> listPackaging(Integer materialId, Integer supplierId, boolean excludeDeprecated, SearchQueryPagination pagination) {
|
public SearchQueryResult<Packaging> listPackaging(Integer materialId, Integer supplierId, boolean excludeDeprecated, SearchQueryPagination pagination) {
|
||||||
|
|
||||||
String query = buildQuery(materialId, supplierId, excludeDeprecated);
|
String query = buildQuery(materialId, supplierId, excludeDeprecated, pagination);
|
||||||
|
|
||||||
|
Object[] paginationParams = dialectProvider.getPaginationParameters(pagination.getLimit(), pagination.getOffset());
|
||||||
|
|
||||||
var params = new ArrayList<Object>();
|
var params = new ArrayList<Object>();
|
||||||
params.add(excludeDeprecated);
|
// Note: excludeDeprecated is not added as parameter - it's inserted as boolean literal in buildQuery()
|
||||||
if (materialId != null) {
|
if (materialId != null) {
|
||||||
params.add(materialId);
|
params.add(materialId);
|
||||||
}
|
}
|
||||||
if (supplierId != null) {
|
if (supplierId != null) {
|
||||||
params.add(supplierId);
|
params.add(supplierId);
|
||||||
}
|
}
|
||||||
params.add(pagination.getLimit());
|
params.add(paginationParams[0]);
|
||||||
params.add(pagination.getOffset());
|
params.add(paginationParams[1]);
|
||||||
|
|
||||||
var packaging = jdbcTemplate.query(query, new PackagingMapper(), params.toArray());
|
var packaging = jdbcTemplate.query(query, new PackagingMapper(), params.toArray());
|
||||||
|
|
||||||
return new SearchQueryResult<>(packaging, pagination.getPage(), countPackaging(materialId, supplierId, excludeDeprecated), pagination.getLimit());
|
return new SearchQueryResult<>(packaging, pagination.getPage(), countPackaging(materialId, supplierId, excludeDeprecated), pagination.getLimit());
|
||||||
}
|
}
|
||||||
|
|
||||||
private static String buildQuery(Integer materialId, Integer supplierId, boolean excludeDeprecated) {
|
private String buildQuery(Integer materialId, Integer supplierId, boolean excludeDeprecated, SearchQueryPagination pagination) {
|
||||||
StringBuilder queryBuilder = new StringBuilder("""
|
StringBuilder queryBuilder = new StringBuilder("""
|
||||||
SELECT id,
|
SELECT id, supplier_node_id, material_id, hu_dimension_id, shu_dimension_id, is_deprecated
|
||||||
FROM packaging
|
FROM packaging
|
||||||
WHERE 1=1""");
|
WHERE 1=1""");
|
||||||
|
|
||||||
if (excludeDeprecated) {
|
if (excludeDeprecated) {
|
||||||
queryBuilder.append(" AND is_deprecated = FALSE");
|
queryBuilder.append(" AND is_deprecated = ").append(dialectProvider.getBooleanFalse());
|
||||||
}
|
}
|
||||||
if (materialId != null) {
|
if (materialId != null) {
|
||||||
queryBuilder.append(" AND material_id = ?");
|
queryBuilder.append(" AND material_id = ?");
|
||||||
|
|
@ -86,7 +91,8 @@ public class PackagingRepository {
|
||||||
if (supplierId != null) {
|
if (supplierId != null) {
|
||||||
queryBuilder.append(" AND supplier_node_id = ?");
|
queryBuilder.append(" AND supplier_node_id = ?");
|
||||||
}
|
}
|
||||||
queryBuilder.append("ORDER BY id LIMIT ? OFFSET ?");
|
queryBuilder.append(" ORDER BY id ");
|
||||||
|
queryBuilder.append(dialectProvider.buildPaginationClause(pagination.getLimit(), pagination.getOffset()));
|
||||||
return queryBuilder.toString();
|
return queryBuilder.toString();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -145,7 +151,7 @@ public class PackagingRepository {
|
||||||
|
|
||||||
@Transactional
|
@Transactional
|
||||||
public Optional<Integer> setDeprecatedById(Integer id) {
|
public Optional<Integer> setDeprecatedById(Integer id) {
|
||||||
String query = "UPDATE packaging SET is_deprecated = TRUE WHERE id = ?";
|
String query = "UPDATE packaging SET is_deprecated = " + dialectProvider.getBooleanTrue() + " WHERE id = ?";
|
||||||
return Optional.ofNullable(jdbcTemplate.update(query, id) == 0 ? null : id);
|
return Optional.ofNullable(jdbcTemplate.update(query, id) == 0 ? null : id);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -9,6 +9,7 @@ import org.springframework.jdbc.core.RowMapper;
|
||||||
import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate;
|
import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate;
|
||||||
import org.springframework.jdbc.support.GeneratedKeyHolder;
|
import org.springframework.jdbc.support.GeneratedKeyHolder;
|
||||||
import org.springframework.jdbc.support.KeyHolder;
|
import org.springframework.jdbc.support.KeyHolder;
|
||||||
|
import org.springframework.stereotype.Repository;
|
||||||
import org.springframework.stereotype.Service;
|
import org.springframework.stereotype.Service;
|
||||||
import org.springframework.transaction.annotation.Transactional;
|
import org.springframework.transaction.annotation.Transactional;
|
||||||
|
|
||||||
|
|
@ -19,7 +20,7 @@ import java.sql.Statement;
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
@Service
|
@Repository
|
||||||
public class DestinationRepository {
|
public class DestinationRepository {
|
||||||
|
|
||||||
private final JdbcTemplate jdbcTemplate;
|
private final JdbcTemplate jdbcTemplate;
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,6 @@
|
||||||
package de.avatic.lcc.repositories.premise;
|
package de.avatic.lcc.repositories.premise;
|
||||||
|
|
||||||
|
import de.avatic.lcc.database.dialect.SqlDialectProvider;
|
||||||
import de.avatic.lcc.model.db.materials.Material;
|
import de.avatic.lcc.model.db.materials.Material;
|
||||||
import de.avatic.lcc.model.db.nodes.Location;
|
import de.avatic.lcc.model.db.nodes.Location;
|
||||||
import de.avatic.lcc.model.db.nodes.Node;
|
import de.avatic.lcc.model.db.nodes.Node;
|
||||||
|
|
@ -37,10 +38,12 @@ public class PremiseRepository {
|
||||||
|
|
||||||
private final JdbcTemplate jdbcTemplate;
|
private final JdbcTemplate jdbcTemplate;
|
||||||
private final NamedParameterJdbcTemplate namedParameterJdbcTemplate;
|
private final NamedParameterJdbcTemplate namedParameterJdbcTemplate;
|
||||||
|
private final SqlDialectProvider dialectProvider;
|
||||||
|
|
||||||
public PremiseRepository(JdbcTemplate jdbcTemplate, NamedParameterJdbcTemplate namedParameterJdbcTemplate) {
|
public PremiseRepository(JdbcTemplate jdbcTemplate, NamedParameterJdbcTemplate namedParameterJdbcTemplate, SqlDialectProvider dialectProvider) {
|
||||||
this.jdbcTemplate = jdbcTemplate;
|
this.jdbcTemplate = jdbcTemplate;
|
||||||
this.namedParameterJdbcTemplate = namedParameterJdbcTemplate;
|
this.namedParameterJdbcTemplate = namedParameterJdbcTemplate;
|
||||||
|
this.dialectProvider = dialectProvider;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Transactional
|
@Transactional
|
||||||
|
|
@ -53,7 +56,7 @@ public class PremiseRepository {
|
||||||
.withArchived(archived)
|
.withArchived(archived)
|
||||||
.withDone(done);
|
.withDone(done);
|
||||||
|
|
||||||
String query = queryBuilder.buildSelectQuery();
|
String query = queryBuilder.buildSelectQuery(dialectProvider, pagination);
|
||||||
String countQuery = queryBuilder.buildCountQuery();
|
String countQuery = queryBuilder.buildCountQuery();
|
||||||
|
|
||||||
List<PremiseListEntry> entities;
|
List<PremiseListEntry> entities;
|
||||||
|
|
@ -77,12 +80,14 @@ public class PremiseRepository {
|
||||||
|
|
||||||
private List<PremiseListEntry> executeQueryWithoutFilter(String query, Integer userId,
|
private List<PremiseListEntry> executeQueryWithoutFilter(String query, Integer userId,
|
||||||
SearchQueryPagination pagination) {
|
SearchQueryPagination pagination) {
|
||||||
|
Object[] paginationParams = dialectProvider.getPaginationParameters(pagination.getLimit(), pagination.getOffset());
|
||||||
|
|
||||||
return jdbcTemplate.query(
|
return jdbcTemplate.query(
|
||||||
query,
|
query,
|
||||||
new PremiseListEntryMapper(),
|
new PremiseListEntryMapper(),
|
||||||
userId,
|
userId,
|
||||||
pagination.getLimit(),
|
paginationParams[0],
|
||||||
pagination.getOffset()
|
paginationParams[1]
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -104,11 +109,13 @@ public class PremiseRepository {
|
||||||
}
|
}
|
||||||
|
|
||||||
private Object[] createFilterParams(Integer userId, String wildcardFilter, SearchQueryPagination pagination) {
|
private Object[] createFilterParams(Integer userId, String wildcardFilter, SearchQueryPagination pagination) {
|
||||||
|
Object[] paginationParams = dialectProvider.getPaginationParameters(pagination.getLimit(), pagination.getOffset());
|
||||||
|
|
||||||
return new Object[]{
|
return new Object[]{
|
||||||
userId,
|
userId,
|
||||||
wildcardFilter, wildcardFilter, wildcardFilter, wildcardFilter,
|
wildcardFilter, wildcardFilter, wildcardFilter, wildcardFilter,
|
||||||
wildcardFilter, wildcardFilter,
|
wildcardFilter, wildcardFilter,
|
||||||
pagination.getLimit(), pagination.getOffset()
|
paginationParams[0], paginationParams[1]
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -353,7 +360,7 @@ public class PremiseRepository {
|
||||||
}
|
}
|
||||||
|
|
||||||
String placeholders = String.join(",", Collections.nCopies(premiseIds.size(), "?"));
|
String placeholders = String.join(",", Collections.nCopies(premiseIds.size(), "?"));
|
||||||
String query = "UPDATE premise SET material_cost = null, is_fca_enabled = false, oversea_share = null WHERE id IN (" + placeholders + ")";
|
String query = "UPDATE premise SET material_cost = null, is_fca_enabled = " + dialectProvider.getBooleanFalse() + ", oversea_share = null WHERE id IN (" + placeholders + ")";
|
||||||
jdbcTemplate.update(query, premiseIds.toArray());
|
jdbcTemplate.update(query, premiseIds.toArray());
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
@ -580,11 +587,15 @@ public class PremiseRepository {
|
||||||
|
|
||||||
KeyHolder keyHolder = new GeneratedKeyHolder();
|
KeyHolder keyHolder = new GeneratedKeyHolder();
|
||||||
|
|
||||||
jdbcTemplate.update(connection -> {
|
String sql = String.format(
|
||||||
PreparedStatement ps = connection.prepareStatement(
|
|
||||||
"INSERT INTO premise (material_id, supplier_node_id, user_supplier_node_id, user_id, state, created_at, updated_at, geo_lat, geo_lng, country_id)" +
|
"INSERT INTO premise (material_id, supplier_node_id, user_supplier_node_id, user_id, state, created_at, updated_at, geo_lat, geo_lng, country_id)" +
|
||||||
" VALUES (?, ?, ?, ?, 'DRAFT', CURRENT_TIMESTAMP, CURRENT_TIMESTAMP, ?, ?, ?)",
|
" VALUES (?, ?, ?, ?, 'DRAFT', %s, %s, ?, ?, ?)",
|
||||||
Statement.RETURN_GENERATED_KEYS);
|
dialectProvider.getCurrentTimestamp(),
|
||||||
|
dialectProvider.getCurrentTimestamp()
|
||||||
|
);
|
||||||
|
|
||||||
|
jdbcTemplate.update(connection -> {
|
||||||
|
PreparedStatement ps = connection.prepareStatement(sql, Statement.RETURN_GENERATED_KEYS);
|
||||||
|
|
||||||
ps.setInt(1, materialId);
|
ps.setInt(1, materialId);
|
||||||
ps.setObject(2, supplierId);
|
ps.setObject(2, supplierId);
|
||||||
|
|
@ -699,7 +710,7 @@ public class PremiseRepository {
|
||||||
return premiseIds;
|
return premiseIds;
|
||||||
}
|
}
|
||||||
|
|
||||||
String sql = "SELECT id FROM premise WHERE id IN (:ids) AND tariff_unlocked = TRUE";
|
String sql = "SELECT id FROM premise WHERE id IN (:ids) AND tariff_unlocked = " + dialectProvider.getBooleanTrue();
|
||||||
|
|
||||||
List<Integer> unlockedIds = namedParameterJdbcTemplate.query(
|
List<Integer> unlockedIds = namedParameterJdbcTemplate.query(
|
||||||
sql,
|
sql,
|
||||||
|
|
@ -725,7 +736,7 @@ public class PremiseRepository {
|
||||||
/**
|
/**
|
||||||
* Encapsulates SQL query building logic
|
* Encapsulates SQL query building logic
|
||||||
*/
|
*/
|
||||||
private static class QueryBuilder {
|
private class QueryBuilder {
|
||||||
private static final String BASE_JOIN_QUERY = """
|
private static final String BASE_JOIN_QUERY = """
|
||||||
FROM premise AS p
|
FROM premise AS p
|
||||||
LEFT JOIN material as m ON p.material_id = m.id
|
LEFT JOIN material as m ON p.material_id = m.id
|
||||||
|
|
@ -769,7 +780,7 @@ public class PremiseRepository {
|
||||||
return queryBuilder.toString();
|
return queryBuilder.toString();
|
||||||
}
|
}
|
||||||
|
|
||||||
public String buildSelectQuery() {
|
public String buildSelectQuery(SqlDialectProvider dialectProvider, SearchQueryPagination pagination) {
|
||||||
StringBuilder queryBuilder = new StringBuilder();
|
StringBuilder queryBuilder = new StringBuilder();
|
||||||
queryBuilder.append("""
|
queryBuilder.append("""
|
||||||
SELECT p.id as 'p.id', p.state as 'p.state', p.user_id as 'p.user_id',
|
SELECT p.id as 'p.id', p.state as 'p.state', p.user_id as 'p.user_id',
|
||||||
|
|
@ -785,8 +796,8 @@ public class PremiseRepository {
|
||||||
user_n.country_id as 'user_n.country_id', user_n.geo_lat as 'user_n.geo_lat', user_n.geo_lng as 'user_n.geo_lng'
|
user_n.country_id as 'user_n.country_id', user_n.geo_lat as 'user_n.geo_lat', user_n.geo_lng as 'user_n.geo_lng'
|
||||||
""").append(BASE_JOIN_QUERY);
|
""").append(BASE_JOIN_QUERY);
|
||||||
appendConditions(queryBuilder);
|
appendConditions(queryBuilder);
|
||||||
queryBuilder.append(" ORDER BY p.updated_at DESC, p.id DESC");
|
queryBuilder.append(" ORDER BY p.updated_at DESC, p.id DESC ");
|
||||||
queryBuilder.append(" LIMIT ? OFFSET ?");
|
queryBuilder.append(dialectProvider.buildPaginationClause(pagination.getLimit(), pagination.getOffset()));
|
||||||
return queryBuilder.toString();
|
return queryBuilder.toString();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -827,7 +838,7 @@ public class PremiseRepository {
|
||||||
|
|
||||||
private void appendBooleanCondition(StringBuilder queryBuilder, Boolean condition, String field) {
|
private void appendBooleanCondition(StringBuilder queryBuilder, Boolean condition, String field) {
|
||||||
if (condition != null && condition) {
|
if (condition != null && condition) {
|
||||||
queryBuilder.append(" OR ").append(field).append(" = TRUE");
|
queryBuilder.append(" OR ").append(field).append(" = ").append(dialectProvider.getBooleanTrue());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,6 @@
|
||||||
package de.avatic.lcc.repositories.premise;
|
package de.avatic.lcc.repositories.premise;
|
||||||
|
|
||||||
|
import de.avatic.lcc.database.dialect.SqlDialectProvider;
|
||||||
import de.avatic.lcc.model.db.premises.route.Route;
|
import de.avatic.lcc.model.db.premises.route.Route;
|
||||||
import de.avatic.lcc.util.exception.internalerror.DatabaseException;
|
import de.avatic.lcc.util.exception.internalerror.DatabaseException;
|
||||||
import org.springframework.jdbc.core.JdbcTemplate;
|
import org.springframework.jdbc.core.JdbcTemplate;
|
||||||
|
|
@ -20,9 +21,11 @@ import java.util.Optional;
|
||||||
public class RouteRepository {
|
public class RouteRepository {
|
||||||
|
|
||||||
private final JdbcTemplate jdbcTemplate;
|
private final JdbcTemplate jdbcTemplate;
|
||||||
|
private final SqlDialectProvider dialectProvider;
|
||||||
|
|
||||||
public RouteRepository(JdbcTemplate jdbcTemplate) {
|
public RouteRepository(JdbcTemplate jdbcTemplate, SqlDialectProvider dialectProvider) {
|
||||||
this.jdbcTemplate = jdbcTemplate;
|
this.jdbcTemplate = jdbcTemplate;
|
||||||
|
this.dialectProvider = dialectProvider;
|
||||||
}
|
}
|
||||||
|
|
||||||
public List<Route> getByDestinationId(Integer id) {
|
public List<Route> getByDestinationId(Integer id) {
|
||||||
|
|
@ -31,7 +34,7 @@ public class RouteRepository {
|
||||||
}
|
}
|
||||||
|
|
||||||
public Optional<Route> getSelectedByDestinationId(Integer id) {
|
public Optional<Route> getSelectedByDestinationId(Integer id) {
|
||||||
String query = "SELECT * FROM premise_route WHERE premise_destination_id = ? AND is_selected = TRUE";
|
String query = "SELECT * FROM premise_route WHERE premise_destination_id = ? AND is_selected = " + dialectProvider.getBooleanTrue();
|
||||||
var route = jdbcTemplate.query(query, new RouteMapper(), id);
|
var route = jdbcTemplate.query(query, new RouteMapper(), id);
|
||||||
|
|
||||||
if(route.isEmpty()) {
|
if(route.isEmpty()) {
|
||||||
|
|
@ -78,12 +81,12 @@ public class RouteRepository {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void updateSelectedByDestinationId(Integer destinationId, Integer selectedRouteId) {
|
public void updateSelectedByDestinationId(Integer destinationId, Integer selectedRouteId) {
|
||||||
String deselectQuery = """
|
String deselectQuery = String.format("""
|
||||||
UPDATE premise_route SET is_selected = FALSE WHERE is_selected = TRUE AND premise_destination_id = ?
|
UPDATE premise_route SET is_selected = %s WHERE is_selected = %s AND premise_destination_id = ?
|
||||||
""";
|
""", dialectProvider.getBooleanFalse(), dialectProvider.getBooleanTrue());
|
||||||
String selectQuery = """
|
String selectQuery = String.format("""
|
||||||
UPDATE premise_route SET is_selected = TRUE WHERE id = ?
|
UPDATE premise_route SET is_selected = %s WHERE id = ?
|
||||||
""";
|
""", dialectProvider.getBooleanTrue());
|
||||||
|
|
||||||
jdbcTemplate.update(deselectQuery, destinationId);
|
jdbcTemplate.update(deselectQuery, destinationId);
|
||||||
var affectedRowsSelect = jdbcTemplate.update(selectQuery, selectedRouteId);
|
var affectedRowsSelect = jdbcTemplate.update(selectQuery, selectedRouteId);
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,6 @@
|
||||||
package de.avatic.lcc.repositories.properties;
|
package de.avatic.lcc.repositories.properties;
|
||||||
|
|
||||||
|
import de.avatic.lcc.database.dialect.SqlDialectProvider;
|
||||||
import de.avatic.lcc.dto.generic.PropertyDTO;
|
import de.avatic.lcc.dto.generic.PropertyDTO;
|
||||||
import de.avatic.lcc.model.db.properties.SystemPropertyMappingId;
|
import de.avatic.lcc.model.db.properties.SystemPropertyMappingId;
|
||||||
import de.avatic.lcc.model.db.rates.ValidityPeriodState;
|
import de.avatic.lcc.model.db.rates.ValidityPeriodState;
|
||||||
|
|
@ -26,9 +27,11 @@ import java.util.stream.Collectors;
|
||||||
public class PropertyRepository {
|
public class PropertyRepository {
|
||||||
|
|
||||||
private final JdbcTemplate jdbcTemplate;
|
private final JdbcTemplate jdbcTemplate;
|
||||||
|
private final SqlDialectProvider dialectProvider;
|
||||||
|
|
||||||
public PropertyRepository(JdbcTemplate jdbcTemplate) {
|
public PropertyRepository(JdbcTemplate jdbcTemplate, SqlDialectProvider dialectProvider) {
|
||||||
this.jdbcTemplate = jdbcTemplate;
|
this.jdbcTemplate = jdbcTemplate;
|
||||||
|
this.dialectProvider = dialectProvider;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
@ -58,11 +61,14 @@ public class PropertyRepository {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
String query = """
|
String query = dialectProvider.buildUpsertStatement(
|
||||||
INSERT INTO system_property (property_set_id, system_property_type_id, property_value) VALUES (?, ?, ?)
|
"system_property",
|
||||||
ON DUPLICATE KEY UPDATE property_value = ?""";
|
List.of("property_set_id", "system_property_type_id"),
|
||||||
|
List.of("property_set_id", "system_property_type_id", "property_value"),
|
||||||
|
List.of("property_value")
|
||||||
|
);
|
||||||
|
|
||||||
var affectedRows = jdbcTemplate.update(query, setId, typeId, value, value);
|
var affectedRows = jdbcTemplate.update(query, setId, typeId, value);
|
||||||
|
|
||||||
if (!(affectedRows > 0)) {
|
if (!(affectedRows > 0)) {
|
||||||
throw new DatabaseException("Could not update property value for property set " + setId + " and property type " + mappingId);
|
throw new DatabaseException("Could not update property value for property set " + setId + " and property type " + mappingId);
|
||||||
|
|
@ -99,10 +105,15 @@ public class PropertyRepository {
|
||||||
LEFT JOIN system_property AS sp ON sp.system_property_type_id = type.id
|
LEFT JOIN system_property AS sp ON sp.system_property_type_id = type.id
|
||||||
LEFT JOIN property_set AS ps ON ps.id = sp.property_set_id AND ps.state IN (?, ?)
|
LEFT JOIN property_set AS ps ON ps.id = sp.property_set_id AND ps.state IN (?, ?)
|
||||||
GROUP BY type.id, type.name, type.data_type, type.external_mapping_id, type.validation_rule, type.description, type.property_group, type.sequence_number
|
GROUP BY type.id, type.name, type.data_type, type.external_mapping_id, type.validation_rule, type.description, type.property_group, type.sequence_number
|
||||||
HAVING draftValue IS NOT NULL OR validValue IS NOT NULL ORDER BY type.property_group , type.sequence_number;
|
HAVING MAX(CASE WHEN ps.state = ? THEN sp.property_value END) IS NOT NULL
|
||||||
|
OR MAX(CASE WHEN ps.state = ? THEN sp.property_value END) IS NOT NULL
|
||||||
|
ORDER BY type.property_group , type.sequence_number;
|
||||||
""";
|
""";
|
||||||
|
|
||||||
return jdbcTemplate.query(query, new PropertyMapper(), ValidityPeriodState.DRAFT.name(), ValidityPeriodState.VALID.name(), ValidityPeriodState.DRAFT.name(), ValidityPeriodState.VALID.name());
|
return jdbcTemplate.query(query, new PropertyMapper(),
|
||||||
|
ValidityPeriodState.DRAFT.name(), ValidityPeriodState.VALID.name(),
|
||||||
|
ValidityPeriodState.DRAFT.name(), ValidityPeriodState.VALID.name(),
|
||||||
|
ValidityPeriodState.DRAFT.name(), ValidityPeriodState.VALID.name());
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -182,9 +193,11 @@ public class PropertyRepository {
|
||||||
try {
|
try {
|
||||||
List<Map<String, Object>> results = jdbcTemplate.queryForList(query, ValidityPeriodState.VALID.name());
|
List<Map<String, Object>> results = jdbcTemplate.queryForList(query, ValidityPeriodState.VALID.name());
|
||||||
|
|
||||||
String insertQuery = """
|
String insertQuery = dialectProvider.buildInsertIgnoreStatement(
|
||||||
INSERT IGNORE INTO system_property (property_value, system_property_type_id, property_set_id)
|
"system_property",
|
||||||
VALUES (?, ?, ?)""";
|
List.of("property_value", "system_property_type_id", "property_set_id"),
|
||||||
|
List.of("property_set_id", "system_property_type_id")
|
||||||
|
);
|
||||||
|
|
||||||
List<Object[]> batchArgs = results.stream()
|
List<Object[]> batchArgs = results.stream()
|
||||||
.map(row -> new Object[]{row.get("value"), row.get("typeId"), setId})
|
.map(row -> new Object[]{row.get("value"), row.get("typeId"), setId})
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,7 @@
|
||||||
package de.avatic.lcc.repositories.properties;
|
package de.avatic.lcc.repositories.properties;
|
||||||
|
|
||||||
|
|
||||||
|
import de.avatic.lcc.database.dialect.SqlDialectProvider;
|
||||||
import de.avatic.lcc.model.db.properties.PropertySet;
|
import de.avatic.lcc.model.db.properties.PropertySet;
|
||||||
import de.avatic.lcc.model.db.rates.ValidityPeriodState;
|
import de.avatic.lcc.model.db.rates.ValidityPeriodState;
|
||||||
import org.springframework.jdbc.core.JdbcTemplate;
|
import org.springframework.jdbc.core.JdbcTemplate;
|
||||||
|
|
@ -23,9 +24,11 @@ import java.util.Optional;
|
||||||
public class PropertySetRepository {
|
public class PropertySetRepository {
|
||||||
|
|
||||||
private final JdbcTemplate jdbcTemplate;
|
private final JdbcTemplate jdbcTemplate;
|
||||||
|
private final SqlDialectProvider dialectProvider;
|
||||||
|
|
||||||
public PropertySetRepository(JdbcTemplate jdbcTemplate) {
|
public PropertySetRepository(JdbcTemplate jdbcTemplate, SqlDialectProvider dialectProvider) {
|
||||||
this.jdbcTemplate = jdbcTemplate;
|
this.jdbcTemplate = jdbcTemplate;
|
||||||
|
this.dialectProvider = dialectProvider;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
@ -155,16 +158,21 @@ public class PropertySetRepository {
|
||||||
}
|
}
|
||||||
|
|
||||||
public Optional<PropertySet> getByDate(LocalDate date) {
|
public Optional<PropertySet> getByDate(LocalDate date) {
|
||||||
String query = """
|
String query = String.format("""
|
||||||
SELECT id, start_date, end_date, state
|
SELECT id, start_date, end_date, state
|
||||||
FROM property_set
|
FROM property_set
|
||||||
WHERE DATE(start_date) <= ?
|
WHERE %s <= ?
|
||||||
AND (end_date IS NULL OR DATE(end_date) >= ?)
|
AND (end_date IS NULL OR %s >= ?)
|
||||||
ORDER BY start_date DESC
|
ORDER BY start_date DESC
|
||||||
LIMIT 1
|
%s
|
||||||
""";
|
""",
|
||||||
|
dialectProvider.extractDate("start_date"),
|
||||||
|
dialectProvider.extractDate("end_date"),
|
||||||
|
dialectProvider.buildPaginationClause(1, 0)
|
||||||
|
);
|
||||||
|
|
||||||
var propertySets = jdbcTemplate.query(query, new PropertySetMapper(), date, date);
|
Object[] paginationParams = dialectProvider.getPaginationParameters(1, 0);
|
||||||
|
var propertySets = jdbcTemplate.query(query, new PropertySetMapper(), date, date, paginationParams[0], paginationParams[1]);
|
||||||
|
|
||||||
return propertySets.isEmpty() ? Optional.empty() : Optional.of(propertySets.getFirst());
|
return propertySets.isEmpty() ? Optional.empty() : Optional.of(propertySets.getFirst());
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,6 @@
|
||||||
package de.avatic.lcc.repositories.rates;
|
package de.avatic.lcc.repositories.rates;
|
||||||
|
|
||||||
|
import de.avatic.lcc.database.dialect.SqlDialectProvider;
|
||||||
import de.avatic.lcc.dto.generic.TransportType;
|
import de.avatic.lcc.dto.generic.TransportType;
|
||||||
import de.avatic.lcc.model.db.rates.ContainerRate;
|
import de.avatic.lcc.model.db.rates.ContainerRate;
|
||||||
import de.avatic.lcc.model.db.rates.ValidityPeriodState;
|
import de.avatic.lcc.model.db.rates.ValidityPeriodState;
|
||||||
|
|
@ -13,6 +14,7 @@ import org.springframework.transaction.annotation.Transactional;
|
||||||
import java.sql.ResultSet;
|
import java.sql.ResultSet;
|
||||||
import java.sql.SQLException;
|
import java.sql.SQLException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
import java.util.Arrays;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
|
|
@ -21,9 +23,11 @@ import java.util.Optional;
|
||||||
public class ContainerRateRepository {
|
public class ContainerRateRepository {
|
||||||
|
|
||||||
private final JdbcTemplate jdbcTemplate;
|
private final JdbcTemplate jdbcTemplate;
|
||||||
|
private final SqlDialectProvider dialectProvider;
|
||||||
|
|
||||||
public ContainerRateRepository(JdbcTemplate jdbcTemplate) {
|
public ContainerRateRepository(JdbcTemplate jdbcTemplate, SqlDialectProvider dialectProvider) {
|
||||||
this.jdbcTemplate = jdbcTemplate;
|
this.jdbcTemplate = jdbcTemplate;
|
||||||
|
this.dialectProvider = dialectProvider;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
@ -74,9 +78,12 @@ public class ContainerRateRepository {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
queryBuilder.append(" ORDER BY cr.id LIMIT ? OFFSET ?");
|
queryBuilder.append(" ORDER BY cr.id ");
|
||||||
params.add(pagination.getLimit());
|
queryBuilder.append(dialectProvider.buildPaginationClause(pagination.getLimit(), pagination.getOffset()));
|
||||||
params.add(pagination.getOffset());
|
|
||||||
|
Object[] paginationParams = dialectProvider.getPaginationParameters(pagination.getLimit(), pagination.getOffset());
|
||||||
|
params.add(paginationParams[0]);
|
||||||
|
params.add(paginationParams[1]);
|
||||||
|
|
||||||
Integer totalCount = jdbcTemplate.queryForObject(countQueryBuilder.toString(), Integer.class, countParams.toArray());
|
Integer totalCount = jdbcTemplate.queryForObject(countQueryBuilder.toString(), Integer.class, countParams.toArray());
|
||||||
var results = jdbcTemplate.query(queryBuilder.toString(), new ContainerRateMapper(), params.toArray());
|
var results = jdbcTemplate.query(queryBuilder.toString(), new ContainerRateMapper(), params.toArray());
|
||||||
|
|
@ -128,10 +135,12 @@ public class ContainerRateRepository {
|
||||||
LEFT JOIN node AS from_node ON from_node.id = container_rate.from_node_id
|
LEFT JOIN node AS from_node ON from_node.id = container_rate.from_node_id
|
||||||
LEFT JOIN validity_period ON validity_period.id = container_rate.validity_period_id
|
LEFT JOIN validity_period ON validity_period.id = container_rate.validity_period_id
|
||||||
WHERE validity_period.state = ?
|
WHERE validity_period.state = ?
|
||||||
AND to_node.is_deprecated = FALSE
|
AND to_node.is_deprecated = %s
|
||||||
AND from_node.is_deprecated = FALSE
|
AND from_node.is_deprecated = %s
|
||||||
AND (container_rate.container_rate_type = ? OR container_rate.container_rate_type = ?)
|
AND (container_rate.container_rate_type = ? OR container_rate.container_rate_type = ?)
|
||||||
AND container_rate.from_node_id = ? AND to_node.country_id IN (%s)""".formatted(
|
AND container_rate.from_node_id = ? AND to_node.country_id IN (%s)""".formatted(
|
||||||
|
dialectProvider.getBooleanFalse(),
|
||||||
|
dialectProvider.getBooleanFalse(),
|
||||||
destinationCountryPlaceholders);
|
destinationCountryPlaceholders);
|
||||||
|
|
||||||
List<Object> params = new ArrayList<>();
|
List<Object> params = new ArrayList<>();
|
||||||
|
|
@ -147,7 +156,7 @@ public class ContainerRateRepository {
|
||||||
@Transactional
|
@Transactional
|
||||||
public List<ContainerRate> getPostRunsFor(ContainerRate mainRun) {
|
public List<ContainerRate> getPostRunsFor(ContainerRate mainRun) {
|
||||||
|
|
||||||
String query = """
|
String query = String.format("""
|
||||||
SELECT container_rate.id AS id,
|
SELECT container_rate.id AS id,
|
||||||
container_rate.validity_period_id AS validity_period_id,
|
container_rate.validity_period_id AS validity_period_id,
|
||||||
container_rate.container_rate_type AS container_rate_type,
|
container_rate.container_rate_type AS container_rate_type,
|
||||||
|
|
@ -164,9 +173,11 @@ public class ContainerRateRepository {
|
||||||
LEFT JOIN node AS from_node ON from_node.id = container_rate.from_node_id
|
LEFT JOIN node AS from_node ON from_node.id = container_rate.from_node_id
|
||||||
LEFT JOIN validity_period ON validity_period.id = container_rate.validity_period_id
|
LEFT JOIN validity_period ON validity_period.id = container_rate.validity_period_id
|
||||||
WHERE validity_period.state = ?
|
WHERE validity_period.state = ?
|
||||||
AND to_node.is_deprecated = FALSE
|
AND to_node.is_deprecated = %s
|
||||||
AND from_node.is_deprecated = FALSE
|
AND from_node.is_deprecated = %s
|
||||||
AND container_rate.from_node_id = ? AND container_rate.container_rate_type = ?""";
|
AND container_rate.from_node_id = ? AND container_rate.container_rate_type = ?""",
|
||||||
|
dialectProvider.getBooleanFalse(),
|
||||||
|
dialectProvider.getBooleanFalse());
|
||||||
|
|
||||||
return jdbcTemplate.query(query, new ContainerRateMapper(true), ValidityPeriodState.VALID.name(), mainRun.getToNodeId(), TransportType.POST_RUN.name());
|
return jdbcTemplate.query(query, new ContainerRateMapper(true), ValidityPeriodState.VALID.name(), mainRun.getToNodeId(), TransportType.POST_RUN.name());
|
||||||
}
|
}
|
||||||
|
|
@ -213,17 +224,17 @@ public class ContainerRateRepository {
|
||||||
|
|
||||||
@Transactional
|
@Transactional
|
||||||
public void insert(ContainerRate containerRate) {
|
public void insert(ContainerRate containerRate) {
|
||||||
String sql = """
|
// Build UPSERT statement using dialect provider
|
||||||
INSERT INTO container_rate
|
List<String> uniqueColumns = Arrays.asList("from_node_id", "to_node_id", "container_rate_type", "validity_period_id");
|
||||||
(from_node_id, to_node_id, container_rate_type, rate_teu, rate_feu, rate_hc, lead_time, validity_period_id)
|
List<String> insertColumns = Arrays.asList("from_node_id", "to_node_id", "container_rate_type", "rate_teu", "rate_feu", "rate_hc", "lead_time", "validity_period_id");
|
||||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
List<String> updateColumns = Arrays.asList("container_rate_type", "rate_teu", "rate_feu", "rate_hc", "lead_time");
|
||||||
ON DUPLICATE KEY UPDATE
|
|
||||||
container_rate_type = VALUES(container_rate_type),
|
String sql = dialectProvider.buildUpsertStatement(
|
||||||
rate_teu = VALUES(rate_teu),
|
"container_rate",
|
||||||
rate_feu = VALUES(rate_feu),
|
uniqueColumns,
|
||||||
rate_hc = VALUES(rate_hc),
|
insertColumns,
|
||||||
lead_time = VALUES(lead_time)
|
updateColumns
|
||||||
""";
|
);
|
||||||
|
|
||||||
jdbcTemplate.update(sql,
|
jdbcTemplate.update(sql,
|
||||||
containerRate.getFromNodeId(),
|
containerRate.getFromNodeId(),
|
||||||
|
|
@ -240,15 +251,16 @@ public class ContainerRateRepository {
|
||||||
@Transactional
|
@Transactional
|
||||||
public boolean hasMainRun(Integer nodeId) {
|
public boolean hasMainRun(Integer nodeId) {
|
||||||
String query = """
|
String query = """
|
||||||
SELECT EXISTS(
|
SELECT CASE WHEN EXISTS(
|
||||||
SELECT 1 FROM container_rate
|
SELECT 1 FROM container_rate
|
||||||
WHERE (from_node_id = ? OR to_node_id = ?)
|
WHERE (from_node_id = ? OR to_node_id = ?)
|
||||||
AND (container_rate_type = ? OR container_rate_type = ?)
|
AND (container_rate_type = ? OR container_rate_type = ?)
|
||||||
)
|
) THEN 1 ELSE 0 END
|
||||||
""";
|
""";
|
||||||
|
|
||||||
return Boolean.TRUE.equals(jdbcTemplate.queryForObject(query, Boolean.class,
|
Integer result = jdbcTemplate.queryForObject(query, Integer.class,
|
||||||
nodeId, nodeId, TransportType.SEA.name(), TransportType.RAIL.name()));
|
nodeId, nodeId, TransportType.SEA.name(), TransportType.RAIL.name());
|
||||||
|
return result != null && result > 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Transactional
|
@Transactional
|
||||||
|
|
@ -259,7 +271,11 @@ public class ContainerRateRepository {
|
||||||
|
|
||||||
@Transactional
|
@Transactional
|
||||||
public void copyCurrentToDraft() {
|
public void copyCurrentToDraft() {
|
||||||
String sql = """
|
// Build LIMIT clause for subquery
|
||||||
|
String limitClause = dialectProvider.buildPaginationClause(1, 0);
|
||||||
|
Object[] paginationParams = dialectProvider.getPaginationParameters(1, 0);
|
||||||
|
|
||||||
|
String sql = String.format("""
|
||||||
INSERT INTO container_rate (
|
INSERT INTO container_rate (
|
||||||
from_node_id,
|
from_node_id,
|
||||||
to_node_id,
|
to_node_id,
|
||||||
|
|
@ -278,13 +294,13 @@ public class ContainerRateRepository {
|
||||||
cr.rate_feu,
|
cr.rate_feu,
|
||||||
cr.rate_hc,
|
cr.rate_hc,
|
||||||
cr.lead_time,
|
cr.lead_time,
|
||||||
(SELECT id FROM validity_period WHERE state = 'DRAFT' LIMIT 1) as validity_period_id
|
(SELECT id FROM validity_period WHERE state = 'DRAFT' %s) as validity_period_id
|
||||||
FROM container_rate cr
|
FROM container_rate cr
|
||||||
INNER JOIN validity_period vp ON cr.validity_period_id = vp.id
|
INNER JOIN validity_period vp ON cr.validity_period_id = vp.id
|
||||||
WHERE vp.state = 'VALID'
|
WHERE vp.state = 'VALID'
|
||||||
""";
|
""", limitClause);
|
||||||
|
|
||||||
jdbcTemplate.update(sql);
|
jdbcTemplate.update(sql, paginationParams);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,6 @@
|
||||||
package de.avatic.lcc.repositories.rates;
|
package de.avatic.lcc.repositories.rates;
|
||||||
|
|
||||||
|
import de.avatic.lcc.database.dialect.SqlDialectProvider;
|
||||||
import de.avatic.lcc.model.db.rates.MatrixRate;
|
import de.avatic.lcc.model.db.rates.MatrixRate;
|
||||||
import de.avatic.lcc.model.db.rates.ValidityPeriodState;
|
import de.avatic.lcc.model.db.rates.ValidityPeriodState;
|
||||||
import de.avatic.lcc.repositories.pagination.SearchQueryPagination;
|
import de.avatic.lcc.repositories.pagination.SearchQueryPagination;
|
||||||
|
|
@ -23,14 +24,17 @@ import java.util.Optional;
|
||||||
public class MatrixRateRepository {
|
public class MatrixRateRepository {
|
||||||
|
|
||||||
private final JdbcTemplate jdbcTemplate;
|
private final JdbcTemplate jdbcTemplate;
|
||||||
|
private final SqlDialectProvider dialectProvider;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Instantiates the repository by injecting a {@link JdbcTemplate}.
|
* Instantiates the repository by injecting a {@link JdbcTemplate}.
|
||||||
*
|
*
|
||||||
* @param jdbcTemplate the {@link JdbcTemplate} to be used for database interactions
|
* @param jdbcTemplate the {@link JdbcTemplate} to be used for database interactions
|
||||||
|
* @param dialectProvider the {@link SqlDialectProvider} for database-specific SQL syntax
|
||||||
*/
|
*/
|
||||||
public MatrixRateRepository(JdbcTemplate jdbcTemplate) {
|
public MatrixRateRepository(JdbcTemplate jdbcTemplate, SqlDialectProvider dialectProvider) {
|
||||||
this.jdbcTemplate = jdbcTemplate;
|
this.jdbcTemplate = jdbcTemplate;
|
||||||
|
this.dialectProvider = dialectProvider;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
@ -42,9 +46,13 @@ public class MatrixRateRepository {
|
||||||
*/
|
*/
|
||||||
@Transactional
|
@Transactional
|
||||||
public SearchQueryResult<MatrixRate> listRates(SearchQueryPagination pagination) {
|
public SearchQueryResult<MatrixRate> listRates(SearchQueryPagination pagination) {
|
||||||
String query = "SELECT * FROM country_matrix_rate ORDER BY id LIMIT ? OFFSET ?";
|
String query = String.format("SELECT * FROM country_matrix_rate ORDER BY id %s",
|
||||||
|
dialectProvider.buildPaginationClause(pagination.getLimit(), pagination.getOffset()));
|
||||||
|
|
||||||
|
Object[] paginationParams = dialectProvider.getPaginationParameters(pagination.getLimit(), pagination.getOffset());
|
||||||
var totalCount = jdbcTemplate.queryForObject("SELECT COUNT(*) FROM country_matrix_rate", Integer.class);
|
var totalCount = jdbcTemplate.queryForObject("SELECT COUNT(*) FROM country_matrix_rate", Integer.class);
|
||||||
return new SearchQueryResult<>(jdbcTemplate.query(query, new MatrixRateMapper(), pagination.getLimit(), pagination.getOffset()), pagination.getPage(), totalCount, pagination.getLimit());
|
|
||||||
|
return new SearchQueryResult<>(jdbcTemplate.query(query, new MatrixRateMapper(), paginationParams[0], paginationParams[1]), pagination.getPage(), totalCount, pagination.getLimit());
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
@ -96,9 +104,12 @@ public class MatrixRateRepository {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
queryBuilder.append(" ORDER BY cmr.id LIMIT ? OFFSET ?");
|
queryBuilder.append(" ORDER BY cmr.id ");
|
||||||
params.add(pagination.getLimit());
|
queryBuilder.append(dialectProvider.buildPaginationClause(pagination.getLimit(), pagination.getOffset()));
|
||||||
params.add(pagination.getOffset());
|
|
||||||
|
Object[] paginationParams = dialectProvider.getPaginationParameters(pagination.getLimit(), pagination.getOffset());
|
||||||
|
params.add(paginationParams[0]);
|
||||||
|
params.add(paginationParams[1]);
|
||||||
|
|
||||||
var totalCount = jdbcTemplate.queryForObject(countQueryBuilder.toString(), Integer.class, countParams.toArray());
|
var totalCount = jdbcTemplate.queryForObject(countQueryBuilder.toString(), Integer.class, countParams.toArray());
|
||||||
var results = jdbcTemplate.query(queryBuilder.toString(), new MatrixRateMapper(), params.toArray());
|
var results = jdbcTemplate.query(queryBuilder.toString(), new MatrixRateMapper(), params.toArray());
|
||||||
|
|
@ -164,12 +175,12 @@ public class MatrixRateRepository {
|
||||||
|
|
||||||
@Transactional
|
@Transactional
|
||||||
public void insert(MatrixRate rate) {
|
public void insert(MatrixRate rate) {
|
||||||
String sql = """
|
String sql = dialectProvider.buildUpsertStatement(
|
||||||
INSERT INTO country_matrix_rate (from_country_id, to_country_id, rate, validity_period_id)
|
"country_matrix_rate",
|
||||||
VALUES (?, ?, ?, ?)
|
List.of("from_country_id", "to_country_id", "validity_period_id"),
|
||||||
ON DUPLICATE KEY UPDATE
|
List.of("from_country_id", "to_country_id", "rate", "validity_period_id"),
|
||||||
rate = VALUES(rate)
|
List.of("rate")
|
||||||
""";
|
);
|
||||||
|
|
||||||
jdbcTemplate.update(sql,
|
jdbcTemplate.update(sql,
|
||||||
rate.getFromCountry(),
|
rate.getFromCountry(),
|
||||||
|
|
@ -180,13 +191,14 @@ public class MatrixRateRepository {
|
||||||
|
|
||||||
@Transactional
|
@Transactional
|
||||||
public void copyCurrentToDraft() {
|
public void copyCurrentToDraft() {
|
||||||
|
// Note: No pagination needed for the DRAFT subquery - there should only be one DRAFT period
|
||||||
String sql = """
|
String sql = """
|
||||||
INSERT INTO country_matrix_rate (from_country_id, to_country_id, rate, validity_period_id)
|
INSERT INTO country_matrix_rate (from_country_id, to_country_id, rate, validity_period_id)
|
||||||
SELECT
|
SELECT
|
||||||
cmr.from_country_id,
|
cmr.from_country_id,
|
||||||
cmr.to_country_id,
|
cmr.to_country_id,
|
||||||
cmr.rate,
|
cmr.rate,
|
||||||
(SELECT id FROM validity_period WHERE state = 'DRAFT' LIMIT 1) AS validity_period_id
|
(SELECT id FROM validity_period WHERE state = 'DRAFT') AS validity_period_id
|
||||||
FROM country_matrix_rate cmr
|
FROM country_matrix_rate cmr
|
||||||
INNER JOIN validity_period vp ON cmr.validity_period_id = vp.id
|
INNER JOIN validity_period vp ON cmr.validity_period_id = vp.id
|
||||||
WHERE vp.state = 'VALID'
|
WHERE vp.state = 'VALID'
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,6 @@
|
||||||
package de.avatic.lcc.repositories.rates;
|
package de.avatic.lcc.repositories.rates;
|
||||||
|
|
||||||
|
import de.avatic.lcc.database.dialect.SqlDialectProvider;
|
||||||
import de.avatic.lcc.model.db.ValidityTuple;
|
import de.avatic.lcc.model.db.ValidityTuple;
|
||||||
import de.avatic.lcc.model.db.rates.ValidityPeriod;
|
import de.avatic.lcc.model.db.rates.ValidityPeriod;
|
||||||
import de.avatic.lcc.model.db.rates.ValidityPeriodState;
|
import de.avatic.lcc.model.db.rates.ValidityPeriodState;
|
||||||
|
|
@ -30,14 +31,17 @@ public class ValidityPeriodRepository {
|
||||||
* The {@link JdbcTemplate} used for interacting with the database.
|
* The {@link JdbcTemplate} used for interacting with the database.
|
||||||
*/
|
*/
|
||||||
private final JdbcTemplate jdbcTemplate;
|
private final JdbcTemplate jdbcTemplate;
|
||||||
|
private final SqlDialectProvider dialectProvider;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Constructs a new repository with a given {@link JdbcTemplate}.
|
* Constructs a new repository with a given {@link JdbcTemplate}.
|
||||||
*
|
*
|
||||||
* @param jdbcTemplate the {@link JdbcTemplate} used for executing SQL queries.
|
* @param jdbcTemplate the {@link JdbcTemplate} used for executing SQL queries.
|
||||||
|
* @param dialectProvider the {@link SqlDialectProvider} for database-specific SQL syntax
|
||||||
*/
|
*/
|
||||||
public ValidityPeriodRepository(JdbcTemplate jdbcTemplate) {
|
public ValidityPeriodRepository(JdbcTemplate jdbcTemplate, SqlDialectProvider dialectProvider) {
|
||||||
this.jdbcTemplate = jdbcTemplate;
|
this.jdbcTemplate = jdbcTemplate;
|
||||||
|
this.dialectProvider = dialectProvider;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
@ -60,8 +64,8 @@ public class ValidityPeriodRepository {
|
||||||
*/
|
*/
|
||||||
@Transactional
|
@Transactional
|
||||||
public Optional<Integer> getPeriodId(LocalDateTime validAt) {
|
public Optional<Integer> getPeriodId(LocalDateTime validAt) {
|
||||||
String query = "SELECT id FROM validity_period WHERE ? BETWEEN start_date AND end_date";
|
String query = "SELECT id FROM validity_period WHERE start_date <= ? AND (end_date IS NULL OR end_date >= ?)";
|
||||||
return Optional.ofNullable(jdbcTemplate.query(query, (rs) -> rs.next() ? rs.getInt("id") : null, validAt));
|
return Optional.ofNullable(jdbcTemplate.query(query, (rs) -> rs.next() ? rs.getInt("id") : null, validAt, validAt));
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
@ -274,7 +278,9 @@ public class ValidityPeriodRepository {
|
||||||
+ whereClause + """
|
+ whereClause + """
|
||||||
GROUP BY
|
GROUP BY
|
||||||
cj.validity_period_id,
|
cj.validity_period_id,
|
||||||
cj.property_set_id
|
cj.property_set_id,
|
||||||
|
ps.start_date,
|
||||||
|
vp.start_date
|
||||||
HAVING
|
HAVING
|
||||||
COUNT(DISTINCT COALESCE(p.supplier_node_id, p.user_supplier_node_id)) = ?
|
COUNT(DISTINCT COALESCE(p.supplier_node_id, p.user_supplier_node_id)) = ?
|
||||||
ORDER BY
|
ORDER BY
|
||||||
|
|
@ -329,15 +335,20 @@ public class ValidityPeriodRepository {
|
||||||
}
|
}
|
||||||
|
|
||||||
public Optional<ValidityPeriod> getByDate(LocalDate date) {
|
public Optional<ValidityPeriod> getByDate(LocalDate date) {
|
||||||
String query = """
|
String query = String.format("""
|
||||||
SELECT * FROM validity_period
|
SELECT * FROM validity_period
|
||||||
WHERE DATE(start_date) <= ?
|
WHERE %s <= ?
|
||||||
AND (end_date IS NULL OR DATE(end_date) >= ?)
|
AND (end_date IS NULL OR %s >= ?)
|
||||||
ORDER BY start_date DESC
|
ORDER BY start_date DESC
|
||||||
LIMIT 1
|
%s
|
||||||
""";
|
""",
|
||||||
|
dialectProvider.extractDate("start_date"),
|
||||||
|
dialectProvider.extractDate("end_date"),
|
||||||
|
dialectProvider.buildPaginationClause(1, 0)
|
||||||
|
);
|
||||||
|
|
||||||
var periods = jdbcTemplate.query(query, new ValidityPeriodMapper(), date, date);
|
Object[] paginationParams = dialectProvider.getPaginationParameters(1, 0);
|
||||||
|
var periods = jdbcTemplate.query(query, new ValidityPeriodMapper(), date, date, paginationParams[0], paginationParams[1]);
|
||||||
|
|
||||||
return periods.isEmpty() ? Optional.empty() : Optional.of(periods.getFirst());
|
return periods.isEmpty() ? Optional.empty() : Optional.of(periods.getFirst());
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,6 @@
|
||||||
package de.avatic.lcc.repositories.users;
|
package de.avatic.lcc.repositories.users;
|
||||||
|
|
||||||
|
import de.avatic.lcc.database.dialect.SqlDialectProvider;
|
||||||
import de.avatic.lcc.model.db.users.App;
|
import de.avatic.lcc.model.db.users.App;
|
||||||
import de.avatic.lcc.model.db.users.Group;
|
import de.avatic.lcc.model.db.users.Group;
|
||||||
import org.springframework.jdbc.core.JdbcTemplate;
|
import org.springframework.jdbc.core.JdbcTemplate;
|
||||||
|
|
@ -31,16 +32,19 @@ public class AppRepository {
|
||||||
|
|
||||||
private final JdbcTemplate jdbcTemplate;
|
private final JdbcTemplate jdbcTemplate;
|
||||||
private final GroupRepository groupRepository;
|
private final GroupRepository groupRepository;
|
||||||
|
private final SqlDialectProvider dialectProvider;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Creates a new AppRepository.
|
* Creates a new AppRepository.
|
||||||
*
|
*
|
||||||
* @param jdbcTemplate Spring JdbcTemplate used for executing SQL queries
|
* @param jdbcTemplate Spring JdbcTemplate used for executing SQL queries
|
||||||
* @param groupRepository Repository used to resolve group identifiers
|
* @param groupRepository Repository used to resolve group identifiers
|
||||||
|
* @param dialectProvider SQL dialect provider for database-specific SQL syntax
|
||||||
*/
|
*/
|
||||||
public AppRepository(JdbcTemplate jdbcTemplate, GroupRepository groupRepository) {
|
public AppRepository(JdbcTemplate jdbcTemplate, GroupRepository groupRepository, SqlDialectProvider dialectProvider) {
|
||||||
this.jdbcTemplate = jdbcTemplate;
|
this.jdbcTemplate = jdbcTemplate;
|
||||||
this.groupRepository = groupRepository;
|
this.groupRepository = groupRepository;
|
||||||
|
this.dialectProvider = dialectProvider;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
@ -128,11 +132,14 @@ public class AppRepository {
|
||||||
jdbcTemplate.update("DELETE FROM sys_app_group_mapping WHERE app_id = ?", appId);
|
jdbcTemplate.update("DELETE FROM sys_app_group_mapping WHERE app_id = ?", appId);
|
||||||
return;
|
return;
|
||||||
} else {
|
} else {
|
||||||
for (Integer groupId : groups) {
|
String insertQuery = dialectProvider.buildInsertIgnoreStatement(
|
||||||
jdbcTemplate.update(
|
"sys_app_group_mapping",
|
||||||
"INSERT IGNORE INTO sys_app_group_mapping (app_id, group_id) VALUES (?, ?)",
|
List.of("app_id", "group_id"),
|
||||||
appId, groupId
|
List.of("app_id", "group_id")
|
||||||
);
|
);
|
||||||
|
|
||||||
|
for (Integer groupId : groups) {
|
||||||
|
jdbcTemplate.update(insertQuery, appId, groupId);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,6 @@
|
||||||
package de.avatic.lcc.repositories.users;
|
package de.avatic.lcc.repositories.users;
|
||||||
|
|
||||||
|
import de.avatic.lcc.database.dialect.SqlDialectProvider;
|
||||||
import de.avatic.lcc.model.db.users.Group;
|
import de.avatic.lcc.model.db.users.Group;
|
||||||
import de.avatic.lcc.repositories.pagination.SearchQueryPagination;
|
import de.avatic.lcc.repositories.pagination.SearchQueryPagination;
|
||||||
import de.avatic.lcc.repositories.pagination.SearchQueryResult;
|
import de.avatic.lcc.repositories.pagination.SearchQueryResult;
|
||||||
|
|
@ -16,21 +17,26 @@ import java.util.List;
|
||||||
@Repository
|
@Repository
|
||||||
public class GroupRepository {
|
public class GroupRepository {
|
||||||
private final JdbcTemplate jdbcTemplate;
|
private final JdbcTemplate jdbcTemplate;
|
||||||
|
private final SqlDialectProvider dialectProvider;
|
||||||
|
|
||||||
public GroupRepository(JdbcTemplate jdbcTemplate) {
|
public GroupRepository(JdbcTemplate jdbcTemplate, SqlDialectProvider dialectProvider) {
|
||||||
this.jdbcTemplate = jdbcTemplate;
|
this.jdbcTemplate = jdbcTemplate;
|
||||||
|
this.dialectProvider = dialectProvider;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Transactional
|
@Transactional
|
||||||
public SearchQueryResult<Group> listGroups(SearchQueryPagination pagination) {
|
public SearchQueryResult<Group> listGroups(SearchQueryPagination pagination) {
|
||||||
|
|
||||||
String query = "SELECT * FROM sys_group ORDER BY group_name LIMIT ? OFFSET ?";
|
String query = String.format("SELECT * FROM sys_group ORDER BY group_name %s",
|
||||||
|
dialectProvider.buildPaginationClause(pagination.getLimit(), pagination.getOffset()));
|
||||||
|
|
||||||
|
Object[] paginationParams = dialectProvider.getPaginationParameters(pagination.getLimit(), pagination.getOffset());
|
||||||
|
|
||||||
var groups = jdbcTemplate.query(query, new GroupMapper(),
|
var groups = jdbcTemplate.query(query, new GroupMapper(),
|
||||||
pagination.getLimit(), pagination.getOffset());
|
paginationParams[0], paginationParams[1]);
|
||||||
|
|
||||||
Integer totalCount = jdbcTemplate.queryForObject(
|
Integer totalCount = jdbcTemplate.queryForObject(
|
||||||
"SELECT COUNT(*) FROM sys_group ORDER BY group_name",
|
"SELECT COUNT(*) FROM sys_group",
|
||||||
Integer.class
|
Integer.class
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
@ -63,8 +69,13 @@ public class GroupRepository {
|
||||||
|
|
||||||
@Transactional
|
@Transactional
|
||||||
public void updateGroup(Group group) {
|
public void updateGroup(Group group) {
|
||||||
String query = "INSERT INTO sys_group (group_name, group_description) VALUES (?, ?) ON DUPLICATE KEY UPDATE group_description = ?";
|
String query = dialectProvider.buildUpsertStatement(
|
||||||
jdbcTemplate.update(query, group.getName(), group.getDescription(), group.getDescription());
|
"sys_group",
|
||||||
|
List.of("group_name"),
|
||||||
|
List.of("group_name", "group_description"),
|
||||||
|
List.of("group_description")
|
||||||
|
);
|
||||||
|
jdbcTemplate.update(query, group.getName(), group.getDescription());
|
||||||
}
|
}
|
||||||
|
|
||||||
private static class GroupMapper implements RowMapper<Group> {
|
private static class GroupMapper implements RowMapper<Group> {
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,6 @@
|
||||||
package de.avatic.lcc.repositories.users;
|
package de.avatic.lcc.repositories.users;
|
||||||
|
|
||||||
|
import de.avatic.lcc.database.dialect.SqlDialectProvider;
|
||||||
import de.avatic.lcc.model.db.ValidityTuple;
|
import de.avatic.lcc.model.db.ValidityTuple;
|
||||||
import de.avatic.lcc.model.db.nodes.Node;
|
import de.avatic.lcc.model.db.nodes.Node;
|
||||||
import de.avatic.lcc.util.exception.base.ForbiddenException;
|
import de.avatic.lcc.util.exception.base.ForbiddenException;
|
||||||
|
|
@ -22,9 +23,11 @@ public class UserNodeRepository {
|
||||||
|
|
||||||
|
|
||||||
private final JdbcTemplate jdbcTemplate;
|
private final JdbcTemplate jdbcTemplate;
|
||||||
|
private final SqlDialectProvider dialectProvider;
|
||||||
|
|
||||||
public UserNodeRepository(JdbcTemplate jdbcTemplate) {
|
public UserNodeRepository(JdbcTemplate jdbcTemplate, SqlDialectProvider dialectProvider) {
|
||||||
this.jdbcTemplate = jdbcTemplate;
|
this.jdbcTemplate = jdbcTemplate;
|
||||||
|
this.dialectProvider = dialectProvider;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Transactional
|
@Transactional
|
||||||
|
|
@ -43,11 +46,15 @@ public class UserNodeRepository {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (excludeDeprecated) {
|
if (excludeDeprecated) {
|
||||||
queryBuilder.append(" AND is_deprecated = FALSE");
|
queryBuilder.append(" AND is_deprecated = ").append(dialectProvider.getBooleanFalse());
|
||||||
}
|
}
|
||||||
|
|
||||||
queryBuilder.append(" LIMIT ?");
|
queryBuilder.append(" ORDER BY id");
|
||||||
params.add(limit);
|
queryBuilder.append(" ").append(dialectProvider.buildPaginationClause(limit, 0));
|
||||||
|
|
||||||
|
Object[] paginationParams = dialectProvider.getPaginationParameters(limit, 0);
|
||||||
|
params.add(paginationParams[0]);
|
||||||
|
params.add(paginationParams[1]);
|
||||||
|
|
||||||
return jdbcTemplate.query(queryBuilder.toString(), new NodeMapper(), params.toArray());
|
return jdbcTemplate.query(queryBuilder.toString(), new NodeMapper(), params.toArray());
|
||||||
}
|
}
|
||||||
|
|
@ -139,11 +146,19 @@ public class UserNodeRepository {
|
||||||
|
|
||||||
@Transactional
|
@Transactional
|
||||||
public void checkOwner(List<Integer> userNodeIds, Integer userId) {
|
public void checkOwner(List<Integer> userNodeIds, Integer userId) {
|
||||||
String query = """
|
if (userNodeIds.isEmpty()) {
|
||||||
SELECT id FROM sys_user_node WHERE id IN (?) AND user_id <> ?
|
return;
|
||||||
""";
|
}
|
||||||
|
|
||||||
var otherIds = jdbcTemplate.queryForList(query, Integer.class, userNodeIds, userId);
|
String placeholders = String.join(",", Collections.nCopies(userNodeIds.size(), "?"));
|
||||||
|
String query = """
|
||||||
|
SELECT id FROM sys_user_node WHERE id IN (""" + placeholders + ") AND user_id <> ?";
|
||||||
|
|
||||||
|
// Combine userNodeIds and userId into a single parameter array
|
||||||
|
List<Object> params = new ArrayList<>(userNodeIds);
|
||||||
|
params.add(userId);
|
||||||
|
|
||||||
|
var otherIds = jdbcTemplate.queryForList(query, Integer.class, params.toArray());
|
||||||
|
|
||||||
if(!otherIds.isEmpty()) {
|
if(!otherIds.isEmpty()) {
|
||||||
throw new ForbiddenException("Access violation. Cannot open user nodes with ids = " + otherIds);
|
throw new ForbiddenException("Access violation. Cannot open user nodes with ids = " + otherIds);
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,6 @@
|
||||||
package de.avatic.lcc.repositories.users;
|
package de.avatic.lcc.repositories.users;
|
||||||
|
|
||||||
|
import de.avatic.lcc.database.dialect.SqlDialectProvider;
|
||||||
import de.avatic.lcc.model.db.users.Group;
|
import de.avatic.lcc.model.db.users.Group;
|
||||||
import de.avatic.lcc.model.db.users.User;
|
import de.avatic.lcc.model.db.users.User;
|
||||||
import de.avatic.lcc.repositories.pagination.SearchQueryPagination;
|
import de.avatic.lcc.repositories.pagination.SearchQueryPagination;
|
||||||
|
|
@ -25,20 +26,24 @@ public class UserRepository {
|
||||||
|
|
||||||
private final JdbcTemplate jdbcTemplate;
|
private final JdbcTemplate jdbcTemplate;
|
||||||
private final GroupRepository groupRepository;
|
private final GroupRepository groupRepository;
|
||||||
|
private final SqlDialectProvider dialectProvider;
|
||||||
|
|
||||||
public UserRepository(JdbcTemplate jdbcTemplate, GroupRepository groupRepository) {
|
public UserRepository(JdbcTemplate jdbcTemplate, GroupRepository groupRepository, SqlDialectProvider dialectProvider) {
|
||||||
this.jdbcTemplate = jdbcTemplate;
|
this.jdbcTemplate = jdbcTemplate;
|
||||||
this.groupRepository = groupRepository;
|
this.groupRepository = groupRepository;
|
||||||
|
this.dialectProvider = dialectProvider;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Transactional
|
@Transactional
|
||||||
public SearchQueryResult<User> listUsers(SearchQueryPagination pagination) {
|
public SearchQueryResult<User> listUsers(SearchQueryPagination pagination) {
|
||||||
String query = """
|
String query = String.format("""
|
||||||
SELECT *
|
SELECT *
|
||||||
FROM sys_user
|
FROM sys_user
|
||||||
ORDER BY sys_user.workday_id LIMIT ? OFFSET ?""";
|
ORDER BY sys_user.workday_id %s""", dialectProvider.buildPaginationClause(pagination.getLimit(), pagination.getOffset()));
|
||||||
|
|
||||||
return new SearchQueryResult<>(jdbcTemplate.query(query, new UserMapper(), pagination.getLimit(), pagination.getOffset()), pagination.getPage(), getTotalUserCount(), pagination.getLimit());
|
Object[] paginationParams = dialectProvider.getPaginationParameters(pagination.getLimit(), pagination.getOffset());
|
||||||
|
|
||||||
|
return new SearchQueryResult<>(jdbcTemplate.query(query, new UserMapper(), paginationParams[0], paginationParams[1]), pagination.getPage(), getTotalUserCount(), pagination.getLimit());
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -113,11 +118,14 @@ public class UserRepository {
|
||||||
return;
|
return;
|
||||||
} else
|
} else
|
||||||
{
|
{
|
||||||
for (Integer groupId : groups) {
|
String insertQuery = dialectProvider.buildInsertIgnoreStatement(
|
||||||
jdbcTemplate.update(
|
"sys_user_group_mapping",
|
||||||
"INSERT IGNORE INTO sys_user_group_mapping (user_id, group_id) VALUES (?, ?)",
|
List.of("user_id", "group_id"),
|
||||||
userId, groupId
|
List.of("user_id", "group_id")
|
||||||
);
|
);
|
||||||
|
|
||||||
|
for (Integer groupId : groups) {
|
||||||
|
jdbcTemplate.update(insertQuery, userId, groupId);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -49,6 +49,7 @@ public class BatchGeoApiService {
|
||||||
|
|
||||||
ArrayList<BulkInstruction<ExcelNode>> noGeo = new ArrayList<>();
|
ArrayList<BulkInstruction<ExcelNode>> noGeo = new ArrayList<>();
|
||||||
ArrayList<BulkInstruction<ExcelNode>> failedGeoLookups = new ArrayList<>();
|
ArrayList<BulkInstruction<ExcelNode>> failedGeoLookups = new ArrayList<>();
|
||||||
|
ArrayList<BulkInstruction<ExcelNode>> failedFuzzyGeoLookups = new ArrayList<>();
|
||||||
int totalSuccessful = 0;
|
int totalSuccessful = 0;
|
||||||
|
|
||||||
for (var node : nodes) {
|
for (var node : nodes) {
|
||||||
|
|
@ -57,7 +58,6 @@ public class BatchGeoApiService {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
for (int currentBatch = 0; currentBatch < noGeo.size(); currentBatch += MAX_BATCH_SIZE) {
|
for (int currentBatch = 0; currentBatch < noGeo.size(); currentBatch += MAX_BATCH_SIZE) {
|
||||||
int end = Math.min(currentBatch + MAX_BATCH_SIZE, noGeo.size());
|
int end = Math.min(currentBatch + MAX_BATCH_SIZE, noGeo.size());
|
||||||
var chunk = noGeo.subList(currentBatch, end);
|
var chunk = noGeo.subList(currentBatch, end);
|
||||||
|
|
@ -67,33 +67,108 @@ public class BatchGeoApiService {
|
||||||
.toList());
|
.toList());
|
||||||
|
|
||||||
if (chunkResult.isPresent()) {
|
if (chunkResult.isPresent()) {
|
||||||
|
var response = chunkResult.get();
|
||||||
|
|
||||||
totalSuccessful += chunkResult.get().getSummary().getSuccessfulRequests();
|
|
||||||
|
|
||||||
|
if (response.getSummary() != null && response.getSummary().getSuccessfulRequests() != null) {
|
||||||
|
totalSuccessful += response.getSummary().getSuccessfulRequests();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (response.getBatchItems() == null || response.getBatchItems().isEmpty()) {
|
||||||
|
logger.warn("Batch response contains no items");
|
||||||
|
failedGeoLookups.addAll(chunk);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
for (int itemIdx = 0; itemIdx < chunk.size(); itemIdx++) {
|
for (int itemIdx = 0; itemIdx < chunk.size(); itemIdx++) {
|
||||||
var result = chunkResult.get().getBatchItems().get(itemIdx);
|
|
||||||
|
if (itemIdx >= response.getBatchItems().size()) {
|
||||||
|
logger.warn("BatchItems size mismatch at index {}", itemIdx);
|
||||||
|
failedGeoLookups.add(chunk.get(itemIdx));
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
var result = response.getBatchItems().get(itemIdx);
|
||||||
var node = chunk.get(itemIdx).getEntity();
|
var node = chunk.get(itemIdx).getEntity();
|
||||||
|
|
||||||
if (!result.getFeatures().isEmpty() &&
|
|
||||||
(result.getFeatures().getFirst().getProperties().getConfidence().equalsIgnoreCase("high") ||
|
if (result == null || result.getFeatures() == null || result.getFeatures().isEmpty()) {
|
||||||
result.getFeatures().getFirst().getProperties().getConfidence().equalsIgnoreCase("medium") ||
|
logger.warn("No geocoding result for address {}",
|
||||||
(result.getFeatures().getFirst().getProperties().getMatchCodes() != null &&
|
node.getAddress() != null ? node.getAddress() : "unknown");
|
||||||
result.getFeatures().getFirst().getProperties().getMatchCodes().stream().anyMatch(s -> s.equalsIgnoreCase("good"))))) {
|
|
||||||
var geometry = result.getFeatures().getFirst().getGeometry();
|
|
||||||
var properties = result.getFeatures().getFirst().getProperties();
|
|
||||||
node.setGeoLng(BigDecimal.valueOf(geometry.getCoordinates().get(0)));
|
|
||||||
node.setGeoLat(BigDecimal.valueOf(geometry.getCoordinates().get(1)));
|
|
||||||
node.setAddress(properties.getAddress().getFormattedAddress());
|
|
||||||
node.setCountryId(IsoCode.valueOf(properties.getAddress().getCountryRegion().getIso()));
|
|
||||||
} else {
|
|
||||||
logger.warn("Geocoding failed for address {}", node.getAddress());
|
|
||||||
failedGeoLookups.add(chunk.get(itemIdx));
|
failedGeoLookups.add(chunk.get(itemIdx));
|
||||||
//throw new ExcelValidationError("Unable to geocode " + node.getName() + ". Please check your address or enter geo position yourself.");
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
var feature = result.getFeatures().getFirst();
|
||||||
|
if (feature == null) {
|
||||||
|
logger.warn("Feature is null for address {}", node.getAddress());
|
||||||
|
failedGeoLookups.add(chunk.get(itemIdx));
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
var properties = feature.getProperties();
|
||||||
|
if (properties == null) {
|
||||||
|
logger.warn("Properties is null for address {}", node.getAddress());
|
||||||
|
failedGeoLookups.add(chunk.get(itemIdx));
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
String confidence = properties.getConfidence();
|
||||||
|
boolean hasGoodConfidence = confidence != null &&
|
||||||
|
(confidence.equalsIgnoreCase("high") ||
|
||||||
|
confidence.equalsIgnoreCase("medium"));
|
||||||
|
|
||||||
|
boolean hasGoodMatchCode = properties.getMatchCodes() != null &&
|
||||||
|
properties.getMatchCodes().stream()
|
||||||
|
.anyMatch(s -> s != null && s.equalsIgnoreCase("good"));
|
||||||
|
|
||||||
|
if (hasGoodConfidence || hasGoodMatchCode) {
|
||||||
|
var geometry = feature.getGeometry();
|
||||||
|
if (geometry == null || geometry.getCoordinates() == null ||
|
||||||
|
geometry.getCoordinates().size() < 2) {
|
||||||
|
logger.warn("Invalid geometry for address {}", node.getAddress());
|
||||||
|
failedGeoLookups.add(chunk.get(itemIdx));
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
var coordinates = geometry.getCoordinates();
|
||||||
|
if (coordinates.get(0) == null || coordinates.get(1) == null) {
|
||||||
|
logger.warn("Null coordinates for address {}", node.getAddress());
|
||||||
|
failedGeoLookups.add(chunk.get(itemIdx));
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
node.setGeoLng(BigDecimal.valueOf(coordinates.get(0)));
|
||||||
|
node.setGeoLat(BigDecimal.valueOf(coordinates.get(1)));
|
||||||
|
|
||||||
|
if (properties.getAddress() != null &&
|
||||||
|
properties.getAddress().getFormattedAddress() != null) {
|
||||||
|
node.setAddress(properties.getAddress().getFormattedAddress());
|
||||||
|
}
|
||||||
|
|
||||||
|
if (properties.getAddress() != null &&
|
||||||
|
properties.getAddress().getCountryRegion() != null &&
|
||||||
|
properties.getAddress().getCountryRegion().getIso() != null) {
|
||||||
|
try {
|
||||||
|
node.setCountryId(IsoCode.valueOf(
|
||||||
|
properties.getAddress().getCountryRegion().getIso()));
|
||||||
|
} catch (IllegalArgumentException e) {
|
||||||
|
logger.warn("Invalid ISO code: {}",
|
||||||
|
properties.getAddress().getCountryRegion().getIso());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
logger.warn("Geocoding failed for address {} (low confidence)",
|
||||||
|
node.getAddress());
|
||||||
|
failedGeoLookups.add(chunk.get(itemIdx));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
logger.warn("Batch request returned empty result");
|
||||||
|
failedGeoLookups.addAll(chunk);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
// Second pass: fuzzy lookup with company name for failed addresses
|
// Second pass: fuzzy lookup with company name for failed addresses
|
||||||
if (!failedGeoLookups.isEmpty()) {
|
if (!failedGeoLookups.isEmpty()) {
|
||||||
|
|
@ -108,31 +183,52 @@ public class BatchGeoApiService {
|
||||||
&& !fuzzyResult.get().getResults().isEmpty()) {
|
&& !fuzzyResult.get().getResults().isEmpty()) {
|
||||||
|
|
||||||
var result = fuzzyResult.get().getResults().getFirst();
|
var result = fuzzyResult.get().getResults().getFirst();
|
||||||
|
if (result == null) {
|
||||||
|
logger.warn("Fuzzy result is null for: {}", node.getName());
|
||||||
|
failedFuzzyGeoLookups.add(instruction);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
// Score >= 0.7 means good confidence (1.0 = perfect match)
|
double score = result.getScore();
|
||||||
if (result.getScore() >= 7.0) {
|
if (score < 7.0) {
|
||||||
node.setGeoLat(BigDecimal.valueOf(result.getPosition().getLat()));
|
logger.warn("Fuzzy search returned low confidence result for: {} (score: {})",
|
||||||
node.setGeoLng(BigDecimal.valueOf(result.getPosition().getLon()));
|
node.getName(), score);
|
||||||
|
failedFuzzyGeoLookups.add(instruction);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (result.getPosition() == null) {
|
||||||
|
logger.warn("Position is null for: {}", node.getName());
|
||||||
|
failedFuzzyGeoLookups.add(instruction);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
double lat = result.getPosition().getLat();
|
||||||
|
double lon = result.getPosition().getLon();
|
||||||
|
|
||||||
|
node.setGeoLat(BigDecimal.valueOf(lat));
|
||||||
|
node.setGeoLng(BigDecimal.valueOf(lon));
|
||||||
|
|
||||||
|
if (result.getAddress() != null &&
|
||||||
|
result.getAddress().getFreeformAddress() != null) {
|
||||||
node.setAddress(result.getAddress().getFreeformAddress());
|
node.setAddress(result.getAddress().getFreeformAddress());
|
||||||
|
}
|
||||||
|
|
||||||
// Update country if it differs
|
if (result.getAddress() != null &&
|
||||||
if (result.getAddress().getCountryCode() != null) {
|
result.getAddress().getCountryCode() != null) {
|
||||||
try {
|
try {
|
||||||
node.setCountryId(IsoCode.valueOf(result.getAddress().getCountryCode()));
|
node.setCountryId(IsoCode.valueOf(result.getAddress().getCountryCode()));
|
||||||
} catch (IllegalArgumentException e) {
|
} catch (IllegalArgumentException e) {
|
||||||
logger.warn("Unknown country code: {}", result.getAddress().getCountryCode());
|
logger.warn("Unknown country code: {}",
|
||||||
|
result.getAddress().getCountryCode());
|
||||||
|
failedFuzzyGeoLookups.add(instruction);
|
||||||
|
continue;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fuzzySuccessful++;
|
fuzzySuccessful++;
|
||||||
logger.info("Fuzzy search successful for: {} (score: {})",
|
logger.info("Fuzzy search successful for: {} (score: {})",
|
||||||
node.getName(), result.getScore());
|
node.getName(), score);
|
||||||
} else {
|
|
||||||
logger.warn("Fuzzy search returned low confidence result for: {} (score: {})",
|
|
||||||
node.getName(), result.getScore());
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
logger.error("Fuzzy search found no results for: {}", node.getName());
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -140,8 +236,10 @@ public class BatchGeoApiService {
|
||||||
fuzzySuccessful, failedGeoLookups.size());
|
fuzzySuccessful, failedGeoLookups.size());
|
||||||
|
|
||||||
// Throw error for remaining failed lookups
|
// Throw error for remaining failed lookups
|
||||||
int remainingFailed = failedGeoLookups.size() - fuzzySuccessful;
|
if (!failedFuzzyGeoLookups.isEmpty()) {
|
||||||
if (remainingFailed > 0) {
|
|
||||||
|
failedFuzzyGeoLookups.forEach(instruction -> {logger.warn("Lookup finally failed for: {}", instruction.getEntity().getName());});
|
||||||
|
|
||||||
var firstFailed = failedGeoLookups.stream()
|
var firstFailed = failedGeoLookups.stream()
|
||||||
.filter(i -> i.getEntity().getGeoLat() == null)
|
.filter(i -> i.getEntity().getGeoLat() == null)
|
||||||
.findFirst()
|
.findFirst()
|
||||||
|
|
@ -149,7 +247,9 @@ public class BatchGeoApiService {
|
||||||
.orElse(null);
|
.orElse(null);
|
||||||
|
|
||||||
if (firstFailed != null) {
|
if (firstFailed != null) {
|
||||||
throw new ExcelValidationError("Unable to geocode " + firstFailed.getName()
|
String name = firstFailed.getName() != null ?
|
||||||
|
firstFailed.getName() : "unknown";
|
||||||
|
throw new ExcelValidationError("Unable to geocode " + name
|
||||||
+ ". Please check your address or enter geo position yourself.");
|
+ ". Please check your address or enter geo position yourself.");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -159,13 +259,32 @@ public class BatchGeoApiService {
|
||||||
private Optional<FuzzySearchResponse> executeFuzzySearch(ExcelNode node) {
|
private Optional<FuzzySearchResponse> executeFuzzySearch(ExcelNode node) {
|
||||||
try {
|
try {
|
||||||
String companyName = node.getName();
|
String companyName = node.getName();
|
||||||
String country = node.getCountryId().name();
|
if (companyName == null) {
|
||||||
|
logger.warn("Company name is null for fuzzy search");
|
||||||
|
return Optional.empty();
|
||||||
|
}
|
||||||
|
|
||||||
|
IsoCode countryId = node.getCountryId();
|
||||||
|
if (countryId == null) {
|
||||||
|
logger.warn("Country ID is null for fuzzy search: {}", companyName);
|
||||||
|
return Optional.empty();
|
||||||
|
}
|
||||||
|
String country = countryId.name();
|
||||||
|
|
||||||
|
String address = node.getAddress();
|
||||||
|
if (address == null) {
|
||||||
|
logger.warn("Address is null for fuzzy search: {}", companyName);
|
||||||
|
address = ""; // Fallback zu leerem String
|
||||||
|
}
|
||||||
|
|
||||||
// Normalisiere Unicode für konsistente Suche
|
// Normalisiere Unicode für konsistente Suche
|
||||||
companyName = java.text.Normalizer.normalize(companyName, java.text.Normalizer.Form.NFC);
|
companyName = java.text.Normalizer.normalize(companyName,
|
||||||
|
java.text.Normalizer.Form.NFC);
|
||||||
|
|
||||||
// URL-Encoding
|
// URL-Encoding
|
||||||
String encodedQuery = URLEncoder.encode(companyName + ", " + node.getAddress() + ", " + country, StandardCharsets.UTF_8);
|
String encodedQuery = URLEncoder.encode(
|
||||||
|
companyName + ", " + address + ", " + country,
|
||||||
|
StandardCharsets.UTF_8);
|
||||||
|
|
||||||
String url = String.format(
|
String url = String.format(
|
||||||
"https://atlas.microsoft.com/search/fuzzy/json?api-version=1.0&subscription-key=%s&query=%s&limit=5",
|
"https://atlas.microsoft.com/search/fuzzy/json?api-version=1.0&subscription-key=%s&query=%s&limit=5",
|
||||||
|
|
@ -185,13 +304,21 @@ public class BatchGeoApiService {
|
||||||
return Optional.ofNullable(response.getBody());
|
return Optional.ofNullable(response.getBody());
|
||||||
|
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
logger.error("Fuzzy search failed for {}", node.getName(), e);
|
logger.error("Fuzzy search failed for {}",
|
||||||
|
node.getName() != null ? node.getName() : "unknown", e);
|
||||||
return Optional.empty();
|
return Optional.empty();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private String getGeoCodeString(ExcelNode excelNode) {
|
private String getGeoCodeString(ExcelNode excelNode) {
|
||||||
return excelNode.getAddress() + ", " + excelNode.getCountryId();
|
String address = excelNode.getAddress();
|
||||||
|
IsoCode countryId = excelNode.getCountryId();
|
||||||
|
|
||||||
|
// Fallback-Werte für null
|
||||||
|
String addressStr = address != null ? address : "";
|
||||||
|
String countryStr = countryId != null ? countryId.name() : "";
|
||||||
|
|
||||||
|
return addressStr + ", " + countryStr;
|
||||||
}
|
}
|
||||||
|
|
||||||
private Optional<BatchGeocodingResponse> executeBatchRequest(List<BatchItem> batchItems) {
|
private Optional<BatchGeocodingResponse> executeBatchRequest(List<BatchItem> batchItems) {
|
||||||
|
|
|
||||||
|
|
@ -15,6 +15,7 @@ import org.apache.poi.xssf.usermodel.XSSFWorkbook;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
import org.springframework.stereotype.Service;
|
import org.springframework.stereotype.Service;
|
||||||
|
import org.springframework.transaction.annotation.Transactional;
|
||||||
|
|
||||||
import java.io.ByteArrayInputStream;
|
import java.io.ByteArrayInputStream;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
@ -56,6 +57,7 @@ public class BulkImportService {
|
||||||
this.materialFastExcelMapper = materialFastExcelMapper;
|
this.materialFastExcelMapper = materialFastExcelMapper;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Transactional
|
||||||
public void processOperation(BulkOperation op) throws IOException {
|
public void processOperation(BulkOperation op) throws IOException {
|
||||||
var file = op.getFile();
|
var file = op.getFile();
|
||||||
var type = op.getFileType();
|
var type = op.getFileType();
|
||||||
|
|
|
||||||
|
|
@ -9,6 +9,7 @@ import de.avatic.lcc.service.transformer.generic.NodeTransformer;
|
||||||
import de.avatic.lcc.util.exception.internalerror.ExcelValidationError;
|
import de.avatic.lcc.util.exception.internalerror.ExcelValidationError;
|
||||||
import org.springframework.stereotype.Service;
|
import org.springframework.stereotype.Service;
|
||||||
|
|
||||||
|
import java.math.BigDecimal;
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
|
|
||||||
@Service
|
@Service
|
||||||
|
|
@ -61,22 +62,26 @@ public class NodeBulkImportService {
|
||||||
}
|
}
|
||||||
|
|
||||||
private boolean compare(Node updateNode, Node currentNode) {
|
private boolean compare(Node updateNode, Node currentNode) {
|
||||||
|
return Objects.equals(updateNode.getName(), currentNode.getName()) &&
|
||||||
return updateNode.getName().equals(currentNode.getName()) &&
|
compareBigDecimal(updateNode.getGeoLat(), currentNode.getGeoLat()) &&
|
||||||
updateNode.getGeoLat().compareTo(currentNode.getGeoLat()) == 0 &&
|
compareBigDecimal(updateNode.getGeoLng(), currentNode.getGeoLng()) &&
|
||||||
updateNode.getGeoLng().compareTo(currentNode.getGeoLng()) == 0 &&
|
Objects.equals(updateNode.getExternalMappingId(), currentNode.getExternalMappingId()) &&
|
||||||
updateNode.getExternalMappingId().equals(currentNode.getExternalMappingId()) &&
|
Objects.equals(updateNode.getCountryId(), currentNode.getCountryId()) &&
|
||||||
updateNode.getCountryId().equals(currentNode.getCountryId()) &&
|
Objects.equals(updateNode.getIntermediate(), currentNode.getIntermediate()) &&
|
||||||
updateNode.getIntermediate().equals(currentNode.getIntermediate()) &&
|
Objects.equals(updateNode.getDestination(), currentNode.getDestination()) &&
|
||||||
updateNode.getDestination().equals(currentNode.getDestination()) &&
|
Objects.equals(updateNode.getSource(), currentNode.getSource()) &&
|
||||||
updateNode.getSource().equals(currentNode.getSource()) &&
|
Objects.equals(updateNode.getAddress(), currentNode.getAddress()) &&
|
||||||
updateNode.getAddress().equals(currentNode.getAddress()) &&
|
Objects.equals(updateNode.getDeprecated(), currentNode.getDeprecated()) &&
|
||||||
updateNode.getDeprecated().equals(currentNode.getDeprecated()) &&
|
Objects.equals(updateNode.getId(), currentNode.getId()) &&
|
||||||
updateNode.getId().equals(currentNode.getId()) &&
|
Objects.equals(updateNode.getPredecessorRequired(), currentNode.getPredecessorRequired()) &&
|
||||||
updateNode.getPredecessorRequired().equals(currentNode.getPredecessorRequired()) &&
|
|
||||||
compare(updateNode.getNodePredecessors(), currentNode.getNodePredecessors()) &&
|
compare(updateNode.getNodePredecessors(), currentNode.getNodePredecessors()) &&
|
||||||
compare(updateNode.getOutboundCountries(), currentNode.getOutboundCountries());
|
compare(updateNode.getOutboundCountries(), currentNode.getOutboundCountries());
|
||||||
|
}
|
||||||
|
|
||||||
|
private boolean compareBigDecimal(BigDecimal a, BigDecimal b) {
|
||||||
|
if (a == null && b == null) return true;
|
||||||
|
if (a == null || b == null) return false;
|
||||||
|
return a.compareTo(b) == 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
private boolean compare(Collection<Integer> outbound1, Collection<Integer> outbound2) {
|
private boolean compare(Collection<Integer> outbound1, Collection<Integer> outbound2) {
|
||||||
|
|
|
||||||
50
src/main/resources/application-mssql.properties
Normal file
50
src/main/resources/application-mssql.properties
Normal file
|
|
@ -0,0 +1,50 @@
|
||||||
|
# MSSQL Profile Configuration
|
||||||
|
# Activate with: -Dspring.profiles.active=mssql or SPRING_PROFILES_ACTIVE=mssql
|
||||||
|
|
||||||
|
# Application Name
|
||||||
|
spring.application.name=lcc
|
||||||
|
|
||||||
|
# Database Configuration - MSSQL
|
||||||
|
spring.datasource.driver-class-name=com.microsoft.sqlserver.jdbc.SQLServerDriver
|
||||||
|
spring.datasource.url=jdbc:sqlserver://${DB_HOST:localhost}:1433;databaseName=${DB_DATABASE:lcc};encrypt=true;trustServerCertificate=true
|
||||||
|
spring.datasource.username=${DB_USER:sa}
|
||||||
|
spring.datasource.password=${DB_PASSWORD}
|
||||||
|
|
||||||
|
# File Upload Limits
|
||||||
|
spring.servlet.multipart.max-file-size=30MB
|
||||||
|
spring.servlet.multipart.max-request-size=50MB
|
||||||
|
|
||||||
|
# Azure AD Configuration
|
||||||
|
spring.cloud.azure.active-directory.enabled=true
|
||||||
|
spring.cloud.azure.active-directory.authorization-clients.graph.scopes=openid,profile,email,https://graph.microsoft.com/User.Read
|
||||||
|
|
||||||
|
# Management Endpoints
|
||||||
|
management.endpoints.web.exposure.include=health,info,metrics
|
||||||
|
management.endpoint.health.show-details=when-authorized
|
||||||
|
|
||||||
|
# Flyway Migration - MSSQL
|
||||||
|
spring.flyway.enabled=true
|
||||||
|
spring.flyway.locations=classpath:db/migration/mssql
|
||||||
|
spring.flyway.baseline-on-migrate=true
|
||||||
|
spring.sql.init.mode=never
|
||||||
|
|
||||||
|
# LCC Configuration
|
||||||
|
lcc.allowed_cors=
|
||||||
|
lcc.allowed_oauth_token_cors=*
|
||||||
|
|
||||||
|
lcc.auth.identify.by=workday
|
||||||
|
lcc.auth.claim.workday=employeeid
|
||||||
|
lcc.auth.claim.email=preferred_username
|
||||||
|
lcc.auth.claim.firstname=given_name
|
||||||
|
lcc.auth.claim.lastname=family_name
|
||||||
|
|
||||||
|
lcc.auth.claim.ignore.workday=false
|
||||||
|
|
||||||
|
# Bulk Import
|
||||||
|
lcc.bulk.sheet_password=secretSheet?!
|
||||||
|
|
||||||
|
# Calculation Job Processor Configuration
|
||||||
|
calculation.job.processor.enabled=true
|
||||||
|
calculation.job.processor.pool-size=1
|
||||||
|
calculation.job.processor.delay=5000
|
||||||
|
calculation.job.processor.thread-name-prefix=calc-job-
|
||||||
50
src/main/resources/application-mysql.properties
Normal file
50
src/main/resources/application-mysql.properties
Normal file
|
|
@ -0,0 +1,50 @@
|
||||||
|
# MySQL Profile Configuration
|
||||||
|
# Activate with: -Dspring.profiles.active=mysql or SPRING_PROFILES_ACTIVE=mysql
|
||||||
|
|
||||||
|
# Application Name
|
||||||
|
spring.application.name=lcc
|
||||||
|
|
||||||
|
# Database Configuration - MySQL
|
||||||
|
spring.datasource.driver-class-name=com.mysql.cj.jdbc.Driver
|
||||||
|
spring.datasource.url=jdbc:mysql://${DB_HOST:localhost}:3306/${DB_DATABASE:lcc}
|
||||||
|
spring.datasource.username=${DB_USER:root}
|
||||||
|
spring.datasource.password=${DB_PASSWORD}
|
||||||
|
|
||||||
|
# File Upload Limits
|
||||||
|
spring.servlet.multipart.max-file-size=30MB
|
||||||
|
spring.servlet.multipart.max-request-size=50MB
|
||||||
|
|
||||||
|
# Azure AD Configuration
|
||||||
|
spring.cloud.azure.active-directory.enabled=true
|
||||||
|
spring.cloud.azure.active-directory.authorization-clients.graph.scopes=openid,profile,email,https://graph.microsoft.com/User.Read
|
||||||
|
|
||||||
|
# Management Endpoints
|
||||||
|
management.endpoints.web.exposure.include=health,info,metrics
|
||||||
|
management.endpoint.health.show-details=when-authorized
|
||||||
|
|
||||||
|
# Flyway Migration - MySQL
|
||||||
|
spring.flyway.enabled=true
|
||||||
|
spring.flyway.locations=classpath:db/migration/mysql
|
||||||
|
spring.flyway.baseline-on-migrate=true
|
||||||
|
spring.sql.init.mode=never
|
||||||
|
|
||||||
|
# LCC Configuration
|
||||||
|
lcc.allowed_cors=
|
||||||
|
lcc.allowed_oauth_token_cors=*
|
||||||
|
|
||||||
|
lcc.auth.identify.by=workday
|
||||||
|
lcc.auth.claim.workday=employeeid
|
||||||
|
lcc.auth.claim.email=preferred_username
|
||||||
|
lcc.auth.claim.firstname=given_name
|
||||||
|
lcc.auth.claim.lastname=family_name
|
||||||
|
|
||||||
|
lcc.auth.claim.ignore.workday=false
|
||||||
|
|
||||||
|
# Bulk Import
|
||||||
|
lcc.bulk.sheet_password=secretSheet?!
|
||||||
|
|
||||||
|
# Calculation Job Processor Configuration
|
||||||
|
calculation.job.processor.enabled=true
|
||||||
|
calculation.job.processor.pool-size=1
|
||||||
|
calculation.job.processor.delay=5000
|
||||||
|
calculation.job.processor.thread-name-prefix=calc-job-
|
||||||
|
|
@ -1,8 +1,17 @@
|
||||||
|
# MySQL Profile Configuration
|
||||||
|
# Activate with: -Dspring.profiles.active=mysql or SPRING_PROFILES_ACTIVE=mysql
|
||||||
|
|
||||||
# Application Name
|
# Application Name
|
||||||
spring.application.name=lcc
|
spring.application.name=lcc
|
||||||
|
|
||||||
# Database Configuration
|
# Active Profile (mysql or mssql)
|
||||||
|
spring.profiles.active=prod,mysql
|
||||||
|
|
||||||
|
# Database Configuration - MySQL
|
||||||
spring.datasource.driver-class-name=com.mysql.cj.jdbc.Driver
|
spring.datasource.driver-class-name=com.mysql.cj.jdbc.Driver
|
||||||
|
spring.datasource.url=jdbc:mysql://${DB_HOST:localhost}:3306/${DB_DATABASE:lcc}
|
||||||
|
spring.datasource.username=${DB_USER:root}
|
||||||
|
spring.datasource.password=${DB_PASSWORD}
|
||||||
|
|
||||||
# File Upload Limits
|
# File Upload Limits
|
||||||
spring.servlet.multipart.max-file-size=30MB
|
spring.servlet.multipart.max-file-size=30MB
|
||||||
|
|
@ -16,16 +25,16 @@ spring.cloud.azure.active-directory.authorization-clients.graph.scopes=openid,pr
|
||||||
management.endpoints.web.exposure.include=health,info,metrics
|
management.endpoints.web.exposure.include=health,info,metrics
|
||||||
management.endpoint.health.show-details=when-authorized
|
management.endpoint.health.show-details=when-authorized
|
||||||
|
|
||||||
# Flyway Migration
|
# Flyway Migration - MySQL
|
||||||
spring.flyway.enabled=true
|
spring.flyway.enabled=true
|
||||||
spring.flyway.locations=classpath:db/migration
|
spring.flyway.locations=classpath:db/migration/mysql
|
||||||
spring.flyway.baseline-on-migrate=true
|
spring.flyway.baseline-on-migrate=true
|
||||||
spring.sql.init.mode=never
|
spring.sql.init.mode=never
|
||||||
|
|
||||||
|
# LCC Configuration
|
||||||
lcc.allowed_cors=
|
lcc.allowed_cors=
|
||||||
lcc.allowed_oauth_token_cors=*
|
lcc.allowed_oauth_token_cors=*
|
||||||
|
|
||||||
|
|
||||||
lcc.auth.identify.by=workday
|
lcc.auth.identify.by=workday
|
||||||
lcc.auth.claim.workday=employeeid
|
lcc.auth.claim.workday=employeeid
|
||||||
lcc.auth.claim.email=preferred_username
|
lcc.auth.claim.email=preferred_username
|
||||||
|
|
|
||||||
16930
src/main/resources/db/migration/mssql/V10__Nomenclature.sql
Normal file
16930
src/main/resources/db/migration/mssql/V10__Nomenclature.sql
Normal file
File diff suppressed because it is too large
Load diff
|
|
@ -0,0 +1,58 @@
|
||||||
|
-- Add retries and priority columns to calculation_job (if not exists)
|
||||||
|
IF NOT EXISTS (SELECT * FROM sys.columns WHERE object_id = OBJECT_ID(N'calculation_job') AND name = 'retries')
|
||||||
|
BEGIN
|
||||||
|
ALTER TABLE calculation_job ADD retries INT NOT NULL DEFAULT 0;
|
||||||
|
END
|
||||||
|
|
||||||
|
IF NOT EXISTS (SELECT * FROM sys.columns WHERE object_id = OBJECT_ID(N'calculation_job') AND name = 'priority')
|
||||||
|
BEGIN
|
||||||
|
ALTER TABLE calculation_job
|
||||||
|
ADD priority VARCHAR(10) NOT NULL DEFAULT 'MEDIUM'
|
||||||
|
CHECK (priority IN ('LOW', 'MEDIUM', 'HIGH'));
|
||||||
|
END
|
||||||
|
|
||||||
|
IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = OBJECT_ID(N'calculation_job') AND name = 'idx_priority')
|
||||||
|
BEGIN
|
||||||
|
CREATE INDEX idx_priority ON calculation_job(priority);
|
||||||
|
END
|
||||||
|
|
||||||
|
-- Add retries column to distance_matrix (if not exists)
|
||||||
|
IF NOT EXISTS (SELECT * FROM sys.columns WHERE object_id = OBJECT_ID(N'distance_matrix') AND name = 'retries')
|
||||||
|
BEGIN
|
||||||
|
ALTER TABLE distance_matrix ADD retries INT NOT NULL DEFAULT 0;
|
||||||
|
END
|
||||||
|
|
||||||
|
ALTER TABLE distance_matrix
|
||||||
|
DROP CONSTRAINT chk_distance_matrix_state;
|
||||||
|
|
||||||
|
ALTER TABLE distance_matrix
|
||||||
|
ADD CONSTRAINT chk_distance_matrix_state CHECK (state IN ('VALID', 'STALE', 'EXCEPTION'));
|
||||||
|
|
||||||
|
|
||||||
|
-- Check if distance_d2d column exists before adding (already exists in V1)
|
||||||
|
IF NOT EXISTS (SELECT * FROM sys.columns WHERE object_id = OBJECT_ID(N'premise_destination') AND name = 'distance_d2d')
|
||||||
|
BEGIN
|
||||||
|
ALTER TABLE premise_destination
|
||||||
|
ADD distance_d2d DECIMAL(15, 2) DEFAULT NULL;
|
||||||
|
|
||||||
|
EXEC sp_addextendedproperty
|
||||||
|
@name = N'MS_Description',
|
||||||
|
@value = N'travel distance between the two nodes in meters',
|
||||||
|
@level0type = N'SCHEMA', @level0name = 'dbo',
|
||||||
|
@level1type = N'TABLE', @level1name = 'premise_destination',
|
||||||
|
@level2type = N'COLUMN', @level2name = 'distance_d2d';
|
||||||
|
END
|
||||||
|
|
||||||
|
-- Add distance column to premise_route_section (if not exists)
|
||||||
|
IF NOT EXISTS (SELECT * FROM sys.columns WHERE object_id = OBJECT_ID(N'premise_route_section') AND name = 'distance')
|
||||||
|
BEGIN
|
||||||
|
ALTER TABLE premise_route_section
|
||||||
|
ADD distance DECIMAL(15, 2) DEFAULT NULL;
|
||||||
|
|
||||||
|
EXEC sp_addextendedproperty
|
||||||
|
@name = N'MS_Description',
|
||||||
|
@value = N'travel distance between the two nodes in meters',
|
||||||
|
@level0type = N'SCHEMA', @level0name = 'dbo',
|
||||||
|
@level1type = N'TABLE', @level1name = 'premise_route_section',
|
||||||
|
@level2type = N'COLUMN', @level2name = 'distance';
|
||||||
|
END
|
||||||
|
|
@ -0,0 +1,15 @@
|
||||||
|
-- Merge statement for MSSQL (equivalent to INSERT ... ON DUPLICATE KEY UPDATE)
|
||||||
|
MERGE INTO packaging_property_type AS target
|
||||||
|
USING (VALUES
|
||||||
|
(N'Stackable', 'STACKABLE', 'BOOLEAN', NULL, 0, N'desc', 'general', 1),
|
||||||
|
(N'Rust Prevention', 'RUST_PREVENTION', 'BOOLEAN', NULL, 0, N'desc', 'general', 2),
|
||||||
|
(N'Mixable', 'MIXABLE', 'BOOLEAN', NULL, 0, N'desc', 'general', 3)
|
||||||
|
) AS source (name, external_mapping_id, data_type, validation_rule, is_required, description, property_group, sequence_number)
|
||||||
|
ON target.external_mapping_id = source.external_mapping_id
|
||||||
|
WHEN MATCHED THEN
|
||||||
|
UPDATE SET
|
||||||
|
name = source.name,
|
||||||
|
data_type = source.data_type
|
||||||
|
WHEN NOT MATCHED THEN
|
||||||
|
INSERT (name, external_mapping_id, data_type, validation_rule, is_required, description, property_group, sequence_number)
|
||||||
|
VALUES (source.name, source.external_mapping_id, source.data_type, source.validation_rule, source.is_required, source.description, source.property_group, source.sequence_number);
|
||||||
666
src/main/resources/db/migration/mssql/V1__Create_schema.sql
Normal file
666
src/main/resources/db/migration/mssql/V1__Create_schema.sql
Normal file
|
|
@ -0,0 +1,666 @@
|
||||||
|
-- Property management tables
|
||||||
|
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'property_set') AND type in (N'U'))
|
||||||
|
CREATE TABLE property_set
|
||||||
|
(
|
||||||
|
-- Represents a collection of properties valid for a specific time period
|
||||||
|
id INT NOT NULL IDENTITY(1,1) PRIMARY KEY,
|
||||||
|
start_date DATETIME2 NOT NULL DEFAULT GETDATE(),
|
||||||
|
end_date DATETIME2 NULL,
|
||||||
|
state VARCHAR(8) NOT NULL,
|
||||||
|
CONSTRAINT chk_property_state_values CHECK (state IN ('DRAFT', 'VALID', 'INVALID', 'EXPIRED')),
|
||||||
|
CONSTRAINT chk_property_date_range CHECK (end_date IS NULL OR end_date > start_date)
|
||||||
|
);
|
||||||
|
CREATE INDEX idx_dates ON property_set (start_date, end_date);
|
||||||
|
CREATE INDEX idx_property_set_id ON property_set (id);
|
||||||
|
|
||||||
|
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'system_property_type') AND type in (N'U'))
|
||||||
|
CREATE TABLE system_property_type
|
||||||
|
(
|
||||||
|
-- Stores system-wide configuration property types
|
||||||
|
id INT NOT NULL IDENTITY(1,1) PRIMARY KEY,
|
||||||
|
name NVARCHAR(255) NOT NULL,
|
||||||
|
external_mapping_id VARCHAR(16),
|
||||||
|
description NVARCHAR(512) NOT NULL,
|
||||||
|
property_group VARCHAR(32) NOT NULL,
|
||||||
|
sequence_number INT NOT NULL,
|
||||||
|
data_type VARCHAR(16) NOT NULL,
|
||||||
|
validation_rule VARCHAR(64),
|
||||||
|
CONSTRAINT idx_external_mapping UNIQUE (external_mapping_id),
|
||||||
|
CONSTRAINT chk_system_data_type_values CHECK (data_type IN
|
||||||
|
('INT', 'PERCENTAGE', 'BOOLEAN', 'CURRENCY', 'ENUMERATION',
|
||||||
|
'TEXT'))
|
||||||
|
);
|
||||||
|
|
||||||
|
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'system_property') AND type in (N'U'))
|
||||||
|
CREATE TABLE system_property
|
||||||
|
(
|
||||||
|
-- Stores system-wide configuration properties
|
||||||
|
id INT NOT NULL IDENTITY(1,1) PRIMARY KEY,
|
||||||
|
property_set_id INT NOT NULL,
|
||||||
|
system_property_type_id INT NOT NULL,
|
||||||
|
property_value NVARCHAR(500),
|
||||||
|
FOREIGN KEY (property_set_id) REFERENCES property_set (id),
|
||||||
|
FOREIGN KEY (system_property_type_id) REFERENCES system_property_type (id),
|
||||||
|
CONSTRAINT idx_system_property_type_id_property_set UNIQUE (system_property_type_id, property_set_id)
|
||||||
|
);
|
||||||
|
CREATE INDEX idx_system_property_type_id ON system_property (system_property_type_id);
|
||||||
|
CREATE INDEX idx_system_property_set_id ON system_property (property_set_id);
|
||||||
|
|
||||||
|
-- country
|
||||||
|
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'country') AND type in (N'U'))
|
||||||
|
CREATE TABLE country
|
||||||
|
(
|
||||||
|
id INT NOT NULL IDENTITY(1,1),
|
||||||
|
iso_code VARCHAR(2) NOT NULL,
|
||||||
|
region_code VARCHAR(5) NOT NULL,
|
||||||
|
name NVARCHAR(255) NOT NULL,
|
||||||
|
is_deprecated BIT NOT NULL DEFAULT 0,
|
||||||
|
PRIMARY KEY (id),
|
||||||
|
CONSTRAINT uk_country_iso_code UNIQUE (iso_code),
|
||||||
|
CONSTRAINT chk_country_region_code
|
||||||
|
CHECK (region_code IN ('EMEA', 'LATAM', 'APAC', 'NAM'))
|
||||||
|
);
|
||||||
|
|
||||||
|
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'country_property_type') AND type in (N'U'))
|
||||||
|
CREATE TABLE country_property_type
|
||||||
|
(
|
||||||
|
id INT NOT NULL IDENTITY(1,1),
|
||||||
|
name NVARCHAR(255) NOT NULL,
|
||||||
|
external_mapping_id VARCHAR(16),
|
||||||
|
data_type VARCHAR(16) NOT NULL,
|
||||||
|
validation_rule VARCHAR(64),
|
||||||
|
description NVARCHAR(512) NOT NULL,
|
||||||
|
property_group VARCHAR(32) NOT NULL,
|
||||||
|
sequence_number INT NOT NULL,
|
||||||
|
is_required BIT NOT NULL DEFAULT 0,
|
||||||
|
CONSTRAINT chk_country_data_type_values CHECK (data_type IN
|
||||||
|
('INT', 'PERCENTAGE', 'BOOLEAN', 'CURRENCY', 'ENUMERATION',
|
||||||
|
'TEXT')),
|
||||||
|
PRIMARY KEY (id)
|
||||||
|
);
|
||||||
|
CREATE INDEX idx_property_type_data_type ON country_property_type (data_type);
|
||||||
|
|
||||||
|
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'country_property') AND type in (N'U'))
|
||||||
|
CREATE TABLE country_property
|
||||||
|
(
|
||||||
|
id INT NOT NULL IDENTITY(1,1) PRIMARY KEY,
|
||||||
|
country_id INT NOT NULL,
|
||||||
|
country_property_type_id INT NOT NULL,
|
||||||
|
property_set_id INT NOT NULL,
|
||||||
|
property_value NVARCHAR(500),
|
||||||
|
FOREIGN KEY (country_id) REFERENCES country (id),
|
||||||
|
FOREIGN KEY (country_property_type_id) REFERENCES country_property_type (id),
|
||||||
|
FOREIGN KEY (property_set_id) REFERENCES property_set (id),
|
||||||
|
CONSTRAINT idx_country_property UNIQUE (country_id, country_property_type_id, property_set_id)
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Main table for user information
|
||||||
|
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'sys_user') AND type in (N'U'))
|
||||||
|
CREATE TABLE sys_user
|
||||||
|
(
|
||||||
|
id INT NOT NULL IDENTITY(1,1) PRIMARY KEY,
|
||||||
|
workday_id VARCHAR(32) NOT NULL,
|
||||||
|
email VARCHAR(254) NOT NULL,
|
||||||
|
firstname NVARCHAR(100) NOT NULL,
|
||||||
|
lastname NVARCHAR(100) NOT NULL,
|
||||||
|
is_active BIT NOT NULL DEFAULT 1,
|
||||||
|
CONSTRAINT idx_user_email UNIQUE (email),
|
||||||
|
CONSTRAINT idx_user_workday UNIQUE (workday_id)
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Group definitions
|
||||||
|
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'sys_group') AND type in (N'U'))
|
||||||
|
CREATE TABLE sys_group
|
||||||
|
(
|
||||||
|
id INT NOT NULL IDENTITY(1,1) PRIMARY KEY,
|
||||||
|
group_name NVARCHAR(64) NOT NULL,
|
||||||
|
group_description NVARCHAR(MAX) NOT NULL,
|
||||||
|
CONSTRAINT idx_group_name UNIQUE (group_name)
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Junction table for user-group assignments
|
||||||
|
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'sys_user_group_mapping') AND type in (N'U'))
|
||||||
|
CREATE TABLE sys_user_group_mapping
|
||||||
|
(
|
||||||
|
user_id INT NOT NULL,
|
||||||
|
group_id INT NOT NULL,
|
||||||
|
PRIMARY KEY (user_id, group_id),
|
||||||
|
FOREIGN KEY (user_id) REFERENCES sys_user (id),
|
||||||
|
FOREIGN KEY (group_id) REFERENCES sys_group (id)
|
||||||
|
);
|
||||||
|
|
||||||
|
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'sys_user_node') AND type in (N'U'))
|
||||||
|
CREATE TABLE sys_user_node
|
||||||
|
(
|
||||||
|
id INT NOT NULL IDENTITY(1,1) PRIMARY KEY,
|
||||||
|
user_id INT NOT NULL,
|
||||||
|
country_id INT NOT NULL,
|
||||||
|
name NVARCHAR(254) NOT NULL,
|
||||||
|
address NVARCHAR(500) NOT NULL,
|
||||||
|
geo_lat DECIMAL(8, 4) CHECK (geo_lat BETWEEN -90 AND 90),
|
||||||
|
geo_lng DECIMAL(8, 4) CHECK (geo_lng BETWEEN -180 AND 180),
|
||||||
|
is_deprecated BIT DEFAULT 0,
|
||||||
|
FOREIGN KEY (user_id) REFERENCES sys_user (id),
|
||||||
|
FOREIGN KEY (country_id) REFERENCES country (id)
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Main table for application information
|
||||||
|
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'sys_app') AND type in (N'U'))
|
||||||
|
CREATE TABLE sys_app
|
||||||
|
(
|
||||||
|
id INT NOT NULL IDENTITY(1,1) PRIMARY KEY,
|
||||||
|
client_id VARCHAR(255) NOT NULL UNIQUE,
|
||||||
|
client_secret VARCHAR(255) NOT NULL,
|
||||||
|
name NVARCHAR(255) NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Junction table for app-group assignments
|
||||||
|
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'sys_app_group_mapping') AND type in (N'U'))
|
||||||
|
CREATE TABLE sys_app_group_mapping
|
||||||
|
(
|
||||||
|
app_id INT NOT NULL,
|
||||||
|
group_id INT NOT NULL,
|
||||||
|
PRIMARY KEY (app_id, group_id),
|
||||||
|
FOREIGN KEY (app_id) REFERENCES sys_app (id),
|
||||||
|
FOREIGN KEY (group_id) REFERENCES sys_group (id)
|
||||||
|
);
|
||||||
|
|
||||||
|
-- logistic nodes
|
||||||
|
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'node') AND type in (N'U'))
|
||||||
|
CREATE TABLE node
|
||||||
|
(
|
||||||
|
id INT IDENTITY(1,1) PRIMARY KEY,
|
||||||
|
country_id INT NOT NULL,
|
||||||
|
name NVARCHAR(255) NOT NULL,
|
||||||
|
address NVARCHAR(500) NOT NULL,
|
||||||
|
external_mapping_id VARCHAR(32),
|
||||||
|
predecessor_required BIT NOT NULL DEFAULT 0,
|
||||||
|
is_destination BIT NOT NULL,
|
||||||
|
is_source BIT NOT NULL,
|
||||||
|
is_intermediate BIT NOT NULL,
|
||||||
|
geo_lat DECIMAL(8, 4) CHECK (geo_lat BETWEEN -90 AND 90),
|
||||||
|
geo_lng DECIMAL(8, 4) CHECK (geo_lng BETWEEN -180 AND 180),
|
||||||
|
updated_at DATETIME2 NOT NULL DEFAULT GETDATE(),
|
||||||
|
is_deprecated BIT NOT NULL DEFAULT 0,
|
||||||
|
FOREIGN KEY (country_id) REFERENCES country (id)
|
||||||
|
);
|
||||||
|
CREATE INDEX idx_country_id ON node (country_id);
|
||||||
|
|
||||||
|
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'node_predecessor_chain') AND type in (N'U'))
|
||||||
|
CREATE TABLE node_predecessor_chain
|
||||||
|
(
|
||||||
|
id INT NOT NULL IDENTITY(1,1) PRIMARY KEY,
|
||||||
|
node_id INT NOT NULL,
|
||||||
|
FOREIGN KEY (node_id) REFERENCES node (id)
|
||||||
|
);
|
||||||
|
|
||||||
|
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'node_predecessor_entry') AND type in (N'U'))
|
||||||
|
CREATE TABLE node_predecessor_entry
|
||||||
|
(
|
||||||
|
id INT NOT NULL IDENTITY(1,1) PRIMARY KEY,
|
||||||
|
node_id INT NOT NULL,
|
||||||
|
node_predecessor_chain_id INT NOT NULL,
|
||||||
|
sequence_number INT NOT NULL CHECK (sequence_number > 0),
|
||||||
|
FOREIGN KEY (node_id) REFERENCES node (id),
|
||||||
|
FOREIGN KEY (node_predecessor_chain_id) REFERENCES node_predecessor_chain (id),
|
||||||
|
CONSTRAINT uk_node_predecessor UNIQUE (node_predecessor_chain_id, sequence_number)
|
||||||
|
);
|
||||||
|
CREATE INDEX idx_node_predecessor ON node_predecessor_entry (node_predecessor_chain_id);
|
||||||
|
CREATE INDEX idx_sequence ON node_predecessor_entry (sequence_number);
|
||||||
|
|
||||||
|
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'outbound_country_mapping') AND type in (N'U'))
|
||||||
|
CREATE TABLE outbound_country_mapping
|
||||||
|
(
|
||||||
|
id INT NOT NULL IDENTITY(1,1) PRIMARY KEY,
|
||||||
|
node_id INT NOT NULL,
|
||||||
|
country_id INT NOT NULL,
|
||||||
|
FOREIGN KEY (node_id) REFERENCES node (id),
|
||||||
|
FOREIGN KEY (country_id) REFERENCES country (id),
|
||||||
|
CONSTRAINT uk_node_id_country_id UNIQUE (node_id, country_id)
|
||||||
|
);
|
||||||
|
CREATE INDEX idx_ocm_node_id ON outbound_country_mapping (node_id);
|
||||||
|
CREATE INDEX idx_ocm_country_id ON outbound_country_mapping (country_id);
|
||||||
|
|
||||||
|
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'distance_matrix') AND type in (N'U'))
|
||||||
|
CREATE TABLE distance_matrix
|
||||||
|
(
|
||||||
|
id INT NOT NULL IDENTITY(1,1) PRIMARY KEY,
|
||||||
|
from_node_id INT DEFAULT NULL,
|
||||||
|
to_node_id INT DEFAULT NULL,
|
||||||
|
from_user_node_id INT DEFAULT NULL,
|
||||||
|
to_user_node_id INT DEFAULT NULL,
|
||||||
|
from_geo_lat DECIMAL(8, 4) CHECK (from_geo_lat BETWEEN -90 AND 90),
|
||||||
|
from_geo_lng DECIMAL(8, 4) CHECK (from_geo_lng BETWEEN -180 AND 180),
|
||||||
|
to_geo_lat DECIMAL(8, 4) CHECK (to_geo_lat BETWEEN -90 AND 90),
|
||||||
|
to_geo_lng DECIMAL(8, 4) CHECK (to_geo_lng BETWEEN -180 AND 180),
|
||||||
|
distance DECIMAL(15, 2) NOT NULL,
|
||||||
|
updated_at DATETIME2 NOT NULL DEFAULT GETDATE(),
|
||||||
|
state VARCHAR(10) NOT NULL,
|
||||||
|
FOREIGN KEY (from_node_id) REFERENCES node (id),
|
||||||
|
FOREIGN KEY (to_node_id) REFERENCES node (id),
|
||||||
|
FOREIGN KEY (from_user_node_id) REFERENCES sys_user_node (id),
|
||||||
|
FOREIGN KEY (to_user_node_id) REFERENCES sys_user_node (id),
|
||||||
|
CONSTRAINT chk_distance_matrix_state CHECK (state IN ('VALID', 'STALE')),
|
||||||
|
CONSTRAINT chk_from_node_xor CHECK (
|
||||||
|
(from_node_id IS NOT NULL AND from_user_node_id IS NULL) OR
|
||||||
|
(from_node_id IS NULL AND from_user_node_id IS NOT NULL)
|
||||||
|
),
|
||||||
|
CONSTRAINT chk_to_node_xor CHECK (
|
||||||
|
(to_node_id IS NOT NULL AND to_user_node_id IS NULL) OR
|
||||||
|
(to_node_id IS NULL AND to_user_node_id IS NOT NULL)
|
||||||
|
),
|
||||||
|
CONSTRAINT uk_nodes_unique UNIQUE (from_node_id, to_node_id, from_user_node_id, to_user_node_id)
|
||||||
|
);
|
||||||
|
CREATE INDEX idx_from_to_nodes ON distance_matrix (from_node_id, to_node_id);
|
||||||
|
CREATE INDEX idx_user_from_to_nodes ON distance_matrix (from_user_node_id, to_user_node_id);
|
||||||
|
|
||||||
|
-- container rates
|
||||||
|
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'validity_period') AND type in (N'U'))
|
||||||
|
CREATE TABLE validity_period
|
||||||
|
(
|
||||||
|
id INT NOT NULL IDENTITY(1,1) PRIMARY KEY,
|
||||||
|
start_date DATETIME2 NOT NULL DEFAULT GETDATE(),
|
||||||
|
end_date DATETIME2 DEFAULT NULL,
|
||||||
|
renewals INT DEFAULT 0,
|
||||||
|
state VARCHAR(8) NOT NULL CHECK (state IN ('DRAFT', 'VALID', 'INVALID', 'EXPIRED')),
|
||||||
|
CONSTRAINT chk_validity_date_range CHECK (end_date IS NULL OR end_date > start_date)
|
||||||
|
);
|
||||||
|
|
||||||
|
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'container_rate') AND type in (N'U'))
|
||||||
|
CREATE TABLE container_rate
|
||||||
|
(
|
||||||
|
id INT NOT NULL IDENTITY(1,1) PRIMARY KEY,
|
||||||
|
from_node_id INT NOT NULL,
|
||||||
|
to_node_id INT NOT NULL,
|
||||||
|
container_rate_type VARCHAR(8) CHECK (container_rate_type IN ('RAIL', 'SEA', 'POST_RUN', 'ROAD')),
|
||||||
|
rate_teu DECIMAL(15, 2) NOT NULL,
|
||||||
|
rate_feu DECIMAL(15, 2) NOT NULL,
|
||||||
|
rate_hc DECIMAL(15, 2) NOT NULL,
|
||||||
|
lead_time INT NOT NULL,
|
||||||
|
validity_period_id INT NOT NULL,
|
||||||
|
FOREIGN KEY (from_node_id) REFERENCES node (id),
|
||||||
|
FOREIGN KEY (to_node_id) REFERENCES node (id),
|
||||||
|
FOREIGN KEY (validity_period_id) REFERENCES validity_period (id),
|
||||||
|
CONSTRAINT uk_container_rate_unique UNIQUE (from_node_id, to_node_id, validity_period_id, container_rate_type)
|
||||||
|
);
|
||||||
|
CREATE INDEX idx_cr_from_to_nodes ON container_rate (from_node_id, to_node_id);
|
||||||
|
CREATE INDEX idx_cr_validity_period_id ON container_rate (validity_period_id);
|
||||||
|
|
||||||
|
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'country_matrix_rate') AND type in (N'U'))
|
||||||
|
CREATE TABLE country_matrix_rate
|
||||||
|
(
|
||||||
|
id INT NOT NULL IDENTITY(1,1) PRIMARY KEY,
|
||||||
|
from_country_id INT NOT NULL,
|
||||||
|
to_country_id INT NOT NULL,
|
||||||
|
rate DECIMAL(15, 2) NOT NULL,
|
||||||
|
validity_period_id INT NOT NULL,
|
||||||
|
FOREIGN KEY (from_country_id) REFERENCES country (id),
|
||||||
|
FOREIGN KEY (to_country_id) REFERENCES country (id),
|
||||||
|
FOREIGN KEY (validity_period_id) REFERENCES validity_period (id),
|
||||||
|
CONSTRAINT uk_country_matrix_rate_unique UNIQUE (from_country_id, to_country_id, validity_period_id)
|
||||||
|
);
|
||||||
|
CREATE INDEX idx_cmr_from_to_country ON country_matrix_rate (from_country_id, to_country_id);
|
||||||
|
CREATE INDEX idx_cmr_validity_period_id ON country_matrix_rate (validity_period_id);
|
||||||
|
|
||||||
|
-- packaging and material
|
||||||
|
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'material') AND type in (N'U'))
|
||||||
|
CREATE TABLE material
|
||||||
|
(
|
||||||
|
id INT NOT NULL IDENTITY(1,1) PRIMARY KEY,
|
||||||
|
part_number VARCHAR(12) NOT NULL,
|
||||||
|
normalized_part_number VARCHAR(12) NOT NULL,
|
||||||
|
hs_code VARCHAR(11),
|
||||||
|
name NVARCHAR(500) NOT NULL,
|
||||||
|
is_deprecated BIT NOT NULL DEFAULT 0,
|
||||||
|
CONSTRAINT uq_normalized_part_number UNIQUE (normalized_part_number)
|
||||||
|
);
|
||||||
|
CREATE INDEX idx_part_number ON material (part_number);
|
||||||
|
CREATE INDEX idx_normalized_part_number ON material (normalized_part_number);
|
||||||
|
CREATE INDEX idx_hs_code ON material (hs_code);
|
||||||
|
|
||||||
|
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'packaging_dimension') AND type in (N'U'))
|
||||||
|
CREATE TABLE packaging_dimension
|
||||||
|
(
|
||||||
|
id INT NOT NULL IDENTITY(1,1) PRIMARY KEY,
|
||||||
|
type VARCHAR(3) DEFAULT 'HU',
|
||||||
|
length INT NOT NULL,
|
||||||
|
width INT NOT NULL,
|
||||||
|
height INT NOT NULL,
|
||||||
|
displayed_dimension_unit VARCHAR(2) DEFAULT 'CM',
|
||||||
|
weight INT NOT NULL,
|
||||||
|
displayed_weight_unit VARCHAR(2) DEFAULT 'KG',
|
||||||
|
content_unit_count INT NOT NULL,
|
||||||
|
is_deprecated BIT NOT NULL DEFAULT 0,
|
||||||
|
CONSTRAINT chk_packaging_dimension_type_values CHECK (type IN ('SHU', 'HU')),
|
||||||
|
CONSTRAINT chk_packaging_dimension_displayed_dimension_unit CHECK (displayed_dimension_unit IN ('MM', 'CM', 'M')),
|
||||||
|
CONSTRAINT chk_packaging_dimension_displayed_weight_unit CHECK (displayed_weight_unit IN ('T', 'G', 'KG'))
|
||||||
|
);
|
||||||
|
|
||||||
|
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'packaging') AND type in (N'U'))
|
||||||
|
CREATE TABLE packaging
|
||||||
|
(
|
||||||
|
id INT NOT NULL IDENTITY(1,1) PRIMARY KEY,
|
||||||
|
supplier_node_id INT NOT NULL,
|
||||||
|
material_id INT NOT NULL,
|
||||||
|
hu_dimension_id INT NOT NULL,
|
||||||
|
shu_dimension_id INT NOT NULL,
|
||||||
|
is_deprecated BIT NOT NULL DEFAULT 0,
|
||||||
|
FOREIGN KEY (supplier_node_id) REFERENCES node (id),
|
||||||
|
FOREIGN KEY (material_id) REFERENCES material (id),
|
||||||
|
FOREIGN KEY (hu_dimension_id) REFERENCES packaging_dimension (id),
|
||||||
|
FOREIGN KEY (shu_dimension_id) REFERENCES packaging_dimension (id)
|
||||||
|
);
|
||||||
|
CREATE INDEX idx_pkg_material_id ON packaging (material_id);
|
||||||
|
CREATE INDEX idx_pkg_hu_dimension_id ON packaging (hu_dimension_id);
|
||||||
|
CREATE INDEX idx_pkg_shu_dimension_id ON packaging (shu_dimension_id);
|
||||||
|
|
||||||
|
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'packaging_property_type') AND type in (N'U'))
|
||||||
|
CREATE TABLE packaging_property_type
|
||||||
|
(
|
||||||
|
id INT NOT NULL IDENTITY(1,1) PRIMARY KEY,
|
||||||
|
name NVARCHAR(255) NOT NULL,
|
||||||
|
external_mapping_id VARCHAR(16) NOT NULL,
|
||||||
|
description NVARCHAR(255) NOT NULL,
|
||||||
|
property_group VARCHAR(32) NOT NULL,
|
||||||
|
sequence_number INT NOT NULL,
|
||||||
|
data_type VARCHAR(16),
|
||||||
|
validation_rule VARCHAR(64),
|
||||||
|
is_required BIT NOT NULL DEFAULT 0,
|
||||||
|
CONSTRAINT idx_packaging_property_type UNIQUE (external_mapping_id),
|
||||||
|
CONSTRAINT chk_packaging_data_type_values CHECK (data_type IN
|
||||||
|
('INT', 'PERCENTAGE', 'BOOLEAN', 'CURRENCY', 'ENUMERATION',
|
||||||
|
'TEXT'))
|
||||||
|
);
|
||||||
|
|
||||||
|
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'packaging_property') AND type in (N'U'))
|
||||||
|
CREATE TABLE packaging_property
|
||||||
|
(
|
||||||
|
id INT NOT NULL IDENTITY(1,1) PRIMARY KEY,
|
||||||
|
packaging_property_type_id INT NOT NULL,
|
||||||
|
packaging_id INT NOT NULL,
|
||||||
|
property_value NVARCHAR(500),
|
||||||
|
FOREIGN KEY (packaging_property_type_id) REFERENCES packaging_property_type (id),
|
||||||
|
FOREIGN KEY (packaging_id) REFERENCES packaging (id),
|
||||||
|
CONSTRAINT idx_packaging_property_unique UNIQUE (packaging_property_type_id, packaging_id)
|
||||||
|
);
|
||||||
|
CREATE INDEX idx_pp_packaging_property_type_id ON packaging_property (packaging_property_type_id);
|
||||||
|
CREATE INDEX idx_pp_packaging_id ON packaging_property (packaging_id);
|
||||||
|
|
||||||
|
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'premise') AND type in (N'U'))
|
||||||
|
CREATE TABLE premise
|
||||||
|
(
|
||||||
|
id INT NOT NULL IDENTITY(1,1) PRIMARY KEY,
|
||||||
|
material_id INT NOT NULL,
|
||||||
|
supplier_node_id INT,
|
||||||
|
user_supplier_node_id INT,
|
||||||
|
geo_lat DECIMAL(8, 4) CHECK (geo_lat BETWEEN -90 AND 90),
|
||||||
|
geo_lng DECIMAL(8, 4) CHECK (geo_lng BETWEEN -180 AND 180),
|
||||||
|
country_id INT NOT NULL,
|
||||||
|
packaging_id INT DEFAULT NULL,
|
||||||
|
user_id INT NOT NULL,
|
||||||
|
created_at DATETIME2 NOT NULL DEFAULT GETDATE(),
|
||||||
|
updated_at DATETIME2 NOT NULL DEFAULT GETDATE(),
|
||||||
|
material_cost DECIMAL(15, 2) DEFAULT NULL,
|
||||||
|
is_fca_enabled BIT DEFAULT 0,
|
||||||
|
oversea_share DECIMAL(8, 4) DEFAULT NULL,
|
||||||
|
hs_code VARCHAR(11) DEFAULT NULL,
|
||||||
|
tariff_measure INT DEFAULT NULL,
|
||||||
|
tariff_rate DECIMAL(8, 4) DEFAULT NULL,
|
||||||
|
tariff_unlocked BIT DEFAULT 0,
|
||||||
|
state VARCHAR(10) NOT NULL DEFAULT 'DRAFT',
|
||||||
|
individual_hu_length INT,
|
||||||
|
individual_hu_height INT,
|
||||||
|
individual_hu_width INT,
|
||||||
|
individual_hu_weight INT,
|
||||||
|
hu_displayed_dimension_unit VARCHAR(2) DEFAULT 'MM',
|
||||||
|
hu_displayed_weight_unit VARCHAR(2) DEFAULT 'KG',
|
||||||
|
hu_unit_count INT DEFAULT NULL,
|
||||||
|
hu_stackable BIT DEFAULT 1,
|
||||||
|
hu_mixable BIT DEFAULT 1,
|
||||||
|
FOREIGN KEY (material_id) REFERENCES material (id),
|
||||||
|
FOREIGN KEY (supplier_node_id) REFERENCES node (id),
|
||||||
|
FOREIGN KEY (user_supplier_node_id) REFERENCES sys_user_node (id),
|
||||||
|
FOREIGN KEY (packaging_id) REFERENCES packaging (id),
|
||||||
|
FOREIGN KEY (user_id) REFERENCES sys_user (id),
|
||||||
|
CONSTRAINT chk_premise_state_values CHECK (state IN ('DRAFT', 'COMPLETED', 'ARCHIVED')),
|
||||||
|
CONSTRAINT chk_premise_displayed_dimension_unit CHECK (hu_displayed_dimension_unit IN ('MM', 'CM', 'M')),
|
||||||
|
CONSTRAINT chk_premise_displayed_weight_unit CHECK (hu_displayed_weight_unit IN ('T', 'G', 'KG'))
|
||||||
|
);
|
||||||
|
CREATE INDEX idx_prem_material_id ON premise (material_id);
|
||||||
|
CREATE INDEX idx_prem_supplier_node_id ON premise (supplier_node_id);
|
||||||
|
CREATE INDEX idx_prem_packaging_id ON premise (packaging_id);
|
||||||
|
CREATE INDEX idx_prem_user_id ON premise (user_id);
|
||||||
|
CREATE INDEX idx_prem_user_supplier_node_id ON premise (user_supplier_node_id);
|
||||||
|
|
||||||
|
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'premise_destination') AND type in (N'U'))
|
||||||
|
CREATE TABLE premise_destination
|
||||||
|
(
|
||||||
|
id INT NOT NULL IDENTITY(1,1) PRIMARY KEY,
|
||||||
|
premise_id INT NOT NULL,
|
||||||
|
annual_amount INT,
|
||||||
|
destination_node_id INT NOT NULL,
|
||||||
|
is_d2d BIT DEFAULT 0,
|
||||||
|
rate_d2d DECIMAL(15, 2) DEFAULT NULL CHECK (rate_d2d >= 0),
|
||||||
|
lead_time_d2d INT DEFAULT NULL CHECK (lead_time_d2d >= 0),
|
||||||
|
repacking_cost DECIMAL(15, 2) DEFAULT NULL CHECK (repacking_cost >= 0),
|
||||||
|
handling_cost DECIMAL(15, 2) DEFAULT NULL CHECK (handling_cost >= 0),
|
||||||
|
disposal_cost DECIMAL(15, 2) DEFAULT NULL CHECK (disposal_cost >= 0),
|
||||||
|
geo_lat DECIMAL(8, 4) CHECK (geo_lat BETWEEN -90 AND 90),
|
||||||
|
geo_lng DECIMAL(8, 4) CHECK (geo_lng BETWEEN -180 AND 180),
|
||||||
|
country_id INT NOT NULL,
|
||||||
|
distance_d2d DECIMAL(15, 2),
|
||||||
|
FOREIGN KEY (premise_id) REFERENCES premise (id),
|
||||||
|
FOREIGN KEY (country_id) REFERENCES country (id),
|
||||||
|
FOREIGN KEY (destination_node_id) REFERENCES node (id)
|
||||||
|
);
|
||||||
|
CREATE INDEX idx_pd_destination_node_id ON premise_destination (destination_node_id);
|
||||||
|
CREATE INDEX idx_pd_premise_id ON premise_destination (premise_id);
|
||||||
|
|
||||||
|
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'premise_route_node') AND type in (N'U'))
|
||||||
|
CREATE TABLE premise_route_node
|
||||||
|
(
|
||||||
|
id INT NOT NULL IDENTITY(1,1) PRIMARY KEY,
|
||||||
|
node_id INT DEFAULT NULL,
|
||||||
|
user_node_id INT DEFAULT NULL,
|
||||||
|
name NVARCHAR(255) NOT NULL,
|
||||||
|
address NVARCHAR(500),
|
||||||
|
external_mapping_id VARCHAR(32) NOT NULL,
|
||||||
|
country_id INT NOT NULL,
|
||||||
|
is_destination BIT DEFAULT 0,
|
||||||
|
is_intermediate BIT DEFAULT 0,
|
||||||
|
is_source BIT DEFAULT 0,
|
||||||
|
geo_lat DECIMAL(8, 4) CHECK (geo_lat BETWEEN -90 AND 90),
|
||||||
|
geo_lng DECIMAL(8, 4) CHECK (geo_lng BETWEEN -180 AND 180),
|
||||||
|
is_outdated BIT DEFAULT 0,
|
||||||
|
FOREIGN KEY (node_id) REFERENCES node (id),
|
||||||
|
FOREIGN KEY (country_id) REFERENCES country (id),
|
||||||
|
FOREIGN KEY (user_node_id) REFERENCES sys_user_node (id),
|
||||||
|
CONSTRAINT chk_node CHECK (user_node_id IS NULL OR node_id IS NULL)
|
||||||
|
);
|
||||||
|
CREATE INDEX idx_prn_node_id ON premise_route_node (node_id);
|
||||||
|
CREATE INDEX idx_prn_user_node_id ON premise_route_node (user_node_id);
|
||||||
|
|
||||||
|
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'premise_route') AND type in (N'U'))
|
||||||
|
CREATE TABLE premise_route
|
||||||
|
(
|
||||||
|
id INT NOT NULL IDENTITY(1,1) PRIMARY KEY,
|
||||||
|
premise_destination_id INT NOT NULL,
|
||||||
|
is_fastest BIT DEFAULT 0,
|
||||||
|
is_cheapest BIT DEFAULT 0,
|
||||||
|
is_selected BIT DEFAULT 0,
|
||||||
|
FOREIGN KEY (premise_destination_id) REFERENCES premise_destination (id)
|
||||||
|
);
|
||||||
|
|
||||||
|
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'premise_route_section') AND type in (N'U'))
|
||||||
|
CREATE TABLE premise_route_section
|
||||||
|
(
|
||||||
|
id INT NOT NULL IDENTITY(1,1) PRIMARY KEY,
|
||||||
|
premise_route_id INT NOT NULL,
|
||||||
|
from_route_node_id INT NOT NULL,
|
||||||
|
to_route_node_id INT NOT NULL,
|
||||||
|
list_position INT NOT NULL,
|
||||||
|
transport_type VARCHAR(16) CHECK (transport_type IN ('RAIL', 'SEA', 'ROAD', 'POST_RUN')),
|
||||||
|
rate_type VARCHAR(16) CHECK (rate_type IN ('CONTAINER', 'MATRIX', 'NEAR_BY')),
|
||||||
|
is_pre_run BIT DEFAULT 0,
|
||||||
|
is_main_run BIT DEFAULT 0,
|
||||||
|
is_post_run BIT DEFAULT 0,
|
||||||
|
is_outdated BIT DEFAULT 0,
|
||||||
|
CONSTRAINT fk_premise_route_section_premise_route_id FOREIGN KEY (premise_route_id) REFERENCES premise_route (id),
|
||||||
|
FOREIGN KEY (from_route_node_id) REFERENCES premise_route_node (id),
|
||||||
|
FOREIGN KEY (to_route_node_id) REFERENCES premise_route_node (id),
|
||||||
|
CONSTRAINT chk_main_run CHECK (transport_type = 'ROAD' OR transport_type = 'POST_RUN' OR is_main_run = 1)
|
||||||
|
);
|
||||||
|
CREATE INDEX idx_prs_premise_route_id ON premise_route_section (premise_route_id);
|
||||||
|
CREATE INDEX idx_prs_from_route_node_id ON premise_route_section (from_route_node_id);
|
||||||
|
CREATE INDEX idx_prs_to_route_node_id ON premise_route_section (to_route_node_id);
|
||||||
|
|
||||||
|
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'calculation_job') AND type in (N'U'))
|
||||||
|
CREATE TABLE calculation_job
|
||||||
|
(
|
||||||
|
id INT NOT NULL IDENTITY(1,1) PRIMARY KEY,
|
||||||
|
premise_id INT NOT NULL,
|
||||||
|
calculation_date DATETIME2 NOT NULL DEFAULT GETDATE(),
|
||||||
|
validity_period_id INT NOT NULL,
|
||||||
|
property_set_id INT NOT NULL,
|
||||||
|
job_state VARCHAR(10) NOT NULL CHECK (job_state IN ('CREATED', 'SCHEDULED', 'VALID', 'INVALID', 'EXCEPTION')),
|
||||||
|
error_id INT DEFAULT NULL,
|
||||||
|
user_id INT NOT NULL,
|
||||||
|
FOREIGN KEY (premise_id) REFERENCES premise (id),
|
||||||
|
FOREIGN KEY (validity_period_id) REFERENCES validity_period (id),
|
||||||
|
FOREIGN KEY (property_set_id) REFERENCES property_set (id),
|
||||||
|
FOREIGN KEY (user_id) REFERENCES sys_user (id)
|
||||||
|
);
|
||||||
|
CREATE INDEX idx_cj_premise_id ON calculation_job (premise_id);
|
||||||
|
CREATE INDEX idx_cj_validity_period_id ON calculation_job (validity_period_id);
|
||||||
|
CREATE INDEX idx_cj_property_set_id ON calculation_job (property_set_id);
|
||||||
|
|
||||||
|
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'calculation_job_destination') AND type in (N'U'))
|
||||||
|
CREATE TABLE calculation_job_destination
|
||||||
|
(
|
||||||
|
id INT NOT NULL IDENTITY(1,1) PRIMARY KEY,
|
||||||
|
calculation_job_id INT NOT NULL,
|
||||||
|
premise_destination_id INT NOT NULL,
|
||||||
|
shipping_frequency INT,
|
||||||
|
total_cost DECIMAL(15, 2),
|
||||||
|
annual_amount DECIMAL(15, 2),
|
||||||
|
annual_risk_cost DECIMAL(15, 2) NOT NULL,
|
||||||
|
annual_chance_cost DECIMAL(15, 2) NOT NULL,
|
||||||
|
is_small_unit BIT DEFAULT 0,
|
||||||
|
annual_repacking_cost DECIMAL(15, 2) NOT NULL,
|
||||||
|
annual_handling_cost DECIMAL(15, 2) NOT NULL,
|
||||||
|
annual_disposal_cost DECIMAL(15, 2) NOT NULL,
|
||||||
|
operational_stock DECIMAL(15, 2) NOT NULL,
|
||||||
|
safety_stock DECIMAL(15, 2) NOT NULL,
|
||||||
|
stocked_inventory DECIMAL(15, 2) NOT NULL,
|
||||||
|
in_transport_stock DECIMAL(15, 2) NOT NULL,
|
||||||
|
stock_before_payment DECIMAL(15, 2) NOT NULL,
|
||||||
|
annual_capital_cost DECIMAL(15, 2) NOT NULL,
|
||||||
|
annual_storage_cost DECIMAL(15, 2) NOT NULL,
|
||||||
|
custom_value DECIMAL(15, 2) NOT NULL,
|
||||||
|
custom_duties DECIMAL(15, 2) NOT NULL,
|
||||||
|
tariff_rate DECIMAL(8, 4) NOT NULL,
|
||||||
|
annual_custom_cost DECIMAL(15, 2) NOT NULL,
|
||||||
|
air_freight_share_max DECIMAL(8, 4) NOT NULL,
|
||||||
|
air_freight_share DECIMAL(8, 4) NOT NULL,
|
||||||
|
air_freight_volumetric_weight DECIMAL(15, 2) NOT NULL,
|
||||||
|
air_freight_weight DECIMAL(15, 2) NOT NULL,
|
||||||
|
annual_air_freight_cost DECIMAL(15, 2) NOT NULL,
|
||||||
|
is_d2d BIT DEFAULT 0,
|
||||||
|
rate_d2d DECIMAL(15, 2) DEFAULT NULL,
|
||||||
|
container_type VARCHAR(8),
|
||||||
|
hu_count INT NOT NULL,
|
||||||
|
layer_structure NVARCHAR(MAX),
|
||||||
|
layer_count INT NOT NULL,
|
||||||
|
transport_weight_exceeded BIT DEFAULT 0,
|
||||||
|
annual_transportation_cost DECIMAL(15, 2) NOT NULL,
|
||||||
|
container_utilization DECIMAL(8, 4) NOT NULL,
|
||||||
|
transit_time_in_days INT NOT NULL,
|
||||||
|
safety_stock_in_days INT NOT NULL,
|
||||||
|
material_cost DECIMAL(15, 2) NOT NULL,
|
||||||
|
fca_cost DECIMAL(15, 2) NOT NULL,
|
||||||
|
FOREIGN KEY (calculation_job_id) REFERENCES calculation_job (id),
|
||||||
|
FOREIGN KEY (premise_destination_id) REFERENCES premise_destination (id),
|
||||||
|
CONSTRAINT chk_container_type CHECK (container_type IN ('TEU', 'FEU', 'HC', 'TRUCK'))
|
||||||
|
);
|
||||||
|
CREATE INDEX idx_cjd_calculation_job_id ON calculation_job_destination (calculation_job_id);
|
||||||
|
CREATE INDEX idx_cjd_premise_destination_id ON calculation_job_destination (premise_destination_id);
|
||||||
|
|
||||||
|
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'calculation_job_route_section') AND type in (N'U'))
|
||||||
|
CREATE TABLE calculation_job_route_section
|
||||||
|
(
|
||||||
|
id INT NOT NULL IDENTITY(1,1) PRIMARY KEY,
|
||||||
|
premise_route_section_id INT,
|
||||||
|
calculation_job_destination_id INT NOT NULL,
|
||||||
|
transport_type VARCHAR(16) CHECK (transport_type IN ('RAIL', 'SEA', 'ROAD', 'POST_RUN', 'MATRIX', 'D2D')),
|
||||||
|
is_unmixed_price BIT DEFAULT 0,
|
||||||
|
is_cbm_price BIT DEFAULT 0,
|
||||||
|
is_weight_price BIT DEFAULT 0,
|
||||||
|
is_stacked BIT DEFAULT 0,
|
||||||
|
is_pre_run BIT DEFAULT 0,
|
||||||
|
is_main_run BIT DEFAULT 0,
|
||||||
|
is_post_run BIT DEFAULT 0,
|
||||||
|
rate DECIMAL(15, 2) NOT NULL,
|
||||||
|
distance DECIMAL(15, 2) DEFAULT NULL,
|
||||||
|
cbm_price DECIMAL(15, 2) NOT NULL,
|
||||||
|
weight_price DECIMAL(15, 2) NOT NULL,
|
||||||
|
annual_cost DECIMAL(15, 2) NOT NULL,
|
||||||
|
transit_time INT NOT NULL,
|
||||||
|
FOREIGN KEY (premise_route_section_id) REFERENCES premise_route_section (id),
|
||||||
|
FOREIGN KEY (calculation_job_destination_id) REFERENCES calculation_job_destination (id),
|
||||||
|
CONSTRAINT chk_stacked CHECK (is_unmixed_price = 1 OR is_stacked = 1)
|
||||||
|
);
|
||||||
|
CREATE INDEX idx_cjrs_premise_route_section_id ON calculation_job_route_section (premise_route_section_id);
|
||||||
|
CREATE INDEX idx_cjrs_calculation_job_destination_id ON calculation_job_route_section (calculation_job_destination_id);
|
||||||
|
|
||||||
|
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'bulk_operation') AND type in (N'U'))
|
||||||
|
CREATE TABLE bulk_operation
|
||||||
|
(
|
||||||
|
id INT NOT NULL IDENTITY(1,1) PRIMARY KEY,
|
||||||
|
user_id INT NOT NULL,
|
||||||
|
bulk_file_type VARCHAR(32) NOT NULL,
|
||||||
|
bulk_processing_type VARCHAR(32) NOT NULL,
|
||||||
|
state VARCHAR(10) NOT NULL,
|
||||||
|
[file] VARBINARY(MAX) DEFAULT NULL,
|
||||||
|
validity_period_id INT DEFAULT NULL,
|
||||||
|
created_at DATETIME2 NOT NULL DEFAULT GETDATE(),
|
||||||
|
FOREIGN KEY (user_id) REFERENCES sys_user (id),
|
||||||
|
FOREIGN KEY (validity_period_id) REFERENCES validity_period (id),
|
||||||
|
CONSTRAINT chk_bulk_file_type CHECK (bulk_file_type IN ('CONTAINER_RATE', 'COUNTRY_MATRIX', 'MATERIAL', 'PACKAGING', 'NODE')),
|
||||||
|
CONSTRAINT chk_bulk_operation_state CHECK (state IN ('SCHEDULED', 'PROCESSING', 'COMPLETED', 'EXCEPTION')),
|
||||||
|
CONSTRAINT chk_bulk_processing_type CHECK (bulk_processing_type IN ('IMPORT', 'EXPORT'))
|
||||||
|
);
|
||||||
|
|
||||||
|
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'sys_error') AND type in (N'U'))
|
||||||
|
CREATE TABLE sys_error
|
||||||
|
(
|
||||||
|
id INT NOT NULL IDENTITY(1,1) PRIMARY KEY,
|
||||||
|
user_id INT DEFAULT NULL,
|
||||||
|
title NVARCHAR(255) NOT NULL,
|
||||||
|
code NVARCHAR(255) NOT NULL,
|
||||||
|
message NVARCHAR(1024) NOT NULL,
|
||||||
|
request NVARCHAR(MAX),
|
||||||
|
pinia NVARCHAR(MAX),
|
||||||
|
calculation_job_id INT DEFAULT NULL,
|
||||||
|
bulk_operation_id INT DEFAULT NULL,
|
||||||
|
type VARCHAR(16) NOT NULL DEFAULT 'BACKEND',
|
||||||
|
created_at DATETIME2 NOT NULL DEFAULT GETDATE(),
|
||||||
|
FOREIGN KEY (user_id) REFERENCES sys_user (id),
|
||||||
|
FOREIGN KEY (calculation_job_id) REFERENCES calculation_job (id),
|
||||||
|
FOREIGN KEY (bulk_operation_id) REFERENCES bulk_operation (id),
|
||||||
|
CONSTRAINT chk_error_type CHECK (type IN ('BACKEND', 'FRONTEND', 'BULK', 'CALCULATION'))
|
||||||
|
);
|
||||||
|
CREATE INDEX idx_se_user_id ON sys_error (user_id);
|
||||||
|
CREATE INDEX idx_se_calculation_job_id ON sys_error (calculation_job_id);
|
||||||
|
|
||||||
|
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'sys_error_trace_item') AND type in (N'U'))
|
||||||
|
CREATE TABLE sys_error_trace_item
|
||||||
|
(
|
||||||
|
id INT NOT NULL IDENTITY(1,1) PRIMARY KEY,
|
||||||
|
error_id INT NOT NULL,
|
||||||
|
line INT,
|
||||||
|
[file] VARCHAR(255) NOT NULL,
|
||||||
|
method VARCHAR(255) NOT NULL,
|
||||||
|
fullPath VARCHAR(1024) NOT NULL,
|
||||||
|
created_at DATETIME2 NOT NULL DEFAULT GETDATE(),
|
||||||
|
FOREIGN KEY (error_id) REFERENCES sys_error (id)
|
||||||
|
);
|
||||||
|
|
@ -0,0 +1,18 @@
|
||||||
|
INSERT INTO property_set (state)
|
||||||
|
SELECT 'VALID'
|
||||||
|
WHERE NOT EXISTS (
|
||||||
|
SELECT 1 FROM property_set ps
|
||||||
|
WHERE ps.state = 'VALID'
|
||||||
|
AND ps.start_date <= GETDATE()
|
||||||
|
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
INSERT INTO validity_period (state)
|
||||||
|
SELECT 'VALID'
|
||||||
|
WHERE NOT EXISTS (
|
||||||
|
SELECT 1 FROM validity_period vp
|
||||||
|
WHERE vp.state = 'VALID'
|
||||||
|
AND vp.start_date <= GETDATE()
|
||||||
|
AND (vp.end_date IS NULL OR vp.end_date > GETDATE())
|
||||||
|
);
|
||||||
603
src/main/resources/db/migration/mssql/V3__Properties.sql
Normal file
603
src/main/resources/db/migration/mssql/V3__Properties.sql
Normal file
|
|
@ -0,0 +1,603 @@
|
||||||
|
|
||||||
|
-- ===================================================
|
||||||
|
-- INSERT Statements für system_property_type
|
||||||
|
-- Mapping: external mapping id -> external_mapping_id
|
||||||
|
-- Description -> name
|
||||||
|
-- ===================================================
|
||||||
|
|
||||||
|
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'Reference route: Start node', 'START_REF', 'TEXT', '{}', N'Specifies the starting node of the reference route. A historical maximum and a historical minimum value are stored for the reference route. This reference route is used to calculate fluctuations in transport costs.', '2_Reference route', '1');
|
||||||
|
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'Reference route: End node', 'END_REF', 'TEXT', '{}', N'Specifies the end node of the reference route. A historical maximum and a historical minimum value are stored for the reference route. This reference route is used to calculate fluctuations in transport costs.', '2_Reference route', '2');
|
||||||
|
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'Reference route: All-time-high container rate (40 ft. GP) [EUR]', 'RISK_REF', 'CURRENCY', '{"GT":0}', N'Specifies the historically maximum container rate of the reference route for a 40 ft. GP container. A historical maximum and a historical minimum value are stored for the reference route. This reference route is used to calculate fluctuations in transport costs.', '2_Reference route', '3');
|
||||||
|
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'Reference route: All-time-low container rate (40 ft. GP) [EUR]', 'CHANCE_REF', 'CURRENCY', '{"GT":0}', N'Specifies the historically lowest container rate of the reference route for a 40 ft. GP container. A historical maximum and a historical minimum value are stored for the reference route. This reference route is used to calculate fluctuations in transport costs.', '2_Reference route', '4');
|
||||||
|
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'Payment terms [days]', 'PAYMENT_TERMS', 'INT', '{}', N'Payment terms agreed with suppliers in days. This value is used to calculate the financing costs for goods in transit and in safety stock.', '1_General', '3');
|
||||||
|
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'Annual working days', 'WORKDAYS', 'INT', '{"GT": 0, "LT": 366}', N'Annual production working days.', '1_General', '2');
|
||||||
|
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'Interest rate inventory [%]', 'INTEREST_RATE', 'PERCENTAGE', '{"GTE": 0}', N'Interest rate used for calculating capital costs.', '1_General', '4');
|
||||||
|
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'FCA fee [%]', 'FCA_FEE', 'PERCENTAGE', '{"GTE": 0}', N'FCA fee to be added to EXW prices. The logistics cost expert must explicitly select this during the calculation for the fee to be applied.', '1_General', '5');
|
||||||
|
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'Default customs rate [%]', 'TARIFF_RATE', 'PERCENTAGE', '{"GTE":0}', N'Standard customs duty rate to be applied when the HS Code cannot be resolved automatically.', '1_General', '6');
|
||||||
|
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'Customs clearance fee per import & HS code [EUR]', 'CUSTOM_FEE', 'CURRENCY', '{"GTE":0}', N'Avg. customs clearance fee per HS code and import.', '1_General', '7');
|
||||||
|
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'Standard reporting format', 'REPORTING', 'ENUMERATION', '{"ENUM":["MEK_B","MEK_C"]}', N'Specifies the reporting format. The MEK_C reporting format includes occasional air transports that occur with overseas production. The MEK_B reporting format hides these for reasons.', '1_General', '1');
|
||||||
|
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'40 ft.', 'FEU', 'BOOLEAN', '{}', N'Enable if calculation should include this container size; container rates to be maintained.', '3_Sea and road transport', '1');
|
||||||
|
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'20 ft.', 'TEU', 'BOOLEAN', '{}', N'Enable if calculation should include this container size; container rates to be maintained.', '3_Sea and road transport', '2');
|
||||||
|
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'40 ft. HC', 'FEU_HQ', 'BOOLEAN', '{}', N'Enable if calculation should include this container size; container rates to be maintained.', '3_Sea and road transport', '3');
|
||||||
|
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'Container utilization in mixed containers [%]', 'CONTAINER_UTIL', 'PERCENTAGE', '{"GTE":0,"LTE":1}', N'Utilization degree of mixed containers (loss from stacking/packaging).', '3_Sea and road transport', '6');
|
||||||
|
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'Truck utilization road transport EMEA [%]', 'TRUCK_UTIL', 'PERCENTAGE', '{"GTE":0,"LTE":1}', N'Utilization degree of trucks (loss from stacking/packaging).', '3_Sea and road transport', '8');
|
||||||
|
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'Max validity period of container freight rates [days]', 'VALID_DAYS', 'INT', '{"GT": 0}', N'After the validity period expires, no logistics cost calculations are possible with the current freight rates. This mechanism ensures that freight rates are regularly updated or verified by a freight rate key user.', '1_General', '8');
|
||||||
|
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'Metropolitan region size (diameter) [km]', 'RADIUS_REGION', 'INT', '{"GT": 0}', N'If there are no kilometer rates within a country, it is possible to use container rates from neighboring logistics nodes. However, the node must be within the metropolitan region radius.', '1_General', '9');
|
||||||
|
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'Min delivery frequency / year for container transports', 'FREQ_MIN', 'INT', '{"GT": 0, "LT": 366}', N'Low runners: Indicates the number of annual deliveries when the annual demand is lower than the content of a handling unit (The HU is then split up)', '1_General', '10');
|
||||||
|
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'Max delivery frequency / year for container transport', 'FREQ_MAX', 'INT', '{"GT": 0, "LT": 366}', N'High runners: Indicates the maximum number of annual deliveries. (If the annual demand exceeds this number, one delivery contains more than one HU). Please note that this value affects the storage space cost.', '1_General', '11');
|
||||||
|
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'Max weight load 20 ft. container [kg]', 'TEU_LOAD', 'INT', '{"GT": 0}', N'Weight limit of TEU container.', '3_Sea and road transport', '4');
|
||||||
|
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'Max weight load 40 ft. container [kg]', 'FEU_LOAD', 'INT', '{"GT": 0}', N'Weight limit of FEU container (may be restricted by law, e.g. CN truck load = 21 tons).', '3_Sea and road transport', '5');
|
||||||
|
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'Max weight load truck [kg]', 'TRUCK_LOAD', 'INT', '{"GT": 0}', N'Weight limit of standard truck.', '3_Sea and road transport', '7');
|
||||||
|
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'Pre-carriage [EUR/kg]', 'AIR_PRECARRIAGE', 'CURRENCY', '{"GTE": 0}', N'The pre-carriage costs per kilogram to the departure airport when calculating air freight costs.', '4_Air transport', '1');
|
||||||
|
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'Pre-carriage handling [EUR]', 'AIR_HANDLING', 'CURRENCY', '{"GTE": 0}', N'One-time costs for processing documents in an air freight transport.', '4_Air transport', '2');
|
||||||
|
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'Main carriage [EUR/kg]', 'AIR_MAINCARRIAGE', 'CURRENCY', '{"GTE": 0}', N'Air freight costs per kg on the route from China to Germany.', '4_Air transport', '3');
|
||||||
|
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'Hand over fee [EUR]', 'AIR_HANDOVER_FEE', 'CURRENCY', '{"GTE": 0}', N'One-time handover costs for air freight transports.', '4_Air transport', '4');
|
||||||
|
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'Customs clearance fee [EUR]', 'AIR_CUSTOM_FEE', 'CURRENCY', '{"GTE": 0}', N'One-time costs for customs clearance in air freight transports.', '4_Air transport', '5');
|
||||||
|
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'On-carriage [EUR/kg]', 'AIR_ONCARRIAGE', 'CURRENCY', '{"GTE": 0}', N'On-carriage costs per kilogram from destination airport to final destination when calculating air freight costs.', '4_Air transport', '6');
|
||||||
|
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'Terminal handling fee [EUR/kg]', 'AIR_TERMINAL_FEE', 'CURRENCY', '{"GTE": 0}', N'Terminal handling charges per kilogram for air freight transports.', '4_Air transport', '7');
|
||||||
|
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'GR handling KLT [EUR/HU]', 'KLT_HANDLING', 'CURRENCY', '{"GTE": 0}', N'Handling costs per received small load carrier (KLTs are handling units under 0.08 m³ volume) at German wage level.', '5_Warehouse', '4');
|
||||||
|
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'GR handling GLT [EUR/HU]', 'GLT_HANDLING', 'CURRENCY', '{"GTE": 0}', N'Handling costs per received large load carrier (GLT are handling units over 0.08 m³ volume) at German wage level.', '5_Warehouse', '5');
|
||||||
|
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'GLT booking & document handling [EUR/GR]', 'BOOKING', 'CURRENCY', '{"GTE": 0}', N'One-time document handling fee per GLT at German wage level.', '5_Warehouse', '2');
|
||||||
|
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'GLT release from storage [EUR/GLT]', 'GLT_RELEASE', 'CURRENCY', '{"GTE": 0}', N'Cost to release one GLT from storage at German wage level.', '5_Warehouse', '12');
|
||||||
|
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'KLT release from storage [EUR/KLT]', 'KLT_RELEASE', 'CURRENCY', '{"GTE": 0}', N'Cost to release one KLT from storage at German wage level.', '5_Warehouse', '11');
|
||||||
|
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'GLT dispatch [EUR/GLT]', 'GLT_DISPATCH', 'CURRENCY', '{"GTE": 0}', N'Cost to dispatch one GLT at German wage level.', '5_Warehouse', '14');
|
||||||
|
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'KLT dispatch [EUR/KLT]', 'KLT_DISPATCH', 'CURRENCY', '{"GTE": 0}', N'Cost to dispatch one KLT at German wage level.', '5_Warehouse', '13');
|
||||||
|
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'Repacking KLT, HU <15kg [EUR/HU]', 'KLT_REPACK_S', 'CURRENCY', '{"GTE": 0}', N'Cost to repack one KLT (with a weight under 15 kg) from one-way to returnable at German wage level.', '5_Warehouse', '6');
|
||||||
|
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'Repacking KLT, HU >=15kg [EUR/HU]', 'KLT_REPACK_M', 'CURRENCY', '{"GTE": 0}', N'Cost to repack one KLT (with a weight under or equal 15 kg) from one-way to returnable with crane at German wage level.', '5_Warehouse', '7');
|
||||||
|
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'Repacking GLT, HU <15kg [EUR/HU]', 'GLT_REPACK_S', 'CURRENCY', '{"GTE": 0}', N'Cost to repack one GLT (with a weight under 15 kg) from one-way to returnable at German wage level.', '5_Warehouse', '8');
|
||||||
|
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'Repacking GLT, HU 15 - 2000kg [EUR/HU]', 'GLT_REPACK_M', 'CURRENCY', '{"GTE": 0}', N'Cost to repack one GLT (with a weight over 15 but under or equal 2000 kg) from one-way to returnable with crane at German wage level.', '5_Warehouse', '9');
|
||||||
|
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'Repacking GLT, HU >2000kg [EUR/HU]', 'GLT_REPACK_L', 'INT', '{"GTE": 0}', N'Cost to repack one GLT (with a weight over 2000 kg) from one-way to returnable with crane at German wage level.', '5_Warehouse', '10');
|
||||||
|
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'GLT disposal [EUR/GLT]', 'DISPOSAL', 'INT', '{"GTE": 0}', N'Cost to dispose one wooden pallet.', '5_Warehouse', '15');
|
||||||
|
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'Space costs per cbm per night [EUR/cbm]', 'SPACE_COST', 'CURRENCY', '{"GTE": 0}', N'The storage costs incurred for a storage space of 1 square meter per started height unit (meter) and per day. E.g.: 1 Euro pallet with 1.8 m height is calculated as 1.2 x 0.8 x SPACE_COST x 2, where SPACE_COST is the entered price.', '5_Warehouse', '1');
|
||||||
|
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'KLT booking & document handling [EUR/GR]', 'BOOKING_KLT', 'CURRENCY', '{"GTE": 0}', N'One-time document handling fee per KLT at German wage level.', '5_Warehouse', '3');
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
-- ===================================================
|
||||||
|
-- INSERT Statements für system_property
|
||||||
|
-- Verwendung von Subqueries für dynamische ID-Ermittlung
|
||||||
|
-- ===================================================
|
||||||
|
|
||||||
|
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
|
||||||
|
VALUES (
|
||||||
|
(SELECT ps.id FROM property_set ps
|
||||||
|
WHERE ps.state = 'VALID'
|
||||||
|
AND ps.start_date <= GETDATE()
|
||||||
|
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
|
||||||
|
ORDER BY ps.start_date DESC
|
||||||
|
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
|
||||||
|
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'PAYMENT_TERMS'),
|
||||||
|
'30'
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
|
||||||
|
VALUES (
|
||||||
|
(SELECT ps.id FROM property_set ps
|
||||||
|
WHERE ps.state = 'VALID'
|
||||||
|
AND ps.start_date <= GETDATE()
|
||||||
|
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
|
||||||
|
ORDER BY ps.start_date DESC
|
||||||
|
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
|
||||||
|
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'START_REF'),
|
||||||
|
'CNXMN'
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
|
||||||
|
VALUES (
|
||||||
|
(SELECT ps.id FROM property_set ps
|
||||||
|
WHERE ps.state = 'VALID'
|
||||||
|
AND ps.start_date <= GETDATE()
|
||||||
|
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
|
||||||
|
ORDER BY ps.start_date DESC
|
||||||
|
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
|
||||||
|
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'END_REF'),
|
||||||
|
'DEHAM'
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
|
||||||
|
VALUES (
|
||||||
|
(SELECT ps.id FROM property_set ps
|
||||||
|
WHERE ps.state = 'VALID'
|
||||||
|
AND ps.start_date <= GETDATE()
|
||||||
|
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
|
||||||
|
ORDER BY ps.start_date DESC
|
||||||
|
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
|
||||||
|
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'RISK_REF'),
|
||||||
|
'20000.00'
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
|
||||||
|
VALUES (
|
||||||
|
(SELECT ps.id FROM property_set ps
|
||||||
|
WHERE ps.state = 'VALID'
|
||||||
|
AND ps.start_date <= GETDATE()
|
||||||
|
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
|
||||||
|
ORDER BY ps.start_date DESC
|
||||||
|
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
|
||||||
|
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'CHANCE_REF'),
|
||||||
|
'1000.00'
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
|
||||||
|
VALUES (
|
||||||
|
(SELECT ps.id FROM property_set ps
|
||||||
|
WHERE ps.state = 'VALID'
|
||||||
|
AND ps.start_date <= GETDATE()
|
||||||
|
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
|
||||||
|
ORDER BY ps.start_date DESC
|
||||||
|
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
|
||||||
|
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'TRUCK_UTIL'),
|
||||||
|
'0.7'
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
|
||||||
|
VALUES (
|
||||||
|
(SELECT ps.id FROM property_set ps
|
||||||
|
WHERE ps.state = 'VALID'
|
||||||
|
AND ps.start_date <= GETDATE()
|
||||||
|
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
|
||||||
|
ORDER BY ps.start_date DESC
|
||||||
|
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
|
||||||
|
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'WORKDAYS'),
|
||||||
|
'210'
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
|
||||||
|
VALUES (
|
||||||
|
(SELECT ps.id FROM property_set ps
|
||||||
|
WHERE ps.state = 'VALID'
|
||||||
|
AND ps.start_date <= GETDATE()
|
||||||
|
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
|
||||||
|
ORDER BY ps.start_date DESC
|
||||||
|
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
|
||||||
|
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'INTEREST_RATE'),
|
||||||
|
'0.12'
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
|
||||||
|
VALUES (
|
||||||
|
(SELECT ps.id FROM property_set ps
|
||||||
|
WHERE ps.state = 'VALID'
|
||||||
|
AND ps.start_date <= GETDATE()
|
||||||
|
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
|
||||||
|
ORDER BY ps.start_date DESC
|
||||||
|
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
|
||||||
|
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'FCA_FEE'),
|
||||||
|
'0.002'
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
|
||||||
|
VALUES (
|
||||||
|
(SELECT ps.id FROM property_set ps
|
||||||
|
WHERE ps.state = 'VALID'
|
||||||
|
AND ps.start_date <= GETDATE()
|
||||||
|
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
|
||||||
|
ORDER BY ps.start_date DESC
|
||||||
|
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
|
||||||
|
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'TARIFF_RATE'),
|
||||||
|
'0.03'
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
|
||||||
|
VALUES (
|
||||||
|
(SELECT ps.id FROM property_set ps
|
||||||
|
WHERE ps.state = 'VALID'
|
||||||
|
AND ps.start_date <= GETDATE()
|
||||||
|
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
|
||||||
|
ORDER BY ps.start_date DESC
|
||||||
|
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
|
||||||
|
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'CUSTOM_FEE'),
|
||||||
|
'35'
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
|
||||||
|
VALUES (
|
||||||
|
(SELECT ps.id FROM property_set ps
|
||||||
|
WHERE ps.state = 'VALID'
|
||||||
|
AND ps.start_date <= GETDATE()
|
||||||
|
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
|
||||||
|
ORDER BY ps.start_date DESC
|
||||||
|
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
|
||||||
|
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'REPORTING'),
|
||||||
|
'MEK_B'
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
|
||||||
|
VALUES (
|
||||||
|
(SELECT ps.id FROM property_set ps
|
||||||
|
WHERE ps.state = 'VALID'
|
||||||
|
AND ps.start_date <= GETDATE()
|
||||||
|
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
|
||||||
|
ORDER BY ps.start_date DESC
|
||||||
|
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
|
||||||
|
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'FEU'),
|
||||||
|
'true'
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
|
||||||
|
VALUES (
|
||||||
|
(SELECT ps.id FROM property_set ps
|
||||||
|
WHERE ps.state = 'VALID'
|
||||||
|
AND ps.start_date <= GETDATE()
|
||||||
|
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
|
||||||
|
ORDER BY ps.start_date DESC
|
||||||
|
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
|
||||||
|
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'TEU'),
|
||||||
|
'true'
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
|
||||||
|
VALUES (
|
||||||
|
(SELECT ps.id FROM property_set ps
|
||||||
|
WHERE ps.state = 'VALID'
|
||||||
|
AND ps.start_date <= GETDATE()
|
||||||
|
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
|
||||||
|
ORDER BY ps.start_date DESC
|
||||||
|
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
|
||||||
|
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'FEU_HQ'),
|
||||||
|
'true'
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
|
||||||
|
VALUES (
|
||||||
|
(SELECT ps.id FROM property_set ps
|
||||||
|
WHERE ps.state = 'VALID'
|
||||||
|
AND ps.start_date <= GETDATE()
|
||||||
|
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
|
||||||
|
ORDER BY ps.start_date DESC
|
||||||
|
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
|
||||||
|
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'CONTAINER_UTIL'),
|
||||||
|
'0.7'
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
|
||||||
|
VALUES (
|
||||||
|
(SELECT ps.id FROM property_set ps
|
||||||
|
WHERE ps.state = 'VALID'
|
||||||
|
AND ps.start_date <= GETDATE()
|
||||||
|
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
|
||||||
|
ORDER BY ps.start_date DESC
|
||||||
|
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
|
||||||
|
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'VALID_DAYS'),
|
||||||
|
'60'
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
|
||||||
|
VALUES (
|
||||||
|
(SELECT ps.id FROM property_set ps
|
||||||
|
WHERE ps.state = 'VALID'
|
||||||
|
AND ps.start_date <= GETDATE()
|
||||||
|
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
|
||||||
|
ORDER BY ps.start_date DESC
|
||||||
|
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
|
||||||
|
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'RADIUS_REGION'),
|
||||||
|
'20'
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
|
||||||
|
VALUES (
|
||||||
|
(SELECT ps.id FROM property_set ps
|
||||||
|
WHERE ps.state = 'VALID'
|
||||||
|
AND ps.start_date <= GETDATE()
|
||||||
|
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
|
||||||
|
ORDER BY ps.start_date DESC
|
||||||
|
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
|
||||||
|
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'FREQ_MIN'),
|
||||||
|
'3'
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
|
||||||
|
VALUES (
|
||||||
|
(SELECT ps.id FROM property_set ps
|
||||||
|
WHERE ps.state = 'VALID'
|
||||||
|
AND ps.start_date <= GETDATE()
|
||||||
|
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
|
||||||
|
ORDER BY ps.start_date DESC
|
||||||
|
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
|
||||||
|
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'FREQ_MAX'),
|
||||||
|
'50'
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
|
||||||
|
VALUES (
|
||||||
|
(SELECT ps.id FROM property_set ps
|
||||||
|
WHERE ps.state = 'VALID'
|
||||||
|
AND ps.start_date <= GETDATE()
|
||||||
|
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
|
||||||
|
ORDER BY ps.start_date DESC
|
||||||
|
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
|
||||||
|
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'TEU_LOAD'),
|
||||||
|
'20000'
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
|
||||||
|
VALUES (
|
||||||
|
(SELECT ps.id FROM property_set ps
|
||||||
|
WHERE ps.state = 'VALID'
|
||||||
|
AND ps.start_date <= GETDATE()
|
||||||
|
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
|
||||||
|
ORDER BY ps.start_date DESC
|
||||||
|
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
|
||||||
|
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'FEU_LOAD'),
|
||||||
|
'21000'
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
|
||||||
|
VALUES (
|
||||||
|
(SELECT ps.id FROM property_set ps
|
||||||
|
WHERE ps.state = 'VALID'
|
||||||
|
AND ps.start_date <= GETDATE()
|
||||||
|
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
|
||||||
|
ORDER BY ps.start_date DESC
|
||||||
|
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
|
||||||
|
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'TRUCK_LOAD'),
|
||||||
|
'25000'
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
|
||||||
|
VALUES (
|
||||||
|
(SELECT ps.id FROM property_set ps
|
||||||
|
WHERE ps.state = 'VALID'
|
||||||
|
AND ps.start_date <= GETDATE()
|
||||||
|
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
|
||||||
|
ORDER BY ps.start_date DESC
|
||||||
|
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
|
||||||
|
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'AIR_PRECARRIAGE'),
|
||||||
|
'0.1'
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
|
||||||
|
VALUES (
|
||||||
|
(SELECT ps.id FROM property_set ps
|
||||||
|
WHERE ps.state = 'VALID'
|
||||||
|
AND ps.start_date <= GETDATE()
|
||||||
|
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
|
||||||
|
ORDER BY ps.start_date DESC
|
||||||
|
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
|
||||||
|
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'AIR_HANDLING'),
|
||||||
|
'80'
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
|
||||||
|
VALUES (
|
||||||
|
(SELECT ps.id FROM property_set ps
|
||||||
|
WHERE ps.state = 'VALID'
|
||||||
|
AND ps.start_date <= GETDATE()
|
||||||
|
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
|
||||||
|
ORDER BY ps.start_date DESC
|
||||||
|
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
|
||||||
|
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'AIR_MAINCARRIAGE'),
|
||||||
|
'3.5'
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
|
||||||
|
VALUES (
|
||||||
|
(SELECT ps.id FROM property_set ps
|
||||||
|
WHERE ps.state = 'VALID'
|
||||||
|
AND ps.start_date <= GETDATE()
|
||||||
|
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
|
||||||
|
ORDER BY ps.start_date DESC
|
||||||
|
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
|
||||||
|
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'AIR_HANDOVER_FEE'),
|
||||||
|
'35'
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
|
||||||
|
VALUES (
|
||||||
|
(SELECT ps.id FROM property_set ps
|
||||||
|
WHERE ps.state = 'VALID'
|
||||||
|
AND ps.start_date <= GETDATE()
|
||||||
|
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
|
||||||
|
ORDER BY ps.start_date DESC
|
||||||
|
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
|
||||||
|
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'AIR_CUSTOM_FEE'),
|
||||||
|
'45'
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
|
||||||
|
VALUES (
|
||||||
|
(SELECT ps.id FROM property_set ps
|
||||||
|
WHERE ps.state = 'VALID'
|
||||||
|
AND ps.start_date <= GETDATE()
|
||||||
|
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
|
||||||
|
ORDER BY ps.start_date DESC
|
||||||
|
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
|
||||||
|
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'AIR_ONCARRIAGE'),
|
||||||
|
'0.2'
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
|
||||||
|
VALUES (
|
||||||
|
(SELECT ps.id FROM property_set ps
|
||||||
|
WHERE ps.state = 'VALID'
|
||||||
|
AND ps.start_date <= GETDATE()
|
||||||
|
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
|
||||||
|
ORDER BY ps.start_date DESC
|
||||||
|
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
|
||||||
|
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'AIR_TERMINAL_FEE'),
|
||||||
|
'0.2'
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
|
||||||
|
VALUES (
|
||||||
|
(SELECT ps.id FROM property_set ps
|
||||||
|
WHERE ps.state = 'VALID'
|
||||||
|
AND ps.start_date <= GETDATE()
|
||||||
|
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
|
||||||
|
ORDER BY ps.start_date DESC
|
||||||
|
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
|
||||||
|
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'KLT_HANDLING'),
|
||||||
|
'0.71'
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
|
||||||
|
VALUES (
|
||||||
|
(SELECT ps.id FROM property_set ps
|
||||||
|
WHERE ps.state = 'VALID'
|
||||||
|
AND ps.start_date <= GETDATE()
|
||||||
|
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
|
||||||
|
ORDER BY ps.start_date DESC
|
||||||
|
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
|
||||||
|
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'GLT_HANDLING'),
|
||||||
|
'3.5'
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
|
||||||
|
VALUES (
|
||||||
|
(SELECT ps.id FROM property_set ps
|
||||||
|
WHERE ps.state = 'VALID'
|
||||||
|
AND ps.start_date <= GETDATE()
|
||||||
|
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
|
||||||
|
ORDER BY ps.start_date DESC
|
||||||
|
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
|
||||||
|
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'BOOKING'),
|
||||||
|
'3.5'
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
|
||||||
|
VALUES (
|
||||||
|
(SELECT ps.id FROM property_set ps
|
||||||
|
WHERE ps.state = 'VALID'
|
||||||
|
AND ps.start_date <= GETDATE()
|
||||||
|
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
|
||||||
|
ORDER BY ps.start_date DESC
|
||||||
|
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
|
||||||
|
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'BOOKING_KLT'),
|
||||||
|
'0.35'
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
|
||||||
|
VALUES (
|
||||||
|
(SELECT ps.id FROM property_set ps
|
||||||
|
WHERE ps.state = 'VALID'
|
||||||
|
AND ps.start_date <= GETDATE()
|
||||||
|
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
|
||||||
|
ORDER BY ps.start_date DESC
|
||||||
|
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
|
||||||
|
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'GLT_RELEASE'),
|
||||||
|
'2.23'
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
|
||||||
|
VALUES (
|
||||||
|
(SELECT ps.id FROM property_set ps
|
||||||
|
WHERE ps.state = 'VALID'
|
||||||
|
AND ps.start_date <= GETDATE()
|
||||||
|
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
|
||||||
|
ORDER BY ps.start_date DESC
|
||||||
|
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
|
||||||
|
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'KLT_RELEASE'),
|
||||||
|
'1.12'
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
|
||||||
|
VALUES (
|
||||||
|
(SELECT ps.id FROM property_set ps
|
||||||
|
WHERE ps.state = 'VALID'
|
||||||
|
AND ps.start_date <= GETDATE()
|
||||||
|
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
|
||||||
|
ORDER BY ps.start_date DESC
|
||||||
|
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
|
||||||
|
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'GLT_DISPATCH'),
|
||||||
|
'1.61'
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
|
||||||
|
VALUES (
|
||||||
|
(SELECT ps.id FROM property_set ps
|
||||||
|
WHERE ps.state = 'VALID'
|
||||||
|
AND ps.start_date <= GETDATE()
|
||||||
|
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
|
||||||
|
ORDER BY ps.start_date DESC
|
||||||
|
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
|
||||||
|
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'KLT_DISPATCH'),
|
||||||
|
'0.333'
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
|
||||||
|
VALUES (
|
||||||
|
(SELECT ps.id FROM property_set ps
|
||||||
|
WHERE ps.state = 'VALID'
|
||||||
|
AND ps.start_date <= GETDATE()
|
||||||
|
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
|
||||||
|
ORDER BY ps.start_date DESC
|
||||||
|
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
|
||||||
|
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'KLT_REPACK_S'),
|
||||||
|
'2.08'
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
|
||||||
|
VALUES (
|
||||||
|
(SELECT ps.id FROM property_set ps
|
||||||
|
WHERE ps.state = 'VALID'
|
||||||
|
AND ps.start_date <= GETDATE()
|
||||||
|
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
|
||||||
|
ORDER BY ps.start_date DESC
|
||||||
|
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
|
||||||
|
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'KLT_REPACK_M'),
|
||||||
|
'3.02'
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
|
||||||
|
VALUES (
|
||||||
|
(SELECT ps.id FROM property_set ps
|
||||||
|
WHERE ps.state = 'VALID'
|
||||||
|
AND ps.start_date <= GETDATE()
|
||||||
|
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
|
||||||
|
ORDER BY ps.start_date DESC
|
||||||
|
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
|
||||||
|
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'GLT_REPACK_S'),
|
||||||
|
'3.02'
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
|
||||||
|
VALUES (
|
||||||
|
(SELECT ps.id FROM property_set ps
|
||||||
|
WHERE ps.state = 'VALID'
|
||||||
|
AND ps.start_date <= GETDATE()
|
||||||
|
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
|
||||||
|
ORDER BY ps.start_date DESC
|
||||||
|
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
|
||||||
|
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'GLT_REPACK_M'),
|
||||||
|
'7.76'
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
|
||||||
|
VALUES (
|
||||||
|
(SELECT ps.id FROM property_set ps
|
||||||
|
WHERE ps.state = 'VALID'
|
||||||
|
AND ps.start_date <= GETDATE()
|
||||||
|
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
|
||||||
|
ORDER BY ps.start_date DESC
|
||||||
|
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
|
||||||
|
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'GLT_REPACK_L'),
|
||||||
|
'14'
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
|
||||||
|
VALUES (
|
||||||
|
(SELECT ps.id FROM property_set ps
|
||||||
|
WHERE ps.state = 'VALID'
|
||||||
|
AND ps.start_date <= GETDATE()
|
||||||
|
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
|
||||||
|
ORDER BY ps.start_date DESC
|
||||||
|
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
|
||||||
|
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'DISPOSAL'),
|
||||||
|
'6'
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
|
||||||
|
VALUES (
|
||||||
|
(SELECT ps.id FROM property_set ps
|
||||||
|
WHERE ps.state = 'VALID'
|
||||||
|
AND ps.start_date <= GETDATE()
|
||||||
|
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
|
||||||
|
ORDER BY ps.start_date DESC
|
||||||
|
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
|
||||||
|
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'SPACE_COST'),
|
||||||
|
'0.2630136986'
|
||||||
|
);
|
||||||
685
src/main/resources/db/migration/mssql/V4__Country.sql
Normal file
685
src/main/resources/db/migration/mssql/V4__Country.sql
Normal file
|
|
@ -0,0 +1,685 @@
|
||||||
|
-- Country Data Import SQL Script
|
||||||
|
-- Generated from Lastenheft_Requirements Appendix A_Länder 1.csv
|
||||||
|
|
||||||
|
|
||||||
|
-- ===================================================
|
||||||
|
-- INSERT a property set if not exists.
|
||||||
|
-- ===================================================
|
||||||
|
|
||||||
|
INSERT INTO property_set (state)
|
||||||
|
SELECT 'VALID'
|
||||||
|
WHERE NOT EXISTS (
|
||||||
|
SELECT 1 FROM property_set ps
|
||||||
|
WHERE ps.state = 'VALID'
|
||||||
|
AND ps.start_date <= GETDATE()
|
||||||
|
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
-- =============================================================================
|
||||||
|
-- 1. INSERT COUNTRY PROPERTY TYPES
|
||||||
|
-- =============================================================================
|
||||||
|
|
||||||
|
INSERT INTO country_property_type
|
||||||
|
(name, external_mapping_id, data_type, validation_rule, is_required, description, property_group, sequence_number)
|
||||||
|
VALUES
|
||||||
|
('Customs Union', 'UNION', 'ENUMERATION', '{ "ENUM" : ["EU", "NONE"]}', 0, 'Specifies the customs union in which the country is located. When crossing a customs union border, customs costs are added to the calculation result.', 'General', 1),
|
||||||
|
('Safety Stock [working days]', 'SAFETY_STOCK', 'INT', '{"GTE": 0}', 0, 'Specifies the safety stock in working days that is maintained when sourcing from this country.', 'General', 2),
|
||||||
|
('Air Freight Share [%]', 'AIR_SHARE', 'PERCENTAGE', '{"GTE": 0}', 0, 'Specifies the maximum air freight proportion that is included in the calculation when sourcing from this country. The actual air freight proportion that is used additionally depends on the overseas share of the part number and lies between 0% and this value.', 'General', 3),
|
||||||
|
('Wage Factor [%]', 'WAGE', 'PERCENTAGE', '{"GT": 0}', 0, 'Specifies the wage factor level for calculating handling costs in relation to the German wage factor level.', 'General', 4);
|
||||||
|
|
||||||
|
-- =============================================================================
|
||||||
|
-- 2. INSERT COUNTRIES
|
||||||
|
-- =============================================================================
|
||||||
|
|
||||||
|
INSERT INTO country (iso_code, name, region_code, is_deprecated) VALUES
|
||||||
|
('AD', N'Andorra', 'EMEA', 0),
|
||||||
|
('AE', N'United Arab Emirates', 'EMEA', 0),
|
||||||
|
('AF', N'Afghanistan', 'EMEA', 0),
|
||||||
|
('AG', N'Antigua and Barbuda', 'LATAM', 0),
|
||||||
|
('AI', N'Anguilla', 'LATAM', 0),
|
||||||
|
('AL', N'Albania', 'EMEA', 0),
|
||||||
|
('AM', N'Armenia', 'EMEA', 0),
|
||||||
|
('AO', N'Angola', 'EMEA', 0),
|
||||||
|
('AQ', N'Antarctica', 'EMEA', 0),
|
||||||
|
('AR', N'Argentina', 'LATAM', 0),
|
||||||
|
('AS', N'American Samoa', 'APAC', 0),
|
||||||
|
('AT', N'Austria', 'EMEA', 0),
|
||||||
|
('AU', N'Australia', 'APAC', 0),
|
||||||
|
('AW', N'Aruba', 'LATAM', 0),
|
||||||
|
('AX', N'Åland Islands', 'EMEA', 0),
|
||||||
|
('AZ', N'Azerbaijan', 'EMEA', 0),
|
||||||
|
('BA', N'Bosnia and Herzegovina', 'EMEA', 0),
|
||||||
|
('BB', N'Barbados', 'LATAM', 0),
|
||||||
|
('BD', N'Bangladesh', 'EMEA', 0),
|
||||||
|
('BE', N'Belgium', 'EMEA', 0),
|
||||||
|
('BF', N'Burkina Faso', 'EMEA', 0),
|
||||||
|
('BG', N'Bulgaria', 'EMEA', 0),
|
||||||
|
('BH', N'Bahrain', 'EMEA', 0),
|
||||||
|
('BI', N'Burundi', 'EMEA', 0),
|
||||||
|
('BJ', N'Benin', 'EMEA', 0),
|
||||||
|
('BL', N'Saint Barthélemy', 'LATAM', 0),
|
||||||
|
('BM', N'Bermuda', 'NAM', 0),
|
||||||
|
('BN', N'Brunei Darussalam', 'APAC', 0),
|
||||||
|
('BO', N'Bolivia', 'LATAM', 0),
|
||||||
|
('BQ', N'Bonaire, Sint Eustatius and Saba', 'LATAM', 0),
|
||||||
|
('BR', N'Brazil', 'LATAM', 0),
|
||||||
|
('BS', N'Bahamas', 'LATAM', 0),
|
||||||
|
('BT', N'Bhutan', 'APAC', 0),
|
||||||
|
('BV', N'Bouvet Island', 'EMEA', 0),
|
||||||
|
('BW', N'Botswana', 'EMEA', 0),
|
||||||
|
('BY', N'Belarus', 'EMEA', 0),
|
||||||
|
('BZ', N'Belize', 'LATAM', 0),
|
||||||
|
('CA', N'Canada', 'NAM', 0),
|
||||||
|
('CC', N'Cocos (Keeling) Islands', 'APAC', 0),
|
||||||
|
('CD', N'Congo, Democratic Republic', 'EMEA', 0),
|
||||||
|
('CF', N'Central African Republic', 'EMEA', 0),
|
||||||
|
('CG', N'Congo', 'EMEA', 0),
|
||||||
|
('CH', N'Switzerland', 'EMEA', 0),
|
||||||
|
('CI', N'Côte d''Ivoire', 'EMEA', 0),
|
||||||
|
('CK', N'Cook Islands', 'APAC', 0),
|
||||||
|
('CL', N'Chile', 'LATAM', 0),
|
||||||
|
('CM', N'Cameroon', 'EMEA', 0),
|
||||||
|
('CN', N'China', 'APAC', 0),
|
||||||
|
('CO', N'Colombia', 'LATAM', 0),
|
||||||
|
('CR', N'Costa Rica', 'LATAM', 0),
|
||||||
|
('CU', N'Cuba', 'LATAM', 0),
|
||||||
|
('CV', N'Cabo Verde', 'EMEA', 0),
|
||||||
|
('CW', N'Curaçao', 'LATAM', 0),
|
||||||
|
('CX', N'Christmas Island', 'APAC', 0),
|
||||||
|
('CY', N'Cyprus', 'EMEA', 0),
|
||||||
|
('CZ', N'Czech Republic', 'EMEA', 0),
|
||||||
|
('DE', N'Germany', 'EMEA', 0),
|
||||||
|
('DJ', N'Djibouti', 'EMEA', 0),
|
||||||
|
('DK', N'Denmark', 'EMEA', 0),
|
||||||
|
('DM', N'Dominica', 'LATAM', 0),
|
||||||
|
('DO', N'Dominican Republic', 'LATAM', 0),
|
||||||
|
('DZ', N'Algeria', 'EMEA', 0),
|
||||||
|
('EC', N'Ecuador', 'LATAM', 0),
|
||||||
|
('EE', N'Estonia', 'EMEA', 0),
|
||||||
|
('EG', N'Egypt', 'EMEA', 0),
|
||||||
|
('EH', N'Western Sahara', 'EMEA', 0),
|
||||||
|
('ER', N'Eritrea', 'EMEA', 0),
|
||||||
|
('ES', N'Spain', 'EMEA', 0),
|
||||||
|
('ET', N'Ethiopia', 'EMEA', 0),
|
||||||
|
('FI', N'Finland', 'EMEA', 0),
|
||||||
|
('FJ', N'Fiji', 'APAC', 0),
|
||||||
|
('FK', N'Falkland Islands', 'LATAM', 0),
|
||||||
|
('FM', N'Micronesia', 'APAC', 0),
|
||||||
|
('FO', N'Faroe Islands', 'EMEA', 0),
|
||||||
|
('FR', N'France', 'EMEA', 0),
|
||||||
|
('GA', N'Gabon', 'EMEA', 0),
|
||||||
|
('GB', N'United Kingdom', 'EMEA', 0),
|
||||||
|
('GD', N'Grenada', 'LATAM', 0),
|
||||||
|
('GE', N'Georgia', 'EMEA', 0),
|
||||||
|
('GF', N'French Guiana', 'LATAM', 0),
|
||||||
|
('GG', N'Guernsey', 'EMEA', 0),
|
||||||
|
('GH', N'Ghana', 'EMEA', 0),
|
||||||
|
('GI', N'Gibraltar', 'EMEA', 0),
|
||||||
|
('GL', N'Greenland', 'NAM', 0),
|
||||||
|
('GM', N'Gambia', 'EMEA', 0),
|
||||||
|
('GN', N'Guinea', 'EMEA', 0),
|
||||||
|
('GP', N'Guadeloupe', 'LATAM', 0),
|
||||||
|
('GQ', N'Equatorial Guinea', 'EMEA', 0),
|
||||||
|
('GR', N'Greece', 'EMEA', 0),
|
||||||
|
('GS', N'South Georgia and South Sandwich Islands', 'LATAM', 0),
|
||||||
|
('GT', N'Guatemala', 'LATAM', 0),
|
||||||
|
('GU', N'Guam', 'APAC', 0),
|
||||||
|
('GW', N'Guinea-Bissau', 'EMEA', 0),
|
||||||
|
('GY', N'Guyana', 'LATAM', 0),
|
||||||
|
('HK', N'Hong Kong', 'APAC', 0),
|
||||||
|
('HM', N'Heard Island and McDonald Islands', 'APAC', 0),
|
||||||
|
('HN', N'Honduras', 'LATAM', 0),
|
||||||
|
('HR', N'Croatia', 'EMEA', 0),
|
||||||
|
('HT', N'Haiti', 'LATAM', 0),
|
||||||
|
('HU', N'Hungary', 'EMEA', 0),
|
||||||
|
('ID', N'Indonesia', 'APAC', 0),
|
||||||
|
('IE', N'Ireland', 'EMEA', 0),
|
||||||
|
('IL', N'Israel', 'EMEA', 0),
|
||||||
|
('IM', N'Isle of Man', 'EMEA', 0),
|
||||||
|
('IN', N'India', 'APAC', 0),
|
||||||
|
('IO', N'British Indian Ocean Territory', 'APAC', 0),
|
||||||
|
('IQ', N'Iraq', 'EMEA', 0),
|
||||||
|
('IR', N'Iran', 'EMEA', 0),
|
||||||
|
('IS', N'Iceland', 'EMEA', 0),
|
||||||
|
('IT', N'Italy', 'EMEA', 0),
|
||||||
|
('JE', N'Jersey', 'EMEA', 0),
|
||||||
|
('JM', N'Jamaica', 'LATAM', 0),
|
||||||
|
('JO', N'Jordan', 'EMEA', 0),
|
||||||
|
('JP', N'Japan', 'APAC', 0),
|
||||||
|
('KE', N'Kenya', 'EMEA', 0),
|
||||||
|
('KG', N'Kyrgyzstan', 'EMEA', 0),
|
||||||
|
('KH', N'Cambodia', 'APAC', 0),
|
||||||
|
('KI', N'Kiribati', 'APAC', 0),
|
||||||
|
('KM', N'Comoros', 'EMEA', 0),
|
||||||
|
('KN', N'Saint Kitts and Nevis', 'LATAM', 0),
|
||||||
|
('KP', N'Korea, North', 'APAC', 0),
|
||||||
|
('KR', N'Korea, South', 'APAC', 0),
|
||||||
|
('KW', N'Kuwait', 'EMEA', 0),
|
||||||
|
('KY', N'Cayman Islands', 'LATAM', 0),
|
||||||
|
('KZ', N'Kazakhstan', 'EMEA', 0),
|
||||||
|
('LA', N'Laos', 'APAC', 0),
|
||||||
|
('LB', N'Lebanon', 'EMEA', 0),
|
||||||
|
('LC', N'Saint Lucia', 'LATAM', 0),
|
||||||
|
('LI', N'Liechtenstein', 'EMEA', 0),
|
||||||
|
('LK', N'Sri Lanka', 'APAC', 0),
|
||||||
|
('LR', N'Liberia', 'EMEA', 0),
|
||||||
|
('LS', N'Lesotho', 'EMEA', 0),
|
||||||
|
('LT', N'Lithuania', 'EMEA', 0),
|
||||||
|
('LU', N'Luxembourg', 'EMEA', 0),
|
||||||
|
('LV', N'Latvia', 'EMEA', 0),
|
||||||
|
('LY', N'Libya', 'EMEA', 0),
|
||||||
|
('MA', N'Morocco', 'EMEA', 0),
|
||||||
|
('MC', N'Monaco', 'EMEA', 0),
|
||||||
|
('MD', N'Moldova', 'EMEA', 0),
|
||||||
|
('ME', N'Montenegro', 'EMEA', 0),
|
||||||
|
('MF', N'Saint Martin', 'LATAM', 0),
|
||||||
|
('MG', N'Madagascar', 'EMEA', 0),
|
||||||
|
('MH', N'Marshall Islands', 'APAC', 0),
|
||||||
|
('MK', N'North Macedonia', 'EMEA', 0),
|
||||||
|
('ML', N'Mali', 'EMEA', 0),
|
||||||
|
('MM', N'Myanmar', 'APAC', 0),
|
||||||
|
('MN', N'Mongolia', 'APAC', 0),
|
||||||
|
('MO', N'Macao', 'APAC', 0),
|
||||||
|
('MP', N'Northern Mariana Islands', 'APAC', 0),
|
||||||
|
('MQ', N'Martinique', 'LATAM', 0),
|
||||||
|
('MR', N'Mauritania', 'EMEA', 0),
|
||||||
|
('MS', N'Montserrat', 'LATAM', 0),
|
||||||
|
('MT', N'Malta', 'EMEA', 0),
|
||||||
|
('MU', N'Mauritius', 'EMEA', 0),
|
||||||
|
('MV', N'Maldives', 'APAC', 0),
|
||||||
|
('MW', N'Malawi', 'EMEA', 0),
|
||||||
|
('MX', N'Mexico', 'LATAM', 0),
|
||||||
|
('MY', N'Malaysia', 'APAC', 0),
|
||||||
|
('MZ', N'Mozambique', 'EMEA', 0),
|
||||||
|
('NA', N'Namibia', 'EMEA', 0),
|
||||||
|
('NC', N'New Caledonia', 'APAC', 0),
|
||||||
|
('NE', N'Niger', 'EMEA', 0),
|
||||||
|
('NF', N'Norfolk Island', 'APAC', 0),
|
||||||
|
('NG', N'Nigeria', 'EMEA', 0),
|
||||||
|
('NI', N'Nicaragua', 'LATAM', 0),
|
||||||
|
('NL', N'Netherlands', 'EMEA', 0),
|
||||||
|
('NO', N'Norway', 'EMEA', 0),
|
||||||
|
('NP', N'Nepal', 'APAC', 0),
|
||||||
|
('NR', N'Nauru', 'APAC', 0),
|
||||||
|
('NU', N'Niue', 'APAC', 0),
|
||||||
|
('NZ', N'New Zealand', 'APAC', 0),
|
||||||
|
('OM', N'Oman', 'EMEA', 0),
|
||||||
|
('PA', N'Panama', 'LATAM', 0),
|
||||||
|
('PE', N'Peru', 'LATAM', 0),
|
||||||
|
('PF', N'French Polynesia', 'APAC', 0),
|
||||||
|
('PG', N'Papua New Guinea', 'APAC', 0),
|
||||||
|
('PH', N'Philippines', 'APAC', 0),
|
||||||
|
('PK', N'Pakistan', 'APAC', 0),
|
||||||
|
('PL', N'Poland', 'EMEA', 0),
|
||||||
|
('PM', N'Saint Pierre and Miquelon', 'NAM', 0),
|
||||||
|
('PN', N'Pitcairn', 'APAC', 0),
|
||||||
|
('PR', N'Puerto Rico', 'LATAM', 0),
|
||||||
|
('PS', N'Palestine', 'EMEA', 0),
|
||||||
|
('PT', N'Portugal', 'EMEA', 0),
|
||||||
|
('PW', N'Palau', 'APAC', 0),
|
||||||
|
('PY', N'Paraguay', 'LATAM', 0),
|
||||||
|
('QA', N'Qatar', 'EMEA', 0),
|
||||||
|
('RE', N'Réunion', 'EMEA', 0),
|
||||||
|
('RO', N'Romania', 'EMEA', 0),
|
||||||
|
('RS', N'Serbia', 'EMEA', 0),
|
||||||
|
('RU', N'Russian Federation', 'EMEA', 0),
|
||||||
|
('RW', N'Rwanda', 'EMEA', 0),
|
||||||
|
('SA', N'Saudi Arabia', 'EMEA', 0),
|
||||||
|
('SB', N'Solomon Islands', 'APAC', 0),
|
||||||
|
('SC', N'Seychelles', 'EMEA', 0),
|
||||||
|
('SD', N'Sudan', 'EMEA', 0),
|
||||||
|
('SE', N'Sweden', 'EMEA', 0),
|
||||||
|
('SG', N'Singapore', 'APAC', 0),
|
||||||
|
('SH', N'Saint Helena', 'EMEA', 0),
|
||||||
|
('SI', N'Slovenia', 'EMEA', 0),
|
||||||
|
('SJ', N'Svalbard and Jan Mayen', 'EMEA', 0),
|
||||||
|
('SK', N'Slovakia', 'EMEA', 0),
|
||||||
|
('SL', N'Sierra Leone', 'EMEA', 0),
|
||||||
|
('SM', N'San Marino', 'EMEA', 0),
|
||||||
|
('SN', N'Senegal', 'EMEA', 0),
|
||||||
|
('SO', N'Somalia', 'EMEA', 0),
|
||||||
|
('SR', N'Suriname', 'LATAM', 0),
|
||||||
|
('SS', N'South Sudan', 'EMEA', 0),
|
||||||
|
('ST', N'Sao Tome and Principe', 'EMEA', 0),
|
||||||
|
('SV', N'El Salvador', 'LATAM', 0),
|
||||||
|
('SX', N'Sint Maarten', 'LATAM', 0),
|
||||||
|
('SY', N'Syrian Arab Republic', 'EMEA', 0),
|
||||||
|
('SZ', N'Eswatini', 'EMEA', 0),
|
||||||
|
('TC', N'Turks and Caicos Islands', 'LATAM', 0),
|
||||||
|
('TD', N'Chad', 'EMEA', 0),
|
||||||
|
('TF', N'French Southern Territories', 'EMEA', 0),
|
||||||
|
('TG', N'Togo', 'EMEA', 0),
|
||||||
|
('TH', N'Thailand', 'APAC', 0),
|
||||||
|
('TJ', N'Tajikistan', 'EMEA', 0),
|
||||||
|
('TK', N'Tokelau', 'APAC', 0),
|
||||||
|
('TL', N'Timor-Leste', 'APAC', 0),
|
||||||
|
('TM', N'Turkmenistan', 'EMEA', 0),
|
||||||
|
('TN', N'Tunisia', 'EMEA', 0),
|
||||||
|
('TO', N'Tonga', 'APAC', 0),
|
||||||
|
('TR', N'Turkey', 'EMEA', 0),
|
||||||
|
('TT', N'Trinidad and Tobago', 'LATAM', 0),
|
||||||
|
('TV', N'Tuvalu', 'APAC', 0),
|
||||||
|
('TW', N'Taiwan', 'APAC', 0),
|
||||||
|
('TZ', N'Tanzania', 'EMEA', 0),
|
||||||
|
('UA', N'Ukraine', 'EMEA', 0),
|
||||||
|
('UG', N'Uganda', 'EMEA', 0),
|
||||||
|
('UM', N'United States Minor Outlying Islands', 'APAC', 0),
|
||||||
|
('US', N'United States', 'NAM', 0),
|
||||||
|
('UY', N'Uruguay', 'LATAM', 0),
|
||||||
|
('UZ', N'Uzbekistan', 'EMEA', 0),
|
||||||
|
('VA', N'Vatican City', 'EMEA', 0),
|
||||||
|
('VC', N'Saint Vincent and the Grenadines', 'LATAM', 0),
|
||||||
|
('VE', N'Venezuela', 'LATAM', 0),
|
||||||
|
('VG', N'Virgin Islands, British', 'LATAM', 0),
|
||||||
|
('VI', N'Virgin Islands, U.S.', 'LATAM', 0),
|
||||||
|
('VN', N'Viet Nam', 'APAC', 0),
|
||||||
|
('VU', N'Vanuatu', 'APAC', 0),
|
||||||
|
('WF', N'Wallis and Futuna', 'APAC', 0),
|
||||||
|
('WS', N'Samoa', 'APAC', 0),
|
||||||
|
('YE', N'Yemen', 'EMEA', 0),
|
||||||
|
('YT', N'Mayotte', 'EMEA', 0),
|
||||||
|
('ZA', N'South Africa', 'EMEA', 0),
|
||||||
|
('ZM', N'Zambia', 'EMEA', 0),
|
||||||
|
('ZW', N'Zimbabwe', 'EMEA', 0),
|
||||||
|
('XK', N'Kosovo', 'EMEA', 0);
|
||||||
|
|
||||||
|
-- =============================================================================
|
||||||
|
-- 3. INSERT COUNTRY PROPERTIES
|
||||||
|
-- =============================================================================
|
||||||
|
|
||||||
|
-- Note: Uses the currently valid property set (state = 'VALID' and within date range)
|
||||||
|
-- If no valid property set exists, these inserts will fail with NULL constraint violation
|
||||||
|
-- To create a new property set if none exists, uncomment the following:
|
||||||
|
-- INSERT INTO property_set (start_date, state) VALUES (GETDATE(), 'VALID');
|
||||||
|
|
||||||
|
-- Note: Using current valid property set
|
||||||
|
-- Customs Union Properties (only for EU countries)
|
||||||
|
INSERT INTO country_property
|
||||||
|
(country_id, country_property_type_id, property_set_id, property_value)
|
||||||
|
SELECT
|
||||||
|
c.id,
|
||||||
|
cpt.id,
|
||||||
|
(SELECT ps.id FROM property_set ps
|
||||||
|
WHERE ps.state = 'VALID'
|
||||||
|
AND ps.start_date <= GETDATE()
|
||||||
|
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
|
||||||
|
ORDER BY ps.start_date DESC
|
||||||
|
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
|
||||||
|
CASE
|
||||||
|
WHEN c.iso_code IN ('AT', N'BE', 'BG', 'CZ', 'DE', 'DK', 'EE', 'ES', 'FI', 'FR', 'GR', 'HR', 'HU', 'IE', 'IT', 'LT', 'LU', 'LV', 'MT', 'NL', 'PL', 'PT', 'RO', 'SE', 'SI', 'SK')
|
||||||
|
THEN 'EU'
|
||||||
|
ELSE 'NONE'
|
||||||
|
END
|
||||||
|
FROM country c, country_property_type cpt
|
||||||
|
WHERE cpt.external_mapping_id = 'UNION';
|
||||||
|
|
||||||
|
-- Safety Stock Properties
|
||||||
|
INSERT INTO country_property
|
||||||
|
(country_id, country_property_type_id, property_set_id, property_value)
|
||||||
|
SELECT
|
||||||
|
c.id,
|
||||||
|
cpt.id,
|
||||||
|
(SELECT ps.id FROM property_set ps
|
||||||
|
WHERE ps.state = 'VALID'
|
||||||
|
AND ps.start_date <= GETDATE()
|
||||||
|
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
|
||||||
|
ORDER BY ps.start_date DESC
|
||||||
|
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
|
||||||
|
CASE c.iso_code
|
||||||
|
WHEN 'AD' THEN N'15'
|
||||||
|
WHEN 'AE' THEN N'20'
|
||||||
|
WHEN 'AF' THEN N'30'
|
||||||
|
WHEN 'AG' THEN N'55'
|
||||||
|
WHEN 'AI' THEN N'55'
|
||||||
|
WHEN 'AL' THEN N'15'
|
||||||
|
WHEN 'AM' THEN N'15'
|
||||||
|
WHEN 'AO' THEN N'15'
|
||||||
|
WHEN 'AQ' THEN N'55'
|
||||||
|
WHEN 'AR' THEN N'55'
|
||||||
|
WHEN 'AS' THEN N'55'
|
||||||
|
WHEN 'AT' THEN N'10'
|
||||||
|
WHEN 'AU' THEN N'55'
|
||||||
|
WHEN 'AW' THEN N'55'
|
||||||
|
WHEN 'AZ' THEN N'15'
|
||||||
|
WHEN 'BA' THEN N'15'
|
||||||
|
WHEN 'BB' THEN N'55'
|
||||||
|
WHEN 'BD' THEN N'55'
|
||||||
|
WHEN 'BE' THEN N'10'
|
||||||
|
WHEN 'BF' THEN N'30'
|
||||||
|
WHEN 'BG' THEN N'10'
|
||||||
|
WHEN 'BH' THEN N'20'
|
||||||
|
WHEN 'BI' THEN N'30'
|
||||||
|
WHEN 'BJ' THEN N'30'
|
||||||
|
WHEN 'BL' THEN N'30'
|
||||||
|
WHEN 'BM' THEN N'55'
|
||||||
|
WHEN 'BN' THEN N'55'
|
||||||
|
WHEN 'BO' THEN N'55'
|
||||||
|
WHEN 'BQ' THEN N'55'
|
||||||
|
WHEN 'BR' THEN N'55'
|
||||||
|
WHEN 'BS' THEN N'55'
|
||||||
|
WHEN 'BT' THEN N'55'
|
||||||
|
WHEN 'BV' THEN N'30'
|
||||||
|
WHEN 'BW' THEN N'15'
|
||||||
|
WHEN 'BY' THEN N'55'
|
||||||
|
WHEN 'BZ' THEN N'55'
|
||||||
|
WHEN 'CA' THEN N'55'
|
||||||
|
WHEN 'CC' THEN N'55'
|
||||||
|
WHEN 'CD' THEN N'30'
|
||||||
|
WHEN 'CF' THEN N'30'
|
||||||
|
WHEN 'CG' THEN N'30'
|
||||||
|
WHEN 'CH' THEN N'10'
|
||||||
|
WHEN 'CI' THEN N'30'
|
||||||
|
WHEN 'CK' THEN N'30'
|
||||||
|
WHEN 'CL' THEN N'55'
|
||||||
|
WHEN 'CM' THEN N'30'
|
||||||
|
WHEN 'CN' THEN N'55'
|
||||||
|
WHEN 'CO' THEN N'55'
|
||||||
|
WHEN 'CR' THEN N'55'
|
||||||
|
WHEN 'CU' THEN N'55'
|
||||||
|
WHEN 'CV' THEN N'30'
|
||||||
|
WHEN 'CW' THEN N'30'
|
||||||
|
WHEN 'CX' THEN N'55'
|
||||||
|
WHEN 'CY' THEN N'10'
|
||||||
|
WHEN 'CZ' THEN N'10'
|
||||||
|
WHEN 'DE' THEN N'10'
|
||||||
|
WHEN 'DJ' THEN N'30'
|
||||||
|
WHEN 'DK' THEN N'10'
|
||||||
|
WHEN 'DM' THEN N'55'
|
||||||
|
WHEN 'DO' THEN N'55'
|
||||||
|
WHEN 'DZ' THEN N'10'
|
||||||
|
WHEN 'EC' THEN N'55'
|
||||||
|
WHEN 'EE' THEN N'10'
|
||||||
|
WHEN 'EG' THEN N'30'
|
||||||
|
WHEN 'EH' THEN N'30'
|
||||||
|
WHEN 'ER' THEN N'30'
|
||||||
|
WHEN 'ES' THEN N'10'
|
||||||
|
WHEN 'ET' THEN N'30'
|
||||||
|
WHEN 'FI' THEN N'10'
|
||||||
|
WHEN 'FJ' THEN N'55'
|
||||||
|
WHEN 'FK' THEN N'55'
|
||||||
|
WHEN 'FM' THEN N'55'
|
||||||
|
WHEN 'FO' THEN N'30'
|
||||||
|
WHEN 'FR' THEN N'10'
|
||||||
|
WHEN 'GA' THEN N'30'
|
||||||
|
WHEN 'GB' THEN N'30'
|
||||||
|
WHEN 'GD' THEN N'55'
|
||||||
|
WHEN 'GE' THEN N'10'
|
||||||
|
WHEN 'GF' THEN N'30'
|
||||||
|
WHEN 'GG' THEN N'30'
|
||||||
|
WHEN 'GH' THEN N'30'
|
||||||
|
WHEN 'GI' THEN N'10'
|
||||||
|
WHEN 'GL' THEN N'30'
|
||||||
|
WHEN 'GM' THEN N'30'
|
||||||
|
WHEN 'GN' THEN N'30'
|
||||||
|
WHEN 'GP' THEN N'30'
|
||||||
|
WHEN 'GQ' THEN N'30'
|
||||||
|
WHEN 'GR' THEN N'10'
|
||||||
|
WHEN 'GS' THEN N'55'
|
||||||
|
WHEN 'GT' THEN N'55'
|
||||||
|
WHEN 'GU' THEN N'55'
|
||||||
|
WHEN 'GW' THEN N'30'
|
||||||
|
WHEN 'GY' THEN N'55'
|
||||||
|
WHEN 'HK' THEN N'55'
|
||||||
|
WHEN 'HM' THEN N'30'
|
||||||
|
WHEN 'HN' THEN N'55'
|
||||||
|
WHEN 'HR' THEN N'10'
|
||||||
|
WHEN 'HT' THEN N'55'
|
||||||
|
WHEN 'HU' THEN N'10'
|
||||||
|
WHEN 'ID' THEN N'55'
|
||||||
|
WHEN 'IE' THEN N'10'
|
||||||
|
WHEN 'IL' THEN N'30'
|
||||||
|
WHEN 'IM' THEN N'30'
|
||||||
|
WHEN 'IN' THEN N'55'
|
||||||
|
WHEN 'IO' THEN N'55'
|
||||||
|
WHEN 'IQ' THEN N'30'
|
||||||
|
WHEN 'IR' THEN N'30'
|
||||||
|
WHEN 'IS' THEN N'20'
|
||||||
|
WHEN 'IT' THEN N'10'
|
||||||
|
WHEN 'JE' THEN N'30'
|
||||||
|
WHEN 'JM' THEN N'55'
|
||||||
|
WHEN 'JO' THEN N'30'
|
||||||
|
WHEN 'JP' THEN N'55'
|
||||||
|
WHEN 'KE' THEN N'30'
|
||||||
|
WHEN 'KG' THEN N'30'
|
||||||
|
WHEN 'KH' THEN N'55'
|
||||||
|
WHEN 'KI' THEN N'55'
|
||||||
|
WHEN 'KM' THEN N'30'
|
||||||
|
WHEN 'KN' THEN N'55'
|
||||||
|
WHEN 'KP' THEN N'55'
|
||||||
|
WHEN 'KR' THEN N'55'
|
||||||
|
WHEN 'KW' THEN N'30'
|
||||||
|
WHEN 'KY' THEN N'55'
|
||||||
|
WHEN 'KZ' THEN N'30'
|
||||||
|
WHEN 'LA' THEN N'55'
|
||||||
|
WHEN 'LB' THEN N'30'
|
||||||
|
WHEN 'LC' THEN N'55'
|
||||||
|
WHEN 'LI' THEN N'10'
|
||||||
|
WHEN 'LK' THEN N'55'
|
||||||
|
WHEN 'LR' THEN N'30'
|
||||||
|
WHEN 'LS' THEN N'30'
|
||||||
|
WHEN 'LT' THEN N'10'
|
||||||
|
WHEN 'LU' THEN N'10'
|
||||||
|
WHEN 'LV' THEN N'10'
|
||||||
|
WHEN 'LY' THEN N'30'
|
||||||
|
WHEN 'MA' THEN N'20'
|
||||||
|
WHEN 'MC' THEN N'30'
|
||||||
|
WHEN 'MD' THEN N'30'
|
||||||
|
WHEN 'ME' THEN N'30'
|
||||||
|
WHEN 'MF' THEN N'30'
|
||||||
|
WHEN 'MG' THEN N'30'
|
||||||
|
WHEN 'MH' THEN N'55'
|
||||||
|
WHEN 'MK' THEN N'30'
|
||||||
|
WHEN 'ML' THEN N'30'
|
||||||
|
WHEN 'MM' THEN N'55'
|
||||||
|
WHEN 'MN' THEN N'55'
|
||||||
|
WHEN 'MO' THEN N'55'
|
||||||
|
WHEN 'MP' THEN N'55'
|
||||||
|
WHEN 'MQ' THEN N'30'
|
||||||
|
WHEN 'MR' THEN N'30'
|
||||||
|
WHEN 'MS' THEN N'55'
|
||||||
|
WHEN 'MT' THEN N'10'
|
||||||
|
WHEN 'MU' THEN N'30'
|
||||||
|
WHEN 'MV' THEN N'55'
|
||||||
|
WHEN 'MW' THEN N'30'
|
||||||
|
WHEN 'MX' THEN N'55'
|
||||||
|
WHEN 'MY' THEN N'55'
|
||||||
|
WHEN 'MZ' THEN N'30'
|
||||||
|
WHEN 'NA' THEN N'30'
|
||||||
|
WHEN 'NC' THEN N'30'
|
||||||
|
WHEN 'NE' THEN N'30'
|
||||||
|
WHEN 'NF' THEN N'55'
|
||||||
|
WHEN 'NG' THEN N'30'
|
||||||
|
WHEN 'NI' THEN N'55'
|
||||||
|
WHEN 'NL' THEN N'10'
|
||||||
|
WHEN 'NO' THEN N'10'
|
||||||
|
WHEN 'NP' THEN N'55'
|
||||||
|
WHEN 'NR' THEN N'55'
|
||||||
|
WHEN 'NU' THEN N'55'
|
||||||
|
WHEN 'NZ' THEN N'55'
|
||||||
|
WHEN 'OM' THEN N'30'
|
||||||
|
WHEN 'PA' THEN N'55'
|
||||||
|
WHEN 'PE' THEN N'55'
|
||||||
|
WHEN 'PF' THEN N'30'
|
||||||
|
WHEN 'PG' THEN N'55'
|
||||||
|
WHEN 'PH' THEN N'55'
|
||||||
|
WHEN 'PK' THEN N'55'
|
||||||
|
WHEN 'PL' THEN N'10'
|
||||||
|
WHEN 'PM' THEN N'30'
|
||||||
|
WHEN 'PN' THEN N'55'
|
||||||
|
WHEN 'PR' THEN N'55'
|
||||||
|
WHEN 'PS' THEN N'30'
|
||||||
|
WHEN 'PT' THEN N'10'
|
||||||
|
WHEN 'PW' THEN N'55'
|
||||||
|
WHEN 'PY' THEN N'55'
|
||||||
|
WHEN 'QA' THEN N'30'
|
||||||
|
WHEN 'RE' THEN N'30'
|
||||||
|
WHEN 'RO' THEN N'10'
|
||||||
|
WHEN 'RS' THEN N'10'
|
||||||
|
WHEN 'RU' THEN N'30'
|
||||||
|
WHEN 'RW' THEN N'30'
|
||||||
|
WHEN 'SA' THEN N'30'
|
||||||
|
WHEN 'SB' THEN N'55'
|
||||||
|
WHEN 'SC' THEN N'30'
|
||||||
|
WHEN 'SD' THEN N'30'
|
||||||
|
WHEN 'SE' THEN N'10'
|
||||||
|
WHEN 'SG' THEN N'55'
|
||||||
|
WHEN 'SH' THEN N'30'
|
||||||
|
WHEN 'SI' THEN N'10'
|
||||||
|
WHEN 'SJ' THEN N'55'
|
||||||
|
WHEN 'SK' THEN N'10'
|
||||||
|
WHEN 'SL' THEN N'30'
|
||||||
|
WHEN 'SM' THEN N'30'
|
||||||
|
WHEN 'SN' THEN N'30'
|
||||||
|
WHEN 'SO' THEN N'30'
|
||||||
|
WHEN 'SR' THEN N'55'
|
||||||
|
WHEN 'SS' THEN N'30'
|
||||||
|
WHEN 'ST' THEN N'30'
|
||||||
|
WHEN 'SV' THEN N'55'
|
||||||
|
WHEN 'SX' THEN N'30'
|
||||||
|
WHEN 'SY' THEN N'30'
|
||||||
|
WHEN 'SZ' THEN N'30'
|
||||||
|
WHEN 'TC' THEN N'55'
|
||||||
|
WHEN 'TD' THEN N'30'
|
||||||
|
WHEN 'TF' THEN N'30'
|
||||||
|
WHEN 'TG' THEN N'30'
|
||||||
|
WHEN 'TH' THEN N'55'
|
||||||
|
WHEN 'TJ' THEN N'30'
|
||||||
|
WHEN 'TK' THEN N'55'
|
||||||
|
WHEN 'TL' THEN N'55'
|
||||||
|
WHEN 'TM' THEN N'30'
|
||||||
|
WHEN 'TN' THEN N'30'
|
||||||
|
WHEN 'TO' THEN N'55'
|
||||||
|
WHEN 'TR' THEN N'15'
|
||||||
|
WHEN 'TT' THEN N'55'
|
||||||
|
WHEN 'TV' THEN N'55'
|
||||||
|
WHEN 'TW' THEN N'55'
|
||||||
|
WHEN 'TZ' THEN N'30'
|
||||||
|
WHEN 'UA' THEN N'55'
|
||||||
|
WHEN 'UG' THEN N'30'
|
||||||
|
WHEN 'UM' THEN N'55'
|
||||||
|
WHEN 'US' THEN N'55'
|
||||||
|
WHEN 'UY' THEN N'55'
|
||||||
|
WHEN 'UZ' THEN N'30'
|
||||||
|
WHEN 'VA' THEN N'30'
|
||||||
|
WHEN 'VC' THEN N'55'
|
||||||
|
WHEN 'VE' THEN N'55'
|
||||||
|
WHEN 'VG' THEN N'55'
|
||||||
|
WHEN 'VI' THEN N'55'
|
||||||
|
WHEN 'VN' THEN N'55'
|
||||||
|
WHEN 'VU' THEN N'55'
|
||||||
|
WHEN 'WF' THEN N'30'
|
||||||
|
WHEN 'WS' THEN N'55'
|
||||||
|
WHEN 'YE' THEN N'30'
|
||||||
|
WHEN 'YT' THEN N'30'
|
||||||
|
WHEN 'ZA' THEN N'30'
|
||||||
|
WHEN 'ZM' THEN N'30'
|
||||||
|
WHEN 'ZW' THEN N'30'
|
||||||
|
WHEN 'XK' THEN N'55'
|
||||||
|
END
|
||||||
|
FROM country c, country_property_type cpt
|
||||||
|
WHERE cpt.external_mapping_id = 'SAFETY_STOCK';
|
||||||
|
|
||||||
|
-- Air Freight Share Properties (0.03 for countries with safety stock 55, otherwise 0%)
|
||||||
|
INSERT INTO country_property
|
||||||
|
(country_id, country_property_type_id, property_set_id, property_value)
|
||||||
|
SELECT
|
||||||
|
c.id,
|
||||||
|
cpt.id,
|
||||||
|
(SELECT ps.id FROM property_set ps
|
||||||
|
WHERE ps.state = 'VALID'
|
||||||
|
AND ps.start_date <= GETDATE()
|
||||||
|
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
|
||||||
|
ORDER BY ps.start_date DESC
|
||||||
|
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
|
||||||
|
CASE
|
||||||
|
WHEN cp_safety.property_value = '55' THEN N'0.03'
|
||||||
|
ELSE '0'
|
||||||
|
END
|
||||||
|
FROM country c
|
||||||
|
CROSS JOIN country_property_type cpt
|
||||||
|
LEFT JOIN country_property cp_safety
|
||||||
|
ON cp_safety.country_id = c.id
|
||||||
|
AND cp_safety.country_property_type_id = (
|
||||||
|
SELECT id FROM country_property_type
|
||||||
|
WHERE external_mapping_id = 'SAFETY_STOCK'
|
||||||
|
)
|
||||||
|
AND cp_safety.property_set_id = (
|
||||||
|
SELECT ps.id FROM property_set ps
|
||||||
|
WHERE ps.state = 'VALID'
|
||||||
|
AND ps.start_date <= GETDATE()
|
||||||
|
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
|
||||||
|
ORDER BY ps.start_date DESC
|
||||||
|
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY)
|
||||||
|
WHERE cpt.external_mapping_id = 'AIR_SHARE';
|
||||||
|
|
||||||
|
-- Wage Factor Properties (only for countries with defined values)
|
||||||
|
-- Wage Factor Properties (only for countries with defined values)
|
||||||
|
INSERT INTO country_property
|
||||||
|
(country_id, country_property_type_id, property_set_id, property_value)
|
||||||
|
SELECT
|
||||||
|
c.id,
|
||||||
|
cpt.id,
|
||||||
|
(SELECT ps.id FROM property_set ps
|
||||||
|
WHERE ps.state = 'VALID'
|
||||||
|
AND ps.start_date <= GETDATE()
|
||||||
|
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
|
||||||
|
ORDER BY ps.start_date DESC
|
||||||
|
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
|
||||||
|
CASE c.iso_code
|
||||||
|
WHEN 'AT' THEN N'0.99'
|
||||||
|
WHEN 'BE' THEN N'1.14'
|
||||||
|
WHEN 'BG' THEN N'0.23'
|
||||||
|
WHEN 'CZ' THEN N'0.44'
|
||||||
|
WHEN 'DE' THEN N'1.00'
|
||||||
|
WHEN 'DK' THEN N'1.16'
|
||||||
|
WHEN 'EE' THEN N'0.60'
|
||||||
|
WHEN 'ES' THEN N'0.90'
|
||||||
|
WHEN 'FI' THEN N'1.02'
|
||||||
|
WHEN 'FR' THEN N'1.05'
|
||||||
|
WHEN 'GR' THEN N'0.35'
|
||||||
|
WHEN 'HR' THEN N'0.31'
|
||||||
|
WHEN 'HU' THEN N'0.35'
|
||||||
|
WHEN 'IE' THEN N'0.97'
|
||||||
|
WHEN 'IT' THEN N'0.72'
|
||||||
|
WHEN 'LT' THEN N'0.36'
|
||||||
|
WHEN 'LU' THEN N'1.31'
|
||||||
|
WHEN 'LV' THEN N'0.33'
|
||||||
|
WHEN 'MT' THEN N'0.41'
|
||||||
|
WHEN 'NL' THEN N'1.05'
|
||||||
|
WHEN 'PL' THEN N'0.27'
|
||||||
|
WHEN 'PT' THEN N'0.41'
|
||||||
|
WHEN 'RO' THEN N'0.27'
|
||||||
|
WHEN 'SE' THEN N'0.94'
|
||||||
|
WHEN 'SI' THEN N'0.62'
|
||||||
|
WHEN 'SK' THEN N'0.42'
|
||||||
|
ELSE '1'
|
||||||
|
END
|
||||||
|
FROM country c, country_property_type cpt
|
||||||
|
WHERE cpt.external_mapping_id = 'WAGE';
|
||||||
|
|
||||||
|
-- =============================================================================
|
||||||
|
-- VERIFICATION QUERIES (Optional - for testing)
|
||||||
|
-- =============================================================================
|
||||||
|
|
||||||
|
-- Verify country count
|
||||||
|
-- SELECT COUNT(*) as total_countries FROM country;
|
||||||
|
|
||||||
|
-- Verify property types
|
||||||
|
-- SELECT * FROM country_property_type;
|
||||||
|
|
||||||
|
-- Verify EU countries with all properties
|
||||||
|
-- SELECT
|
||||||
|
-- c.iso_code,
|
||||||
|
-- c.region_code,
|
||||||
|
-- MAX(CASE WHEN cpt.name = 'Customs Union' THEN cp.property_value END) as customs_union,
|
||||||
|
-- MAX(CASE WHEN cpt.name = 'Safety Stock' THEN cp.property_value END) as safety_stock,
|
||||||
|
-- MAX(CASE WHEN cpt.name = 'Air Freight Share' THEN cp.property_value END) as air_freight,
|
||||||
|
-- MAX(CASE WHEN cpt.name = 'Wage Factor' THEN cp.property_value END) as wage_factor
|
||||||
|
-- FROM country c
|
||||||
|
-- JOIN country_property cp ON c.id = cp.country_id
|
||||||
|
-- JOIN country_property_type cpt ON cp.country_property_type_id = cpt.id
|
||||||
|
-- WHERE c.iso_code IN ('DE', 'FR', 'AT', 'BE', 'NL')
|
||||||
|
-- GROUP BY c.id, c.iso_code, c.region_code
|
||||||
|
-- ORDER BY c.iso_code;
|
||||||
1224
src/main/resources/db/migration/mssql/V5__Nodes.sql
Normal file
1224
src/main/resources/db/migration/mssql/V5__Nodes.sql
Normal file
File diff suppressed because it is too large
Load diff
804
src/main/resources/db/migration/mssql/V6__Predecessor_Nodes.sql
Normal file
804
src/main/resources/db/migration/mssql/V6__Predecessor_Nodes.sql
Normal file
|
|
@ -0,0 +1,804 @@
|
||||||
|
-- Automatisch generierte SQL-Statements für Node Predecessor Chains
|
||||||
|
-- Generiert aus: node.xlsx
|
||||||
|
-- Format: Mehrere Chains pro Node möglich (mit ; getrennt)
|
||||||
|
|
||||||
|
-- Predecessor Chain 1: AB (Chain 1 von 2)
|
||||||
|
-- Predecessors: WH_ULHA
|
||||||
|
INSERT INTO node_predecessor_chain (
|
||||||
|
node_id
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'AB')
|
||||||
|
);
|
||||||
|
|
||||||
|
DECLARE @chain_id_1 INT;
|
||||||
|
SET @chain_id_1 = SCOPE_IDENTITY();
|
||||||
|
|
||||||
|
INSERT INTO node_predecessor_entry (
|
||||||
|
node_id,
|
||||||
|
node_predecessor_chain_id,
|
||||||
|
sequence_number
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'WH_ULHA'),
|
||||||
|
@chain_id_1,
|
||||||
|
1
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Predecessor Chain 2: AB (Chain 2 von 2)
|
||||||
|
-- Predecessors: WH_STO
|
||||||
|
INSERT INTO node_predecessor_chain (
|
||||||
|
node_id
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'AB')
|
||||||
|
);
|
||||||
|
|
||||||
|
DECLARE @chain_id_2 INT;
|
||||||
|
SET @chain_id_2 = SCOPE_IDENTITY();
|
||||||
|
|
||||||
|
INSERT INTO node_predecessor_entry (
|
||||||
|
node_id,
|
||||||
|
node_predecessor_chain_id,
|
||||||
|
sequence_number
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'WH_STO'),
|
||||||
|
@chain_id_2,
|
||||||
|
1
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
-- Predecessor Chain 3: HH (Chain 1 von 1)
|
||||||
|
-- Predecessors: WH_HH
|
||||||
|
INSERT INTO node_predecessor_chain (
|
||||||
|
node_id
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'HH')
|
||||||
|
);
|
||||||
|
|
||||||
|
DECLARE @chain_id_3 INT;
|
||||||
|
SET @chain_id_3 = SCOPE_IDENTITY();
|
||||||
|
|
||||||
|
INSERT INTO node_predecessor_entry (
|
||||||
|
node_id,
|
||||||
|
node_predecessor_chain_id,
|
||||||
|
sequence_number
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'WH_HH'),
|
||||||
|
@chain_id_3,
|
||||||
|
1
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
-- Predecessor Chain 4: FGG (Chain 1 von 2)
|
||||||
|
-- Predecessors: WH_STO
|
||||||
|
INSERT INTO node_predecessor_chain (
|
||||||
|
node_id
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'FGG')
|
||||||
|
);
|
||||||
|
|
||||||
|
DECLARE @chain_id_4 INT;
|
||||||
|
SET @chain_id_4 = SCOPE_IDENTITY();
|
||||||
|
|
||||||
|
INSERT INTO node_predecessor_entry (
|
||||||
|
node_id,
|
||||||
|
node_predecessor_chain_id,
|
||||||
|
sequence_number
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'WH_STO'),
|
||||||
|
@chain_id_4,
|
||||||
|
1
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Predecessor Chain 5: FGG (Chain 2 von 2)
|
||||||
|
-- Predecessors: BEZEE
|
||||||
|
INSERT INTO node_predecessor_chain (
|
||||||
|
node_id
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'FGG')
|
||||||
|
);
|
||||||
|
|
||||||
|
DECLARE @chain_id_5 INT;
|
||||||
|
SET @chain_id_5 = SCOPE_IDENTITY();
|
||||||
|
|
||||||
|
INSERT INTO node_predecessor_entry (
|
||||||
|
node_id,
|
||||||
|
node_predecessor_chain_id,
|
||||||
|
sequence_number
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'BEZEE'),
|
||||||
|
@chain_id_5,
|
||||||
|
1
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
-- Predecessor Chain 6: KWS (Chain 1 von 2)
|
||||||
|
-- Predecessors: WH_STO
|
||||||
|
INSERT INTO node_predecessor_chain (
|
||||||
|
node_id
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'KWS')
|
||||||
|
);
|
||||||
|
|
||||||
|
DECLARE @chain_id_6 INT;
|
||||||
|
SET @chain_id_6 = SCOPE_IDENTITY();
|
||||||
|
|
||||||
|
INSERT INTO node_predecessor_entry (
|
||||||
|
node_id,
|
||||||
|
node_predecessor_chain_id,
|
||||||
|
sequence_number
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'WH_STO'),
|
||||||
|
@chain_id_6,
|
||||||
|
1
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Predecessor Chain 7: KWS (Chain 2 von 2)
|
||||||
|
-- Predecessors: BEZEE
|
||||||
|
INSERT INTO node_predecessor_chain (
|
||||||
|
node_id
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'KWS')
|
||||||
|
);
|
||||||
|
|
||||||
|
DECLARE @chain_id_7 INT;
|
||||||
|
SET @chain_id_7 = SCOPE_IDENTITY();
|
||||||
|
|
||||||
|
INSERT INTO node_predecessor_entry (
|
||||||
|
node_id,
|
||||||
|
node_predecessor_chain_id,
|
||||||
|
sequence_number
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'BEZEE'),
|
||||||
|
@chain_id_7,
|
||||||
|
1
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
-- Predecessor Chain 8: EGD (Chain 1 von 2)
|
||||||
|
-- Predecessors: WH_HH
|
||||||
|
INSERT INTO node_predecessor_chain (
|
||||||
|
node_id
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'EGD')
|
||||||
|
);
|
||||||
|
|
||||||
|
DECLARE @chain_id_8 INT;
|
||||||
|
SET @chain_id_8 = SCOPE_IDENTITY();
|
||||||
|
|
||||||
|
INSERT INTO node_predecessor_entry (
|
||||||
|
node_id,
|
||||||
|
node_predecessor_chain_id,
|
||||||
|
sequence_number
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'WH_HH'),
|
||||||
|
@chain_id_8,
|
||||||
|
1
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Predecessor Chain 9: EGD (Chain 2 von 2)
|
||||||
|
-- Predecessors: DEHAM
|
||||||
|
INSERT INTO node_predecessor_chain (
|
||||||
|
node_id
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'EGD')
|
||||||
|
);
|
||||||
|
|
||||||
|
DECLARE @chain_id_9 INT;
|
||||||
|
SET @chain_id_9 = SCOPE_IDENTITY();
|
||||||
|
|
||||||
|
INSERT INTO node_predecessor_entry (
|
||||||
|
node_id,
|
||||||
|
node_predecessor_chain_id,
|
||||||
|
sequence_number
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'DEHAM'),
|
||||||
|
@chain_id_9,
|
||||||
|
1
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
-- Predecessor Chain 10: CTT (Chain 1 von 2)
|
||||||
|
-- Predecessors: WH_BAT3
|
||||||
|
INSERT INTO node_predecessor_chain (
|
||||||
|
node_id
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'CTT')
|
||||||
|
);
|
||||||
|
|
||||||
|
DECLARE @chain_id_10 INT;
|
||||||
|
SET @chain_id_10 = SCOPE_IDENTITY();
|
||||||
|
|
||||||
|
INSERT INTO node_predecessor_entry (
|
||||||
|
node_id,
|
||||||
|
node_predecessor_chain_id,
|
||||||
|
sequence_number
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'WH_BAT3'),
|
||||||
|
@chain_id_10,
|
||||||
|
1
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Predecessor Chain 11: CTT (Chain 2 von 2)
|
||||||
|
-- Predecessors: WH_JEAN
|
||||||
|
INSERT INTO node_predecessor_chain (
|
||||||
|
node_id
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'CTT')
|
||||||
|
);
|
||||||
|
|
||||||
|
DECLARE @chain_id_11 INT;
|
||||||
|
SET @chain_id_11 = SCOPE_IDENTITY();
|
||||||
|
|
||||||
|
INSERT INTO node_predecessor_entry (
|
||||||
|
node_id,
|
||||||
|
node_predecessor_chain_id,
|
||||||
|
sequence_number
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'WH_JEAN'),
|
||||||
|
@chain_id_11,
|
||||||
|
1
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
-- Predecessor Chain 12: LZZ (Chain 1 von 1)
|
||||||
|
-- Predecessors: WH_ROLO
|
||||||
|
INSERT INTO node_predecessor_chain (
|
||||||
|
node_id
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'LZZ')
|
||||||
|
);
|
||||||
|
|
||||||
|
DECLARE @chain_id_12 INT;
|
||||||
|
SET @chain_id_12 = SCOPE_IDENTITY();
|
||||||
|
|
||||||
|
INSERT INTO node_predecessor_entry (
|
||||||
|
node_id,
|
||||||
|
node_predecessor_chain_id,
|
||||||
|
sequence_number
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'WH_ROLO'),
|
||||||
|
@chain_id_12,
|
||||||
|
1
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
-- Predecessor Chain 13: STR (Chain 1 von 1)
|
||||||
|
-- Predecessors: WH_ZBU
|
||||||
|
INSERT INTO node_predecessor_chain (
|
||||||
|
node_id
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'STR')
|
||||||
|
);
|
||||||
|
|
||||||
|
DECLARE @chain_id_13 INT;
|
||||||
|
SET @chain_id_13 = SCOPE_IDENTITY();
|
||||||
|
|
||||||
|
INSERT INTO node_predecessor_entry (
|
||||||
|
node_id,
|
||||||
|
node_predecessor_chain_id,
|
||||||
|
sequence_number
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'WH_ZBU'),
|
||||||
|
@chain_id_13,
|
||||||
|
1
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
-- Predecessor Chain 14: VOP (Chain 1 von 1)
|
||||||
|
-- Predecessors: WH_BUD
|
||||||
|
INSERT INTO node_predecessor_chain (
|
||||||
|
node_id
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'VOP')
|
||||||
|
);
|
||||||
|
|
||||||
|
DECLARE @chain_id_14 INT;
|
||||||
|
SET @chain_id_14 = SCOPE_IDENTITY();
|
||||||
|
|
||||||
|
INSERT INTO node_predecessor_entry (
|
||||||
|
node_id,
|
||||||
|
node_predecessor_chain_id,
|
||||||
|
sequence_number
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'WH_BUD'),
|
||||||
|
@chain_id_14,
|
||||||
|
1
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
-- Predecessor Chain 15: KOL (Chain 1 von 1)
|
||||||
|
-- Predecessors: DEHAM
|
||||||
|
INSERT INTO node_predecessor_chain (
|
||||||
|
node_id
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'KOL')
|
||||||
|
);
|
||||||
|
|
||||||
|
DECLARE @chain_id_15 INT;
|
||||||
|
SET @chain_id_15 = SCOPE_IDENTITY();
|
||||||
|
|
||||||
|
INSERT INTO node_predecessor_entry (
|
||||||
|
node_id,
|
||||||
|
node_predecessor_chain_id,
|
||||||
|
sequence_number
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'DEHAM'),
|
||||||
|
@chain_id_15,
|
||||||
|
1
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
-- Predecessor Chain 16: LIPO (Chain 1 von 1)
|
||||||
|
-- Predecessors: WH_BUD
|
||||||
|
INSERT INTO node_predecessor_chain (
|
||||||
|
node_id
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'LIPO')
|
||||||
|
);
|
||||||
|
|
||||||
|
DECLARE @chain_id_16 INT;
|
||||||
|
SET @chain_id_16 = SCOPE_IDENTITY();
|
||||||
|
|
||||||
|
INSERT INTO node_predecessor_entry (
|
||||||
|
node_id,
|
||||||
|
node_predecessor_chain_id,
|
||||||
|
sequence_number
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'WH_BUD'),
|
||||||
|
@chain_id_16,
|
||||||
|
1
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
-- Predecessor Chain 17: WH_ZBU (Chain 1 von 1)
|
||||||
|
-- Predecessors: DEHAM
|
||||||
|
INSERT INTO node_predecessor_chain (
|
||||||
|
node_id
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'WH_ZBU')
|
||||||
|
);
|
||||||
|
|
||||||
|
DECLARE @chain_id_17 INT;
|
||||||
|
SET @chain_id_17 = SCOPE_IDENTITY();
|
||||||
|
|
||||||
|
INSERT INTO node_predecessor_entry (
|
||||||
|
node_id,
|
||||||
|
node_predecessor_chain_id,
|
||||||
|
sequence_number
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'DEHAM'),
|
||||||
|
@chain_id_17,
|
||||||
|
1
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
-- Predecessor Chain 18: WH_STO (Chain 1 von 1)
|
||||||
|
-- Predecessors: BEZEE
|
||||||
|
INSERT INTO node_predecessor_chain (
|
||||||
|
node_id
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'WH_STO')
|
||||||
|
);
|
||||||
|
|
||||||
|
DECLARE @chain_id_18 INT;
|
||||||
|
SET @chain_id_18 = SCOPE_IDENTITY();
|
||||||
|
|
||||||
|
INSERT INTO node_predecessor_entry (
|
||||||
|
node_id,
|
||||||
|
node_predecessor_chain_id,
|
||||||
|
sequence_number
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'BEZEE'),
|
||||||
|
@chain_id_18,
|
||||||
|
1
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
-- Predecessor Chain 19: WH_HH (Chain 1 von 1)
|
||||||
|
-- Predecessors: DEHAM
|
||||||
|
INSERT INTO node_predecessor_chain (
|
||||||
|
node_id
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'WH_HH')
|
||||||
|
);
|
||||||
|
|
||||||
|
DECLARE @chain_id_19 INT;
|
||||||
|
SET @chain_id_19 = SCOPE_IDENTITY();
|
||||||
|
|
||||||
|
INSERT INTO node_predecessor_entry (
|
||||||
|
node_id,
|
||||||
|
node_predecessor_chain_id,
|
||||||
|
sequence_number
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'DEHAM'),
|
||||||
|
@chain_id_19,
|
||||||
|
1
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
-- Predecessor Chain 20: CNSHA (Chain 1 von 6)
|
||||||
|
-- Predecessors: Shanghai
|
||||||
|
INSERT INTO node_predecessor_chain (
|
||||||
|
node_id
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'CNSHA')
|
||||||
|
);
|
||||||
|
|
||||||
|
DECLARE @chain_id_20 INT;
|
||||||
|
SET @chain_id_20 = SCOPE_IDENTITY();
|
||||||
|
|
||||||
|
INSERT INTO node_predecessor_entry (
|
||||||
|
node_id,
|
||||||
|
node_predecessor_chain_id,
|
||||||
|
sequence_number
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'Shanghai'),
|
||||||
|
@chain_id_20,
|
||||||
|
1
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Predecessor Chain 21: CNSHA (Chain 2 von 6)
|
||||||
|
-- Predecessors: Hangzhou
|
||||||
|
INSERT INTO node_predecessor_chain (
|
||||||
|
node_id
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'CNSHA')
|
||||||
|
);
|
||||||
|
|
||||||
|
DECLARE @chain_id_21 INT;
|
||||||
|
SET @chain_id_21 = SCOPE_IDENTITY();
|
||||||
|
|
||||||
|
INSERT INTO node_predecessor_entry (
|
||||||
|
node_id,
|
||||||
|
node_predecessor_chain_id,
|
||||||
|
sequence_number
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'Hangzhou'),
|
||||||
|
@chain_id_21,
|
||||||
|
1
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Predecessor Chain 22: CNSHA (Chain 3 von 6)
|
||||||
|
-- Predecessors: Yangzhong
|
||||||
|
INSERT INTO node_predecessor_chain (
|
||||||
|
node_id
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'CNSHA')
|
||||||
|
);
|
||||||
|
|
||||||
|
DECLARE @chain_id_22 INT;
|
||||||
|
SET @chain_id_22 = SCOPE_IDENTITY();
|
||||||
|
|
||||||
|
INSERT INTO node_predecessor_entry (
|
||||||
|
node_id,
|
||||||
|
node_predecessor_chain_id,
|
||||||
|
sequence_number
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'Yangzhong'),
|
||||||
|
@chain_id_22,
|
||||||
|
1
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Predecessor Chain 23: CNSHA (Chain 4 von 6)
|
||||||
|
-- Predecessors: Taicang
|
||||||
|
INSERT INTO node_predecessor_chain (
|
||||||
|
node_id
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'CNSHA')
|
||||||
|
);
|
||||||
|
|
||||||
|
DECLARE @chain_id_23 INT;
|
||||||
|
SET @chain_id_23 = SCOPE_IDENTITY();
|
||||||
|
|
||||||
|
INSERT INTO node_predecessor_entry (
|
||||||
|
node_id,
|
||||||
|
node_predecessor_chain_id,
|
||||||
|
sequence_number
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'Taicang'),
|
||||||
|
@chain_id_23,
|
||||||
|
1
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Predecessor Chain 24: CNSHA (Chain 5 von 6)
|
||||||
|
-- Predecessors: Jingjiang
|
||||||
|
INSERT INTO node_predecessor_chain (
|
||||||
|
node_id
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'CNSHA')
|
||||||
|
);
|
||||||
|
|
||||||
|
DECLARE @chain_id_24 INT;
|
||||||
|
SET @chain_id_24 = SCOPE_IDENTITY();
|
||||||
|
|
||||||
|
INSERT INTO node_predecessor_entry (
|
||||||
|
node_id,
|
||||||
|
node_predecessor_chain_id,
|
||||||
|
sequence_number
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'Jingjiang'),
|
||||||
|
@chain_id_24,
|
||||||
|
1
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Predecessor Chain 25: CNSHA (Chain 6 von 6)
|
||||||
|
-- Predecessors: JJ
|
||||||
|
INSERT INTO node_predecessor_chain (
|
||||||
|
node_id
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'CNSHA')
|
||||||
|
);
|
||||||
|
|
||||||
|
DECLARE @chain_id_25 INT;
|
||||||
|
SET @chain_id_25 = SCOPE_IDENTITY();
|
||||||
|
|
||||||
|
INSERT INTO node_predecessor_entry (
|
||||||
|
node_id,
|
||||||
|
node_predecessor_chain_id,
|
||||||
|
sequence_number
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'JJ'),
|
||||||
|
@chain_id_25,
|
||||||
|
1
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
-- Predecessor Chain 26: CNTAO (Chain 1 von 2)
|
||||||
|
-- Predecessors: Qingdao
|
||||||
|
INSERT INTO node_predecessor_chain (
|
||||||
|
node_id
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'CNTAO')
|
||||||
|
);
|
||||||
|
|
||||||
|
DECLARE @chain_id_26 INT;
|
||||||
|
SET @chain_id_26 = SCOPE_IDENTITY();
|
||||||
|
|
||||||
|
INSERT INTO node_predecessor_entry (
|
||||||
|
node_id,
|
||||||
|
node_predecessor_chain_id,
|
||||||
|
sequence_number
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'Qingdao'),
|
||||||
|
@chain_id_26,
|
||||||
|
1
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Predecessor Chain 27: CNTAO (Chain 2 von 2)
|
||||||
|
-- Predecessors: Linfen
|
||||||
|
INSERT INTO node_predecessor_chain (
|
||||||
|
node_id
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'CNTAO')
|
||||||
|
);
|
||||||
|
|
||||||
|
DECLARE @chain_id_27 INT;
|
||||||
|
SET @chain_id_27 = SCOPE_IDENTITY();
|
||||||
|
|
||||||
|
INSERT INTO node_predecessor_entry (
|
||||||
|
node_id,
|
||||||
|
node_predecessor_chain_id,
|
||||||
|
sequence_number
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'Linfen'),
|
||||||
|
@chain_id_27,
|
||||||
|
1
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
-- Predecessor Chain 28: CNXMN (Chain 1 von 2)
|
||||||
|
-- Predecessors: Fuqing
|
||||||
|
INSERT INTO node_predecessor_chain (
|
||||||
|
node_id
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'CNXMN')
|
||||||
|
);
|
||||||
|
|
||||||
|
DECLARE @chain_id_28 INT;
|
||||||
|
SET @chain_id_28 = SCOPE_IDENTITY();
|
||||||
|
|
||||||
|
INSERT INTO node_predecessor_entry (
|
||||||
|
node_id,
|
||||||
|
node_predecessor_chain_id,
|
||||||
|
sequence_number
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'Fuqing'),
|
||||||
|
@chain_id_28,
|
||||||
|
1
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Predecessor Chain 29: CNXMN (Chain 2 von 2)
|
||||||
|
-- Predecessors: LX
|
||||||
|
INSERT INTO node_predecessor_chain (
|
||||||
|
node_id
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'CNXMN')
|
||||||
|
);
|
||||||
|
|
||||||
|
DECLARE @chain_id_29 INT;
|
||||||
|
SET @chain_id_29 = SCOPE_IDENTITY();
|
||||||
|
|
||||||
|
INSERT INTO node_predecessor_entry (
|
||||||
|
node_id,
|
||||||
|
node_predecessor_chain_id,
|
||||||
|
sequence_number
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'LX'),
|
||||||
|
@chain_id_29,
|
||||||
|
1
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
-- Predecessor Chain 30: INNSA (Chain 1 von 2)
|
||||||
|
-- Predecessors: Pune
|
||||||
|
INSERT INTO node_predecessor_chain (
|
||||||
|
node_id
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'INNSA')
|
||||||
|
);
|
||||||
|
|
||||||
|
DECLARE @chain_id_30 INT;
|
||||||
|
SET @chain_id_30 = SCOPE_IDENTITY();
|
||||||
|
|
||||||
|
INSERT INTO node_predecessor_entry (
|
||||||
|
node_id,
|
||||||
|
node_predecessor_chain_id,
|
||||||
|
sequence_number
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'Pune'),
|
||||||
|
@chain_id_30,
|
||||||
|
1
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Predecessor Chain 31: INNSA (Chain 2 von 2)
|
||||||
|
-- Predecessors: Aurangabad
|
||||||
|
INSERT INTO node_predecessor_chain (
|
||||||
|
node_id
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'INNSA')
|
||||||
|
);
|
||||||
|
|
||||||
|
DECLARE @chain_id_31 INT;
|
||||||
|
SET @chain_id_31 = SCOPE_IDENTITY();
|
||||||
|
|
||||||
|
INSERT INTO node_predecessor_entry (
|
||||||
|
node_id,
|
||||||
|
node_predecessor_chain_id,
|
||||||
|
sequence_number
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'Aurangabad'),
|
||||||
|
@chain_id_31,
|
||||||
|
1
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
-- Predecessor Chain 32: INMAA (Chain 1 von 1)
|
||||||
|
-- Predecessors: Bangalore
|
||||||
|
INSERT INTO node_predecessor_chain (
|
||||||
|
node_id
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'INMAA')
|
||||||
|
);
|
||||||
|
|
||||||
|
DECLARE @chain_id_32 INT;
|
||||||
|
SET @chain_id_32 = SCOPE_IDENTITY();
|
||||||
|
|
||||||
|
INSERT INTO node_predecessor_entry (
|
||||||
|
node_id,
|
||||||
|
node_predecessor_chain_id,
|
||||||
|
sequence_number
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'Bangalore'),
|
||||||
|
@chain_id_32,
|
||||||
|
1
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
-- Predecessor Chain 33: CNSZX (Chain 1 von 1)
|
||||||
|
-- Predecessors: Shenzhen
|
||||||
|
INSERT INTO node_predecessor_chain (
|
||||||
|
node_id
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'CNSZX')
|
||||||
|
);
|
||||||
|
|
||||||
|
DECLARE @chain_id_33 INT;
|
||||||
|
SET @chain_id_33 = SCOPE_IDENTITY();
|
||||||
|
|
||||||
|
INSERT INTO node_predecessor_entry (
|
||||||
|
node_id,
|
||||||
|
node_predecessor_chain_id,
|
||||||
|
sequence_number
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'Shenzhen'),
|
||||||
|
@chain_id_33,
|
||||||
|
1
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
-- Predecessor Chain 34: WH_BAT3 (Chain 1 von 1)
|
||||||
|
-- Predecessors: FRLEH
|
||||||
|
INSERT INTO node_predecessor_chain (
|
||||||
|
node_id
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'WH_BAT3')
|
||||||
|
);
|
||||||
|
|
||||||
|
DECLARE @chain_id_34 INT;
|
||||||
|
SET @chain_id_34 = SCOPE_IDENTITY();
|
||||||
|
|
||||||
|
INSERT INTO node_predecessor_entry (
|
||||||
|
node_id,
|
||||||
|
node_predecessor_chain_id,
|
||||||
|
sequence_number
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'FRLEH'),
|
||||||
|
@chain_id_34,
|
||||||
|
1
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
-- Predecessor Chain 35: WH_JEAN (Chain 1 von 1)
|
||||||
|
-- Predecessors: FRLEH
|
||||||
|
INSERT INTO node_predecessor_chain (
|
||||||
|
node_id
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'WH_JEAN')
|
||||||
|
);
|
||||||
|
|
||||||
|
DECLARE @chain_id_35 INT;
|
||||||
|
SET @chain_id_35 = SCOPE_IDENTITY();
|
||||||
|
|
||||||
|
INSERT INTO node_predecessor_entry (
|
||||||
|
node_id,
|
||||||
|
node_predecessor_chain_id,
|
||||||
|
sequence_number
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'FRLEH'),
|
||||||
|
@chain_id_35,
|
||||||
|
1
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
-- Predecessor Chain 36: WH_ROLO (Chain 1 von 1)
|
||||||
|
-- Predecessors: ITGOA
|
||||||
|
INSERT INTO node_predecessor_chain (
|
||||||
|
node_id
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'WH_ROLO')
|
||||||
|
);
|
||||||
|
|
||||||
|
DECLARE @chain_id_36 INT;
|
||||||
|
SET @chain_id_36 = SCOPE_IDENTITY();
|
||||||
|
|
||||||
|
INSERT INTO node_predecessor_entry (
|
||||||
|
node_id,
|
||||||
|
node_predecessor_chain_id,
|
||||||
|
sequence_number
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'ITGOA'),
|
||||||
|
@chain_id_36,
|
||||||
|
1
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
-- Predecessor Chain 37: WH_BUD (Chain 1 von 1)
|
||||||
|
-- Predecessors: DEHAM
|
||||||
|
INSERT INTO node_predecessor_chain (
|
||||||
|
node_id
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'WH_BUD')
|
||||||
|
);
|
||||||
|
|
||||||
|
DECLARE @chain_id_37 INT;
|
||||||
|
SET @chain_id_37 = SCOPE_IDENTITY();
|
||||||
|
|
||||||
|
INSERT INTO node_predecessor_entry (
|
||||||
|
node_id,
|
||||||
|
node_predecessor_chain_id,
|
||||||
|
sequence_number
|
||||||
|
) VALUES (
|
||||||
|
(SELECT id FROM node WHERE external_mapping_id = 'DEHAM'),
|
||||||
|
@chain_id_37,
|
||||||
|
1
|
||||||
|
);
|
||||||
|
|
||||||
1087
src/main/resources/db/migration/mssql/V7__Data_Containerrate.sql
Normal file
1087
src/main/resources/db/migration/mssql/V7__Data_Containerrate.sql
Normal file
File diff suppressed because it is too large
Load diff
23310
src/main/resources/db/migration/mssql/V8__Data_Countrymatrixrate.sql
Normal file
23310
src/main/resources/db/migration/mssql/V8__Data_Countrymatrixrate.sql
Normal file
File diff suppressed because it is too large
Load diff
20
src/main/resources/db/migration/mssql/V9__Groups.sql
Normal file
20
src/main/resources/db/migration/mssql/V9__Groups.sql
Normal file
|
|
@ -0,0 +1,20 @@
|
||||||
|
INSERT INTO sys_group(group_name, group_description)
|
||||||
|
VALUES (N'none', N'no rights');
|
||||||
|
INSERT INTO sys_group(group_name, group_description)
|
||||||
|
VALUES (N'basic', N'can generate reports');
|
||||||
|
INSERT INTO sys_group(group_name, group_description)
|
||||||
|
VALUES (N'calculation', N'can generate reports, do calculations');
|
||||||
|
INSERT INTO sys_group(group_name, group_description)
|
||||||
|
VALUES (N'freight', N'manage freight rates');
|
||||||
|
INSERT INTO sys_group(group_name, group_description)
|
||||||
|
VALUES (N'packaging', N'manage packaging data');
|
||||||
|
INSERT INTO sys_group(group_name, group_description)
|
||||||
|
VALUES (N'material', N'manage material data');
|
||||||
|
INSERT INTO sys_group(group_name, group_description)
|
||||||
|
VALUES (N'super',
|
||||||
|
N'can generate reports, do calculations, manage freight rates, manage packaging data, manage material data, manage general system settings');
|
||||||
|
INSERT INTO sys_group(group_name, group_description)
|
||||||
|
VALUES (N'service', N'register external applications');
|
||||||
|
INSERT INTO sys_group(group_name, group_description)
|
||||||
|
VALUES (N'right-management',
|
||||||
|
N'add users, manage user groups');
|
||||||
|
|
@ -0,0 +1,51 @@
|
||||||
|
package de.avatic.lcc.config;
|
||||||
|
|
||||||
|
import org.springframework.boot.test.context.TestConfiguration;
|
||||||
|
import org.springframework.boot.testcontainers.service.connection.ServiceConnection;
|
||||||
|
import org.springframework.context.annotation.Bean;
|
||||||
|
import org.springframework.context.annotation.Profile;
|
||||||
|
import org.testcontainers.containers.MSSQLServerContainer;
|
||||||
|
import org.testcontainers.containers.MySQLContainer;
|
||||||
|
import org.testcontainers.utility.DockerImageName;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* TestContainers configuration for multi-database integration testing.
|
||||||
|
* <p>
|
||||||
|
* Automatically starts the correct database container based on active Spring profile.
|
||||||
|
* Uses @ServiceConnection to automatically configure Spring DataSource.
|
||||||
|
* <p>
|
||||||
|
* Usage:
|
||||||
|
* <pre>
|
||||||
|
* mvn test -Dspring.profiles.active=test,mysql -Dtest=DatabaseConfigurationSmokeTest
|
||||||
|
* mvn test -Dspring.profiles.active=test,mssql -Dtest=DatabaseConfigurationSmokeTest
|
||||||
|
* </pre>
|
||||||
|
*/
|
||||||
|
@TestConfiguration
|
||||||
|
public class DatabaseTestConfiguration {
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
@ServiceConnection
|
||||||
|
@Profile("mysql")
|
||||||
|
public MySQLContainer<?> mysqlContainer() {
|
||||||
|
System.out.println("DatabaseTestConfiguration: Creating MySQL container bean...");
|
||||||
|
MySQLContainer<?> container = new MySQLContainer<>(DockerImageName.parse("mysql:8.0"))
|
||||||
|
.withDatabaseName("lcc_test")
|
||||||
|
.withUsername("test")
|
||||||
|
.withPassword("test");
|
||||||
|
System.out.println("DatabaseTestConfiguration: MySQL container bean created");
|
||||||
|
return container;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
@ServiceConnection
|
||||||
|
@Profile("mssql")
|
||||||
|
public MSSQLServerContainer<?> mssqlContainer() {
|
||||||
|
System.out.println("DatabaseTestConfiguration: Creating MSSQL container bean...");
|
||||||
|
MSSQLServerContainer<?> container = new MSSQLServerContainer<>(
|
||||||
|
DockerImageName.parse("mcr.microsoft.com/mssql/server:2022-latest"))
|
||||||
|
.acceptLicense()
|
||||||
|
.withPassword("YourStrong!Passw0rd123");
|
||||||
|
System.out.println("DatabaseTestConfiguration: MSSQL container bean created");
|
||||||
|
return container;
|
||||||
|
}
|
||||||
|
}
|
||||||
49
src/test/java/de/avatic/lcc/config/RepositoryTestConfig.java
Normal file
49
src/test/java/de/avatic/lcc/config/RepositoryTestConfig.java
Normal file
|
|
@ -0,0 +1,49 @@
|
||||||
|
package de.avatic.lcc.config;
|
||||||
|
|
||||||
|
import org.springframework.boot.SpringBootConfiguration;
|
||||||
|
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
|
||||||
|
import org.springframework.context.annotation.Bean;
|
||||||
|
import org.springframework.context.annotation.ComponentScan;
|
||||||
|
import org.springframework.context.annotation.FilterType;
|
||||||
|
import org.springframework.jdbc.core.JdbcTemplate;
|
||||||
|
import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate;
|
||||||
|
|
||||||
|
import javax.sql.DataSource;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Test configuration that provides only the beans needed for repository tests.
|
||||||
|
* Does NOT load the full LccApplication context.
|
||||||
|
*
|
||||||
|
* Uses @SpringBootConfiguration to prevent Spring Boot from searching for and loading LccApplication.
|
||||||
|
*
|
||||||
|
* Excludes repositories with external dependencies (transformers/services) since we're only testing JDBC layer.
|
||||||
|
*/
|
||||||
|
@SpringBootConfiguration
|
||||||
|
@EnableAutoConfiguration
|
||||||
|
@ComponentScan(
|
||||||
|
basePackages = {
|
||||||
|
"de.avatic.lcc.repositories",
|
||||||
|
"de.avatic.lcc.database.dialect"
|
||||||
|
},
|
||||||
|
excludeFilters = @ComponentScan.Filter(
|
||||||
|
type = FilterType.ASSIGNABLE_TYPE,
|
||||||
|
classes = {
|
||||||
|
de.avatic.lcc.repositories.error.DumpRepository.class
|
||||||
|
}
|
||||||
|
)
|
||||||
|
)
|
||||||
|
public class RepositoryTestConfig {
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public JdbcTemplate jdbcTemplate(DataSource dataSource) {
|
||||||
|
return new JdbcTemplate(dataSource);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public NamedParameterJdbcTemplate namedParameterJdbcTemplate(DataSource dataSource) {
|
||||||
|
return new NamedParameterJdbcTemplate(dataSource);
|
||||||
|
}
|
||||||
|
|
||||||
|
// SqlDialectProvider beans are now provided by @Component annotations in
|
||||||
|
// MySQLDialectProvider and MSSQLDialectProvider classes
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,301 @@
|
||||||
|
package de.avatic.lcc.database.dialect;
|
||||||
|
|
||||||
|
import org.junit.jupiter.api.BeforeEach;
|
||||||
|
import org.junit.jupiter.api.DisplayName;
|
||||||
|
import org.junit.jupiter.api.Nested;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import static org.junit.jupiter.api.Assertions.*;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Unit tests for {@link MSSQLDialectProvider}.
|
||||||
|
*/
|
||||||
|
@DisplayName("MSSQLDialectProvider Tests")
|
||||||
|
class MSSQLDialectProviderTest {
|
||||||
|
|
||||||
|
private MSSQLDialectProvider provider;
|
||||||
|
|
||||||
|
@BeforeEach
|
||||||
|
void setUp() {
|
||||||
|
provider = new MSSQLDialectProvider();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Nested
|
||||||
|
@DisplayName("Metadata Tests")
|
||||||
|
class MetadataTests {
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("Should return correct dialect name")
|
||||||
|
void shouldReturnCorrectDialectName() {
|
||||||
|
assertEquals("Microsoft SQL Server", provider.getDialectName());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("Should return correct driver class name")
|
||||||
|
void shouldReturnCorrectDriverClassName() {
|
||||||
|
assertEquals("com.microsoft.sqlserver.jdbc.SQLServerDriver", provider.getDriverClassName());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Nested
|
||||||
|
@DisplayName("Pagination Tests")
|
||||||
|
class PaginationTests {
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("Should build correct pagination clause with OFFSET/FETCH")
|
||||||
|
void shouldBuildCorrectPaginationClause() {
|
||||||
|
String result = provider.buildPaginationClause(10, 20);
|
||||||
|
assertEquals("OFFSET ? ROWS FETCH NEXT ? ROWS ONLY", result);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("Should return pagination parameters in correct order (offset, limit)")
|
||||||
|
void shouldReturnPaginationParametersInCorrectOrder() {
|
||||||
|
Object[] params = provider.getPaginationParameters(10, 20);
|
||||||
|
// MSSQL: offset first, then limit (reversed from MySQL)
|
||||||
|
assertArrayEquals(new Object[]{20, 10}, params);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Nested
|
||||||
|
@DisplayName("Upsert Operation Tests")
|
||||||
|
class UpsertOperationTests {
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("Should build correct MERGE statement")
|
||||||
|
void shouldBuildCorrectMergeStatement() {
|
||||||
|
List<String> uniqueCols = Arrays.asList("id", "user_id");
|
||||||
|
List<String> insertCols = Arrays.asList("id", "user_id", "name", "value");
|
||||||
|
List<String> updateCols = Arrays.asList("name", "value");
|
||||||
|
|
||||||
|
String result = provider.buildUpsertStatement("test_table", uniqueCols, insertCols, updateCols);
|
||||||
|
|
||||||
|
assertTrue(result.contains("MERGE INTO test_table AS target"));
|
||||||
|
assertTrue(result.contains("USING (SELECT"));
|
||||||
|
assertTrue(result.contains("ON target.id = source.id AND target.user_id = source.user_id"));
|
||||||
|
assertTrue(result.contains("WHEN MATCHED THEN UPDATE SET"));
|
||||||
|
assertTrue(result.contains("WHEN NOT MATCHED THEN INSERT"));
|
||||||
|
assertTrue(result.contains("name = source.name"));
|
||||||
|
assertTrue(result.contains("value = source.value"));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("Should build correct conditional INSERT statement")
|
||||||
|
void shouldBuildCorrectInsertIgnoreStatement() {
|
||||||
|
List<String> columns = Arrays.asList("user_id", "group_id");
|
||||||
|
List<String> uniqueCols = Arrays.asList("user_id", "group_id");
|
||||||
|
|
||||||
|
String result = provider.buildInsertIgnoreStatement("mapping_table", columns, uniqueCols);
|
||||||
|
|
||||||
|
assertTrue(result.contains("IF NOT EXISTS"));
|
||||||
|
assertTrue(result.contains("SELECT 1 FROM mapping_table"));
|
||||||
|
assertTrue(result.contains("WHERE user_id = ? AND group_id = ?"));
|
||||||
|
assertTrue(result.contains("INSERT INTO mapping_table (user_id, group_id) VALUES (?, ?)"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Nested
|
||||||
|
@DisplayName("Locking Strategy Tests")
|
||||||
|
class LockingStrategyTests {
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("Should build WITH (UPDLOCK, READPAST) for SKIP LOCKED equivalent")
|
||||||
|
void shouldBuildSelectForUpdateSkipLocked() {
|
||||||
|
String baseQuery = "SELECT * FROM calculation_job WHERE state = 'CREATED'";
|
||||||
|
String result = provider.buildSelectForUpdateSkipLocked(baseQuery);
|
||||||
|
|
||||||
|
assertTrue(result.contains("WITH (UPDLOCK, READPAST)"));
|
||||||
|
assertTrue(result.contains("FROM calculation_job WITH (UPDLOCK, READPAST)"));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("Should build WITH (UPDLOCK, ROWLOCK) for standard locking")
|
||||||
|
void shouldBuildSelectForUpdate() {
|
||||||
|
String baseQuery = "SELECT * FROM calculation_job WHERE id = ?";
|
||||||
|
String result = provider.buildSelectForUpdate(baseQuery);
|
||||||
|
|
||||||
|
assertTrue(result.contains("WITH (UPDLOCK, ROWLOCK)"));
|
||||||
|
assertTrue(result.contains("FROM calculation_job WITH (UPDLOCK, ROWLOCK)"));
|
||||||
|
assertFalse(result.contains("READPAST"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Nested
|
||||||
|
@DisplayName("Date/Time Function Tests")
|
||||||
|
class DateTimeFunctionTests {
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("Should return GETDATE() for current timestamp")
|
||||||
|
void shouldReturnGetDateForCurrentTimestamp() {
|
||||||
|
assertEquals("GETDATE()", provider.getCurrentTimestamp());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("Should build date subtraction with GETDATE() using DATEADD")
|
||||||
|
void shouldBuildDateSubtractionWithGetDate() {
|
||||||
|
String result = provider.buildDateSubtraction(null, "3", SqlDialectProvider.DateUnit.DAY);
|
||||||
|
assertEquals("DATEADD(DAY, -3, GETDATE())", result);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("Should build date subtraction with custom base date")
|
||||||
|
void shouldBuildDateSubtractionWithCustomBaseDate() {
|
||||||
|
String result = provider.buildDateSubtraction("calculation_date", "60", SqlDialectProvider.DateUnit.MINUTE);
|
||||||
|
assertEquals("DATEADD(MINUTE, -60, calculation_date)", result);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("Should build date addition with GETDATE() using DATEADD")
|
||||||
|
void shouldBuildDateAdditionWithGetDate() {
|
||||||
|
String result = provider.buildDateAddition(null, "7", SqlDialectProvider.DateUnit.DAY);
|
||||||
|
assertEquals("DATEADD(DAY, 7, GETDATE())", result);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("Should build date addition with custom base date")
|
||||||
|
void shouldBuildDateAdditionWithCustomBaseDate() {
|
||||||
|
String result = provider.buildDateAddition("start_date", "1", SqlDialectProvider.DateUnit.MONTH);
|
||||||
|
assertEquals("DATEADD(MONTH, 1, start_date)", result);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("Should extract date from column using CAST")
|
||||||
|
void shouldExtractDateFromColumn() {
|
||||||
|
String result = provider.extractDate("created_at");
|
||||||
|
assertEquals("CAST(created_at AS DATE)", result);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("Should extract date from expression using CAST")
|
||||||
|
void shouldExtractDateFromExpression() {
|
||||||
|
String result = provider.extractDate("GETDATE()");
|
||||||
|
assertEquals("CAST(GETDATE() AS DATE)", result);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Nested
|
||||||
|
@DisplayName("Auto-increment Reset Tests")
|
||||||
|
class AutoIncrementResetTests {
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("Should build DBCC CHECKIDENT reset statement")
|
||||||
|
void shouldBuildAutoIncrementResetStatement() {
|
||||||
|
String result = provider.buildAutoIncrementReset("test_table");
|
||||||
|
assertEquals("DBCC CHECKIDENT ('test_table', RESEED, 0)", result);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Nested
|
||||||
|
@DisplayName("Geospatial Distance Tests")
|
||||||
|
class GeospatialDistanceTests {
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("Should build Haversine distance calculation in kilometers")
|
||||||
|
void shouldBuildHaversineDistanceCalculation() {
|
||||||
|
String result = provider.buildHaversineDistance("50.1", "8.6", "node.geo_lat", "node.geo_lng");
|
||||||
|
|
||||||
|
// MSSQL uses 6371 km (not 6371000 m like MySQL)
|
||||||
|
assertTrue(result.contains("6371"));
|
||||||
|
assertFalse(result.contains("6371000")); // Should NOT be in meters
|
||||||
|
assertTrue(result.contains("ACOS"));
|
||||||
|
assertTrue(result.contains("COS"));
|
||||||
|
assertTrue(result.contains("SIN"));
|
||||||
|
assertTrue(result.contains("RADIANS"));
|
||||||
|
assertTrue(result.contains("50.1"));
|
||||||
|
assertTrue(result.contains("8.6"));
|
||||||
|
assertTrue(result.contains("node.geo_lat"));
|
||||||
|
assertTrue(result.contains("node.geo_lng"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Nested
|
||||||
|
@DisplayName("String/Type Function Tests")
|
||||||
|
class StringTypeFunctionTests {
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("Should build CONCAT with multiple expressions")
|
||||||
|
void shouldBuildConcatWithMultipleExpressions() {
|
||||||
|
String result = provider.buildConcat("first_name", "' '", "last_name");
|
||||||
|
assertEquals("CONCAT(first_name, ' ', last_name)", result);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("Should build CONCAT with single expression")
|
||||||
|
void shouldBuildConcatWithSingleExpression() {
|
||||||
|
String result = provider.buildConcat("column_name");
|
||||||
|
assertEquals("CONCAT(column_name)", result);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("Should cast to string using VARCHAR")
|
||||||
|
void shouldCastToString() {
|
||||||
|
String result = provider.castToString("user_id");
|
||||||
|
assertEquals("CAST(user_id AS VARCHAR(MAX))", result);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Nested
|
||||||
|
@DisplayName("Bulk Operation Tests")
|
||||||
|
class BulkOperationTests {
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("Should return INT max value for MSSQL")
|
||||||
|
void shouldReturnMSSQLIntMaxValue() {
|
||||||
|
// MSSQL returns INT max value (not BIGINT)
|
||||||
|
assertEquals("2147483647", provider.getMaxLimitValue());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("Should support RETURNING clause via OUTPUT")
|
||||||
|
void shouldSupportReturningClause() {
|
||||||
|
assertTrue(provider.supportsReturningClause());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("Should build OUTPUT clause for RETURNING")
|
||||||
|
void shouldBuildOutputClause() {
|
||||||
|
String result = provider.buildReturningClause("id", "name", "created_at");
|
||||||
|
|
||||||
|
assertEquals("OUTPUT INSERTED.id, INSERTED.name, INSERTED.created_at", result);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Nested
|
||||||
|
@DisplayName("Schema/DDL Tests")
|
||||||
|
class SchemaDDLTests {
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("Should return IDENTITY definition")
|
||||||
|
void shouldReturnIdentityDefinition() {
|
||||||
|
String result = provider.getAutoIncrementDefinition();
|
||||||
|
assertEquals("IDENTITY(1,1)", result);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("Should return DATETIME2 with default for timestamp")
|
||||||
|
void shouldReturnDateTimeWithDefaultDefinition() {
|
||||||
|
String result = provider.getTimestampDefinition();
|
||||||
|
assertEquals("DATETIME2 DEFAULT GETDATE()", result);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Nested
|
||||||
|
@DisplayName("Boolean Literal Tests")
|
||||||
|
class BooleanLiteralTests {
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("Should return '1' for boolean true")
|
||||||
|
void shouldReturnOneForBooleanTrue() {
|
||||||
|
assertEquals("1", provider.getBooleanTrue());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("Should return '0' for boolean false")
|
||||||
|
void shouldReturnZeroForBooleanFalse() {
|
||||||
|
assertEquals("0", provider.getBooleanFalse());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,281 @@
|
||||||
|
package de.avatic.lcc.database.dialect;
|
||||||
|
|
||||||
|
import org.junit.jupiter.api.BeforeEach;
|
||||||
|
import org.junit.jupiter.api.DisplayName;
|
||||||
|
import org.junit.jupiter.api.Nested;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import static org.junit.jupiter.api.Assertions.*;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Unit tests for {@link MySQLDialectProvider}.
|
||||||
|
*/
|
||||||
|
@DisplayName("MySQLDialectProvider Tests")
|
||||||
|
class MySQLDialectProviderTest {
|
||||||
|
|
||||||
|
private MySQLDialectProvider provider;
|
||||||
|
|
||||||
|
@BeforeEach
|
||||||
|
void setUp() {
|
||||||
|
provider = new MySQLDialectProvider();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Nested
|
||||||
|
@DisplayName("Metadata Tests")
|
||||||
|
class MetadataTests {
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("Should return correct dialect name")
|
||||||
|
void shouldReturnCorrectDialectName() {
|
||||||
|
assertEquals("MySQL", provider.getDialectName());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("Should return correct driver class name")
|
||||||
|
void shouldReturnCorrectDriverClassName() {
|
||||||
|
assertEquals("com.mysql.cj.jdbc.Driver", provider.getDriverClassName());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Nested
|
||||||
|
@DisplayName("Pagination Tests")
|
||||||
|
class PaginationTests {
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("Should build correct pagination clause")
|
||||||
|
void shouldBuildCorrectPaginationClause() {
|
||||||
|
String result = provider.buildPaginationClause(10, 20);
|
||||||
|
assertEquals("LIMIT ? OFFSET ?", result);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("Should return pagination parameters in correct order")
|
||||||
|
void shouldReturnPaginationParametersInCorrectOrder() {
|
||||||
|
Object[] params = provider.getPaginationParameters(10, 20);
|
||||||
|
assertArrayEquals(new Object[]{10, 20}, params);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Nested
|
||||||
|
@DisplayName("Upsert Operation Tests")
|
||||||
|
class UpsertOperationTests {
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("Should build correct upsert statement")
|
||||||
|
void shouldBuildCorrectUpsertStatement() {
|
||||||
|
List<String> uniqueCols = Arrays.asList("id", "user_id");
|
||||||
|
List<String> insertCols = Arrays.asList("id", "user_id", "name", "value");
|
||||||
|
List<String> updateCols = Arrays.asList("name", "value");
|
||||||
|
|
||||||
|
String result = provider.buildUpsertStatement("test_table", uniqueCols, insertCols, updateCols);
|
||||||
|
|
||||||
|
assertTrue(result.contains("INSERT INTO test_table"));
|
||||||
|
assertTrue(result.contains("(id, user_id, name, value)"));
|
||||||
|
assertTrue(result.contains("VALUES (?, ?, ?, ?)"));
|
||||||
|
assertTrue(result.contains("ON DUPLICATE KEY UPDATE"));
|
||||||
|
assertTrue(result.contains("name = VALUES(name)"));
|
||||||
|
assertTrue(result.contains("value = VALUES(value)"));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("Should build correct insert ignore statement")
|
||||||
|
void shouldBuildCorrectInsertIgnoreStatement() {
|
||||||
|
List<String> columns = Arrays.asList("user_id", "group_id");
|
||||||
|
List<String> uniqueCols = Arrays.asList("user_id", "group_id");
|
||||||
|
|
||||||
|
String result = provider.buildInsertIgnoreStatement("mapping_table", columns, uniqueCols);
|
||||||
|
|
||||||
|
assertEquals("INSERT IGNORE INTO mapping_table (user_id, group_id) VALUES (?, ?)", result);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Nested
|
||||||
|
@DisplayName("Locking Strategy Tests")
|
||||||
|
class LockingStrategyTests {
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("Should build SELECT FOR UPDATE SKIP LOCKED")
|
||||||
|
void shouldBuildSelectForUpdateSkipLocked() {
|
||||||
|
String baseQuery = "SELECT * FROM calculation_job WHERE state = 'CREATED'";
|
||||||
|
String result = provider.buildSelectForUpdateSkipLocked(baseQuery);
|
||||||
|
|
||||||
|
assertTrue(result.endsWith("FOR UPDATE SKIP LOCKED"));
|
||||||
|
assertTrue(result.startsWith("SELECT * FROM calculation_job"));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("Should build SELECT FOR UPDATE")
|
||||||
|
void shouldBuildSelectForUpdate() {
|
||||||
|
String baseQuery = "SELECT * FROM calculation_job WHERE id = ?";
|
||||||
|
String result = provider.buildSelectForUpdate(baseQuery);
|
||||||
|
|
||||||
|
assertTrue(result.endsWith("FOR UPDATE"));
|
||||||
|
assertFalse(result.contains("SKIP LOCKED"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Nested
|
||||||
|
@DisplayName("Date/Time Function Tests")
|
||||||
|
class DateTimeFunctionTests {
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("Should return NOW() for current timestamp")
|
||||||
|
void shouldReturnNowForCurrentTimestamp() {
|
||||||
|
assertEquals("NOW()", provider.getCurrentTimestamp());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("Should build date subtraction with NOW()")
|
||||||
|
void shouldBuildDateSubtractionWithNow() {
|
||||||
|
String result = provider.buildDateSubtraction(null, "3", SqlDialectProvider.DateUnit.DAY);
|
||||||
|
assertEquals("DATE_SUB(NOW(), INTERVAL 3 DAY)", result);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("Should build date subtraction with custom base date")
|
||||||
|
void shouldBuildDateSubtractionWithCustomBaseDate() {
|
||||||
|
String result = provider.buildDateSubtraction("calculation_date", "60", SqlDialectProvider.DateUnit.MINUTE);
|
||||||
|
assertEquals("DATE_SUB(calculation_date, INTERVAL 60 MINUTE)", result);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("Should build date addition with NOW()")
|
||||||
|
void shouldBuildDateAdditionWithNow() {
|
||||||
|
String result = provider.buildDateAddition(null, "7", SqlDialectProvider.DateUnit.DAY);
|
||||||
|
assertEquals("DATE_ADD(NOW(), INTERVAL 7 DAY)", result);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("Should build date addition with custom base date")
|
||||||
|
void shouldBuildDateAdditionWithCustomBaseDate() {
|
||||||
|
String result = provider.buildDateAddition("start_date", "1", SqlDialectProvider.DateUnit.MONTH);
|
||||||
|
assertEquals("DATE_ADD(start_date, INTERVAL 1 MONTH)", result);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("Should extract date from column")
|
||||||
|
void shouldExtractDateFromColumn() {
|
||||||
|
String result = provider.extractDate("created_at");
|
||||||
|
assertEquals("DATE(created_at)", result);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("Should extract date from expression")
|
||||||
|
void shouldExtractDateFromExpression() {
|
||||||
|
String result = provider.extractDate("NOW()");
|
||||||
|
assertEquals("DATE(NOW())", result);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Nested
|
||||||
|
@DisplayName("Auto-increment Reset Tests")
|
||||||
|
class AutoIncrementResetTests {
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("Should build auto-increment reset statement")
|
||||||
|
void shouldBuildAutoIncrementResetStatement() {
|
||||||
|
String result = provider.buildAutoIncrementReset("test_table");
|
||||||
|
assertEquals("ALTER TABLE test_table AUTO_INCREMENT = 1", result);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Nested
|
||||||
|
@DisplayName("Geospatial Distance Tests")
|
||||||
|
class GeospatialDistanceTests {
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("Should build Haversine distance calculation in kilometers")
|
||||||
|
void shouldBuildHaversineDistanceCalculation() {
|
||||||
|
String result = provider.buildHaversineDistance("50.1", "8.6", "node.geo_lat", "node.geo_lng");
|
||||||
|
|
||||||
|
// MySQL now uses 6371 km (not 6371000 m) for consistency with MSSQL
|
||||||
|
assertTrue(result.contains("6371"));
|
||||||
|
assertFalse(result.contains("6371000")); // Should NOT be in meters
|
||||||
|
assertTrue(result.contains("ACOS"));
|
||||||
|
assertTrue(result.contains("COS"));
|
||||||
|
assertTrue(result.contains("SIN"));
|
||||||
|
assertTrue(result.contains("RADIANS"));
|
||||||
|
assertTrue(result.contains("50.1"));
|
||||||
|
assertTrue(result.contains("8.6"));
|
||||||
|
assertTrue(result.contains("node.geo_lat"));
|
||||||
|
assertTrue(result.contains("node.geo_lng"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Nested
|
||||||
|
@DisplayName("String/Type Function Tests")
|
||||||
|
class StringTypeFunctionTests {
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("Should build CONCAT with multiple expressions")
|
||||||
|
void shouldBuildConcatWithMultipleExpressions() {
|
||||||
|
String result = provider.buildConcat("first_name", "' '", "last_name");
|
||||||
|
assertEquals("CONCAT(first_name, ' ', last_name)", result);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("Should build CONCAT with single expression")
|
||||||
|
void shouldBuildConcatWithSingleExpression() {
|
||||||
|
String result = provider.buildConcat("column_name");
|
||||||
|
assertEquals("CONCAT(column_name)", result);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("Should cast to string")
|
||||||
|
void shouldCastToString() {
|
||||||
|
String result = provider.castToString("user_id");
|
||||||
|
assertEquals("CAST(user_id AS CHAR)", result);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Nested
|
||||||
|
@DisplayName("Bulk Operation Tests")
|
||||||
|
class BulkOperationTests {
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("Should return MySQL BIGINT UNSIGNED max value")
|
||||||
|
void shouldReturnMySQLBigIntUnsignedMaxValue() {
|
||||||
|
assertEquals("18446744073709551615", provider.getMaxLimitValue());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("Should not support RETURNING clause")
|
||||||
|
void shouldNotSupportReturningClause() {
|
||||||
|
assertFalse(provider.supportsReturningClause());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("Should throw exception when building RETURNING clause")
|
||||||
|
void shouldThrowExceptionWhenBuildingReturningClause() {
|
||||||
|
UnsupportedOperationException exception = assertThrows(
|
||||||
|
UnsupportedOperationException.class,
|
||||||
|
() -> provider.buildReturningClause("id", "name")
|
||||||
|
);
|
||||||
|
|
||||||
|
assertTrue(exception.getMessage().contains("MySQL does not support RETURNING"));
|
||||||
|
assertTrue(exception.getMessage().contains("LAST_INSERT_ID"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Nested
|
||||||
|
@DisplayName("Schema/DDL Tests")
|
||||||
|
class SchemaDDLTests {
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("Should return AUTO_INCREMENT definition")
|
||||||
|
void shouldReturnAutoIncrementDefinition() {
|
||||||
|
String result = provider.getAutoIncrementDefinition();
|
||||||
|
assertEquals("INT NOT NULL AUTO_INCREMENT", result);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("Should return TIMESTAMP with ON UPDATE definition")
|
||||||
|
void shouldReturnTimestampWithOnUpdateDefinition() {
|
||||||
|
String result = provider.getTimestampDefinition();
|
||||||
|
assertEquals("TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP", result);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,150 @@
|
||||||
|
package de.avatic.lcc.e2e.config;
|
||||||
|
|
||||||
|
import com.microsoft.playwright.Browser;
|
||||||
|
import com.microsoft.playwright.BrowserContext;
|
||||||
|
import com.microsoft.playwright.BrowserType;
|
||||||
|
import com.microsoft.playwright.Page;
|
||||||
|
import com.microsoft.playwright.Playwright;
|
||||||
|
|
||||||
|
import java.nio.file.Path;
|
||||||
|
import java.nio.file.Paths;
|
||||||
|
import java.util.logging.Logger;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Configuration and factory class for Playwright browser instances.
|
||||||
|
* Provides centralized configuration for E2E tests.
|
||||||
|
*/
|
||||||
|
public class PlaywrightTestConfiguration {
|
||||||
|
|
||||||
|
private static final Logger logger = Logger.getLogger(PlaywrightTestConfiguration.class.getName());
|
||||||
|
|
||||||
|
// Default configuration values
|
||||||
|
public static final String DEFAULT_BASE_URL = "http://localhost:5173";
|
||||||
|
public static final boolean DEFAULT_HEADLESS = true;
|
||||||
|
public static final int DEFAULT_VIEWPORT_WIDTH = 1920;
|
||||||
|
public static final int DEFAULT_VIEWPORT_HEIGHT = 1080;
|
||||||
|
public static final double DEFAULT_TOLERANCE = 0.01; // 1%
|
||||||
|
public static final Path SCREENSHOTS_DIR = Paths.get("target/screenshots");
|
||||||
|
public static final Path TRACES_DIR = Paths.get("target/traces");
|
||||||
|
|
||||||
|
private Playwright playwright;
|
||||||
|
private Browser browser;
|
||||||
|
private final boolean headless;
|
||||||
|
private final String baseUrl;
|
||||||
|
private final int viewportWidth;
|
||||||
|
private final int viewportHeight;
|
||||||
|
|
||||||
|
public PlaywrightTestConfiguration() {
|
||||||
|
this(
|
||||||
|
System.getProperty("e2e.baseUrl", DEFAULT_BASE_URL),
|
||||||
|
Boolean.parseBoolean(System.getProperty("playwright.headless", String.valueOf(DEFAULT_HEADLESS))),
|
||||||
|
Integer.parseInt(System.getProperty("playwright.viewport.width", String.valueOf(DEFAULT_VIEWPORT_WIDTH))),
|
||||||
|
Integer.parseInt(System.getProperty("playwright.viewport.height", String.valueOf(DEFAULT_VIEWPORT_HEIGHT)))
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
public PlaywrightTestConfiguration(String baseUrl, boolean headless, int viewportWidth, int viewportHeight) {
|
||||||
|
this.baseUrl = baseUrl;
|
||||||
|
this.headless = headless;
|
||||||
|
this.viewportWidth = viewportWidth;
|
||||||
|
this.viewportHeight = viewportHeight;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initializes Playwright and launches the browser.
|
||||||
|
* Must be called before creating pages.
|
||||||
|
*/
|
||||||
|
public void initialize() {
|
||||||
|
logger.info("Initializing Playwright");
|
||||||
|
|
||||||
|
playwright = Playwright.create();
|
||||||
|
browser = playwright.chromium().launch(
|
||||||
|
new BrowserType.LaunchOptions()
|
||||||
|
.setHeadless(headless)
|
||||||
|
.setSlowMo(headless ? 0 : 100)
|
||||||
|
);
|
||||||
|
|
||||||
|
logger.info(() -> String.format(
|
||||||
|
"Playwright initialized. Headless: %s, Base URL: %s, Viewport: %dx%d",
|
||||||
|
headless, baseUrl, viewportWidth, viewportHeight
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a new browser context with default settings.
|
||||||
|
*/
|
||||||
|
public BrowserContext createContext() {
|
||||||
|
return browser.newContext(new Browser.NewContextOptions()
|
||||||
|
.setViewportSize(viewportWidth, viewportHeight)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a new browser context with tracing enabled.
|
||||||
|
*/
|
||||||
|
public BrowserContext createContextWithTracing(String traceName) {
|
||||||
|
BrowserContext context = createContext();
|
||||||
|
context.tracing().start(new com.microsoft.playwright.Tracing.StartOptions()
|
||||||
|
.setScreenshots(true)
|
||||||
|
.setSnapshots(true)
|
||||||
|
.setSources(true)
|
||||||
|
);
|
||||||
|
return context;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Stops tracing and saves it to a file.
|
||||||
|
*/
|
||||||
|
public void stopTracing(BrowserContext context, String traceName) {
|
||||||
|
context.tracing().stop(new com.microsoft.playwright.Tracing.StopOptions()
|
||||||
|
.setPath(TRACES_DIR.resolve(traceName + ".zip"))
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a new page in a new context.
|
||||||
|
*/
|
||||||
|
public Page createPage() {
|
||||||
|
BrowserContext context = createContext();
|
||||||
|
return context.newPage();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Closes the browser and Playwright instance.
|
||||||
|
*/
|
||||||
|
public void close() {
|
||||||
|
if (browser != null) {
|
||||||
|
browser.close();
|
||||||
|
}
|
||||||
|
if (playwright != null) {
|
||||||
|
playwright.close();
|
||||||
|
}
|
||||||
|
logger.info("Playwright closed");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Getters
|
||||||
|
|
||||||
|
public String getBaseUrl() {
|
||||||
|
return baseUrl;
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean isHeadless() {
|
||||||
|
return headless;
|
||||||
|
}
|
||||||
|
|
||||||
|
public int getViewportWidth() {
|
||||||
|
return viewportWidth;
|
||||||
|
}
|
||||||
|
|
||||||
|
public int getViewportHeight() {
|
||||||
|
return viewportHeight;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Browser getBrowser() {
|
||||||
|
return browser;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Playwright getPlaywright() {
|
||||||
|
return playwright;
|
||||||
|
}
|
||||||
|
}
|
||||||
123
src/test/java/de/avatic/lcc/e2e/config/TestAutoLoginFilter.java
Normal file
123
src/test/java/de/avatic/lcc/e2e/config/TestAutoLoginFilter.java
Normal file
|
|
@ -0,0 +1,123 @@
|
||||||
|
package de.avatic.lcc.e2e.config;
|
||||||
|
|
||||||
|
import de.avatic.lcc.config.LccOidcUser;
|
||||||
|
import de.avatic.lcc.config.filter.DevUserEmulationFilter;
|
||||||
|
import de.avatic.lcc.model.db.users.User;
|
||||||
|
import de.avatic.lcc.repositories.users.UserRepository;
|
||||||
|
import jakarta.servlet.FilterChain;
|
||||||
|
import jakarta.servlet.ServletException;
|
||||||
|
import jakarta.servlet.http.HttpServletRequest;
|
||||||
|
import jakarta.servlet.http.HttpServletResponse;
|
||||||
|
import jakarta.servlet.http.HttpSession;
|
||||||
|
import org.jetbrains.annotations.NotNull;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
import org.springframework.security.core.GrantedAuthority;
|
||||||
|
import org.springframework.security.core.authority.SimpleGrantedAuthority;
|
||||||
|
import org.springframework.security.core.context.SecurityContext;
|
||||||
|
import org.springframework.security.core.context.SecurityContextHolder;
|
||||||
|
import org.springframework.security.oauth2.core.oidc.OidcIdToken;
|
||||||
|
import org.springframework.security.oauth2.core.oidc.OidcUserInfo;
|
||||||
|
import org.springframework.security.web.authentication.preauth.PreAuthenticatedAuthenticationToken;
|
||||||
|
import org.springframework.web.filter.OncePerRequestFilter;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.time.Instant;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.HashSet;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.Set;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Filter that auto-logins a test user when running E2E tests.
|
||||||
|
* This bypasses the need to manually select a user on the /dev page.
|
||||||
|
*/
|
||||||
|
public class TestAutoLoginFilter extends OncePerRequestFilter {
|
||||||
|
|
||||||
|
private static final Logger log = LoggerFactory.getLogger(TestAutoLoginFilter.class);
|
||||||
|
private static final String TEST_USER_EMAIL = "john.doe@test.com";
|
||||||
|
private static final String DEV_USER_ID_SESSION_KEY = "dev.emulated.user.id";
|
||||||
|
|
||||||
|
private final UserRepository userRepository;
|
||||||
|
|
||||||
|
public TestAutoLoginFilter(UserRepository userRepository) {
|
||||||
|
this.userRepository = userRepository;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void doFilterInternal(@NotNull HttpServletRequest request,
|
||||||
|
@NotNull HttpServletResponse response,
|
||||||
|
@NotNull FilterChain filterChain) throws ServletException, IOException {
|
||||||
|
|
||||||
|
HttpSession session = request.getSession(true);
|
||||||
|
Integer emulatedUserId = (Integer) session.getAttribute(DEV_USER_ID_SESSION_KEY);
|
||||||
|
|
||||||
|
// If no user is selected, auto-login the test user
|
||||||
|
if (emulatedUserId == null) {
|
||||||
|
try {
|
||||||
|
User testUser = userRepository.getByEmail(TEST_USER_EMAIL);
|
||||||
|
if (testUser != null) {
|
||||||
|
log.debug("TestAutoLoginFilter - Auto-logging in test user: {}", TEST_USER_EMAIL);
|
||||||
|
session.setAttribute(DEV_USER_ID_SESSION_KEY, testUser.getId());
|
||||||
|
setEmulatedUser(testUser);
|
||||||
|
} else {
|
||||||
|
log.warn("TestAutoLoginFilter - Test user {} not found", TEST_USER_EMAIL);
|
||||||
|
}
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.debug("TestAutoLoginFilter - Could not auto-login: {}", e.getMessage());
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// User is already selected, set authentication
|
||||||
|
User user = userRepository.getById(emulatedUserId);
|
||||||
|
if (user != null) {
|
||||||
|
setEmulatedUser(user);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
filterChain.doFilter(request, response);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void setEmulatedUser(User user) {
|
||||||
|
Set<GrantedAuthority> authorities = new HashSet<>();
|
||||||
|
user.getGroups().forEach(group ->
|
||||||
|
authorities.add(new SimpleGrantedAuthority("ROLE_" + group.getName().toUpperCase()))
|
||||||
|
);
|
||||||
|
|
||||||
|
// Create a mock OIDC user
|
||||||
|
Map<String, Object> claims = new HashMap<>();
|
||||||
|
claims.put("sub", user.getId().toString());
|
||||||
|
claims.put("email", user.getEmail());
|
||||||
|
claims.put("preferred_username", user.getEmail());
|
||||||
|
claims.put("name", user.getFirstName() + " " + user.getLastName());
|
||||||
|
if (user.getWorkdayId() != null) {
|
||||||
|
claims.put("workday_id", user.getWorkdayId());
|
||||||
|
}
|
||||||
|
|
||||||
|
OidcIdToken idToken = new OidcIdToken(
|
||||||
|
"mock-token",
|
||||||
|
Instant.now(),
|
||||||
|
Instant.now().plusSeconds(3600),
|
||||||
|
claims
|
||||||
|
);
|
||||||
|
|
||||||
|
OidcUserInfo userInfo = new OidcUserInfo(claims);
|
||||||
|
|
||||||
|
LccOidcUser oidcUser = new LccOidcUser(
|
||||||
|
authorities,
|
||||||
|
idToken,
|
||||||
|
userInfo,
|
||||||
|
"preferred_username",
|
||||||
|
user.getId()
|
||||||
|
);
|
||||||
|
|
||||||
|
var authentication = new PreAuthenticatedAuthenticationToken(
|
||||||
|
oidcUser,
|
||||||
|
null,
|
||||||
|
authorities
|
||||||
|
);
|
||||||
|
|
||||||
|
SecurityContext context = SecurityContextHolder.createEmptyContext();
|
||||||
|
context.setAuthentication(authentication);
|
||||||
|
SecurityContextHolder.setContext(context);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,44 @@
|
||||||
|
package de.avatic.lcc.e2e.config;
|
||||||
|
|
||||||
|
import org.jetbrains.annotations.NotNull;
|
||||||
|
import org.springframework.context.annotation.Configuration;
|
||||||
|
import org.springframework.context.annotation.Profile;
|
||||||
|
import org.springframework.core.io.ClassPathResource;
|
||||||
|
import org.springframework.core.io.Resource;
|
||||||
|
import org.springframework.web.servlet.config.annotation.ResourceHandlerRegistry;
|
||||||
|
import org.springframework.web.servlet.config.annotation.WebMvcConfigurer;
|
||||||
|
import org.springframework.web.servlet.resource.PathResourceResolver;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Frontend configuration for E2E tests.
|
||||||
|
* Serves index.html for Vue Router to handle SPA routes.
|
||||||
|
*/
|
||||||
|
@Configuration
|
||||||
|
@Profile("test")
|
||||||
|
public class TestFrontendConfig implements WebMvcConfigurer {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void addResourceHandlers(ResourceHandlerRegistry registry) {
|
||||||
|
// Handle all requests by serving index.html for non-existent resources
|
||||||
|
// This allows Vue Router to handle SPA routes like /dev
|
||||||
|
registry.addResourceHandler("/**")
|
||||||
|
.addResourceLocations("classpath:/static/")
|
||||||
|
.resourceChain(true)
|
||||||
|
.addResolver(new PathResourceResolver() {
|
||||||
|
@Override
|
||||||
|
protected Resource getResource(@NotNull String resourcePath, @NotNull Resource location) throws IOException {
|
||||||
|
Resource requestedResource = location.createRelative(resourcePath);
|
||||||
|
|
||||||
|
// If the resource exists, serve it
|
||||||
|
if (requestedResource.exists() && requestedResource.isReadable()) {
|
||||||
|
return requestedResource;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Otherwise, serve index.html for Vue Router to handle
|
||||||
|
return new ClassPathResource("static/index.html");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
221
src/test/java/de/avatic/lcc/e2e/pages/AssistantPage.java
Normal file
221
src/test/java/de/avatic/lcc/e2e/pages/AssistantPage.java
Normal file
|
|
@ -0,0 +1,221 @@
|
||||||
|
package de.avatic.lcc.e2e.pages;
|
||||||
|
|
||||||
|
import com.microsoft.playwright.Locator;
|
||||||
|
import com.microsoft.playwright.Page;
|
||||||
|
import com.microsoft.playwright.options.AriaRole;
|
||||||
|
import com.microsoft.playwright.options.WaitForSelectorState;
|
||||||
|
import de.avatic.lcc.e2e.testdata.TestCaseInput;
|
||||||
|
|
||||||
|
import java.util.logging.Logger;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Page Object for the calculation assistant page.
|
||||||
|
* Handles part number entry, supplier selection, and calculation creation.
|
||||||
|
*/
|
||||||
|
public class AssistantPage extends BasePage {
|
||||||
|
|
||||||
|
private static final Logger logger = Logger.getLogger(AssistantPage.class.getName());
|
||||||
|
|
||||||
|
// Selectors - using more robust selectors
|
||||||
|
private static final String PART_NUMBER_INPUT = "textarea"; // simplified - typically only one textarea on the page
|
||||||
|
private static final String ANALYZE_BUTTON_TEXT = "Analyze input";
|
||||||
|
private static final String SUPPLIER_SEARCH_INPUT = "input[type='text']"; // fallback, may need refinement
|
||||||
|
private static final String LOAD_FROM_PREVIOUS_CHECKBOX = ".checkbox-item";
|
||||||
|
private static final String CREATE_CALCULATION_BUTTON_TEXT = "Create";
|
||||||
|
private static final String DELETE_SUPPLIER_BUTTON = ".icon-btn";
|
||||||
|
|
||||||
|
public AssistantPage(Page page) {
|
||||||
|
super(page);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Navigates to the assistant page.
|
||||||
|
* The part number modal opens automatically by design.
|
||||||
|
*/
|
||||||
|
public void navigate(String baseUrl) {
|
||||||
|
page.navigate(baseUrl + "/assistant");
|
||||||
|
waitForSpaNavigation("/assistant");
|
||||||
|
|
||||||
|
// Wait for the part number modal to appear (it opens automatically)
|
||||||
|
Locator modal = page.locator(".part-number-modal-container");
|
||||||
|
try {
|
||||||
|
modal.waitFor(new Locator.WaitForOptions()
|
||||||
|
.setState(WaitForSelectorState.VISIBLE)
|
||||||
|
.setTimeout(5000));
|
||||||
|
logger.info("Part number modal opened automatically");
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.info("Modal did not open automatically, will be opened manually when needed");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Debug screenshot after navigation
|
||||||
|
page.screenshot(new com.microsoft.playwright.Page.ScreenshotOptions()
|
||||||
|
.setPath(java.nio.file.Paths.get("target/screenshots/debug_after_navigate.png")));
|
||||||
|
|
||||||
|
logger.info("Navigated to assistant page");
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Enters part numbers and clicks analyze.
|
||||||
|
* Works with modal whether it's already open or needs to be opened.
|
||||||
|
*/
|
||||||
|
public void searchPartNumbers(String partNumber) {
|
||||||
|
// Check if modal is already visible
|
||||||
|
Locator modal = page.locator(".part-number-modal-container");
|
||||||
|
boolean modalVisible = false;
|
||||||
|
try {
|
||||||
|
modalVisible = modal.isVisible();
|
||||||
|
} catch (Exception e) {
|
||||||
|
modalVisible = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!modalVisible) {
|
||||||
|
// Modal not open, click "Drop part numbers" button to open it
|
||||||
|
logger.info("Modal not visible, clicking 'Drop part numbers' button");
|
||||||
|
Locator dropButton = page.locator("button:has-text('Drop part numbers')");
|
||||||
|
dropButton.waitFor(new Locator.WaitForOptions().setState(WaitForSelectorState.VISIBLE));
|
||||||
|
dropButton.click();
|
||||||
|
|
||||||
|
// Wait for modal to appear
|
||||||
|
modal.waitFor(new Locator.WaitForOptions().setState(WaitForSelectorState.VISIBLE));
|
||||||
|
} else {
|
||||||
|
logger.info("Modal already visible, proceeding with part number entry");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find and fill textarea inside modal - click first to focus, then type
|
||||||
|
Locator textarea = modal.locator("textarea");
|
||||||
|
textarea.waitFor(new Locator.WaitForOptions().setState(WaitForSelectorState.VISIBLE));
|
||||||
|
textarea.click();
|
||||||
|
page.waitForTimeout(200);
|
||||||
|
textarea.fill(partNumber);
|
||||||
|
|
||||||
|
// Debug screenshot after filling
|
||||||
|
page.screenshot(new com.microsoft.playwright.Page.ScreenshotOptions()
|
||||||
|
.setPath(java.nio.file.Paths.get("target/screenshots/debug_after_fill.png")));
|
||||||
|
|
||||||
|
logger.info(() -> "Filled textarea with: " + partNumber);
|
||||||
|
|
||||||
|
// Click Analyze input button inside modal
|
||||||
|
Locator analyzeButton = modal.locator("button:has-text('Analyze input')");
|
||||||
|
analyzeButton.click();
|
||||||
|
|
||||||
|
logger.info("Clicked Analyze input button");
|
||||||
|
|
||||||
|
// Wait for modal to close after API response
|
||||||
|
page.waitForTimeout(2000); // Wait for API response
|
||||||
|
|
||||||
|
// Check if modal is still visible and wait for it to close
|
||||||
|
try {
|
||||||
|
Locator modalOverlay = page.locator(".modal-overlay");
|
||||||
|
if (modalOverlay.isVisible()) {
|
||||||
|
modalOverlay.waitFor(new Locator.WaitForOptions()
|
||||||
|
.setState(WaitForSelectorState.HIDDEN)
|
||||||
|
.setTimeout(10000));
|
||||||
|
}
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.warning("Modal overlay check failed: " + e.getMessage());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Wait for the part number to appear in the material list (not anywhere on page)
|
||||||
|
// The part number appears in: .item-list-element .supplier-item-address
|
||||||
|
try {
|
||||||
|
Locator partNumberInList = page.locator(".item-list-element .supplier-item-address:has-text('" + partNumber + "')");
|
||||||
|
partNumberInList.waitFor(new Locator.WaitForOptions()
|
||||||
|
.setState(WaitForSelectorState.VISIBLE)
|
||||||
|
.setTimeout(10000));
|
||||||
|
logger.info(() -> "Part number " + partNumber + " appeared in the material list");
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.warning(() -> "Part number " + partNumber + " not found in material list: " + e.getMessage());
|
||||||
|
// Take a screenshot to debug
|
||||||
|
page.screenshot(new com.microsoft.playwright.Page.ScreenshotOptions()
|
||||||
|
.setPath(java.nio.file.Paths.get("target/screenshots/debug_no_materials.png")));
|
||||||
|
|
||||||
|
// Log what materials are visible
|
||||||
|
int itemCount = page.locator(".item-list-element").count();
|
||||||
|
logger.info(() -> "Found " + itemCount + " item-list-elements on page");
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(() -> "Searched for part number: " + partNumber);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Deletes all pre-selected suppliers.
|
||||||
|
* Uses specific selector to target only supplier items, not material items.
|
||||||
|
* SupplierItem has .supplier-content class with flag, MaterialItem has .material-item-text.
|
||||||
|
*/
|
||||||
|
public void deletePreselectedSuppliers() {
|
||||||
|
while (true) {
|
||||||
|
try {
|
||||||
|
// Target only delete buttons within supplier items (which have .supplier-content)
|
||||||
|
// This avoids deleting material items by mistake
|
||||||
|
Locator deleteButton = page.locator(".item-list-element:has(.supplier-content) .icon-btn").first();
|
||||||
|
deleteButton.waitFor(new Locator.WaitForOptions()
|
||||||
|
.setState(WaitForSelectorState.VISIBLE)
|
||||||
|
.setTimeout(1000));
|
||||||
|
deleteButton.click();
|
||||||
|
page.waitForTimeout(200);
|
||||||
|
} catch (Exception e) {
|
||||||
|
// No more supplier delete buttons
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
logger.info("Deleted all pre-selected suppliers");
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Selects a supplier by name using autosuggest.
|
||||||
|
*/
|
||||||
|
public void selectSupplier(String supplierName) {
|
||||||
|
// Find the search input - look for placeholder text or input near supplier section
|
||||||
|
Locator searchInput = page.locator("input[placeholder*='Search'], input[placeholder*='search'], .search-input").first();
|
||||||
|
searchInput.waitFor(new Locator.WaitForOptions().setState(WaitForSelectorState.VISIBLE));
|
||||||
|
searchInput.clear();
|
||||||
|
searchInput.fill(supplierName);
|
||||||
|
page.waitForTimeout(1000);
|
||||||
|
|
||||||
|
// Click the first suggestion
|
||||||
|
Locator suggestion = page.locator(".suggestion-item, .autocomplete-item, [role='option']").first();
|
||||||
|
try {
|
||||||
|
suggestion.waitFor(new Locator.WaitForOptions()
|
||||||
|
.setState(WaitForSelectorState.VISIBLE)
|
||||||
|
.setTimeout(3000));
|
||||||
|
suggestion.click();
|
||||||
|
} catch (Exception e) {
|
||||||
|
// Try clicking text that matches the supplier name
|
||||||
|
page.getByText(supplierName).first().click();
|
||||||
|
}
|
||||||
|
page.waitForTimeout(500);
|
||||||
|
logger.info(() -> "Selected supplier: " + supplierName);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sets the "load from previous" checkbox and creates the calculation.
|
||||||
|
*/
|
||||||
|
public void createCalculation(boolean loadFromPrevious) {
|
||||||
|
// Try to set checkbox if visible
|
||||||
|
try {
|
||||||
|
setCheckbox(LOAD_FROM_PREVIOUS_CHECKBOX, loadFromPrevious);
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.warning("Could not find load from previous checkbox, continuing...");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use specific role-based selector to avoid matching "Create Calculation" heading
|
||||||
|
// and "Create a new supplier" button
|
||||||
|
Locator createButton = page.getByRole(AriaRole.BUTTON,
|
||||||
|
new Page.GetByRoleOptions().setName("Create").setExact(true));
|
||||||
|
createButton.click();
|
||||||
|
|
||||||
|
page.waitForTimeout(500);
|
||||||
|
logger.info(() -> "Created calculation with loadFromPrevious: " + loadFromPrevious);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Performs the complete assistant workflow for a test case.
|
||||||
|
*/
|
||||||
|
public void completeAssistantWorkflow(String baseUrl, TestCaseInput input) {
|
||||||
|
navigate(baseUrl);
|
||||||
|
searchPartNumbers(input.partNumber());
|
||||||
|
deletePreselectedSuppliers();
|
||||||
|
selectSupplier(input.supplierName());
|
||||||
|
createCalculation(input.loadFromPrevious());
|
||||||
|
}
|
||||||
|
}
|
||||||
209
src/test/java/de/avatic/lcc/e2e/pages/BasePage.java
Normal file
209
src/test/java/de/avatic/lcc/e2e/pages/BasePage.java
Normal file
|
|
@ -0,0 +1,209 @@
|
||||||
|
package de.avatic.lcc.e2e.pages;
|
||||||
|
|
||||||
|
import com.microsoft.playwright.Locator;
|
||||||
|
import com.microsoft.playwright.Page;
|
||||||
|
import com.microsoft.playwright.options.LoadState;
|
||||||
|
import com.microsoft.playwright.options.WaitForSelectorState;
|
||||||
|
|
||||||
|
import java.util.logging.Logger;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Base class for all Playwright Page Objects.
|
||||||
|
* Provides common interaction methods for UI elements.
|
||||||
|
*/
|
||||||
|
public abstract class BasePage {
|
||||||
|
|
||||||
|
private static final Logger logger = Logger.getLogger(BasePage.class.getName());
|
||||||
|
|
||||||
|
protected final Page page;
|
||||||
|
|
||||||
|
protected BasePage(Page page) {
|
||||||
|
this.page = page;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Waits until the SPA navigates to a route containing the expected part.
|
||||||
|
*/
|
||||||
|
protected void waitForSpaNavigation(String expectedRoutePart) {
|
||||||
|
page.waitForURL("**" + expectedRoutePart + "**");
|
||||||
|
page.waitForLoadState(LoadState.NETWORKIDLE);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Waits for an element to be visible.
|
||||||
|
*/
|
||||||
|
protected Locator waitForElement(String selector) {
|
||||||
|
Locator locator = page.locator(selector);
|
||||||
|
locator.waitFor(new Locator.WaitForOptions().setState(WaitForSelectorState.VISIBLE));
|
||||||
|
return locator;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Waits for an element to be visible with a custom timeout.
|
||||||
|
*/
|
||||||
|
protected Locator waitForElement(String selector, double timeoutMs) {
|
||||||
|
Locator locator = page.locator(selector);
|
||||||
|
locator.waitFor(new Locator.WaitForOptions()
|
||||||
|
.setState(WaitForSelectorState.VISIBLE)
|
||||||
|
.setTimeout(timeoutMs));
|
||||||
|
return locator;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clears and fills an input field.
|
||||||
|
*/
|
||||||
|
protected void fillInput(Locator locator, String text) {
|
||||||
|
locator.clear();
|
||||||
|
locator.fill(text);
|
||||||
|
logger.info(() -> "Filled input with: " + text);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clears and fills an input field by selector.
|
||||||
|
*/
|
||||||
|
protected void fillInput(String selector, String text) {
|
||||||
|
Locator locator = waitForElement(selector);
|
||||||
|
fillInput(locator, text);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Fills an input field if it exists, returns false if element not found.
|
||||||
|
*/
|
||||||
|
protected boolean fillInputIfExists(String selector, String text, double timeoutMs) {
|
||||||
|
try {
|
||||||
|
Locator locator = page.locator(selector);
|
||||||
|
locator.waitFor(new Locator.WaitForOptions()
|
||||||
|
.setState(WaitForSelectorState.VISIBLE)
|
||||||
|
.setTimeout(timeoutMs));
|
||||||
|
fillInput(locator, text);
|
||||||
|
return true;
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.warning(() -> "Element not found, skipping: " + selector);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clicks a button by selector.
|
||||||
|
*/
|
||||||
|
protected void clickButton(String selector) {
|
||||||
|
Locator button = waitForElement(selector);
|
||||||
|
button.click();
|
||||||
|
logger.info(() -> "Clicked button: " + selector);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clicks a button by its visible text.
|
||||||
|
*/
|
||||||
|
protected void clickButtonByText(String buttonText) {
|
||||||
|
Locator button = page.getByText(buttonText);
|
||||||
|
button.waitFor(new Locator.WaitForOptions().setState(WaitForSelectorState.VISIBLE));
|
||||||
|
button.click();
|
||||||
|
logger.info(() -> "Clicked button with text: " + buttonText);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clicks a button by its visible text with custom timeout.
|
||||||
|
*/
|
||||||
|
protected void clickButtonByText(String buttonText, double timeoutMs) {
|
||||||
|
Locator button = page.getByText(buttonText);
|
||||||
|
button.waitFor(new Locator.WaitForOptions()
|
||||||
|
.setState(WaitForSelectorState.VISIBLE)
|
||||||
|
.setTimeout(timeoutMs));
|
||||||
|
button.click();
|
||||||
|
logger.info(() -> "Clicked button with text: " + buttonText);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sets a checkbox to the desired state.
|
||||||
|
*/
|
||||||
|
protected void setCheckbox(String labelSelector, boolean checked) {
|
||||||
|
Locator label = page.locator(labelSelector);
|
||||||
|
label.waitFor(new Locator.WaitForOptions().setState(WaitForSelectorState.VISIBLE));
|
||||||
|
|
||||||
|
Locator checkbox = label.locator("input[type='checkbox']");
|
||||||
|
boolean isChecked = checkbox.isChecked();
|
||||||
|
|
||||||
|
if (isChecked != checked) {
|
||||||
|
label.click();
|
||||||
|
page.waitForTimeout(300);
|
||||||
|
logger.info(() -> "Toggled checkbox to: " + checked);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Selects an option from a dropdown menu.
|
||||||
|
*/
|
||||||
|
protected void selectDropdownOption(String triggerSelector, String optionText) {
|
||||||
|
Locator dropdownTrigger = waitForElement(triggerSelector);
|
||||||
|
|
||||||
|
// Check if already has the correct value
|
||||||
|
try {
|
||||||
|
String currentValue = dropdownTrigger.locator("span.dropdown-trigger-text").textContent();
|
||||||
|
if (optionText.equals(currentValue)) {
|
||||||
|
logger.info(() -> "Dropdown already has value: " + optionText);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
} catch (Exception ignored) {
|
||||||
|
// Continue to open dropdown
|
||||||
|
}
|
||||||
|
|
||||||
|
dropdownTrigger.click();
|
||||||
|
logger.info("Opened dropdown");
|
||||||
|
|
||||||
|
Locator menu = page.locator("ul.dropdown-menu");
|
||||||
|
menu.waitFor(new Locator.WaitForOptions().setState(WaitForSelectorState.VISIBLE));
|
||||||
|
|
||||||
|
String optionXPath = String.format(
|
||||||
|
"//li[contains(@class, 'dropdown-option')][normalize-space(text())='%s']",
|
||||||
|
optionText
|
||||||
|
);
|
||||||
|
Locator option = page.locator(optionXPath);
|
||||||
|
option.click();
|
||||||
|
|
||||||
|
logger.info(() -> "Selected dropdown option: " + optionText);
|
||||||
|
page.waitForTimeout(200);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Searches in an autosuggest input and selects the first suggestion.
|
||||||
|
*/
|
||||||
|
protected void searchAndSelectAutosuggest(String inputSelector, String searchText) {
|
||||||
|
searchAndSelectAutosuggest(inputSelector, searchText, ".suggestion-item");
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Searches in an autosuggest input and selects from suggestions.
|
||||||
|
*/
|
||||||
|
protected void searchAndSelectAutosuggest(String inputSelector, String searchText, String suggestionSelector) {
|
||||||
|
Locator input = waitForElement(inputSelector);
|
||||||
|
input.clear();
|
||||||
|
input.fill(searchText);
|
||||||
|
|
||||||
|
page.waitForTimeout(1000);
|
||||||
|
|
||||||
|
Locator suggestion = page.locator(suggestionSelector).first();
|
||||||
|
suggestion.waitFor(new Locator.WaitForOptions().setState(WaitForSelectorState.VISIBLE));
|
||||||
|
suggestion.click();
|
||||||
|
|
||||||
|
page.waitForTimeout(500);
|
||||||
|
logger.info(() -> "Selected autosuggest for: " + searchText);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Waits for a modal to close.
|
||||||
|
*/
|
||||||
|
protected void waitForModalToClose() {
|
||||||
|
page.locator("div.modal-container").waitFor(
|
||||||
|
new Locator.WaitForOptions().setState(WaitForSelectorState.HIDDEN)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Takes a screenshot for debugging purposes.
|
||||||
|
*/
|
||||||
|
protected void takeScreenshot(String name) {
|
||||||
|
page.screenshot(new Page.ScreenshotOptions()
|
||||||
|
.setPath(java.nio.file.Paths.get("target/screenshots/" + name + ".png")));
|
||||||
|
}
|
||||||
|
}
|
||||||
679
src/test/java/de/avatic/lcc/e2e/pages/CalculationEditPage.java
Normal file
679
src/test/java/de/avatic/lcc/e2e/pages/CalculationEditPage.java
Normal file
|
|
@ -0,0 +1,679 @@
|
||||||
|
package de.avatic.lcc.e2e.pages;
|
||||||
|
|
||||||
|
import com.microsoft.playwright.Locator;
|
||||||
|
import com.microsoft.playwright.Page;
|
||||||
|
import com.microsoft.playwright.options.WaitForSelectorState;
|
||||||
|
import de.avatic.lcc.e2e.testdata.DestinationInput;
|
||||||
|
import de.avatic.lcc.e2e.testdata.TestCaseInput;
|
||||||
|
|
||||||
|
import java.util.logging.Logger;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Page Object for the calculation edit page.
|
||||||
|
* Handles form filling for materials, packaging, pricing, and destinations.
|
||||||
|
*/
|
||||||
|
public class CalculationEditPage extends BasePage {
|
||||||
|
|
||||||
|
private static final Logger logger = Logger.getLogger(CalculationEditPage.class.getName());
|
||||||
|
|
||||||
|
// Screenshot settings
|
||||||
|
private String screenshotPrefix = null;
|
||||||
|
private int destinationCounter = 0;
|
||||||
|
|
||||||
|
// Material section selectors (first master-data-item box)
|
||||||
|
// Note: Use [1] after following-sibling::div to get only the first following sibling
|
||||||
|
private static final String HS_CODE_INPUT = "//div[contains(@class, 'master-data-item')][1]//div[contains(@class, 'caption-column')][text()='HS code']/following-sibling::div[1]//input[@class='input-field']";
|
||||||
|
private static final String TARIFF_RATE_INPUT = "//div[contains(@class, 'master-data-item')][1]//div[contains(@class, 'caption-column')][contains(., 'Tariff rate')]/following-sibling::div[1]//input[@class='input-field']";
|
||||||
|
|
||||||
|
// Price section selectors (second master-data-item box)
|
||||||
|
// Note: Labels are "MEK_A [EUR]", "Overseas share [%]" (spelling: OverSeas, not OverSea)
|
||||||
|
private static final String PRICE_INPUT = "//div[contains(@class, 'master-data-item')][2]//div[contains(@class, 'caption-column')][contains(., 'MEK_A')]/following-sibling::div[1]//input[@class='input-field']";
|
||||||
|
private static final String OVERSEA_SHARE_INPUT = "//div[contains(@class, 'master-data-item')][2]//div[contains(@class, 'caption-column')][contains(., 'Overseas share')]/following-sibling::div[1]//input[@class='input-field']";
|
||||||
|
private static final String FCA_FEE_CHECKBOX = "//div[contains(@class, 'master-data-item')][2]//div[contains(@class, 'caption-column')][contains(., 'FCA')]/following-sibling::div[1]//label[contains(@class, 'checkbox-item')]";
|
||||||
|
|
||||||
|
// Handling Unit section selectors (third master-data-item box)
|
||||||
|
private static final String LENGTH_INPUT = "//div[contains(@class, 'master-data-item')][3]//div[contains(@class, 'caption-column')][text()='HU length']/following-sibling::div[1]//input[@class='input-field']";
|
||||||
|
private static final String WIDTH_INPUT = "//div[contains(@class, 'master-data-item')][3]//div[contains(@class, 'caption-column')][text()='HU width']/following-sibling::div[1]//input[@class='input-field']";
|
||||||
|
private static final String HEIGHT_INPUT = "//div[contains(@class, 'master-data-item')][3]//div[contains(@class, 'caption-column')][text()='HU height']/following-sibling::div[1]//input[@class='input-field']";
|
||||||
|
private static final String WEIGHT_INPUT = "//div[contains(@class, 'master-data-item')][3]//div[contains(@class, 'caption-column')][text()='HU weight']/following-sibling::div[1]//input[@class='input-field']";
|
||||||
|
private static final String PIECES_PER_UNIT_INPUT = "//div[contains(@class, 'master-data-item')][3]//div[contains(@class, 'caption-column')][text()='Pieces per HU']/following-sibling::div[1]//input[@class='input-field']";
|
||||||
|
|
||||||
|
// Dropdown selectors
|
||||||
|
private static final String DIMENSION_UNIT_DROPDOWN = "//div[contains(@class, 'master-data-item')][3]//div[contains(@class, 'caption-column')][text()='Dimension unit']/following-sibling::div[1]//button[contains(@class, 'dropdown-trigger')]";
|
||||||
|
private static final String WEIGHT_UNIT_DROPDOWN = "//div[contains(@class, 'master-data-item')][3]//div[contains(@class, 'caption-column')][text()='Weight unit']/following-sibling::div[1]//button[contains(@class, 'dropdown-trigger')]";
|
||||||
|
|
||||||
|
// Checkbox selectors
|
||||||
|
private static final String MIXED_CHECKBOX = "//label[contains(@class, 'checkbox-item')][.//span[contains(@class, 'checkbox-label')][text()='Mixable']]";
|
||||||
|
private static final String STACKED_CHECKBOX = "//label[contains(@class, 'checkbox-item')][.//span[contains(@class, 'checkbox-label')][text()='Stackable']]";
|
||||||
|
|
||||||
|
// Destination selectors
|
||||||
|
// Note: Use contains(., 'text') instead of contains(text(), 'text') when text is inside nested elements like tooltips
|
||||||
|
private static final String DESTINATION_NAME_INPUT = "//input[@placeholder='Add new Destination ...']";
|
||||||
|
private static final String DESTINATION_QUANTITY_INPUT = "//div[contains(@class, 'destination-edit-column-caption')][contains(., 'Annual quantity')]/following-sibling::div[1]//input[@class='input-field']";
|
||||||
|
// Radio buttons are hidden and styled via label - click the label text instead
|
||||||
|
private static final String ROUTING_RADIO = "//label[contains(@class, 'radio-item')]//span[contains(@class, 'radio-label')][contains(., 'standard routing')]";
|
||||||
|
private static final String D2D_RADIO = "//label[contains(@class, 'radio-item')]//span[contains(@class, 'radio-label')][contains(., 'individual rate')]";
|
||||||
|
// Note: D2D fields use "D2D Rate [EUR]" and "Lead time [days]" as labels in the UI
|
||||||
|
private static final String D2D_COST_INPUT = "//div[contains(@class, 'destination-edit-column-caption')][contains(., 'D2D Rate')]/following-sibling::div[1]//input[@class='input-field']";
|
||||||
|
private static final String D2D_DURATION_INPUT = "//div[contains(@class, 'destination-edit-column-caption')][contains(., 'Lead time')]/following-sibling::div[1]//input[@class='input-field']";
|
||||||
|
private static final String HANDLING_TAB = "//button[contains(@class, 'tab-header')][contains(., 'Handling')]";
|
||||||
|
private static final String CUSTOM_HANDLING_CHECKBOX = "//div[contains(@class, 'destination-edit-handling-cost')]//label[contains(@class, 'checkbox-item')]";
|
||||||
|
private static final String HANDLING_COST_INPUT = "//div[contains(@class, 'destination-edit-column-caption')][contains(., 'Handling cost')]/following-sibling::div[1]//input[@class='input-field']";
|
||||||
|
private static final String REPACKING_COST_INPUT = "//div[contains(@class, 'destination-edit-column-caption')][contains(., 'Repackaging cost')]/following-sibling::div[1]//input[@class='input-field']";
|
||||||
|
private static final String DISPOSAL_COST_INPUT = "//div[contains(@class, 'destination-edit-column-caption')][contains(., 'Disposal cost')]/following-sibling::div[1]//input[@class='input-field']";
|
||||||
|
|
||||||
|
// Buttons
|
||||||
|
private static final String CALCULATE_AND_CLOSE_BUTTON = "//button[contains(., 'Calculate & close')]";
|
||||||
|
private static final String CLOSE_BUTTON = "//button[contains(., 'Close') and not(contains(., 'Calculate'))]";
|
||||||
|
|
||||||
|
public CalculationEditPage(Page page) {
|
||||||
|
super(page);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Enables screenshot mode with a test case prefix.
|
||||||
|
* Screenshots will be saved at key points during form filling.
|
||||||
|
*/
|
||||||
|
public void enableScreenshots(String testCaseId) {
|
||||||
|
this.screenshotPrefix = testCaseId;
|
||||||
|
this.destinationCounter = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Takes a screenshot if screenshot mode is enabled.
|
||||||
|
*/
|
||||||
|
private void captureScreenshot(String suffix) {
|
||||||
|
if (screenshotPrefix != null) {
|
||||||
|
String filename = screenshotPrefix + "_" + suffix;
|
||||||
|
java.nio.file.Path screenshotPath = java.nio.file.Paths.get("target/screenshots/" + filename + ".png");
|
||||||
|
page.screenshot(new Page.ScreenshotOptions().setPath(screenshotPath).setFullPage(true));
|
||||||
|
logger.info(() -> "Screenshot saved: " + screenshotPath);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Takes a screenshot of the current page state before calculation.
|
||||||
|
*/
|
||||||
|
public void screenshotBeforeCalculate() {
|
||||||
|
captureScreenshot("before_calculate");
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Fills the main calculation form with input data.
|
||||||
|
*/
|
||||||
|
public void fillForm(TestCaseInput input) {
|
||||||
|
logger.info("Filling calculation form");
|
||||||
|
|
||||||
|
// Material section (if HS code input exists)
|
||||||
|
fillInputByXPath(HS_CODE_INPUT, String.valueOf(input.hsCode()), true);
|
||||||
|
fillInputByXPath(TARIFF_RATE_INPUT, String.valueOf(input.tariffRate()), true);
|
||||||
|
|
||||||
|
// Price section
|
||||||
|
fillInputByXPath(PRICE_INPUT, String.valueOf(input.price()), false);
|
||||||
|
fillInputByXPath(OVERSEA_SHARE_INPUT, String.valueOf(input.overseaShare()), false);
|
||||||
|
setCheckboxByXPath(FCA_FEE_CHECKBOX, input.fcaFee());
|
||||||
|
|
||||||
|
// Handling Unit section
|
||||||
|
fillInputByXPath(LENGTH_INPUT, String.valueOf(input.length()), false);
|
||||||
|
fillInputByXPath(WIDTH_INPUT, String.valueOf(input.width()), false);
|
||||||
|
fillInputByXPath(HEIGHT_INPUT, String.valueOf(input.height()), false);
|
||||||
|
fillInputByXPath(WEIGHT_INPUT, String.valueOf(input.weight()), false);
|
||||||
|
fillInputByXPath(PIECES_PER_UNIT_INPUT, String.valueOf(input.piecesPerUnit()), false);
|
||||||
|
|
||||||
|
// Dropdowns
|
||||||
|
selectDropdownByXPath(DIMENSION_UNIT_DROPDOWN, input.dimensionUnit());
|
||||||
|
selectDropdownByXPath(WEIGHT_UNIT_DROPDOWN, input.weightUnit());
|
||||||
|
|
||||||
|
// Checkboxes
|
||||||
|
setCheckboxByXPath(STACKED_CHECKBOX, input.stacked());
|
||||||
|
setCheckboxByXPath(MIXED_CHECKBOX, input.mixed());
|
||||||
|
|
||||||
|
logger.info("Calculation form filled successfully");
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Adds a new destination by name.
|
||||||
|
*/
|
||||||
|
public void addDestination(DestinationInput destination) {
|
||||||
|
searchAndSelectAutosuggestByXPath(DESTINATION_NAME_INPUT, destination.name());
|
||||||
|
page.waitForTimeout(500);
|
||||||
|
logger.info(() -> "Added destination: " + destination.name());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Fills destination-specific fields.
|
||||||
|
*/
|
||||||
|
public void fillDestination(DestinationInput destination) {
|
||||||
|
destinationCounter++;
|
||||||
|
String destNum = String.valueOf(destinationCounter);
|
||||||
|
|
||||||
|
// First, ensure no modal is currently open
|
||||||
|
try {
|
||||||
|
Locator existingModal = page.locator(".modal-overlay");
|
||||||
|
if (existingModal.count() > 0 && existingModal.isVisible()) {
|
||||||
|
logger.info("Closing existing modal before opening destination edit");
|
||||||
|
// Press Escape to close any open modal
|
||||||
|
page.keyboard().press("Escape");
|
||||||
|
page.waitForTimeout(500);
|
||||||
|
}
|
||||||
|
} catch (Exception e) {
|
||||||
|
// No modal open, continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Click on the destination item's edit button to open the modal
|
||||||
|
// The destination item shows the name, so we find it and click the pencil icon
|
||||||
|
String destinationName = destination.name();
|
||||||
|
Locator destinationRow = page.locator(".destination-item-row:has-text('" + destinationName + "')");
|
||||||
|
|
||||||
|
if (destinationRow.count() > 0) {
|
||||||
|
logger.info(() -> "Found destination row for: " + destinationName);
|
||||||
|
Locator editButton = destinationRow.locator("button:has([class*='pencil'])");
|
||||||
|
if (editButton.count() == 0) {
|
||||||
|
// Try alternative selector for icon button
|
||||||
|
editButton = destinationRow.locator(".destination-item-action button").first();
|
||||||
|
}
|
||||||
|
if (editButton.count() > 0) {
|
||||||
|
logger.info("Clicking edit button to open destination modal");
|
||||||
|
editButton.click();
|
||||||
|
page.waitForTimeout(1000); // Wait for modal to open
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Wait for destination edit modal to be visible
|
||||||
|
Locator quantityInput = page.locator("xpath=" + DESTINATION_QUANTITY_INPUT);
|
||||||
|
quantityInput.waitFor(new Locator.WaitForOptions()
|
||||||
|
.setState(WaitForSelectorState.VISIBLE)
|
||||||
|
.setTimeout(10000));
|
||||||
|
|
||||||
|
// Wait extra time for Vue component to fully initialize
|
||||||
|
// This is critical for subsequent destinations
|
||||||
|
page.waitForTimeout(1000);
|
||||||
|
|
||||||
|
// Fill quantity
|
||||||
|
fillInputByXPath(DESTINATION_QUANTITY_INPUT, String.valueOf(destination.quantity()), false);
|
||||||
|
|
||||||
|
// Select transport mode
|
||||||
|
if (destination.d2d()) {
|
||||||
|
page.locator("xpath=" + D2D_RADIO).click();
|
||||||
|
page.waitForTimeout(300);
|
||||||
|
|
||||||
|
// Fill D2D specific fields if individual rate (custom cost/duration)
|
||||||
|
if (destination.d2dCost() != null) {
|
||||||
|
fillInputByXPath(D2D_COST_INPUT, String.valueOf(destination.d2dCost()), true);
|
||||||
|
}
|
||||||
|
if (destination.d2dDuration() != null) {
|
||||||
|
fillInputByXPath(D2D_DURATION_INPUT, String.valueOf(destination.d2dDuration()), true);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Note: D2D mode does NOT show route selection UI - routes are determined by the D2D provider
|
||||||
|
// If using standard routing (no cost specified), the system uses database D2D rates
|
||||||
|
if (destination.d2dCost() == null) {
|
||||||
|
logger.info("D2D with standard routing - D2D rates will be loaded from database");
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
page.locator("xpath=" + ROUTING_RADIO).click();
|
||||||
|
page.waitForTimeout(300);
|
||||||
|
|
||||||
|
// Select route - if not specified, select first available route
|
||||||
|
selectRoute(destination.route());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Take screenshot of Routes tab (with route selection or D2D fields)
|
||||||
|
captureScreenshot("dest" + destNum + "_routes_tab");
|
||||||
|
|
||||||
|
// Handle custom handling costs
|
||||||
|
if (destination.customHandling()) {
|
||||||
|
// Click handling tab
|
||||||
|
try {
|
||||||
|
Locator handlingTab = page.locator("xpath=" + HANDLING_TAB);
|
||||||
|
if (handlingTab.isVisible()) {
|
||||||
|
handlingTab.click();
|
||||||
|
page.waitForTimeout(300);
|
||||||
|
}
|
||||||
|
} catch (Exception e) {
|
||||||
|
// Tab might not exist or already selected
|
||||||
|
}
|
||||||
|
|
||||||
|
setCheckboxByXPath(CUSTOM_HANDLING_CHECKBOX, true);
|
||||||
|
page.waitForTimeout(300);
|
||||||
|
|
||||||
|
if (destination.handlingCost() != null) {
|
||||||
|
fillInputByXPath(HANDLING_COST_INPUT, String.valueOf(destination.handlingCost()), true);
|
||||||
|
}
|
||||||
|
if (destination.repackingCost() != null) {
|
||||||
|
fillInputByXPath(REPACKING_COST_INPUT, String.valueOf(destination.repackingCost()), true);
|
||||||
|
}
|
||||||
|
if (destination.disposalCost() != null) {
|
||||||
|
fillInputByXPath(DISPOSAL_COST_INPUT, String.valueOf(destination.disposalCost()), true);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Take screenshot of Handling tab
|
||||||
|
captureScreenshot("dest" + destNum + "_handling_tab");
|
||||||
|
} else {
|
||||||
|
// For destinations without custom handling, also take a screenshot of the handling tab for verification
|
||||||
|
try {
|
||||||
|
Locator handlingTab = page.locator("xpath=" + HANDLING_TAB);
|
||||||
|
if (handlingTab.isVisible()) {
|
||||||
|
handlingTab.click();
|
||||||
|
page.waitForTimeout(300);
|
||||||
|
captureScreenshot("dest" + destNum + "_handling_tab");
|
||||||
|
// Go back to routes tab
|
||||||
|
Locator routesTab = page.locator("//button[contains(@class, 'tab-header')][contains(., 'Routes')]");
|
||||||
|
if (routesTab.count() > 0 && routesTab.isVisible()) {
|
||||||
|
routesTab.click();
|
||||||
|
page.waitForTimeout(200);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (Exception e) {
|
||||||
|
// Tab might not exist
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Close the destination edit modal by clicking OK
|
||||||
|
Locator okButton = page.locator("button:has-text('OK')");
|
||||||
|
okButton.click();
|
||||||
|
page.waitForTimeout(500);
|
||||||
|
|
||||||
|
// Wait for modal and overlay to fully close
|
||||||
|
try {
|
||||||
|
page.locator(".destination-edit-modal-container").waitFor(
|
||||||
|
new Locator.WaitForOptions()
|
||||||
|
.setState(WaitForSelectorState.HIDDEN)
|
||||||
|
.setTimeout(5000));
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.warning("Destination edit modal might not have closed: " + e.getMessage());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Also wait for any modal overlay to disappear
|
||||||
|
try {
|
||||||
|
page.locator(".modal-overlay").waitFor(
|
||||||
|
new Locator.WaitForOptions()
|
||||||
|
.setState(WaitForSelectorState.HIDDEN)
|
||||||
|
.setTimeout(3000));
|
||||||
|
} catch (Exception e) {
|
||||||
|
// Overlay might not exist or already hidden
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extra wait to ensure DOM is stable
|
||||||
|
page.waitForTimeout(500);
|
||||||
|
|
||||||
|
logger.info(() -> "Filled destination: " + destination.name());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Selects a route from the available routes.
|
||||||
|
* Routes are displayed as clickable elements in the destination edit modal.
|
||||||
|
* Each route shows external_mapping_id values like "HH", "WH HH", etc.
|
||||||
|
*
|
||||||
|
* The Vue component (DestinationEditRoutes) uses a Pinia store for route selection.
|
||||||
|
* When a route is clicked, selectRoute(id) sets route.is_selected = true.
|
||||||
|
*
|
||||||
|
* IMPORTANT: Standard DOM clicks don't reliably trigger Vue's event system.
|
||||||
|
* We need to find the Vue component and call its methods directly.
|
||||||
|
*/
|
||||||
|
private void selectRoute(String route) {
|
||||||
|
// Wait for routes to fully load
|
||||||
|
page.waitForTimeout(500);
|
||||||
|
|
||||||
|
// Wait for routes to be visible
|
||||||
|
try {
|
||||||
|
page.locator(".destination-route-container").first().waitFor(
|
||||||
|
new Locator.WaitForOptions().setState(WaitForSelectorState.VISIBLE).setTimeout(5000));
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.info("No routes visible yet, continuing anyway");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for "no routes available" warning
|
||||||
|
Locator routeWarning = page.locator(".destination-edit-route-warning");
|
||||||
|
if (routeWarning.count() > 0 && routeWarning.isVisible()) {
|
||||||
|
String warningText = routeWarning.textContent();
|
||||||
|
logger.warning(() -> "Route warning displayed: " + warningText);
|
||||||
|
logger.info("No routes available - route selection skipped.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get routes from DOM and find the Vue component
|
||||||
|
Locator allRoutes = page.locator(".destination-route-container");
|
||||||
|
int routeCount = allRoutes.count();
|
||||||
|
logger.info(() -> "Found " + routeCount + " routes in DOM");
|
||||||
|
|
||||||
|
if (routeCount == 0) {
|
||||||
|
logger.warning("No routes found");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Log available routes
|
||||||
|
for (int i = 0; i < routeCount; i++) {
|
||||||
|
final int idx = i;
|
||||||
|
String routeText = allRoutes.nth(i).textContent();
|
||||||
|
logger.info(() -> " Route " + idx + ": " + routeText.trim());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find best matching route index
|
||||||
|
int routeIndexToSelect = findBestMatchingRouteIndexFromDom(allRoutes, route);
|
||||||
|
logger.info(() -> "Will select route at index " + routeIndexToSelect);
|
||||||
|
|
||||||
|
// Try to find and call the Vue component's selectRoute method
|
||||||
|
// The component is mounted on the modal's routes container
|
||||||
|
Object result = page.evaluate("(routeIndex) => { " +
|
||||||
|
"try { " +
|
||||||
|
// Find the route element
|
||||||
|
" const routeElements = document.querySelectorAll('.destination-route-container'); " +
|
||||||
|
" if (!routeElements || routeElements.length === 0) return 'no_routes_in_dom'; " +
|
||||||
|
" if (routeIndex >= routeElements.length) return 'index_out_of_bounds'; " +
|
||||||
|
// Find the Vue component that handles routes - it's the parent of the routes container
|
||||||
|
" const routesCell = document.querySelector('.destination-edit-cell-routes'); " +
|
||||||
|
" if (!routesCell) return 'no_routes_cell'; " +
|
||||||
|
// Walk up to find the component with selectRoute method
|
||||||
|
" let vueComponent = null; " +
|
||||||
|
" let el = routesCell; " +
|
||||||
|
" for (let i = 0; i < 10 && el; i++) { " +
|
||||||
|
" if (el.__vueParentComponent) { " +
|
||||||
|
" let comp = el.__vueParentComponent; " +
|
||||||
|
" while (comp) { " +
|
||||||
|
" if (comp.ctx && typeof comp.ctx.selectRoute === 'function') { " +
|
||||||
|
" vueComponent = comp; " +
|
||||||
|
" break; " +
|
||||||
|
" } " +
|
||||||
|
" comp = comp.parent; " +
|
||||||
|
" } " +
|
||||||
|
" if (vueComponent) break; " +
|
||||||
|
" } " +
|
||||||
|
" el = el.parentElement; " +
|
||||||
|
" } " +
|
||||||
|
" if (!vueComponent) { " +
|
||||||
|
// Alternative: try to access pinia via window or through component
|
||||||
|
" const routeEl = routeElements[routeIndex]; " +
|
||||||
|
" let compEl = routeEl; " +
|
||||||
|
" for (let i = 0; i < 5 && compEl; i++) { " +
|
||||||
|
" if (compEl.__vueParentComponent?.ctx?.destination?.routes) { " +
|
||||||
|
" const routes = compEl.__vueParentComponent.ctx.destination.routes; " +
|
||||||
|
" if (Array.isArray(routes) && routes.length > routeIndex) { " +
|
||||||
|
" routes.forEach((r, idx) => { r.is_selected = (idx === routeIndex); }); " +
|
||||||
|
" return 'set_via_ctx_destination'; " +
|
||||||
|
" } " +
|
||||||
|
" } " +
|
||||||
|
" compEl = compEl.parentElement; " +
|
||||||
|
" } " +
|
||||||
|
" return 'no_vue_component'; " +
|
||||||
|
" } " +
|
||||||
|
// Get the route id from the component's destination.routes
|
||||||
|
" const routes = vueComponent.ctx.destination?.routes; " +
|
||||||
|
" if (!routes || routes.length === 0) return 'no_routes_in_ctx'; " +
|
||||||
|
" if (routeIndex >= routes.length) return 'route_index_exceeds_ctx'; " +
|
||||||
|
" const routeId = routes[routeIndex].id; " +
|
||||||
|
// Call the selectRoute method
|
||||||
|
" vueComponent.ctx.selectRoute(routeId); " +
|
||||||
|
" return 'called_selectRoute:' + routeId; " +
|
||||||
|
"} catch (e) { return 'error:' + e.message; } " +
|
||||||
|
"}", routeIndexToSelect);
|
||||||
|
|
||||||
|
final Object vueResult = result;
|
||||||
|
logger.info(() -> "Vue component route selection result: " + vueResult);
|
||||||
|
|
||||||
|
// Always try click simulation as the primary method - it's most reliable
|
||||||
|
logger.info("Using click simulation to select route");
|
||||||
|
Locator routeToClick = allRoutes.nth(routeIndexToSelect);
|
||||||
|
simulateRobustClick(routeToClick);
|
||||||
|
|
||||||
|
// Wait for UI update
|
||||||
|
page.waitForTimeout(500);
|
||||||
|
|
||||||
|
// Verify selection worked
|
||||||
|
boolean selected = verifyRouteSelectionVisual(allRoutes.nth(routeIndexToSelect));
|
||||||
|
|
||||||
|
// If click didn't work, try Pinia as fallback
|
||||||
|
if (!selected) {
|
||||||
|
logger.info("Click simulation didn't select route, trying Pinia direct access");
|
||||||
|
Object piniaResult = tryPiniaDirectAccess(routeIndexToSelect);
|
||||||
|
final Object piniaResultFinal = piniaResult;
|
||||||
|
logger.info(() -> "Pinia direct access result: " + piniaResultFinal);
|
||||||
|
page.waitForTimeout(300);
|
||||||
|
selected = verifyRouteSelectionVisual(allRoutes.nth(routeIndexToSelect));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!selected) {
|
||||||
|
logger.warning(() -> "Route selection may have failed for index " + routeIndexToSelect);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Try direct Pinia store access through various paths.
|
||||||
|
*/
|
||||||
|
private Object tryPiniaDirectAccess(int routeIndex) {
|
||||||
|
return page.evaluate("(routeIndex) => { " +
|
||||||
|
"try { " +
|
||||||
|
// Try different ways to find Pinia
|
||||||
|
" let pinia = null; " +
|
||||||
|
// Method 1: Through app provides
|
||||||
|
" const app = document.querySelector('#app')?.__vue_app__; " +
|
||||||
|
" if (app?._context?.provides?.pinia) { " +
|
||||||
|
" pinia = app._context.provides.pinia; " +
|
||||||
|
" } " +
|
||||||
|
// Method 2: Through window (if exposed)
|
||||||
|
" if (!pinia && window.__pinia) { " +
|
||||||
|
" pinia = window.__pinia; " +
|
||||||
|
" } " +
|
||||||
|
// Method 3: Walk through app's config
|
||||||
|
" if (!pinia && app?.config?.globalProperties?.$pinia) { " +
|
||||||
|
" pinia = app.config.globalProperties.$pinia; " +
|
||||||
|
" } " +
|
||||||
|
" if (!pinia) return 'pinia_not_found'; " +
|
||||||
|
// Access the store
|
||||||
|
" const storeState = pinia.state?.value?.['destinationSingleEdit']; " +
|
||||||
|
" if (!storeState?.destination?.routes) return 'store_not_found'; " +
|
||||||
|
" const routes = storeState.destination.routes; " +
|
||||||
|
" if (routeIndex >= routes.length) return 'index_out_of_range'; " +
|
||||||
|
// Set selection
|
||||||
|
" routes.forEach((r, idx) => { r.is_selected = (idx === routeIndex); }); " +
|
||||||
|
" return 'pinia_success'; " +
|
||||||
|
"} catch (e) { return 'pinia_error:' + e.message; } " +
|
||||||
|
"}", routeIndex);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Simulate a robust click that Vue should recognize.
|
||||||
|
*/
|
||||||
|
private void simulateRobustClick(Locator element) {
|
||||||
|
try {
|
||||||
|
// First, scroll into view
|
||||||
|
element.scrollIntoViewIfNeeded();
|
||||||
|
page.waitForTimeout(100);
|
||||||
|
|
||||||
|
// Try to trigger via native Playwright click
|
||||||
|
element.click(new Locator.ClickOptions().setForce(true));
|
||||||
|
page.waitForTimeout(100);
|
||||||
|
|
||||||
|
// Also dispatch events manually
|
||||||
|
element.evaluate("el => { " +
|
||||||
|
"const mousedown = new MouseEvent('mousedown', { bubbles: true, cancelable: true, view: window }); " +
|
||||||
|
"const mouseup = new MouseEvent('mouseup', { bubbles: true, cancelable: true, view: window }); " +
|
||||||
|
"const click = new MouseEvent('click', { bubbles: true, cancelable: true, view: window }); " +
|
||||||
|
"el.dispatchEvent(mousedown); " +
|
||||||
|
"el.dispatchEvent(mouseup); " +
|
||||||
|
"el.dispatchEvent(click); " +
|
||||||
|
"}");
|
||||||
|
|
||||||
|
logger.info("Simulated robust click on route element");
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.warning(() -> "Robust click simulation failed: " + e.getMessage());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Verify route selection is visible in the DOM.
|
||||||
|
* @return true if the route appears selected, false otherwise
|
||||||
|
*/
|
||||||
|
private boolean verifyRouteSelectionVisual(Locator routeElement) {
|
||||||
|
try {
|
||||||
|
Locator innerContainer = routeElement.locator(".destination-route-inner-container");
|
||||||
|
if (innerContainer.count() > 0) {
|
||||||
|
String classes = innerContainer.getAttribute("class");
|
||||||
|
boolean selected = classes != null && classes.contains("selected");
|
||||||
|
logger.info(() -> "Route visual verification - classes: " + classes + ", selected: " + selected);
|
||||||
|
return selected;
|
||||||
|
}
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.warning(() -> "Could not verify route selection: " + e.getMessage());
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Find exact matching route from DOM elements.
|
||||||
|
* The route must contain all spec segments in order, and the route text
|
||||||
|
* (when normalized) should match the concatenated spec segments.
|
||||||
|
*
|
||||||
|
* @throws IllegalStateException if no exact match is found
|
||||||
|
*/
|
||||||
|
private int findBestMatchingRouteIndexFromDom(Locator allRoutes, String routeSpec) {
|
||||||
|
int routeCount = allRoutes.count();
|
||||||
|
if (routeSpec == null || routeSpec.isEmpty()) {
|
||||||
|
return 0; // No route specified, use first available
|
||||||
|
}
|
||||||
|
if (routeCount == 0) {
|
||||||
|
throw new IllegalStateException("No routes available, but route spec was: " + routeSpec);
|
||||||
|
}
|
||||||
|
|
||||||
|
String[] specSegments = routeSpec.split(",");
|
||||||
|
|
||||||
|
// Build expected route text by concatenating segments (routes display without separators)
|
||||||
|
StringBuilder expectedBuilder = new StringBuilder();
|
||||||
|
for (String segment : specSegments) {
|
||||||
|
expectedBuilder.append(segment.trim().toLowerCase().replace("_", " "));
|
||||||
|
}
|
||||||
|
String expectedRouteText = expectedBuilder.toString();
|
||||||
|
|
||||||
|
// Find exact match
|
||||||
|
for (int i = 0; i < routeCount; i++) {
|
||||||
|
String routeText = allRoutes.nth(i).textContent().toLowerCase().trim();
|
||||||
|
// Remove common whitespace/separator variations
|
||||||
|
String normalizedRouteText = routeText.replaceAll("\\s+", "").replace(">", "");
|
||||||
|
String normalizedExpected = expectedRouteText.replaceAll("\\s+", "");
|
||||||
|
|
||||||
|
if (normalizedRouteText.equals(normalizedExpected)) {
|
||||||
|
final int matchedIndex = i;
|
||||||
|
final String matchedRoute = routeText;
|
||||||
|
logger.info(() -> "Exact route match found at index " + matchedIndex + ": " + matchedRoute);
|
||||||
|
return i;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// No exact match found - log available routes and fail
|
||||||
|
StringBuilder availableRoutes = new StringBuilder("Available routes:\n");
|
||||||
|
for (int i = 0; i < routeCount; i++) {
|
||||||
|
availableRoutes.append(" ").append(i).append(": ").append(allRoutes.nth(i).textContent().trim()).append("\n");
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new IllegalStateException(
|
||||||
|
"No exact route match found for spec: '" + routeSpec + "' (expected: '" + expectedRouteText + "')\n" +
|
||||||
|
availableRoutes.toString()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clicks the "Calculate & close" button.
|
||||||
|
*/
|
||||||
|
public void calculateAndClose() {
|
||||||
|
page.locator("xpath=" + CALCULATE_AND_CLOSE_BUTTON).click();
|
||||||
|
page.waitForTimeout(2000);
|
||||||
|
logger.info("Clicked Calculate & close");
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clicks the "Close" button.
|
||||||
|
*/
|
||||||
|
public void close() {
|
||||||
|
page.locator("xpath=" + CLOSE_BUTTON).click();
|
||||||
|
logger.info("Clicked Close");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper methods for XPath-based operations
|
||||||
|
|
||||||
|
private void fillInputByXPath(String xpath, String value, boolean optional) {
|
||||||
|
try {
|
||||||
|
Locator locator = page.locator("xpath=" + xpath);
|
||||||
|
if (optional) {
|
||||||
|
locator.waitFor(new Locator.WaitForOptions()
|
||||||
|
.setState(WaitForSelectorState.VISIBLE)
|
||||||
|
.setTimeout(2000));
|
||||||
|
} else {
|
||||||
|
locator.waitFor(new Locator.WaitForOptions()
|
||||||
|
.setState(WaitForSelectorState.VISIBLE));
|
||||||
|
}
|
||||||
|
locator.clear();
|
||||||
|
locator.fill(value);
|
||||||
|
logger.fine(() -> "Filled XPath input: " + xpath + " with value: " + value);
|
||||||
|
} catch (Exception e) {
|
||||||
|
if (!optional) {
|
||||||
|
throw e;
|
||||||
|
}
|
||||||
|
logger.warning(() -> "Optional field not found: " + xpath);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void setCheckboxByXPath(String xpath, boolean checked) {
|
||||||
|
try {
|
||||||
|
Locator label = page.locator("xpath=" + xpath);
|
||||||
|
label.waitFor(new Locator.WaitForOptions()
|
||||||
|
.setState(WaitForSelectorState.VISIBLE)
|
||||||
|
.setTimeout(2000));
|
||||||
|
|
||||||
|
Locator checkbox = label.locator("input[type='checkbox']");
|
||||||
|
boolean isChecked = checkbox.isChecked();
|
||||||
|
|
||||||
|
if (isChecked != checked) {
|
||||||
|
label.click();
|
||||||
|
page.waitForTimeout(300);
|
||||||
|
}
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.warning(() -> "Could not set checkbox: " + xpath);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void selectDropdownByXPath(String xpath, String optionText) {
|
||||||
|
try {
|
||||||
|
Locator dropdown = page.locator("xpath=" + xpath);
|
||||||
|
dropdown.waitFor(new Locator.WaitForOptions()
|
||||||
|
.setState(WaitForSelectorState.VISIBLE)
|
||||||
|
.setTimeout(2000));
|
||||||
|
|
||||||
|
// Check current value
|
||||||
|
try {
|
||||||
|
String currentValue = dropdown.locator("span.dropdown-trigger-text").textContent();
|
||||||
|
if (optionText.equals(currentValue)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
} catch (Exception ignored) {
|
||||||
|
}
|
||||||
|
|
||||||
|
dropdown.click();
|
||||||
|
|
||||||
|
Locator menu = page.locator("ul.dropdown-menu");
|
||||||
|
menu.waitFor(new Locator.WaitForOptions().setState(WaitForSelectorState.VISIBLE));
|
||||||
|
|
||||||
|
String optionXPath = String.format(
|
||||||
|
"//li[contains(@class, 'dropdown-option')][normalize-space(text())='%s']",
|
||||||
|
optionText
|
||||||
|
);
|
||||||
|
page.locator("xpath=" + optionXPath).click();
|
||||||
|
page.waitForTimeout(200);
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.warning(() -> "Could not select dropdown option: " + optionText);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void searchAndSelectAutosuggestByXPath(String xpath, String searchText) {
|
||||||
|
Locator input = page.locator("xpath=" + xpath);
|
||||||
|
input.waitFor(new Locator.WaitForOptions().setState(WaitForSelectorState.VISIBLE));
|
||||||
|
input.clear();
|
||||||
|
input.fill(searchText);
|
||||||
|
page.waitForTimeout(1000);
|
||||||
|
|
||||||
|
Locator suggestion = page.locator(".suggestion-item").first();
|
||||||
|
suggestion.waitFor(new Locator.WaitForOptions().setState(WaitForSelectorState.VISIBLE));
|
||||||
|
suggestion.click();
|
||||||
|
page.waitForTimeout(500);
|
||||||
|
}
|
||||||
|
}
|
||||||
86
src/test/java/de/avatic/lcc/e2e/pages/DevLoginPage.java
Normal file
86
src/test/java/de/avatic/lcc/e2e/pages/DevLoginPage.java
Normal file
|
|
@ -0,0 +1,86 @@
|
||||||
|
package de.avatic.lcc.e2e.pages;
|
||||||
|
|
||||||
|
import com.microsoft.playwright.Locator;
|
||||||
|
import com.microsoft.playwright.Page;
|
||||||
|
import com.microsoft.playwright.options.WaitForSelectorState;
|
||||||
|
|
||||||
|
import java.util.logging.Logger;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Page Object for the dev login page (/dev).
|
||||||
|
* Allows selecting a user from the dev user table for testing.
|
||||||
|
*/
|
||||||
|
public class DevLoginPage extends BasePage {
|
||||||
|
|
||||||
|
private static final Logger logger = Logger.getLogger(DevLoginPage.class.getName());
|
||||||
|
|
||||||
|
private static final String MODAL_YES_BUTTON = "div.modal-dialog-actions button.btn--primary";
|
||||||
|
private static final String MODAL_CONTAINER = "div.modal-container";
|
||||||
|
|
||||||
|
public DevLoginPage(Page page) {
|
||||||
|
super(page);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Navigates to the dev login page and logs in as the specified user.
|
||||||
|
*
|
||||||
|
* @param baseUrl The base URL of the application
|
||||||
|
* @param userName The first name of the user to log in as (e.g., "John")
|
||||||
|
*/
|
||||||
|
public void login(String baseUrl, String userName) {
|
||||||
|
page.navigate(baseUrl + "/dev");
|
||||||
|
|
||||||
|
// Wait for the page to load
|
||||||
|
page.waitForLoadState();
|
||||||
|
|
||||||
|
// The /dev page has two tables. We need the first one (User control tab).
|
||||||
|
// Use .first() to get the first table
|
||||||
|
Locator userTable = page.locator("table.data-table").first();
|
||||||
|
userTable.waitFor(new Locator.WaitForOptions().setState(WaitForSelectorState.VISIBLE));
|
||||||
|
|
||||||
|
// Wait for table rows to appear (API might take time to load data)
|
||||||
|
Locator rows = userTable.locator("tbody tr.table-row");
|
||||||
|
try {
|
||||||
|
rows.first().waitFor(new Locator.WaitForOptions()
|
||||||
|
.setState(WaitForSelectorState.VISIBLE)
|
||||||
|
.setTimeout(10000));
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.warning("No table rows found after waiting. Page content: " +
|
||||||
|
page.content().substring(0, Math.min(1000, page.content().length())));
|
||||||
|
throw new RuntimeException("No users found in dev user table. Is the API working?", e);
|
||||||
|
}
|
||||||
|
|
||||||
|
int rowCount = rows.count();
|
||||||
|
logger.info(() -> "Found " + rowCount + " user rows");
|
||||||
|
|
||||||
|
boolean userFound = false;
|
||||||
|
for (int i = 0; i < rowCount; i++) {
|
||||||
|
Locator row = rows.nth(i);
|
||||||
|
Locator firstCell = row.locator("td").first();
|
||||||
|
String firstName = firstCell.textContent();
|
||||||
|
|
||||||
|
if (firstName != null && firstName.contains(userName)) {
|
||||||
|
row.click();
|
||||||
|
userFound = true;
|
||||||
|
logger.info(() -> "Selected user: " + userName);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!userFound) {
|
||||||
|
throw new RuntimeException("User '" + userName + "' not found in dev user table");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Confirm the login in the modal
|
||||||
|
Locator yesButton = page.locator(MODAL_YES_BUTTON);
|
||||||
|
yesButton.waitFor(new Locator.WaitForOptions().setState(WaitForSelectorState.VISIBLE));
|
||||||
|
yesButton.click();
|
||||||
|
|
||||||
|
// Wait for modal to close
|
||||||
|
page.locator(MODAL_CONTAINER).waitFor(
|
||||||
|
new Locator.WaitForOptions().setState(WaitForSelectorState.HIDDEN)
|
||||||
|
);
|
||||||
|
|
||||||
|
logger.info(() -> "Successfully logged in as: " + userName);
|
||||||
|
}
|
||||||
|
}
|
||||||
620
src/test/java/de/avatic/lcc/e2e/pages/ResultsPage.java
Normal file
620
src/test/java/de/avatic/lcc/e2e/pages/ResultsPage.java
Normal file
|
|
@ -0,0 +1,620 @@
|
||||||
|
package de.avatic.lcc.e2e.pages;
|
||||||
|
|
||||||
|
import com.microsoft.playwright.Locator;
|
||||||
|
import com.microsoft.playwright.Page;
|
||||||
|
import com.microsoft.playwright.options.AriaRole;
|
||||||
|
import com.microsoft.playwright.options.WaitForSelectorState;
|
||||||
|
import de.avatic.lcc.e2e.testdata.DestinationExpected;
|
||||||
|
import de.avatic.lcc.e2e.testdata.TestCaseExpected;
|
||||||
|
|
||||||
|
import java.nio.file.Paths;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.logging.Logger;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Page Object for the calculation results/report page.
|
||||||
|
* Handles navigating to reports and reading calculation results.
|
||||||
|
*/
|
||||||
|
public class ResultsPage extends BasePage {
|
||||||
|
|
||||||
|
private static final Logger logger = Logger.getLogger(ResultsPage.class.getName());
|
||||||
|
|
||||||
|
// Report page selectors based on Report.vue structure
|
||||||
|
private static final String REPORT_CONTAINER = ".report-container";
|
||||||
|
private static final String CREATE_REPORT_BUTTON = "button:has-text('Create report')";
|
||||||
|
private static final String REPORT_BOX = ".box"; // Reports are shown inside Box components
|
||||||
|
|
||||||
|
public ResultsPage(Page page) {
|
||||||
|
super(page);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Navigates to the reports page and creates a report for the given material/supplier.
|
||||||
|
*/
|
||||||
|
public void navigateToReports(String baseUrl, String partNumber, String supplierName) {
|
||||||
|
// Navigate to reports page
|
||||||
|
page.navigate(baseUrl + "/reports");
|
||||||
|
page.waitForLoadState();
|
||||||
|
logger.info("Navigated to reports page");
|
||||||
|
|
||||||
|
// Click "Create report" button
|
||||||
|
Locator createReportBtn = page.locator(CREATE_REPORT_BUTTON);
|
||||||
|
createReportBtn.waitFor(new Locator.WaitForOptions()
|
||||||
|
.setState(WaitForSelectorState.VISIBLE)
|
||||||
|
.setTimeout(10000));
|
||||||
|
createReportBtn.click();
|
||||||
|
logger.info("Clicked Create report button");
|
||||||
|
|
||||||
|
// Wait for the modal to fully open
|
||||||
|
page.waitForTimeout(1000);
|
||||||
|
|
||||||
|
// The modal has an autosuggest search bar with specific placeholder
|
||||||
|
// Use the placeholder text to find the correct input inside the modal
|
||||||
|
Locator searchInput = page.locator("input[placeholder='Select material for reporting']");
|
||||||
|
searchInput.waitFor(new Locator.WaitForOptions()
|
||||||
|
.setState(WaitForSelectorState.VISIBLE)
|
||||||
|
.setTimeout(5000));
|
||||||
|
searchInput.click();
|
||||||
|
searchInput.fill(partNumber);
|
||||||
|
logger.info("Entered part number in search: " + partNumber);
|
||||||
|
page.waitForTimeout(1500);
|
||||||
|
|
||||||
|
// Wait for and select the material from suggestions
|
||||||
|
Locator suggestion = page.locator(".suggestion-item").first();
|
||||||
|
try {
|
||||||
|
suggestion.waitFor(new Locator.WaitForOptions()
|
||||||
|
.setState(WaitForSelectorState.VISIBLE)
|
||||||
|
.setTimeout(5000));
|
||||||
|
suggestion.click();
|
||||||
|
logger.info("Selected material from suggestions");
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.warning("Could not select material from suggestions: " + e.getMessage());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Wait for suppliers list to load
|
||||||
|
page.waitForTimeout(1500);
|
||||||
|
|
||||||
|
// Select the supplier by clicking on its item-list-element
|
||||||
|
// The supplier name is inside a supplier-item component
|
||||||
|
try {
|
||||||
|
Locator supplierElement = page.locator(".item-list-element")
|
||||||
|
.filter(new Locator.FilterOptions().setHasText(supplierName))
|
||||||
|
.first();
|
||||||
|
|
||||||
|
if (supplierElement.count() > 0) {
|
||||||
|
supplierElement.click();
|
||||||
|
logger.info("Selected supplier: " + supplierName);
|
||||||
|
page.waitForTimeout(500);
|
||||||
|
} else {
|
||||||
|
logger.warning("Supplier not found: " + supplierName);
|
||||||
|
}
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.warning("Could not select supplier: " + e.getMessage());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Click OK button inside the modal footer
|
||||||
|
Locator okButton = page.locator(".footer button:has-text('OK')");
|
||||||
|
try {
|
||||||
|
okButton.waitFor(new Locator.WaitForOptions()
|
||||||
|
.setState(WaitForSelectorState.VISIBLE)
|
||||||
|
.setTimeout(5000));
|
||||||
|
okButton.click();
|
||||||
|
logger.info("Clicked OK button");
|
||||||
|
} catch (Exception e) {
|
||||||
|
// Fallback: try to find any OK button
|
||||||
|
page.locator("button:has-text('OK')").first().click();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Wait for the report to load
|
||||||
|
waitForResults();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Waits for the results to be loaded.
|
||||||
|
*/
|
||||||
|
public void waitForResults() {
|
||||||
|
// Wait for any "Prepare report" modal to disappear
|
||||||
|
try {
|
||||||
|
Locator prepareReportModal = page.locator(".modal-overlay, .modal-container, .modal-dialog");
|
||||||
|
if (prepareReportModal.count() > 0 && prepareReportModal.first().isVisible()) {
|
||||||
|
logger.info("Waiting for modal to close...");
|
||||||
|
prepareReportModal.first().waitFor(new Locator.WaitForOptions()
|
||||||
|
.setState(WaitForSelectorState.HIDDEN)
|
||||||
|
.setTimeout(30000));
|
||||||
|
logger.info("Modal closed");
|
||||||
|
}
|
||||||
|
} catch (Exception e) {
|
||||||
|
// Modal might not be present or already closed
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Wait for report container or spinner to disappear
|
||||||
|
page.locator(".report-spinner, .spinner").waitFor(new Locator.WaitForOptions()
|
||||||
|
.setState(WaitForSelectorState.HIDDEN)
|
||||||
|
.setTimeout(30000));
|
||||||
|
} catch (Exception e) {
|
||||||
|
// Spinner might not be present
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
page.locator(REPORT_CONTAINER).waitFor(new Locator.WaitForOptions()
|
||||||
|
.setState(WaitForSelectorState.VISIBLE)
|
||||||
|
.setTimeout(30000));
|
||||||
|
page.waitForLoadState();
|
||||||
|
logger.info("Results loaded");
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.warning("Results container not found, continuing...");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Expands all collapsible boxes on the report page.
|
||||||
|
* The Vue CollapsibleBox component uses:
|
||||||
|
* - .box-content.collapsed for hidden content
|
||||||
|
* - .collapse-button in the header to toggle
|
||||||
|
* - The outer box element gets class "collapsible" when collapsed and clickable
|
||||||
|
*/
|
||||||
|
public void expandAllCollapsibleBoxes() {
|
||||||
|
try {
|
||||||
|
// Strategy: Keep clicking on collapsed boxes until none remain
|
||||||
|
// After each click, re-query the DOM since it changes
|
||||||
|
int maxIterations = 20; // Safety limit
|
||||||
|
int totalExpanded = 0;
|
||||||
|
|
||||||
|
for (int iteration = 0; iteration < maxIterations; iteration++) {
|
||||||
|
// Find collapsed content sections
|
||||||
|
Locator collapsedContent = page.locator(".box-content.collapsed");
|
||||||
|
int collapsedCount = collapsedContent.count();
|
||||||
|
|
||||||
|
if (collapsedCount == 0) {
|
||||||
|
break; // All expanded
|
||||||
|
}
|
||||||
|
|
||||||
|
final int iterNum = iteration + 1;
|
||||||
|
final int remaining = collapsedCount;
|
||||||
|
logger.info(() -> "Iteration " + iterNum + ": Found " + remaining + " collapsed boxes");
|
||||||
|
|
||||||
|
// Try to expand the first collapsed box
|
||||||
|
try {
|
||||||
|
Locator firstCollapsed = collapsedContent.first();
|
||||||
|
|
||||||
|
// Navigate up to find the clickable header span (the title)
|
||||||
|
// Structure: box > div > div.box-header > span (clickable)
|
||||||
|
Locator headerSpan = firstCollapsed.locator("xpath=preceding-sibling::div[contains(@class, 'box-header')]//span").first();
|
||||||
|
|
||||||
|
if (headerSpan.count() > 0 && headerSpan.isVisible()) {
|
||||||
|
headerSpan.click();
|
||||||
|
page.waitForTimeout(400); // Wait for animation
|
||||||
|
totalExpanded++;
|
||||||
|
logger.info(() -> "Expanded box via header span");
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Alternative: Try clicking the collapse button
|
||||||
|
Locator collapseButton = firstCollapsed.locator("xpath=preceding-sibling::div[contains(@class, 'box-header')]//button[contains(@class, 'collapse-button')]").first();
|
||||||
|
|
||||||
|
if (collapseButton.count() > 0 && collapseButton.isVisible()) {
|
||||||
|
collapseButton.click();
|
||||||
|
page.waitForTimeout(400);
|
||||||
|
totalExpanded++;
|
||||||
|
logger.info(() -> "Expanded box via collapse button");
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Alternative: Click on the parent box element which also has a click handler
|
||||||
|
Locator parentBox = firstCollapsed.locator("xpath=ancestor::*[contains(@class, 'collapsible')]").first();
|
||||||
|
|
||||||
|
if (parentBox.count() > 0 && parentBox.isVisible()) {
|
||||||
|
parentBox.click();
|
||||||
|
page.waitForTimeout(400);
|
||||||
|
totalExpanded++;
|
||||||
|
logger.info(() -> "Expanded box via parent collapsible element");
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// If nothing worked, log and try next
|
||||||
|
logger.warning("Could not find clickable element for collapsed box");
|
||||||
|
|
||||||
|
} catch (Exception e) {
|
||||||
|
final String errorMsg = e.getMessage();
|
||||||
|
logger.warning(() -> "Error expanding box: " + errorMsg);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Final check
|
||||||
|
int finalCollapsed = page.locator(".box-content.collapsed").count();
|
||||||
|
final int expanded = totalExpanded;
|
||||||
|
final int stillCollapsedFinal = finalCollapsed;
|
||||||
|
logger.info(() -> "Expanded " + expanded + " boxes, " + stillCollapsedFinal + " still collapsed");
|
||||||
|
|
||||||
|
page.waitForTimeout(500); // Wait for all animations to complete
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.warning("Could not expand all boxes: " + e.getMessage());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Takes a full page screenshot with all content visible.
|
||||||
|
* @param filename The filename without path (will be saved to target/screenshots/)
|
||||||
|
*/
|
||||||
|
public void takeFullPageScreenshot(String filename) {
|
||||||
|
try {
|
||||||
|
// First expand all collapsible sections
|
||||||
|
expandAllCollapsibleBoxes();
|
||||||
|
|
||||||
|
// Wait a moment for any animations to complete
|
||||||
|
page.waitForTimeout(500);
|
||||||
|
|
||||||
|
// Take full page screenshot
|
||||||
|
String path = "target/screenshots/" + filename + ".png";
|
||||||
|
page.screenshot(new Page.ScreenshotOptions()
|
||||||
|
.setPath(Paths.get(path))
|
||||||
|
.setFullPage(true));
|
||||||
|
|
||||||
|
logger.info(() -> "Full page screenshot saved: " + path);
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.warning("Could not take full page screenshot: " + e.getMessage());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Reads all result values from the page.
|
||||||
|
* Based on Report.vue structure with .report-content-row elements.
|
||||||
|
*/
|
||||||
|
public Map<String, Object> readResults() {
|
||||||
|
waitForResults();
|
||||||
|
|
||||||
|
// Expand all collapsible boxes to ensure all content is visible
|
||||||
|
expandAllCollapsibleBoxes();
|
||||||
|
|
||||||
|
Map<String, Object> results = new HashMap<>();
|
||||||
|
|
||||||
|
// Read values from the "Summary" section (first 3-col grid)
|
||||||
|
// Structure: <div class="report-content-row"><div>Label</div><div class="report-content-data-cell">Value €</div>...</div>
|
||||||
|
results.put("mekA", readValueByLabel("MEK A"));
|
||||||
|
results.put("logisticCost", readValueByLabel("Logistics cost"));
|
||||||
|
results.put("mekB", readValueByLabel("MEK B"));
|
||||||
|
|
||||||
|
// Read values from the "Weighted cost breakdown" section
|
||||||
|
results.put("fcaFee", readValueByLabel("FCA fee"));
|
||||||
|
results.put("transportation", readValueByLabel("Transportation costs"));
|
||||||
|
results.put("d2d", readValueByLabel("Door 2 door costs"));
|
||||||
|
results.put("airFreight", readValueByLabel("Air freight costs"));
|
||||||
|
results.put("custom", readValueByLabel("Custom costs"));
|
||||||
|
results.put("repackaging", readValueByLabel("Repackaging"));
|
||||||
|
results.put("handling", readValueByLabel("Handling"));
|
||||||
|
results.put("disposal", readValueByLabel("Disposal costs"));
|
||||||
|
results.put("space", readValueByLabel("Space costs"));
|
||||||
|
results.put("capital", readValueByLabel("Capital costs"));
|
||||||
|
|
||||||
|
// Read safety stock from material section
|
||||||
|
results.put("safetyStock", readIntValueByLabel("Safety stock"));
|
||||||
|
|
||||||
|
// Read destination results
|
||||||
|
results.put("destinations", readDestinationResults());
|
||||||
|
|
||||||
|
return results;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Reads a numeric value by finding the label in a report-content-row.
|
||||||
|
* The structure is: label | value | percentage
|
||||||
|
*/
|
||||||
|
private Double readValueByLabel(String label) {
|
||||||
|
try {
|
||||||
|
// Find the row containing the label, then get the first data cell
|
||||||
|
String xpath = String.format(
|
||||||
|
"//div[contains(@class, 'report-content-row')]/div[contains(text(), '%s')]/following-sibling::div[contains(@class, 'report-content-data-cell')][1]",
|
||||||
|
label
|
||||||
|
);
|
||||||
|
Locator locator = page.locator("xpath=" + xpath).first();
|
||||||
|
|
||||||
|
if (locator.count() == 0) {
|
||||||
|
// Try alternative: text might be in a child element
|
||||||
|
xpath = String.format(
|
||||||
|
"//div[contains(@class, 'report-content-row')]/div[contains(., '%s')]/following-sibling::div[contains(@class, 'report-content-data-cell')][1]",
|
||||||
|
label
|
||||||
|
);
|
||||||
|
locator = page.locator("xpath=" + xpath).first();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (locator.count() == 0) {
|
||||||
|
logger.warning(() -> "Field not found by label: " + label);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
String text = locator.textContent();
|
||||||
|
if (text == null || text.isEmpty()) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove currency symbols, percentage signs, and formatting
|
||||||
|
// Handle German number format (1.234,56) vs English (1,234.56)
|
||||||
|
text = text.replaceAll("[€$%\\s]", "").trim();
|
||||||
|
|
||||||
|
// If contains comma as decimal separator (German format)
|
||||||
|
if (text.contains(",") && !text.contains(".")) {
|
||||||
|
text = text.replace(",", ".");
|
||||||
|
} else if (text.contains(",") && text.contains(".")) {
|
||||||
|
// 1.234,56 format - remove thousands separator, replace decimal
|
||||||
|
text = text.replace(".", "").replace(",", ".");
|
||||||
|
}
|
||||||
|
|
||||||
|
return Double.parseDouble(text);
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.warning(() -> "Could not read numeric value for label: " + label + " - " + e.getMessage());
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Reads integer value by label.
|
||||||
|
*/
|
||||||
|
private Integer readIntValueByLabel(String label) {
|
||||||
|
Double value = readValueByLabel(label);
|
||||||
|
return value != null ? value.intValue() : null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Reads results for all destinations from the report.
|
||||||
|
* Destinations are in collapsible boxes with class containing destination info.
|
||||||
|
*/
|
||||||
|
private List<Map<String, Object>> readDestinationResults() {
|
||||||
|
List<Map<String, Object>> destinations = new ArrayList<>();
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Each destination is in a collapsible-box with the destination name as title
|
||||||
|
// Look for boxes that have destination-related content
|
||||||
|
Locator destinationBoxes = page.locator(".box-gap:has(.report-content-container--2-col)");
|
||||||
|
int count = destinationBoxes.count();
|
||||||
|
|
||||||
|
logger.info(() -> "Found " + count + " potential destination boxes");
|
||||||
|
|
||||||
|
// Skip the first few boxes which are summary, cost breakdown, and material sections
|
||||||
|
// Destinations start after those
|
||||||
|
for (int i = 0; i < count; i++) {
|
||||||
|
Locator box = destinationBoxes.nth(i);
|
||||||
|
|
||||||
|
// Check if this box has destination-specific content (Transit time [days], Container type)
|
||||||
|
if (box.locator("div:has-text('Transit time')").count() > 0) {
|
||||||
|
Map<String, Object> destResult = new HashMap<>();
|
||||||
|
|
||||||
|
destResult.put("transitTime", readValueInBox(box, "Transit time [days]"));
|
||||||
|
destResult.put("stackedLayers", readValueInBox(box, "Stacked layers"));
|
||||||
|
destResult.put("containerUnitCount", readValueInBox(box, "Container unit count"));
|
||||||
|
destResult.put("containerType", readStringInBox(box, "Container type"));
|
||||||
|
destResult.put("limitingFactor", readStringInBox(box, "Limiting factor"));
|
||||||
|
|
||||||
|
destinations.add(destResult);
|
||||||
|
final int destCount = destinations.size();
|
||||||
|
logger.info(() -> "Read destination " + destCount + " results: " + destResult);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.warning("Could not read destination results: " + e.getMessage());
|
||||||
|
}
|
||||||
|
|
||||||
|
return destinations;
|
||||||
|
}
|
||||||
|
|
||||||
|
private Double readValueInBox(Locator box, String label) {
|
||||||
|
try {
|
||||||
|
// Try exact text match first, then contains match
|
||||||
|
Locator cell = box.locator(".report-content-row")
|
||||||
|
.filter(new Locator.FilterOptions().setHasText(label))
|
||||||
|
.locator(".report-content-data-cell")
|
||||||
|
.first();
|
||||||
|
|
||||||
|
if (cell.count() == 0) {
|
||||||
|
logger.warning(() -> "Could not find cell for label: " + label);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
String text = cell.textContent().replaceAll("[^0-9.,\\-]", "").trim();
|
||||||
|
final String logText = text;
|
||||||
|
logger.info(() -> "Read value for '" + label + "': " + logText);
|
||||||
|
|
||||||
|
if (text.isEmpty() || text.equals("-")) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
// Handle German decimal format
|
||||||
|
if (text.contains(",") && !text.contains(".")) {
|
||||||
|
text = text.replace(",", ".");
|
||||||
|
}
|
||||||
|
return Double.parseDouble(text);
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.warning(() -> "Error reading value for label '" + label + "': " + e.getMessage());
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private String readStringInBox(Locator box, String label) {
|
||||||
|
try {
|
||||||
|
Locator cell = box.locator(".report-content-row")
|
||||||
|
.filter(new Locator.FilterOptions().setHasText(label))
|
||||||
|
.locator(".report-content-data-cell")
|
||||||
|
.first();
|
||||||
|
|
||||||
|
if (cell.count() == 0) {
|
||||||
|
logger.warning(() -> "Could not find string cell for label: " + label);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
String text = cell.textContent().trim();
|
||||||
|
logger.info(() -> "Read string for '" + label + "': " + text);
|
||||||
|
return text;
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.warning(() -> "Error reading string for label '" + label + "': " + e.getMessage());
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Verifies that results match expected values.
|
||||||
|
*/
|
||||||
|
public void verifyResults(TestCaseExpected expected, double tolerance) {
|
||||||
|
Map<String, Object> actual = readResults();
|
||||||
|
|
||||||
|
// Log all actual values for debugging
|
||||||
|
logger.info("======== ACTUAL VALUES FROM REPORT ========");
|
||||||
|
logger.info(() -> "MEK A: " + actual.get("mekA"));
|
||||||
|
logger.info(() -> "Logistics cost: " + actual.get("logisticCost"));
|
||||||
|
logger.info(() -> "MEK B: " + actual.get("mekB"));
|
||||||
|
logger.info(() -> "FCA fee: " + actual.get("fcaFee"));
|
||||||
|
logger.info(() -> "Transportation: " + actual.get("transportation"));
|
||||||
|
logger.info(() -> "D2D: " + actual.get("d2d"));
|
||||||
|
logger.info(() -> "Air freight: " + actual.get("airFreight"));
|
||||||
|
logger.info(() -> "Custom: " + actual.get("custom"));
|
||||||
|
logger.info(() -> "Repackaging: " + actual.get("repackaging"));
|
||||||
|
logger.info(() -> "Handling: " + actual.get("handling"));
|
||||||
|
logger.info(() -> "Disposal: " + actual.get("disposal"));
|
||||||
|
logger.info(() -> "Space: " + actual.get("space"));
|
||||||
|
logger.info(() -> "Capital: " + actual.get("capital"));
|
||||||
|
logger.info(() -> "Safety stock: " + actual.get("safetyStock"));
|
||||||
|
logger.info("======== EXPECTED VALUES ========");
|
||||||
|
logger.info(() -> "MEK A: " + expected.mekA());
|
||||||
|
logger.info(() -> "Logistics cost: " + expected.logisticCost());
|
||||||
|
logger.info(() -> "MEK B: " + expected.mekB());
|
||||||
|
logger.info(() -> "FCA fee: " + expected.fcaFee());
|
||||||
|
logger.info(() -> "Transportation: " + expected.transportation());
|
||||||
|
logger.info(() -> "D2D: " + expected.d2d());
|
||||||
|
logger.info(() -> "Air freight: " + expected.airFreight());
|
||||||
|
logger.info(() -> "Custom: " + expected.custom());
|
||||||
|
logger.info(() -> "Repackaging: " + expected.repackaging());
|
||||||
|
logger.info(() -> "Handling: " + expected.handling());
|
||||||
|
logger.info(() -> "Disposal: " + expected.disposal());
|
||||||
|
logger.info(() -> "Space: " + expected.space());
|
||||||
|
logger.info(() -> "Capital: " + expected.capital());
|
||||||
|
logger.info(() -> "Safety stock: " + expected.safetyStock());
|
||||||
|
logger.info("============================================");
|
||||||
|
|
||||||
|
verifyNumericResult("MEK_A", expected.mekA(), (Double) actual.get("mekA"), tolerance);
|
||||||
|
verifyNumericResult("LOGISTIC_COST", expected.logisticCost(), (Double) actual.get("logisticCost"), tolerance);
|
||||||
|
verifyNumericResult("MEK_B", expected.mekB(), (Double) actual.get("mekB"), tolerance);
|
||||||
|
verifyNumericResult("FCA_FEE", expected.fcaFee(), (Double) actual.get("fcaFee"), tolerance);
|
||||||
|
verifyNumericResult("TRANSPORTATION", expected.transportation(), (Double) actual.get("transportation"), tolerance);
|
||||||
|
verifyNumericResult("D2D", expected.d2d(), (Double) actual.get("d2d"), tolerance);
|
||||||
|
verifyNumericResult("AIR_FREIGHT", expected.airFreight(), (Double) actual.get("airFreight"), tolerance);
|
||||||
|
verifyNumericResult("CUSTOM", expected.custom(), (Double) actual.get("custom"), tolerance);
|
||||||
|
verifyNumericResult("REPACKAGING", expected.repackaging(), (Double) actual.get("repackaging"), tolerance);
|
||||||
|
verifyNumericResult("HANDLING", expected.handling(), (Double) actual.get("handling"), tolerance);
|
||||||
|
verifyNumericResult("DISPOSAL", expected.disposal(), (Double) actual.get("disposal"), tolerance);
|
||||||
|
verifyNumericResult("SPACE", expected.space(), (Double) actual.get("space"), tolerance);
|
||||||
|
verifyNumericResult("CAPITAL", expected.capital(), (Double) actual.get("capital"), tolerance);
|
||||||
|
|
||||||
|
// Verify destinations
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
List<Map<String, Object>> actualDestinations = (List<Map<String, Object>>) actual.get("destinations");
|
||||||
|
List<DestinationExpected> expectedDestinations = expected.destinations();
|
||||||
|
|
||||||
|
if (expectedDestinations.size() != actualDestinations.size()) {
|
||||||
|
throw new AssertionError(String.format(
|
||||||
|
"Destination count mismatch: expected %d, got %d",
|
||||||
|
expectedDestinations.size(), actualDestinations.size()
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
for (int i = 0; i < expectedDestinations.size(); i++) {
|
||||||
|
DestinationExpected expDest = expectedDestinations.get(i);
|
||||||
|
Map<String, Object> actDest = actualDestinations.get(i);
|
||||||
|
|
||||||
|
String prefix = "Destination " + (i + 1) + " ";
|
||||||
|
|
||||||
|
// Verify transit time (always expected to have a value)
|
||||||
|
if (expDest.transitTime() != null) {
|
||||||
|
verifyNumericResult(prefix + "TRANSIT_TIME",
|
||||||
|
expDest.transitTime().doubleValue(),
|
||||||
|
(Double) actDest.get("transitTime"), tolerance);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify stacked layers (null expected = "-" in UI)
|
||||||
|
verifyNullableNumericResult(prefix + "STACKED_LAYERS",
|
||||||
|
expDest.stackedLayers(),
|
||||||
|
(Double) actDest.get("stackedLayers"), tolerance);
|
||||||
|
|
||||||
|
// Verify container unit count (null expected = "-" in UI)
|
||||||
|
verifyNullableNumericResult(prefix + "CONTAINER_UNIT_COUNT",
|
||||||
|
expDest.containerUnitCount(),
|
||||||
|
(Double) actDest.get("containerUnitCount"), tolerance);
|
||||||
|
|
||||||
|
// Verify container type (null or "-" expected = "-" in UI)
|
||||||
|
String expContainerType = expDest.containerType();
|
||||||
|
String actContainerType = (String) actDest.get("containerType");
|
||||||
|
verifyStringResult(prefix + "CONTAINER_TYPE", expContainerType, actContainerType);
|
||||||
|
|
||||||
|
// Verify limiting factor (null or "-" expected = "-" in UI)
|
||||||
|
String expLimitingFactor = expDest.limitingFactor();
|
||||||
|
String actLimitingFactor = (String) actDest.get("limitingFactor");
|
||||||
|
verifyStringResult(prefix + "LIMITING_FACTOR", expLimitingFactor, actLimitingFactor);
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info("All results verified successfully");
|
||||||
|
}
|
||||||
|
|
||||||
|
private void verifyNumericResult(String fieldName, double expected, Double actual, double tolerance) {
|
||||||
|
// If actual is null and expected is effectively zero, treat as pass
|
||||||
|
// (some fields may not be displayed in the UI when their value is 0)
|
||||||
|
if (actual == null) {
|
||||||
|
if (Math.abs(expected) < 0.001) {
|
||||||
|
logger.info(() -> "Field '" + fieldName + "': actual is null, expected ~0 - treating as pass");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
throw new AssertionError(String.format(
|
||||||
|
"Field '%s': actual value is null, expected %f",
|
||||||
|
fieldName, expected
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
double diff = Math.abs(expected - actual);
|
||||||
|
double relativeDiff = expected != 0 ? diff / Math.abs(expected) : diff;
|
||||||
|
|
||||||
|
if (relativeDiff > tolerance) {
|
||||||
|
throw new AssertionError(String.format(
|
||||||
|
"Field '%s': expected %f, got %f (diff: %.4f, tolerance: %.2f%%)",
|
||||||
|
fieldName, expected, actual, relativeDiff * 100, tolerance * 100
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Verifies a nullable numeric result. If expected is null, actual should also be null.
|
||||||
|
*/
|
||||||
|
private void verifyNullableNumericResult(String fieldName, Integer expected, Double actual, double tolerance) {
|
||||||
|
if (expected == null) {
|
||||||
|
// Expected null means UI shows "-"
|
||||||
|
if (actual != null) {
|
||||||
|
throw new AssertionError(String.format(
|
||||||
|
"Field '%s': expected null (UI shows '-'), got %f",
|
||||||
|
fieldName, actual
|
||||||
|
));
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Expected has a value, verify it
|
||||||
|
verifyNumericResult(fieldName, expected.doubleValue(), actual, tolerance);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Verifies a string result. Handles null/"-" as equivalent.
|
||||||
|
*/
|
||||||
|
private void verifyStringResult(String fieldName, String expected, String actual) {
|
||||||
|
// Normalize "-" to null for comparison
|
||||||
|
String normExpected = (expected == null || "-".equals(expected)) ? null : expected;
|
||||||
|
String normActual = (actual == null || "-".equals(actual)) ? null : actual;
|
||||||
|
|
||||||
|
if (normExpected == null && normActual == null) {
|
||||||
|
return; // Both null/"-" = match
|
||||||
|
}
|
||||||
|
|
||||||
|
if (normExpected == null || normActual == null || !normExpected.equals(normActual)) {
|
||||||
|
throw new AssertionError(String.format(
|
||||||
|
"Field '%s': expected '%s', got '%s'",
|
||||||
|
fieldName, expected, actual
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
56
src/test/java/de/avatic/lcc/e2e/testdata/DestinationExpected.java
vendored
Normal file
56
src/test/java/de/avatic/lcc/e2e/testdata/DestinationExpected.java
vendored
Normal file
|
|
@ -0,0 +1,56 @@
|
||||||
|
package de.avatic.lcc.e2e.testdata;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Expected output values for a single destination in a test case.
|
||||||
|
* Nullable fields (Integer, String) indicate the UI shows "-" when no main run/D2D is configured.
|
||||||
|
*/
|
||||||
|
public record DestinationExpected(
|
||||||
|
Integer transitTime,
|
||||||
|
Integer stackedLayers,
|
||||||
|
Integer containerUnitCount,
|
||||||
|
String containerType,
|
||||||
|
String limitingFactor
|
||||||
|
) {
|
||||||
|
public static Builder builder() {
|
||||||
|
return new Builder();
|
||||||
|
}
|
||||||
|
|
||||||
|
public static class Builder {
|
||||||
|
private Integer transitTime;
|
||||||
|
private Integer stackedLayers;
|
||||||
|
private Integer containerUnitCount;
|
||||||
|
private String containerType;
|
||||||
|
private String limitingFactor;
|
||||||
|
|
||||||
|
public Builder transitTime(Integer transitTime) {
|
||||||
|
this.transitTime = transitTime;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder stackedLayers(Integer stackedLayers) {
|
||||||
|
this.stackedLayers = stackedLayers;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder containerUnitCount(Integer containerUnitCount) {
|
||||||
|
this.containerUnitCount = containerUnitCount;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder containerType(String containerType) {
|
||||||
|
this.containerType = containerType;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder limitingFactor(String limitingFactor) {
|
||||||
|
this.limitingFactor = limitingFactor;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public DestinationExpected build() {
|
||||||
|
return new DestinationExpected(
|
||||||
|
transitTime, stackedLayers, containerUnitCount, containerType, limitingFactor
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
91
src/test/java/de/avatic/lcc/e2e/testdata/DestinationInput.java
vendored
Normal file
91
src/test/java/de/avatic/lcc/e2e/testdata/DestinationInput.java
vendored
Normal file
|
|
@ -0,0 +1,91 @@
|
||||||
|
package de.avatic.lcc.e2e.testdata;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Input data for a single destination in a test case.
|
||||||
|
*/
|
||||||
|
public record DestinationInput(
|
||||||
|
String name,
|
||||||
|
int quantity,
|
||||||
|
boolean d2d,
|
||||||
|
String route,
|
||||||
|
Double d2dCost,
|
||||||
|
Integer d2dDuration,
|
||||||
|
Double handlingCost,
|
||||||
|
Double repackingCost,
|
||||||
|
Double disposalCost,
|
||||||
|
boolean customHandling
|
||||||
|
) {
|
||||||
|
public static Builder builder() {
|
||||||
|
return new Builder();
|
||||||
|
}
|
||||||
|
|
||||||
|
public static class Builder {
|
||||||
|
private String name;
|
||||||
|
private int quantity;
|
||||||
|
private boolean d2d;
|
||||||
|
private String route;
|
||||||
|
private Double d2dCost;
|
||||||
|
private Integer d2dDuration;
|
||||||
|
private Double handlingCost;
|
||||||
|
private Double repackingCost;
|
||||||
|
private Double disposalCost;
|
||||||
|
private boolean customHandling;
|
||||||
|
|
||||||
|
public Builder name(String name) {
|
||||||
|
this.name = name;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder quantity(int quantity) {
|
||||||
|
this.quantity = quantity;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder d2d(boolean d2d) {
|
||||||
|
this.d2d = d2d;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder route(String route) {
|
||||||
|
this.route = route;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder d2dCost(Double d2dCost) {
|
||||||
|
this.d2dCost = d2dCost;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder d2dDuration(Integer d2dDuration) {
|
||||||
|
this.d2dDuration = d2dDuration;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder handlingCost(Double handlingCost) {
|
||||||
|
this.handlingCost = handlingCost;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder repackingCost(Double repackingCost) {
|
||||||
|
this.repackingCost = repackingCost;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder disposalCost(Double disposalCost) {
|
||||||
|
this.disposalCost = disposalCost;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder customHandling(boolean customHandling) {
|
||||||
|
this.customHandling = customHandling;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public DestinationInput build() {
|
||||||
|
return new DestinationInput(
|
||||||
|
name, quantity, d2d, route, d2dCost, d2dDuration,
|
||||||
|
handlingCost, repackingCost, disposalCost, customHandling
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
12
src/test/java/de/avatic/lcc/e2e/testdata/TestCase.java
vendored
Normal file
12
src/test/java/de/avatic/lcc/e2e/testdata/TestCase.java
vendored
Normal file
|
|
@ -0,0 +1,12 @@
|
||||||
|
package de.avatic.lcc.e2e.testdata;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Represents a complete E2E test case with input data and expected output.
|
||||||
|
*/
|
||||||
|
public record TestCase(
|
||||||
|
String id,
|
||||||
|
String name,
|
||||||
|
TestCaseInput input,
|
||||||
|
TestCaseExpected expected
|
||||||
|
) {
|
||||||
|
}
|
||||||
128
src/test/java/de/avatic/lcc/e2e/testdata/TestCaseExpected.java
vendored
Normal file
128
src/test/java/de/avatic/lcc/e2e/testdata/TestCaseExpected.java
vendored
Normal file
|
|
@ -0,0 +1,128 @@
|
||||||
|
package de.avatic.lcc.e2e.testdata;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Expected output values for a test case containing all calculated results.
|
||||||
|
*/
|
||||||
|
public record TestCaseExpected(
|
||||||
|
double mekA,
|
||||||
|
double logisticCost,
|
||||||
|
double mekB,
|
||||||
|
double fcaFee,
|
||||||
|
double transportation,
|
||||||
|
double d2d,
|
||||||
|
double airFreight,
|
||||||
|
double custom,
|
||||||
|
double repackaging,
|
||||||
|
double handling,
|
||||||
|
double disposal,
|
||||||
|
double space,
|
||||||
|
double capital,
|
||||||
|
int safetyStock,
|
||||||
|
List<DestinationExpected> destinations
|
||||||
|
) {
|
||||||
|
public static Builder builder() {
|
||||||
|
return new Builder();
|
||||||
|
}
|
||||||
|
|
||||||
|
public static class Builder {
|
||||||
|
private double mekA;
|
||||||
|
private double logisticCost;
|
||||||
|
private double mekB;
|
||||||
|
private double fcaFee;
|
||||||
|
private double transportation;
|
||||||
|
private double d2d;
|
||||||
|
private double airFreight;
|
||||||
|
private double custom;
|
||||||
|
private double repackaging;
|
||||||
|
private double handling;
|
||||||
|
private double disposal;
|
||||||
|
private double space;
|
||||||
|
private double capital;
|
||||||
|
private int safetyStock;
|
||||||
|
private List<DestinationExpected> destinations = List.of();
|
||||||
|
|
||||||
|
public Builder mekA(double mekA) {
|
||||||
|
this.mekA = mekA;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder logisticCost(double logisticCost) {
|
||||||
|
this.logisticCost = logisticCost;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder mekB(double mekB) {
|
||||||
|
this.mekB = mekB;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder fcaFee(double fcaFee) {
|
||||||
|
this.fcaFee = fcaFee;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder transportation(double transportation) {
|
||||||
|
this.transportation = transportation;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder d2d(Double d2d) {
|
||||||
|
this.d2d = d2d;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder airFreight(double airFreight) {
|
||||||
|
this.airFreight = airFreight;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder custom(double custom) {
|
||||||
|
this.custom = custom;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder repackaging(double repackaging) {
|
||||||
|
this.repackaging = repackaging;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder handling(double handling) {
|
||||||
|
this.handling = handling;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder disposal(double disposal) {
|
||||||
|
this.disposal = disposal;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder space(double space) {
|
||||||
|
this.space = space;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder capital(double capital) {
|
||||||
|
this.capital = capital;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder safetyStock(int safetyStock) {
|
||||||
|
this.safetyStock = safetyStock;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder destinations(List<DestinationExpected> destinations) {
|
||||||
|
this.destinations = destinations;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public TestCaseExpected build() {
|
||||||
|
return new TestCaseExpected(
|
||||||
|
mekA, logisticCost, mekB, fcaFee, transportation, d2d, airFreight,
|
||||||
|
custom, repackaging, handling, disposal, space, capital, safetyStock, destinations
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
150
src/test/java/de/avatic/lcc/e2e/testdata/TestCaseInput.java
vendored
Normal file
150
src/test/java/de/avatic/lcc/e2e/testdata/TestCaseInput.java
vendored
Normal file
|
|
@ -0,0 +1,150 @@
|
||||||
|
package de.avatic.lcc.e2e.testdata;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Input data for a test case containing all form values to be entered.
|
||||||
|
*/
|
||||||
|
public record TestCaseInput(
|
||||||
|
String partNumber,
|
||||||
|
String supplierName,
|
||||||
|
boolean loadFromPrevious,
|
||||||
|
Integer hsCode,
|
||||||
|
double tariffRate,
|
||||||
|
double price,
|
||||||
|
double overseaShare,
|
||||||
|
boolean fcaFee,
|
||||||
|
int length,
|
||||||
|
int width,
|
||||||
|
int height,
|
||||||
|
String dimensionUnit,
|
||||||
|
int weight,
|
||||||
|
String weightUnit,
|
||||||
|
int piecesPerUnit,
|
||||||
|
boolean stacked,
|
||||||
|
boolean mixed,
|
||||||
|
List<DestinationInput> destinations
|
||||||
|
) {
|
||||||
|
public static Builder builder() {
|
||||||
|
return new Builder();
|
||||||
|
}
|
||||||
|
|
||||||
|
public static class Builder {
|
||||||
|
private String partNumber;
|
||||||
|
private String supplierName;
|
||||||
|
private boolean loadFromPrevious;
|
||||||
|
private Integer hsCode;
|
||||||
|
private double tariffRate;
|
||||||
|
private double price;
|
||||||
|
private double overseaShare;
|
||||||
|
private boolean fcaFee;
|
||||||
|
private int length;
|
||||||
|
private int width;
|
||||||
|
private int height;
|
||||||
|
private String dimensionUnit = "cm";
|
||||||
|
private int weight;
|
||||||
|
private String weightUnit = "kg";
|
||||||
|
private int piecesPerUnit;
|
||||||
|
private boolean stacked;
|
||||||
|
private boolean mixed;
|
||||||
|
private List<DestinationInput> destinations = List.of();
|
||||||
|
|
||||||
|
public Builder partNumber(String partNumber) {
|
||||||
|
this.partNumber = partNumber;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder supplierName(String supplierName) {
|
||||||
|
this.supplierName = supplierName;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder loadFromPrevious(boolean loadFromPrevious) {
|
||||||
|
this.loadFromPrevious = loadFromPrevious;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder hsCode(Integer hsCode) {
|
||||||
|
this.hsCode = hsCode;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder tariffRate(double tariffRate) {
|
||||||
|
this.tariffRate = tariffRate;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder price(double price) {
|
||||||
|
this.price = price;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder overseaShare(double overseaShare) {
|
||||||
|
this.overseaShare = overseaShare;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder fcaFee(boolean fcaFee) {
|
||||||
|
this.fcaFee = fcaFee;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder length(int length) {
|
||||||
|
this.length = length;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder width(int width) {
|
||||||
|
this.width = width;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder height(int height) {
|
||||||
|
this.height = height;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder dimensionUnit(String dimensionUnit) {
|
||||||
|
this.dimensionUnit = dimensionUnit;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder weight(int weight) {
|
||||||
|
this.weight = weight;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder weightUnit(String weightUnit) {
|
||||||
|
this.weightUnit = weightUnit;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder piecesPerUnit(int piecesPerUnit) {
|
||||||
|
this.piecesPerUnit = piecesPerUnit;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder stacked(boolean stacked) {
|
||||||
|
this.stacked = stacked;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder mixed(boolean mixed) {
|
||||||
|
this.mixed = mixed;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder destinations(List<DestinationInput> destinations) {
|
||||||
|
this.destinations = destinations;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public TestCaseInput build() {
|
||||||
|
return new TestCaseInput(
|
||||||
|
partNumber, supplierName, loadFromPrevious, hsCode, tariffRate, price,
|
||||||
|
overseaShare, fcaFee, length, width, height, dimensionUnit, weight,
|
||||||
|
weightUnit, piecesPerUnit, stacked, mixed, destinations
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
901
src/test/java/de/avatic/lcc/e2e/testdata/TestCases.java
vendored
Normal file
901
src/test/java/de/avatic/lcc/e2e/testdata/TestCases.java
vendored
Normal file
|
|
@ -0,0 +1,901 @@
|
||||||
|
package de.avatic.lcc.e2e.testdata;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Static test case definitions extracted from Testfälle.xlsx.
|
||||||
|
* These test cases cover various logistics calculation scenarios including:
|
||||||
|
* - EU and Non-EU suppliers
|
||||||
|
* - Matrix, D2D, and Container transport modes
|
||||||
|
* - Different packaging configurations
|
||||||
|
* - Single and multiple destinations
|
||||||
|
*/
|
||||||
|
public final class TestCases {
|
||||||
|
|
||||||
|
private TestCases() {
|
||||||
|
// Utility class
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Test Case 1: EU Supplier, user - Matrix - Direkt
|
||||||
|
* Single destination, no FCA fee, standard packaging
|
||||||
|
*/
|
||||||
|
public static final TestCase CASE_1 = new TestCase(
|
||||||
|
"1",
|
||||||
|
"EU Supplier, user - Matrix - Direkt",
|
||||||
|
TestCaseInput.builder()
|
||||||
|
.partNumber("3064540201")
|
||||||
|
.supplierName("Ireland supplier")
|
||||||
|
.loadFromPrevious(false)
|
||||||
|
.hsCode(84312002)
|
||||||
|
.tariffRate(0.0)
|
||||||
|
.price(8.0)
|
||||||
|
.overseaShare(0.0)
|
||||||
|
.fcaFee(false)
|
||||||
|
.length(114)
|
||||||
|
.width(114)
|
||||||
|
.height(95)
|
||||||
|
.dimensionUnit("cm")
|
||||||
|
.weight(850)
|
||||||
|
.weightUnit("kg")
|
||||||
|
.piecesPerUnit(20)
|
||||||
|
.stacked(true)
|
||||||
|
.mixed(true)
|
||||||
|
.destinations(List.of(
|
||||||
|
DestinationInput.builder()
|
||||||
|
.name("Hamburg (KION plant)")
|
||||||
|
.quantity(5)
|
||||||
|
.d2d(false)
|
||||||
|
.route("IE SUP,HH")
|
||||||
|
.customHandling(false)
|
||||||
|
.build()
|
||||||
|
))
|
||||||
|
.build(),
|
||||||
|
TestCaseExpected.builder()
|
||||||
|
.mekA(8.0)
|
||||||
|
.logisticCost(33.76)
|
||||||
|
.mekB(41.76)
|
||||||
|
.fcaFee(0.0)
|
||||||
|
.transportation(4.18)
|
||||||
|
.d2d(0.0)
|
||||||
|
.airFreight(0.0)
|
||||||
|
.custom(0.0)
|
||||||
|
.repackaging(0.0)
|
||||||
|
.handling(4.392)
|
||||||
|
.disposal(0.0)
|
||||||
|
.space(24.95)
|
||||||
|
.capital(0.13)
|
||||||
|
.safetyStock(10)
|
||||||
|
.destinations(List.of(
|
||||||
|
DestinationExpected.builder()
|
||||||
|
.transitTime(3)
|
||||||
|
.stackedLayers(null)
|
||||||
|
.containerUnitCount(null)
|
||||||
|
.containerType(null)
|
||||||
|
.limitingFactor(null)
|
||||||
|
.build()
|
||||||
|
))
|
||||||
|
.build()
|
||||||
|
);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Test Case 2: EU-supplier, standard - Matrix - Über Hop
|
||||||
|
* Two destinations, with FCA fee, individual packaging
|
||||||
|
*/
|
||||||
|
public static final TestCase CASE_2 = new TestCase(
|
||||||
|
"2",
|
||||||
|
"EU-supplier, standard - Matrix - Über Hop",
|
||||||
|
TestCaseInput.builder()
|
||||||
|
.partNumber("4222640104")
|
||||||
|
.supplierName("Hamburg (KION plant)")
|
||||||
|
.loadFromPrevious(false)
|
||||||
|
.hsCode(84312002)
|
||||||
|
.tariffRate(0.0)
|
||||||
|
.price(230.0)
|
||||||
|
.overseaShare(0.0)
|
||||||
|
.fcaFee(true)
|
||||||
|
.length(120)
|
||||||
|
.width(80)
|
||||||
|
.height(95)
|
||||||
|
.dimensionUnit("cm")
|
||||||
|
.weight(1200)
|
||||||
|
.weightUnit("kg")
|
||||||
|
.piecesPerUnit(2000)
|
||||||
|
.stacked(true)
|
||||||
|
.mixed(true)
|
||||||
|
.destinations(List.of(
|
||||||
|
DestinationInput.builder()
|
||||||
|
.name("Geisa (KION plant)")
|
||||||
|
.quantity(3500)
|
||||||
|
.d2d(false)
|
||||||
|
.route("HH,WH STO,FGG")
|
||||||
|
.handlingCost(3.5)
|
||||||
|
.repackingCost(2.7)
|
||||||
|
.disposalCost(6.5)
|
||||||
|
.customHandling(true)
|
||||||
|
.build(),
|
||||||
|
DestinationInput.builder()
|
||||||
|
.name("Aschaffenburg (KION plant)")
|
||||||
|
.quantity(25000)
|
||||||
|
.d2d(false)
|
||||||
|
.route("HH,WH ULHA,AB")
|
||||||
|
.handlingCost(3.0)
|
||||||
|
.repackingCost(3.3)
|
||||||
|
.disposalCost(8.0)
|
||||||
|
.customHandling(true)
|
||||||
|
.build()
|
||||||
|
))
|
||||||
|
.build(),
|
||||||
|
TestCaseExpected.builder()
|
||||||
|
.mekA(230.0)
|
||||||
|
.logisticCost(1.50)
|
||||||
|
.mekB(231.50)
|
||||||
|
.fcaFee(0.46)
|
||||||
|
.transportation(0.02)
|
||||||
|
.d2d(0.0)
|
||||||
|
.airFreight(0.0)
|
||||||
|
.custom(0.0)
|
||||||
|
.repackaging(0.00)
|
||||||
|
.handling(0.00)
|
||||||
|
.disposal(0.00)
|
||||||
|
.space(0.01)
|
||||||
|
.capital(1.00)
|
||||||
|
.safetyStock(10)
|
||||||
|
.destinations(List.of(
|
||||||
|
DestinationExpected.builder()
|
||||||
|
.transitTime(6)
|
||||||
|
.stackedLayers(null)
|
||||||
|
.containerUnitCount(null)
|
||||||
|
.containerType(null)
|
||||||
|
.limitingFactor(null)
|
||||||
|
.build(),
|
||||||
|
DestinationExpected.builder()
|
||||||
|
.transitTime(6)
|
||||||
|
.stackedLayers(null)
|
||||||
|
.containerUnitCount(null)
|
||||||
|
.containerType(null)
|
||||||
|
.limitingFactor(null)
|
||||||
|
.build()
|
||||||
|
))
|
||||||
|
.build()
|
||||||
|
);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Test Case 3: Non-EU supplier, user - Matrix - Direkt
|
||||||
|
* Three destinations, with customs
|
||||||
|
*/
|
||||||
|
public static final TestCase CASE_3 = new TestCase(
|
||||||
|
"3",
|
||||||
|
"Non-EU supplier, user - Matrix - Direkt",
|
||||||
|
TestCaseInput.builder()
|
||||||
|
.partNumber("4222640803")
|
||||||
|
.supplierName("Turkey supplier")
|
||||||
|
.loadFromPrevious(false)
|
||||||
|
.hsCode(84312002)
|
||||||
|
.tariffRate(1.7)
|
||||||
|
.price(11.0)
|
||||||
|
.overseaShare(0.0)
|
||||||
|
.fcaFee(true)
|
||||||
|
.length(120)
|
||||||
|
.width(100)
|
||||||
|
.height(80)
|
||||||
|
.dimensionUnit("cm")
|
||||||
|
.weight(570)
|
||||||
|
.weightUnit("kg")
|
||||||
|
.piecesPerUnit(2000)
|
||||||
|
.stacked(true)
|
||||||
|
.mixed(true)
|
||||||
|
.destinations(List.of(
|
||||||
|
DestinationInput.builder()
|
||||||
|
.name("Hamburg (KION plant)")
|
||||||
|
.quantity(60000)
|
||||||
|
.d2d(false)
|
||||||
|
.route("Turkey sup ...,WH HH,HH")
|
||||||
|
.customHandling(false)
|
||||||
|
.build(),
|
||||||
|
DestinationInput.builder()
|
||||||
|
.name("Aschaffenburg (KION plant)")
|
||||||
|
.quantity(80000)
|
||||||
|
.d2d(false)
|
||||||
|
.route("Turkey sup ...,WH ULHA,AB")
|
||||||
|
.handlingCost(6.0)
|
||||||
|
.repackingCost(6.0)
|
||||||
|
.disposalCost(6.0)
|
||||||
|
.customHandling(true)
|
||||||
|
.build(),
|
||||||
|
DestinationInput.builder()
|
||||||
|
.name("Luzzara (KION plant)")
|
||||||
|
.quantity(30000)
|
||||||
|
.d2d(false)
|
||||||
|
.route("Turkey sup ...,LZZ")
|
||||||
|
.customHandling(false)
|
||||||
|
.build()
|
||||||
|
))
|
||||||
|
.build(),
|
||||||
|
TestCaseExpected.builder()
|
||||||
|
.mekA(11.0)
|
||||||
|
.logisticCost(0.33)
|
||||||
|
.mekB(11.33)
|
||||||
|
.fcaFee(0.02)
|
||||||
|
.transportation(0.06)
|
||||||
|
.d2d(0.0)
|
||||||
|
.airFreight(0.0)
|
||||||
|
.custom(0.21)
|
||||||
|
.repackaging(0.00)
|
||||||
|
.handling(0.00)
|
||||||
|
.disposal(0.00)
|
||||||
|
.space(0.00)
|
||||||
|
.capital(0.03)
|
||||||
|
.safetyStock(10)
|
||||||
|
.destinations(List.of(
|
||||||
|
DestinationExpected.builder()
|
||||||
|
.transitTime(6)
|
||||||
|
.stackedLayers(null)
|
||||||
|
.containerUnitCount(null)
|
||||||
|
.containerType(null)
|
||||||
|
.limitingFactor(null)
|
||||||
|
.build(),
|
||||||
|
DestinationExpected.builder()
|
||||||
|
.transitTime(6)
|
||||||
|
.stackedLayers(null)
|
||||||
|
.containerUnitCount(null)
|
||||||
|
.containerType(null)
|
||||||
|
.limitingFactor(null)
|
||||||
|
.build(),
|
||||||
|
DestinationExpected.builder()
|
||||||
|
.transitTime(3)
|
||||||
|
.stackedLayers(null)
|
||||||
|
.containerUnitCount(null)
|
||||||
|
.containerType(null)
|
||||||
|
.limitingFactor(null)
|
||||||
|
.build()
|
||||||
|
))
|
||||||
|
.build()
|
||||||
|
);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Test Case 3b: Non-EU supplier, standard - Matrix - Direkt
|
||||||
|
* Variation of case 3 with standard packaging
|
||||||
|
*/
|
||||||
|
public static final TestCase CASE_3B = new TestCase(
|
||||||
|
"3b",
|
||||||
|
"Non-EU supplier, standard - Matrix - Direkt",
|
||||||
|
TestCaseInput.builder()
|
||||||
|
.partNumber("4222640805")
|
||||||
|
.supplierName("Turkey supplier")
|
||||||
|
.loadFromPrevious(false)
|
||||||
|
.hsCode(84312002)
|
||||||
|
.tariffRate(1.7)
|
||||||
|
.price(11.0)
|
||||||
|
.overseaShare(0.0)
|
||||||
|
.fcaFee(true)
|
||||||
|
.length(120)
|
||||||
|
.width(100)
|
||||||
|
.height(80)
|
||||||
|
.dimensionUnit("cm")
|
||||||
|
.weight(570)
|
||||||
|
.weightUnit("kg")
|
||||||
|
.piecesPerUnit(2000)
|
||||||
|
.stacked(true)
|
||||||
|
.mixed(true)
|
||||||
|
.destinations(List.of(
|
||||||
|
DestinationInput.builder()
|
||||||
|
.name("Hamburg (KION plant)")
|
||||||
|
.quantity(60000)
|
||||||
|
.d2d(false)
|
||||||
|
.route("Turkey sup ...,WH HH,HH")
|
||||||
|
.customHandling(false)
|
||||||
|
.build()
|
||||||
|
))
|
||||||
|
.build(),
|
||||||
|
TestCaseExpected.builder()
|
||||||
|
.mekA(11.0)
|
||||||
|
.logisticCost(0.33)
|
||||||
|
.mekB(11.33)
|
||||||
|
.fcaFee(0.02)
|
||||||
|
.transportation(0.06)
|
||||||
|
.d2d(0.0)
|
||||||
|
.airFreight(0.0)
|
||||||
|
.custom(0.21)
|
||||||
|
.repackaging(0.0)
|
||||||
|
.handling(0.01)
|
||||||
|
.disposal(0.0)
|
||||||
|
.space(0.01)
|
||||||
|
.capital(0.03)
|
||||||
|
.safetyStock(10)
|
||||||
|
.destinations(List.of(
|
||||||
|
DestinationExpected.builder()
|
||||||
|
.transitTime(6)
|
||||||
|
.stackedLayers(null)
|
||||||
|
.containerUnitCount(null)
|
||||||
|
.containerType(null)
|
||||||
|
.limitingFactor(null)
|
||||||
|
.build()
|
||||||
|
))
|
||||||
|
.build()
|
||||||
|
);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Test Case 4: Non-EU supplier, standard - D2D - Über Hop
|
||||||
|
* D2D transport with customs, large volume
|
||||||
|
*/
|
||||||
|
public static final TestCase CASE_4 = new TestCase(
|
||||||
|
"4",
|
||||||
|
"Non-EU supplier, standard - D2D - Über Hop",
|
||||||
|
TestCaseInput.builder()
|
||||||
|
.partNumber("5512640106")
|
||||||
|
.supplierName("Turkey supplier")
|
||||||
|
.loadFromPrevious(false)
|
||||||
|
.hsCode(84312002)
|
||||||
|
.tariffRate(3.0)
|
||||||
|
.price(56.87)
|
||||||
|
.overseaShare(100.0)
|
||||||
|
.fcaFee(false)
|
||||||
|
.length(114)
|
||||||
|
.width(114)
|
||||||
|
.height(95)
|
||||||
|
.dimensionUnit("cm")
|
||||||
|
.weight(850)
|
||||||
|
.weightUnit("kg")
|
||||||
|
.piecesPerUnit(10000)
|
||||||
|
.stacked(true)
|
||||||
|
.mixed(true)
|
||||||
|
.destinations(List.of(
|
||||||
|
DestinationInput.builder()
|
||||||
|
.name("Hamburg (KION plant)")
|
||||||
|
.quantity(1200000)
|
||||||
|
.d2d(true)
|
||||||
|
.route("Turkey sup ...,WH HH,HH")
|
||||||
|
.d2dCost(6500.0)
|
||||||
|
.d2dDuration(47)
|
||||||
|
.customHandling(false)
|
||||||
|
.build()
|
||||||
|
))
|
||||||
|
.build(),
|
||||||
|
TestCaseExpected.builder()
|
||||||
|
.mekA(56.87)
|
||||||
|
.logisticCost(2.61)
|
||||||
|
.mekB(59.48)
|
||||||
|
.fcaFee(0.0)
|
||||||
|
.transportation(0.0)
|
||||||
|
.d2d(0.03)
|
||||||
|
.airFreight(0.0)
|
||||||
|
.custom(1.71)
|
||||||
|
.repackaging(0.0)
|
||||||
|
.handling(0.00)
|
||||||
|
.disposal(0.00)
|
||||||
|
.space(0.00)
|
||||||
|
.capital(0.87)
|
||||||
|
.safetyStock(10)
|
||||||
|
.destinations(List.of(
|
||||||
|
DestinationExpected.builder()
|
||||||
|
.transitTime(47)
|
||||||
|
.stackedLayers(2)
|
||||||
|
.containerUnitCount(240000)
|
||||||
|
.containerType("40 ft. GP")
|
||||||
|
.limitingFactor("Weight")
|
||||||
|
.build()
|
||||||
|
))
|
||||||
|
.build()
|
||||||
|
);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Test Case 5: EU Supplier, user - D2D - Über Hop
|
||||||
|
* D2D transport with custom handling costs
|
||||||
|
*/
|
||||||
|
public static final TestCase CASE_5 = new TestCase(
|
||||||
|
"5",
|
||||||
|
"EU Supplier, user - D2D - Über Hop",
|
||||||
|
TestCaseInput.builder()
|
||||||
|
.partNumber("8212640113")
|
||||||
|
.supplierName("Ireland supplier")
|
||||||
|
.loadFromPrevious(false)
|
||||||
|
.hsCode(84312002)
|
||||||
|
.tariffRate(0.0)
|
||||||
|
.price(8.0)
|
||||||
|
.overseaShare(75.0)
|
||||||
|
.fcaFee(true)
|
||||||
|
.length(114)
|
||||||
|
.width(114)
|
||||||
|
.height(95)
|
||||||
|
.dimensionUnit("cm")
|
||||||
|
.weight(850)
|
||||||
|
.weightUnit("kg")
|
||||||
|
.piecesPerUnit(2000)
|
||||||
|
.stacked(true)
|
||||||
|
.mixed(false)
|
||||||
|
.destinations(List.of(
|
||||||
|
DestinationInput.builder()
|
||||||
|
.name("Hamburg (KION plant)")
|
||||||
|
.quantity(500)
|
||||||
|
.d2d(true)
|
||||||
|
.route("IE SUP,WH HH,HH")
|
||||||
|
.d2dCost(2500.0)
|
||||||
|
.d2dDuration(12)
|
||||||
|
.handlingCost(120.0)
|
||||||
|
.repackingCost(230.0)
|
||||||
|
.disposalCost(5.0)
|
||||||
|
.customHandling(true)
|
||||||
|
.build(),
|
||||||
|
DestinationInput.builder()
|
||||||
|
.name("Aschaffenburg (KION plant)")
|
||||||
|
.quantity(1000)
|
||||||
|
.d2d(true)
|
||||||
|
.route("IE SUP,WH ULHA,AB")
|
||||||
|
.d2dCost(1500.0)
|
||||||
|
.d2dDuration(10)
|
||||||
|
.handlingCost(2.5)
|
||||||
|
.repackingCost(5.0)
|
||||||
|
.disposalCost(6.0)
|
||||||
|
.customHandling(true)
|
||||||
|
.build()
|
||||||
|
))
|
||||||
|
.build(),
|
||||||
|
TestCaseExpected.builder()
|
||||||
|
.mekA(8.0)
|
||||||
|
.logisticCost(8.61)
|
||||||
|
.mekB(16.61)
|
||||||
|
.fcaFee(0.02)
|
||||||
|
.transportation(0.0)
|
||||||
|
.d2d(8.0)
|
||||||
|
.airFreight(0.0)
|
||||||
|
.custom(0.0)
|
||||||
|
.repackaging(0.04)
|
||||||
|
.handling(0.24)
|
||||||
|
.disposal(0.00)
|
||||||
|
.space(0.17)
|
||||||
|
.capital(0.16)
|
||||||
|
.safetyStock(10)
|
||||||
|
.destinations(List.of(
|
||||||
|
DestinationExpected.builder()
|
||||||
|
.transitTime(12)
|
||||||
|
.stackedLayers(2)
|
||||||
|
.containerUnitCount(48000)
|
||||||
|
.containerType("40 ft. GP")
|
||||||
|
.limitingFactor("Weight")
|
||||||
|
.build(),
|
||||||
|
DestinationExpected.builder()
|
||||||
|
.transitTime(10)
|
||||||
|
.stackedLayers(2)
|
||||||
|
.containerUnitCount(48000)
|
||||||
|
.containerType("40 ft. GP")
|
||||||
|
.limitingFactor("Weight")
|
||||||
|
.build()
|
||||||
|
))
|
||||||
|
.build()
|
||||||
|
);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Test Case 6: EU-supplier, standard - D2D - Über Hop
|
||||||
|
* D2D transport with custom handling, three destinations
|
||||||
|
*/
|
||||||
|
public static final TestCase CASE_6 = new TestCase(
|
||||||
|
"6",
|
||||||
|
"EU-supplier, standard - D2D - Über Hop",
|
||||||
|
TestCaseInput.builder()
|
||||||
|
.partNumber("8212640827")
|
||||||
|
.supplierName("Hamburg (KION plant)")
|
||||||
|
.loadFromPrevious(false)
|
||||||
|
.hsCode(84312002)
|
||||||
|
.tariffRate(100.0)
|
||||||
|
.price(18.2)
|
||||||
|
.overseaShare(0.0)
|
||||||
|
.fcaFee(false)
|
||||||
|
.length(1140)
|
||||||
|
.width(1140)
|
||||||
|
.height(950)
|
||||||
|
.dimensionUnit("mm")
|
||||||
|
.weight(99000)
|
||||||
|
.weightUnit("g")
|
||||||
|
.piecesPerUnit(2000)
|
||||||
|
.stacked(true)
|
||||||
|
.mixed(false)
|
||||||
|
.destinations(List.of(
|
||||||
|
DestinationInput.builder()
|
||||||
|
.name("Hamburg (KION plant)")
|
||||||
|
.quantity(4000)
|
||||||
|
.d2d(true)
|
||||||
|
.d2dCost(0.01)
|
||||||
|
.d2dDuration(1)
|
||||||
|
.handlingCost(0.0)
|
||||||
|
.repackingCost(0.0)
|
||||||
|
.disposalCost(0.0)
|
||||||
|
.customHandling(true)
|
||||||
|
.build(),
|
||||||
|
DestinationInput.builder()
|
||||||
|
.name("Aschaffenburg (KION plant)")
|
||||||
|
.quantity(6000)
|
||||||
|
.d2d(true)
|
||||||
|
.d2dCost(100.0)
|
||||||
|
.d2dDuration(2)
|
||||||
|
.customHandling(false)
|
||||||
|
.build(),
|
||||||
|
DestinationInput.builder()
|
||||||
|
.name("Luzzara (KION plant)")
|
||||||
|
.quantity(3000)
|
||||||
|
.d2d(true)
|
||||||
|
.d2dCost(200.0)
|
||||||
|
.d2dDuration(3)
|
||||||
|
.handlingCost(20.0)
|
||||||
|
.repackingCost(7.0)
|
||||||
|
.disposalCost(11.0)
|
||||||
|
.customHandling(true)
|
||||||
|
.build()
|
||||||
|
))
|
||||||
|
.build(),
|
||||||
|
TestCaseExpected.builder()
|
||||||
|
.mekA(18.2)
|
||||||
|
.logisticCost(0.41)
|
||||||
|
.mekB(18.61)
|
||||||
|
.fcaFee(0.0)
|
||||||
|
.transportation(0.0)
|
||||||
|
.d2d(0.07)
|
||||||
|
.airFreight(0.0)
|
||||||
|
.custom(0.0)
|
||||||
|
.repackaging(0.00)
|
||||||
|
.handling(0.01)
|
||||||
|
.disposal(0.00)
|
||||||
|
.space(0.03)
|
||||||
|
.capital(0.30)
|
||||||
|
.safetyStock(10)
|
||||||
|
.destinations(List.of(
|
||||||
|
DestinationExpected.builder()
|
||||||
|
.transitTime(1)
|
||||||
|
.stackedLayers(2)
|
||||||
|
.containerUnitCount(80000)
|
||||||
|
.containerType("40 ft. GP")
|
||||||
|
.limitingFactor("Volume")
|
||||||
|
.build(),
|
||||||
|
DestinationExpected.builder()
|
||||||
|
.transitTime(2)
|
||||||
|
.stackedLayers(2)
|
||||||
|
.containerUnitCount(80000)
|
||||||
|
.containerType("40 ft. GP")
|
||||||
|
.limitingFactor("Volume")
|
||||||
|
.build(),
|
||||||
|
DestinationExpected.builder()
|
||||||
|
.transitTime(3)
|
||||||
|
.stackedLayers(2)
|
||||||
|
.containerUnitCount(80000)
|
||||||
|
.containerType("40 ft. GP")
|
||||||
|
.limitingFactor("Volume")
|
||||||
|
.build()
|
||||||
|
))
|
||||||
|
.build()
|
||||||
|
);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Test Case 7: Non-EU supplier, user - D2D - Über Hop
|
||||||
|
* D2D transport from China with customs and air freight
|
||||||
|
*/
|
||||||
|
public static final TestCase CASE_7 = new TestCase(
|
||||||
|
"7",
|
||||||
|
"Non-EU supplier, user - D2D - Über Hop",
|
||||||
|
TestCaseInput.builder()
|
||||||
|
.partNumber("8222640822")
|
||||||
|
.supplierName("Yantian supplier")
|
||||||
|
.loadFromPrevious(false)
|
||||||
|
.hsCode(84312002)
|
||||||
|
.tariffRate(3.0)
|
||||||
|
.price(56.87)
|
||||||
|
.overseaShare(100.0)
|
||||||
|
.fcaFee(true)
|
||||||
|
.length(114)
|
||||||
|
.width(114)
|
||||||
|
.height(95)
|
||||||
|
.dimensionUnit("cm")
|
||||||
|
.weight(850)
|
||||||
|
.weightUnit("kg")
|
||||||
|
.piecesPerUnit(10000)
|
||||||
|
.stacked(true)
|
||||||
|
.mixed(false)
|
||||||
|
.destinations(List.of(
|
||||||
|
DestinationInput.builder()
|
||||||
|
.name("Stříbro (KION plant)")
|
||||||
|
.quantity(50000)
|
||||||
|
.d2d(true)
|
||||||
|
.route("Yantian s ...,CNSZX,DEHAM,WH ZBU,STR")
|
||||||
|
.d2dCost(6500.0)
|
||||||
|
.d2dDuration(47)
|
||||||
|
.customHandling(false)
|
||||||
|
.build()
|
||||||
|
))
|
||||||
|
.build(),
|
||||||
|
TestCaseExpected.builder()
|
||||||
|
.mekA(56.87)
|
||||||
|
.logisticCost(5.48)
|
||||||
|
.mekB(62.35)
|
||||||
|
.fcaFee(0.11)
|
||||||
|
.transportation(0.0)
|
||||||
|
.d2d(0.39)
|
||||||
|
.airFreight(0.0)
|
||||||
|
.custom(1.72)
|
||||||
|
.repackaging(0.00)
|
||||||
|
.handling(0.00)
|
||||||
|
.disposal(0.00)
|
||||||
|
.space(0.01)
|
||||||
|
.capital(3.25)
|
||||||
|
.safetyStock(100)
|
||||||
|
.destinations(List.of(
|
||||||
|
DestinationExpected.builder()
|
||||||
|
.transitTime(47)
|
||||||
|
.stackedLayers(2)
|
||||||
|
.containerUnitCount(240000)
|
||||||
|
.containerType("40 ft. GP")
|
||||||
|
.limitingFactor("Weight")
|
||||||
|
.build()
|
||||||
|
))
|
||||||
|
.build()
|
||||||
|
);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Test Case 8: Non-EU supplier, standard - D2D - Über Hop
|
||||||
|
* D2D from China (Baoli) with container transport
|
||||||
|
*/
|
||||||
|
public static final TestCase CASE_8 = new TestCase(
|
||||||
|
"8",
|
||||||
|
"Non-EU supplier, standard - D2D - Über Hop",
|
||||||
|
TestCaseInput.builder()
|
||||||
|
.partNumber("8212640827")
|
||||||
|
.supplierName("KION Baoli (Jiangsu) Forklift Co., Ltd. (KION plant)")
|
||||||
|
.loadFromPrevious(false)
|
||||||
|
.hsCode(84312002)
|
||||||
|
.tariffRate(3.0)
|
||||||
|
.price(18.2)
|
||||||
|
.overseaShare(0.0)
|
||||||
|
.fcaFee(false)
|
||||||
|
.length(120)
|
||||||
|
.width(100)
|
||||||
|
.height(87)
|
||||||
|
.dimensionUnit("cm")
|
||||||
|
.weight(99000)
|
||||||
|
.weightUnit("g")
|
||||||
|
.piecesPerUnit(150)
|
||||||
|
.stacked(true)
|
||||||
|
.mixed(false)
|
||||||
|
.destinations(List.of(
|
||||||
|
DestinationInput.builder()
|
||||||
|
.name("Aschaffenburg (KION plant)")
|
||||||
|
.quantity(15000)
|
||||||
|
.d2d(true)
|
||||||
|
.route("JJ,CNSHA,DEHAM,WH STO,WH ULHA,AB")
|
||||||
|
.d2dCost(4500.0)
|
||||||
|
.d2dDuration(47)
|
||||||
|
.customHandling(false)
|
||||||
|
.build()
|
||||||
|
))
|
||||||
|
.build(),
|
||||||
|
TestCaseExpected.builder()
|
||||||
|
.mekA(18.2)
|
||||||
|
.logisticCost(2.99)
|
||||||
|
.mekB(21.19)
|
||||||
|
.fcaFee(0.0)
|
||||||
|
.transportation(0.0)
|
||||||
|
.d2d(0.9)
|
||||||
|
.airFreight(0.0)
|
||||||
|
.custom(0.58)
|
||||||
|
.repackaging(0.05)
|
||||||
|
.handling(0.05)
|
||||||
|
.disposal(0.04)
|
||||||
|
.space(0.33)
|
||||||
|
.capital(1.04)
|
||||||
|
.safetyStock(55)
|
||||||
|
.destinations(List.of(
|
||||||
|
DestinationExpected.builder()
|
||||||
|
.transitTime(47)
|
||||||
|
.stackedLayers(2)
|
||||||
|
.containerUnitCount(6300)
|
||||||
|
.containerType("40 ft. GP")
|
||||||
|
.limitingFactor("Volume")
|
||||||
|
.build()
|
||||||
|
))
|
||||||
|
.build()
|
||||||
|
);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Test Case 9: EU Supplier, user - Container - Über Hop
|
||||||
|
* Container transport with very low quantity
|
||||||
|
*/
|
||||||
|
public static final TestCase CASE_9 = new TestCase(
|
||||||
|
"9",
|
||||||
|
"EU Supplier, user - Container - Über Hop",
|
||||||
|
TestCaseInput.builder()
|
||||||
|
.partNumber("8263500575")
|
||||||
|
.supplierName("Ireland supplier")
|
||||||
|
.loadFromPrevious(false)
|
||||||
|
.hsCode(84312002)
|
||||||
|
.tariffRate(0.0)
|
||||||
|
.price(8.0)
|
||||||
|
.overseaShare(0.0)
|
||||||
|
.fcaFee(false)
|
||||||
|
.length(114)
|
||||||
|
.width(114)
|
||||||
|
.height(95)
|
||||||
|
.dimensionUnit("cm")
|
||||||
|
.weight(850)
|
||||||
|
.weightUnit("kg")
|
||||||
|
.piecesPerUnit(20)
|
||||||
|
.stacked(false)
|
||||||
|
.mixed(false)
|
||||||
|
.destinations(List.of(
|
||||||
|
DestinationInput.builder()
|
||||||
|
.name("Hamburg (KION plant)")
|
||||||
|
.quantity(5)
|
||||||
|
.d2d(false)
|
||||||
|
.route("IE SUP,HH")
|
||||||
|
.customHandling(false)
|
||||||
|
.build()
|
||||||
|
))
|
||||||
|
.build(),
|
||||||
|
TestCaseExpected.builder()
|
||||||
|
.mekA(8.0)
|
||||||
|
.logisticCost(1505.46)
|
||||||
|
.mekB(1513.46)
|
||||||
|
.fcaFee(0.0)
|
||||||
|
.transportation(1475.98)
|
||||||
|
.d2d(0.0)
|
||||||
|
.airFreight(0.0)
|
||||||
|
.custom(0.0)
|
||||||
|
.repackaging(0.0)
|
||||||
|
.handling(4.39)
|
||||||
|
.disposal(0.0)
|
||||||
|
.space(24.95)
|
||||||
|
.capital(0.13)
|
||||||
|
.safetyStock(10)
|
||||||
|
.destinations(List.of(
|
||||||
|
DestinationExpected.builder()
|
||||||
|
.transitTime(3)
|
||||||
|
.stackedLayers(null)
|
||||||
|
.containerUnitCount(null)
|
||||||
|
.containerType(null)
|
||||||
|
.limitingFactor(null)
|
||||||
|
.build()
|
||||||
|
))
|
||||||
|
.build()
|
||||||
|
);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Test Case 10: EU-supplier, standard - Container - Über Hop
|
||||||
|
* Container transport with custom handling costs
|
||||||
|
*/
|
||||||
|
public static final TestCase CASE_10 = new TestCase(
|
||||||
|
"10",
|
||||||
|
"EU-supplier, standard - Container - Über Hop",
|
||||||
|
TestCaseInput.builder()
|
||||||
|
.partNumber("8263500575")
|
||||||
|
.supplierName("Ireland supplier")
|
||||||
|
.loadFromPrevious(false)
|
||||||
|
.hsCode(84312002)
|
||||||
|
.tariffRate(0.0)
|
||||||
|
.price(8.0)
|
||||||
|
.overseaShare(0.0)
|
||||||
|
.fcaFee(true)
|
||||||
|
.length(114)
|
||||||
|
.width(114)
|
||||||
|
.height(95)
|
||||||
|
.dimensionUnit("cm")
|
||||||
|
.weight(850)
|
||||||
|
.weightUnit("kg")
|
||||||
|
.piecesPerUnit(20)
|
||||||
|
.stacked(false)
|
||||||
|
.mixed(false)
|
||||||
|
.destinations(List.of(
|
||||||
|
DestinationInput.builder()
|
||||||
|
.name("Hamburg (KION plant)")
|
||||||
|
.quantity(40)
|
||||||
|
.d2d(false)
|
||||||
|
.route("IE SUP,HH")
|
||||||
|
.handlingCost(6.0)
|
||||||
|
.repackingCost(6.0)
|
||||||
|
.disposalCost(6.0)
|
||||||
|
.customHandling(true)
|
||||||
|
.build()
|
||||||
|
))
|
||||||
|
.build(),
|
||||||
|
TestCaseExpected.builder()
|
||||||
|
.mekA(8.0)
|
||||||
|
.logisticCost(188.82)
|
||||||
|
.mekB(196.82)
|
||||||
|
.fcaFee(0.02)
|
||||||
|
.transportation(184.50)
|
||||||
|
.d2d(0.0)
|
||||||
|
.airFreight(0.0)
|
||||||
|
.custom(0.0)
|
||||||
|
.repackaging(0.3)
|
||||||
|
.handling(0.45)
|
||||||
|
.disposal(0.3)
|
||||||
|
.space(3.12)
|
||||||
|
.capital(0.14)
|
||||||
|
.safetyStock(10)
|
||||||
|
.destinations(List.of(
|
||||||
|
DestinationExpected.builder()
|
||||||
|
.transitTime(3)
|
||||||
|
.stackedLayers(null)
|
||||||
|
.containerUnitCount(null)
|
||||||
|
.containerType(null)
|
||||||
|
.limitingFactor(null)
|
||||||
|
.build()
|
||||||
|
))
|
||||||
|
.build()
|
||||||
|
);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Test Case 11: Non-EU supplier, user - Container - Über Hop
|
||||||
|
* Container transport from China with air freight
|
||||||
|
*/
|
||||||
|
public static final TestCase CASE_11 = new TestCase(
|
||||||
|
"11",
|
||||||
|
"Non-EU supplier, user - Container - Über Hop",
|
||||||
|
TestCaseInput.builder()
|
||||||
|
.partNumber("8263500575")
|
||||||
|
.supplierName("Linde (China) Forklift Truck (Supplier) (KION plant)")
|
||||||
|
.loadFromPrevious(false)
|
||||||
|
.hsCode(84312002)
|
||||||
|
.tariffRate(1.7)
|
||||||
|
.price(8.0)
|
||||||
|
.overseaShare(75.0)
|
||||||
|
.fcaFee(true)
|
||||||
|
.length(114)
|
||||||
|
.width(114)
|
||||||
|
.height(95)
|
||||||
|
.dimensionUnit("cm")
|
||||||
|
.weight(850)
|
||||||
|
.weightUnit("kg")
|
||||||
|
.piecesPerUnit(20)
|
||||||
|
.stacked(false)
|
||||||
|
.mixed(false)
|
||||||
|
.destinations(List.of(
|
||||||
|
DestinationInput.builder()
|
||||||
|
.name("Hamburg (KION plant)")
|
||||||
|
.quantity(900)
|
||||||
|
.d2d(false)
|
||||||
|
.route("LX,CNXMN,DEHAM,WH HH,HH")
|
||||||
|
.customHandling(false)
|
||||||
|
.build()
|
||||||
|
))
|
||||||
|
.build(),
|
||||||
|
TestCaseExpected.builder()
|
||||||
|
.mekA(8.0)
|
||||||
|
.logisticCost(9.50)
|
||||||
|
.mekB(17.50)
|
||||||
|
.fcaFee(0.02)
|
||||||
|
.transportation(4.87)
|
||||||
|
.d2d(0.0)
|
||||||
|
.airFreight(0.0)
|
||||||
|
.custom(0.32)
|
||||||
|
.repackaging(0.39)
|
||||||
|
.handling(0.38)
|
||||||
|
.disposal(0.30)
|
||||||
|
.space(2.77)
|
||||||
|
.capital(0.46)
|
||||||
|
.safetyStock(10)
|
||||||
|
.destinations(List.of(
|
||||||
|
DestinationExpected.builder()
|
||||||
|
.transitTime(47)
|
||||||
|
.stackedLayers(2)
|
||||||
|
.containerUnitCount(400)
|
||||||
|
.containerType("20 ft. GP")
|
||||||
|
.limitingFactor("Volume")
|
||||||
|
.build()
|
||||||
|
))
|
||||||
|
.build()
|
||||||
|
);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* All test cases as a list for parametrized tests.
|
||||||
|
*/
|
||||||
|
public static final List<TestCase> ALL = List.of(
|
||||||
|
CASE_1,
|
||||||
|
CASE_2,
|
||||||
|
CASE_3,
|
||||||
|
CASE_3B,
|
||||||
|
CASE_4,
|
||||||
|
CASE_5,
|
||||||
|
CASE_6,
|
||||||
|
CASE_7,
|
||||||
|
CASE_8,
|
||||||
|
CASE_9,
|
||||||
|
CASE_10,
|
||||||
|
CASE_11
|
||||||
|
);
|
||||||
|
}
|
||||||
443
src/test/java/de/avatic/lcc/e2e/tests/AbstractE2ETest.java
Normal file
443
src/test/java/de/avatic/lcc/e2e/tests/AbstractE2ETest.java
Normal file
|
|
@ -0,0 +1,443 @@
|
||||||
|
package de.avatic.lcc.e2e.tests;
|
||||||
|
|
||||||
|
import com.microsoft.playwright.Browser;
|
||||||
|
import com.microsoft.playwright.BrowserContext;
|
||||||
|
import com.microsoft.playwright.BrowserType;
|
||||||
|
import com.microsoft.playwright.Page;
|
||||||
|
import com.microsoft.playwright.Playwright;
|
||||||
|
import de.avatic.lcc.LccApplication;
|
||||||
|
import de.avatic.lcc.config.DatabaseTestConfiguration;
|
||||||
|
import de.avatic.lcc.e2e.config.TestFrontendConfig;
|
||||||
|
import de.avatic.lcc.e2e.pages.DevLoginPage;
|
||||||
|
import org.junit.jupiter.api.AfterAll;
|
||||||
|
import org.junit.jupiter.api.AfterEach;
|
||||||
|
import org.junit.jupiter.api.BeforeAll;
|
||||||
|
import org.junit.jupiter.api.BeforeEach;
|
||||||
|
import org.junit.jupiter.api.Tag;
|
||||||
|
import org.junit.jupiter.api.TestInstance;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
import org.springframework.boot.test.context.SpringBootTest;
|
||||||
|
import org.springframework.boot.test.web.server.LocalServerPort;
|
||||||
|
import org.springframework.context.annotation.Import;
|
||||||
|
import org.springframework.jdbc.core.JdbcTemplate;
|
||||||
|
import org.springframework.test.context.ActiveProfiles;
|
||||||
|
import org.testcontainers.junit.jupiter.Testcontainers;
|
||||||
|
|
||||||
|
import java.io.File;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.nio.file.Files;
|
||||||
|
import java.nio.file.Path;
|
||||||
|
import java.nio.file.Paths;
|
||||||
|
import java.util.concurrent.TimeUnit;
|
||||||
|
import java.util.logging.Logger;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Abstract base class for E2E tests.
|
||||||
|
* Starts Spring Boot backend with integrated frontend and provides Playwright setup.
|
||||||
|
*
|
||||||
|
* <p>Prerequisites:
|
||||||
|
* <ul>
|
||||||
|
* <li>Frontend must be built to src/main/resources/static before running tests</li>
|
||||||
|
* <li>Run: {@code cd src/frontend && BUILD_FOR_SPRING=true npm run build}</li>
|
||||||
|
* </ul>
|
||||||
|
*
|
||||||
|
* <p>Or use Maven profile (if configured):
|
||||||
|
* {@code mvn test -Dtest="*E2ETest" -Pe2e}
|
||||||
|
*/
|
||||||
|
@SpringBootTest(
|
||||||
|
classes = LccApplication.class,
|
||||||
|
webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT
|
||||||
|
)
|
||||||
|
@Import({DatabaseTestConfiguration.class, TestFrontendConfig.class})
|
||||||
|
@Testcontainers
|
||||||
|
@ActiveProfiles({"test", "dev", "mysql", "e2e"})
|
||||||
|
@TestInstance(TestInstance.Lifecycle.PER_CLASS)
|
||||||
|
@Tag("e2e")
|
||||||
|
public abstract class AbstractE2ETest {
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
protected JdbcTemplate jdbcTemplate;
|
||||||
|
|
||||||
|
private static final Logger logger = Logger.getLogger(AbstractE2ETest.class.getName());
|
||||||
|
|
||||||
|
protected static final boolean HEADLESS = Boolean.parseBoolean(
|
||||||
|
System.getProperty("playwright.headless", "true")
|
||||||
|
);
|
||||||
|
protected static final double TOLERANCE = 0.03; // 3% tolerance for numeric comparisons
|
||||||
|
|
||||||
|
@LocalServerPort
|
||||||
|
protected int port;
|
||||||
|
|
||||||
|
protected Playwright playwright;
|
||||||
|
protected Browser browser;
|
||||||
|
protected BrowserContext context;
|
||||||
|
protected Page page;
|
||||||
|
|
||||||
|
protected String getBaseUrl() {
|
||||||
|
return "http://localhost:" + port;
|
||||||
|
}
|
||||||
|
|
||||||
|
@BeforeAll
|
||||||
|
void setupPlaywright() {
|
||||||
|
// Load E2E test data
|
||||||
|
loadTestData();
|
||||||
|
|
||||||
|
checkFrontendBuilt();
|
||||||
|
|
||||||
|
logger.info("Setting up Playwright");
|
||||||
|
playwright = Playwright.create();
|
||||||
|
browser = playwright.chromium().launch(
|
||||||
|
new BrowserType.LaunchOptions()
|
||||||
|
.setHeadless(HEADLESS)
|
||||||
|
.setSlowMo(HEADLESS ? 0 : 100)
|
||||||
|
);
|
||||||
|
|
||||||
|
// Ensure screenshot directory exists
|
||||||
|
try {
|
||||||
|
Files.createDirectories(Paths.get("target/screenshots"));
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.warning("Could not create screenshots directory");
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(() -> String.format(
|
||||||
|
"Playwright setup complete. Headless: %s, Base URL: %s",
|
||||||
|
HEADLESS, getBaseUrl()
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
@BeforeEach
|
||||||
|
void setupPage() {
|
||||||
|
context = browser.newContext(new Browser.NewContextOptions()
|
||||||
|
.setViewportSize(1920, 1080)
|
||||||
|
);
|
||||||
|
page = context.newPage();
|
||||||
|
|
||||||
|
// Login via DevLoginPage
|
||||||
|
DevLoginPage loginPage = new DevLoginPage(page);
|
||||||
|
loginPage.login(getBaseUrl(), "John");
|
||||||
|
|
||||||
|
// Navigate to home page after login
|
||||||
|
page.navigate(getBaseUrl());
|
||||||
|
page.waitForLoadState();
|
||||||
|
|
||||||
|
// Take screenshot after login
|
||||||
|
takeScreenshot("after_login");
|
||||||
|
|
||||||
|
logger.info(() -> "Page setup complete, logged in as John. Current URL: " + page.url());
|
||||||
|
}
|
||||||
|
|
||||||
|
@AfterEach
|
||||||
|
void teardownPage() {
|
||||||
|
if (context != null) {
|
||||||
|
context.close();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@AfterAll
|
||||||
|
void teardownPlaywright() {
|
||||||
|
if (browser != null) {
|
||||||
|
browser.close();
|
||||||
|
}
|
||||||
|
if (playwright != null) {
|
||||||
|
playwright.close();
|
||||||
|
}
|
||||||
|
logger.info("Playwright teardown complete");
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Takes a screenshot for debugging purposes.
|
||||||
|
*/
|
||||||
|
protected void takeScreenshot(String name) {
|
||||||
|
Path screenshotPath = Paths.get("target/screenshots/" + name + ".png");
|
||||||
|
page.screenshot(new Page.ScreenshotOptions().setPath(screenshotPath));
|
||||||
|
logger.info(() -> "Screenshot saved to: " + screenshotPath);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks if the frontend has been built to static resources.
|
||||||
|
* Throws an exception with instructions if not.
|
||||||
|
*/
|
||||||
|
private void checkFrontendBuilt() {
|
||||||
|
Path staticIndex = Paths.get("src/main/resources/static/index.html");
|
||||||
|
if (!Files.exists(staticIndex)) {
|
||||||
|
// Try to build frontend automatically
|
||||||
|
if (tryBuildFrontend()) {
|
||||||
|
logger.info("Frontend built successfully");
|
||||||
|
} else {
|
||||||
|
throw new IllegalStateException(
|
||||||
|
"Frontend not built. Please run:\n" +
|
||||||
|
" cd src/frontend && BUILD_FOR_SPRING=true npm run build\n" +
|
||||||
|
"Or set -Dskip.frontend.check=true to skip this check."
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
logger.info("Frontend already built at: " + staticIndex);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Attempts to build the frontend automatically.
|
||||||
|
* Returns true if successful, false otherwise.
|
||||||
|
*/
|
||||||
|
private boolean tryBuildFrontend() {
|
||||||
|
if (Boolean.getBoolean("skip.frontend.build")) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info("Attempting to build frontend...");
|
||||||
|
|
||||||
|
try {
|
||||||
|
File frontendDir = new File("src/frontend");
|
||||||
|
if (!frontendDir.exists()) {
|
||||||
|
logger.warning("Frontend directory not found");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if node_modules exists
|
||||||
|
File nodeModules = new File(frontendDir, "node_modules");
|
||||||
|
if (!nodeModules.exists()) {
|
||||||
|
logger.info("Installing npm dependencies...");
|
||||||
|
ProcessBuilder npmInstall = new ProcessBuilder("npm", "install")
|
||||||
|
.directory(frontendDir)
|
||||||
|
.inheritIO();
|
||||||
|
Process installProcess = npmInstall.start();
|
||||||
|
if (!installProcess.waitFor(5, TimeUnit.MINUTES)) {
|
||||||
|
installProcess.destroyForcibly();
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build frontend (to dist/)
|
||||||
|
ProcessBuilder npmBuild = new ProcessBuilder("npm", "run", "build")
|
||||||
|
.directory(frontendDir)
|
||||||
|
.inheritIO();
|
||||||
|
|
||||||
|
Process buildProcess = npmBuild.start();
|
||||||
|
boolean completed = buildProcess.waitFor(3, TimeUnit.MINUTES);
|
||||||
|
|
||||||
|
if (!completed) {
|
||||||
|
buildProcess.destroyForcibly();
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (buildProcess.exitValue() != 0) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Copy dist/ to src/main/resources/static/
|
||||||
|
return copyFrontendToStatic(frontendDir);
|
||||||
|
|
||||||
|
} catch (IOException | InterruptedException e) {
|
||||||
|
logger.warning("Failed to build frontend: " + e.getMessage());
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Loads E2E test data into the database.
|
||||||
|
* This is called once before all tests run.
|
||||||
|
*/
|
||||||
|
private void loadTestData() {
|
||||||
|
logger.info("Loading E2E test data...");
|
||||||
|
|
||||||
|
// Check if test users already exist
|
||||||
|
Integer existingUsers = jdbcTemplate.queryForObject(
|
||||||
|
"SELECT COUNT(*) FROM sys_user WHERE email = 'john.doe@test.com'",
|
||||||
|
Integer.class
|
||||||
|
);
|
||||||
|
|
||||||
|
if (existingUsers != null && existingUsers > 0) {
|
||||||
|
logger.info("Test users already exist, checking nodes...");
|
||||||
|
addMissingNodes();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create test users
|
||||||
|
jdbcTemplate.update(
|
||||||
|
"INSERT INTO sys_user (workday_id, email, firstname, lastname, is_active) VALUES (?, ?, ?, ?, ?)",
|
||||||
|
"WD001TEST", "john.doe@test.com", "John", "Doe", true
|
||||||
|
);
|
||||||
|
jdbcTemplate.update(
|
||||||
|
"INSERT INTO sys_user (workday_id, email, firstname, lastname, is_active) VALUES (?, ?, ?, ?, ?)",
|
||||||
|
"WD002TEST", "jane.smith@test.com", "Jane", "Smith", true
|
||||||
|
);
|
||||||
|
jdbcTemplate.update(
|
||||||
|
"INSERT INTO sys_user (workday_id, email, firstname, lastname, is_active) VALUES (?, ?, ?, ?, ?)",
|
||||||
|
"WD003TEST", "admin.test@test.com", "Admin", "User", true
|
||||||
|
);
|
||||||
|
|
||||||
|
// Assign groups to users
|
||||||
|
// John gets 'super' role for full E2E testing capabilities
|
||||||
|
jdbcTemplate.update(
|
||||||
|
"INSERT INTO sys_user_group_mapping (user_id, group_id) " +
|
||||||
|
"SELECT u.id, g.id FROM sys_user u, sys_group g " +
|
||||||
|
"WHERE u.email = 'john.doe@test.com' AND g.group_name = 'super'"
|
||||||
|
);
|
||||||
|
jdbcTemplate.update(
|
||||||
|
"INSERT INTO sys_user_group_mapping (user_id, group_id) " +
|
||||||
|
"SELECT u.id, g.id FROM sys_user u, sys_group g " +
|
||||||
|
"WHERE u.email = 'jane.smith@test.com' AND g.group_name = 'super'"
|
||||||
|
);
|
||||||
|
jdbcTemplate.update(
|
||||||
|
"INSERT INTO sys_user_group_mapping (user_id, group_id) " +
|
||||||
|
"SELECT u.id, g.id FROM sys_user u, sys_group g " +
|
||||||
|
"WHERE u.email = 'admin.test@test.com' AND g.group_name = 'super'"
|
||||||
|
);
|
||||||
|
|
||||||
|
// Add missing nodes for E2E tests
|
||||||
|
addMissingNodes();
|
||||||
|
|
||||||
|
logger.info("E2E test data loaded successfully");
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Adds missing nodes needed for E2E tests.
|
||||||
|
*/
|
||||||
|
private void addMissingNodes() {
|
||||||
|
logger.info("Adding missing nodes for E2E tests...");
|
||||||
|
|
||||||
|
// Add Ireland supplier to node table (if not exists)
|
||||||
|
Integer irelandCount = jdbcTemplate.queryForObject(
|
||||||
|
"SELECT COUNT(*) FROM node WHERE name = 'Ireland supplier'", Integer.class);
|
||||||
|
if (irelandCount == null || irelandCount == 0) {
|
||||||
|
Integer ieCountryId = jdbcTemplate.queryForObject(
|
||||||
|
"SELECT id FROM country WHERE iso_code = 'IE'", Integer.class);
|
||||||
|
jdbcTemplate.update(
|
||||||
|
"INSERT INTO node (country_id, name, address, external_mapping_id, predecessor_required, " +
|
||||||
|
"is_destination, is_source, is_intermediate, geo_lat, geo_lng, is_deprecated) " +
|
||||||
|
"VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
|
||||||
|
ieCountryId, "Ireland supplier", "Dublin Ireland", "IE_SUP", false,
|
||||||
|
false, true, false, 53.3494, -6.2606, false
|
||||||
|
);
|
||||||
|
logger.info("Added Ireland supplier to node table");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get test user ID for sys_user_node entries
|
||||||
|
Integer testUserId = jdbcTemplate.queryForObject(
|
||||||
|
"SELECT id FROM sys_user WHERE email = 'john.doe@test.com'", Integer.class);
|
||||||
|
|
||||||
|
// Add Turkey supplier to sys_user_node (if not exists)
|
||||||
|
Integer turkeyCount = jdbcTemplate.queryForObject(
|
||||||
|
"SELECT COUNT(*) FROM sys_user_node WHERE name = 'Turkey supplier'", Integer.class);
|
||||||
|
if (turkeyCount == null || turkeyCount == 0) {
|
||||||
|
Integer trCountryId = jdbcTemplate.queryForObject(
|
||||||
|
"SELECT id FROM country WHERE iso_code = 'TR'", Integer.class);
|
||||||
|
jdbcTemplate.update(
|
||||||
|
"INSERT INTO sys_user_node (user_id, country_id, name, address, geo_lat, geo_lng, is_deprecated) " +
|
||||||
|
"VALUES (?, ?, ?, ?, ?, ?, ?)",
|
||||||
|
testUserId, trCountryId, "Turkey supplier", "Antalya Türkiye",
|
||||||
|
36.8864, 30.7105, false
|
||||||
|
);
|
||||||
|
logger.info("Added Turkey supplier to sys_user_node table");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add Yantian supplier to sys_user_node (if not exists)
|
||||||
|
Integer yantianCount = jdbcTemplate.queryForObject(
|
||||||
|
"SELECT COUNT(*) FROM sys_user_node WHERE name = 'Yantian supplier'", Integer.class);
|
||||||
|
if (yantianCount == null || yantianCount == 0) {
|
||||||
|
Integer cnCountryId = jdbcTemplate.queryForObject(
|
||||||
|
"SELECT id FROM country WHERE iso_code = 'CN'", Integer.class);
|
||||||
|
jdbcTemplate.update(
|
||||||
|
"INSERT INTO sys_user_node (user_id, country_id, name, address, geo_lat, geo_lng, is_deprecated) " +
|
||||||
|
"VALUES (?, ?, ?, ?, ?, ?, ?)",
|
||||||
|
testUserId, cnCountryId, "Yantian supplier", "Yantian, China",
|
||||||
|
22.5925, 114.2460, false
|
||||||
|
);
|
||||||
|
logger.info("Added Yantian supplier to sys_user_node table");
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info("Missing nodes added");
|
||||||
|
|
||||||
|
// Add test materials
|
||||||
|
addTestMaterials();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Adds test materials needed for E2E tests.
|
||||||
|
*/
|
||||||
|
private void addTestMaterials() {
|
||||||
|
logger.info("Adding test materials...");
|
||||||
|
|
||||||
|
String[] materials = {
|
||||||
|
"3064540201", "003064540201", "84312000", "wheel hub",
|
||||||
|
"4222640104", "004222640104", "84139100", "gearbox housing blank",
|
||||||
|
"4222640803", "004222640803", "84139100", "planet gear carrier blank stage 1",
|
||||||
|
"4222640805", "004222640805", "84139100", "planet gear carrier blank stage 2",
|
||||||
|
"5512640106", "005512640106", "84312000", "transmission housing blank",
|
||||||
|
"8212640113", "008212640113", "84312000", "transmission housing blank GR2E-04",
|
||||||
|
"8212640827", "008212640827", "84312000", "planet gear carrier blank Stufe 1",
|
||||||
|
"8222640822", "008222640822", "84839089", "planet gear carrier blank stage 1",
|
||||||
|
"8263500575", "008263500575", "85015220", "traction motor assy"
|
||||||
|
};
|
||||||
|
|
||||||
|
for (int i = 0; i < materials.length; i += 4) {
|
||||||
|
String partNumber = materials[i];
|
||||||
|
String normalizedPartNumber = materials[i + 1];
|
||||||
|
String hsCode = materials[i + 2];
|
||||||
|
String name = materials[i + 3];
|
||||||
|
|
||||||
|
// Check by normalized_part_number since that has the UNIQUE constraint
|
||||||
|
Integer count = jdbcTemplate.queryForObject(
|
||||||
|
"SELECT COUNT(*) FROM material WHERE normalized_part_number = ?",
|
||||||
|
Integer.class, normalizedPartNumber);
|
||||||
|
if (count == null || count == 0) {
|
||||||
|
try {
|
||||||
|
jdbcTemplate.update(
|
||||||
|
"INSERT INTO material (part_number, normalized_part_number, hs_code, name, is_deprecated) " +
|
||||||
|
"VALUES (?, ?, ?, ?, ?)",
|
||||||
|
partNumber, normalizedPartNumber, hsCode, name, false
|
||||||
|
);
|
||||||
|
logger.info(() -> "Added material: " + partNumber + " (normalized: " + normalizedPartNumber + ")");
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.warning(() -> "Failed to insert material " + partNumber + ": " + e.getMessage());
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
logger.info(() -> "Material already exists: " + normalizedPartNumber);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info("Test materials added");
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Copies the built frontend from dist/ to src/main/resources/static/.
|
||||||
|
*/
|
||||||
|
private boolean copyFrontendToStatic(File frontendDir) {
|
||||||
|
Path source = frontendDir.toPath().resolve("dist");
|
||||||
|
Path target = Paths.get("src/main/resources/static");
|
||||||
|
|
||||||
|
if (!Files.exists(source)) {
|
||||||
|
logger.warning("Frontend dist directory not found: " + source);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Create target directory if needed
|
||||||
|
Files.createDirectories(target);
|
||||||
|
|
||||||
|
// Copy all files recursively
|
||||||
|
try (var walk = Files.walk(source)) {
|
||||||
|
walk.forEach(sourcePath -> {
|
||||||
|
try {
|
||||||
|
Path targetPath = target.resolve(source.relativize(sourcePath));
|
||||||
|
if (Files.isDirectory(sourcePath)) {
|
||||||
|
Files.createDirectories(targetPath);
|
||||||
|
} else {
|
||||||
|
Files.copy(sourcePath, targetPath,
|
||||||
|
java.nio.file.StandardCopyOption.REPLACE_EXISTING);
|
||||||
|
}
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new RuntimeException("Failed to copy: " + sourcePath, e);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info("Frontend copied to: " + target);
|
||||||
|
return true;
|
||||||
|
|
||||||
|
} catch (IOException | RuntimeException e) {
|
||||||
|
logger.warning("Failed to copy frontend: " + e.getMessage());
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,193 @@
|
||||||
|
package de.avatic.lcc.e2e.tests;
|
||||||
|
|
||||||
|
import com.microsoft.playwright.Locator;
|
||||||
|
import com.microsoft.playwright.options.WaitForSelectorState;
|
||||||
|
import de.avatic.lcc.e2e.pages.AssistantPage;
|
||||||
|
import de.avatic.lcc.e2e.pages.CalculationEditPage;
|
||||||
|
import de.avatic.lcc.e2e.pages.ResultsPage;
|
||||||
|
import de.avatic.lcc.e2e.testdata.DestinationInput;
|
||||||
|
import de.avatic.lcc.e2e.testdata.TestCase;
|
||||||
|
import de.avatic.lcc.e2e.testdata.TestCases;
|
||||||
|
import org.junit.jupiter.api.DisplayName;
|
||||||
|
import org.junit.jupiter.params.ParameterizedTest;
|
||||||
|
import org.junit.jupiter.params.provider.Arguments;
|
||||||
|
import org.junit.jupiter.params.provider.MethodSource;
|
||||||
|
|
||||||
|
import java.util.logging.Logger;
|
||||||
|
import java.util.stream.Stream;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* End-to-end tests for the calculation workflow.
|
||||||
|
* Tests all scenarios from Testfälle.xlsx using Playwright.
|
||||||
|
*
|
||||||
|
* <p>The backend with integrated frontend is started automatically via @SpringBootTest.
|
||||||
|
*
|
||||||
|
* <p>Run with: {@code mvn test -Dtest=CalculationWorkflowE2ETest -Dgroups=e2e -Dspring.profiles.active=test,dev,mysql}
|
||||||
|
*/
|
||||||
|
@DisplayName("Calculation Workflow E2E Tests")
|
||||||
|
class CalculationWorkflowE2ETest extends AbstractE2ETest {
|
||||||
|
|
||||||
|
private static final Logger logger = Logger.getLogger(CalculationWorkflowE2ETest.class.getName());
|
||||||
|
|
||||||
|
// Maximum time to wait for calculation to complete (in milliseconds)
|
||||||
|
private static final int CALCULATION_TIMEOUT_MS = 120000; // 2 minutes
|
||||||
|
private static final int POLL_INTERVAL_MS = 2000; // Check every 2 seconds
|
||||||
|
|
||||||
|
@ParameterizedTest(name = "Testfall {0}: {1}")
|
||||||
|
@MethodSource("provideTestCases")
|
||||||
|
@DisplayName("Calculation workflow")
|
||||||
|
void testCalculationWorkflow(String id, String name, TestCase testCase) {
|
||||||
|
logger.info(() -> "Starting test case: " + id + " - " + name);
|
||||||
|
|
||||||
|
try {
|
||||||
|
// 1. Navigate to assistant and search part numbers
|
||||||
|
AssistantPage assistant = new AssistantPage(page);
|
||||||
|
assistant.navigate(getBaseUrl());
|
||||||
|
assistant.searchPartNumbers(testCase.input().partNumber());
|
||||||
|
|
||||||
|
// 2. Select supplier
|
||||||
|
assistant.deletePreselectedSuppliers();
|
||||||
|
assistant.selectSupplier(testCase.input().supplierName());
|
||||||
|
|
||||||
|
// 3. Create calculation
|
||||||
|
assistant.createCalculation(testCase.input().loadFromPrevious());
|
||||||
|
|
||||||
|
// 4. Fill the calculation form
|
||||||
|
CalculationEditPage calcPage = new CalculationEditPage(page);
|
||||||
|
calcPage.fillForm(testCase.input());
|
||||||
|
|
||||||
|
// 5. Add and fill destinations
|
||||||
|
for (DestinationInput dest : testCase.input().destinations()) {
|
||||||
|
calcPage.addDestination(dest);
|
||||||
|
calcPage.fillDestination(dest);
|
||||||
|
}
|
||||||
|
|
||||||
|
// 6. Take screenshot before clicking Calculate & close
|
||||||
|
takeScreenshot("before_calculate_" + id);
|
||||||
|
|
||||||
|
// 7. Click "Calculate & close" button
|
||||||
|
Locator calcButton = page.locator("xpath=//button[contains(., 'Calculate & close')]");
|
||||||
|
calcButton.waitFor();
|
||||||
|
|
||||||
|
if (!calcButton.isEnabled()) {
|
||||||
|
throw new AssertionError("Calculate & close button is not enabled");
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(() -> "Clicking Calculate & close for test case " + id);
|
||||||
|
calcButton.click();
|
||||||
|
|
||||||
|
// 8. Wait for navigation to calculations list
|
||||||
|
page.waitForURL("**/calculations**", new com.microsoft.playwright.Page.WaitForURLOptions().setTimeout(10000));
|
||||||
|
logger.info(() -> "Navigated to calculations page");
|
||||||
|
|
||||||
|
// 9. Wait for calculation to complete
|
||||||
|
boolean completed = waitForCalculationComplete(testCase.input().partNumber());
|
||||||
|
if (!completed) {
|
||||||
|
takeScreenshot("calculation_timeout_" + id);
|
||||||
|
throw new AssertionError("Calculation did not complete within timeout");
|
||||||
|
}
|
||||||
|
|
||||||
|
takeScreenshot("calculation_completed_" + id);
|
||||||
|
logger.info(() -> "Test case " + id + " - calculation completed!");
|
||||||
|
|
||||||
|
// 10. Navigate to Reports and verify results
|
||||||
|
ResultsPage resultsPage = new ResultsPage(page);
|
||||||
|
resultsPage.navigateToReports(getBaseUrl(), testCase.input().partNumber(), testCase.input().supplierName());
|
||||||
|
|
||||||
|
takeScreenshot("report_" + id);
|
||||||
|
|
||||||
|
// 11. Verify results match expected values
|
||||||
|
resultsPage.verifyResults(testCase.expected(), TOLERANCE);
|
||||||
|
|
||||||
|
logger.info(() -> "Test case " + id + " - all results verified successfully!");
|
||||||
|
|
||||||
|
} catch (Exception e) {
|
||||||
|
// Take screenshot on failure
|
||||||
|
takeScreenshot("failure_" + id);
|
||||||
|
logger.severe(() -> "Test case " + id + " failed: " + e.getMessage());
|
||||||
|
throw e;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Waits for a calculation to complete by polling the calculations list.
|
||||||
|
* Looks for a COMPLETED badge for the given part number.
|
||||||
|
*
|
||||||
|
* @param partNumber the part number to look for
|
||||||
|
* @return true if calculation completed, false if timeout
|
||||||
|
*/
|
||||||
|
private boolean waitForCalculationComplete(String partNumber) {
|
||||||
|
logger.info("Waiting for calculation to complete for: " + partNumber);
|
||||||
|
|
||||||
|
long startTime = System.currentTimeMillis();
|
||||||
|
int attempts = 0;
|
||||||
|
|
||||||
|
while (System.currentTimeMillis() - startTime < CALCULATION_TIMEOUT_MS) {
|
||||||
|
attempts++;
|
||||||
|
final int attemptNum = attempts;
|
||||||
|
|
||||||
|
// Wait a bit for dashboard to update (it pulls every few seconds)
|
||||||
|
page.waitForTimeout(POLL_INTERVAL_MS);
|
||||||
|
|
||||||
|
// Check the "Running" counter in the dashboard
|
||||||
|
// Structure: .dashboard-box contains .dashboard-box-number (the count) and .dashboard-box-number-text (the label)
|
||||||
|
Locator runningBox = page.locator(".dashboard-box:has(.dashboard-box-number-text:text-is('Running'))");
|
||||||
|
|
||||||
|
if (runningBox.count() > 0) {
|
||||||
|
Locator runningCount = runningBox.locator(".dashboard-box-number");
|
||||||
|
if (runningCount.count() > 0) {
|
||||||
|
String runningText = runningCount.textContent().trim();
|
||||||
|
logger.info("Attempt " + attemptNum + ": Running calculations = " + runningText);
|
||||||
|
|
||||||
|
try {
|
||||||
|
int running = Integer.parseInt(runningText);
|
||||||
|
if (running == 0) {
|
||||||
|
// No more running calculations - check if ours completed or failed
|
||||||
|
logger.info("No running calculations. Checking final status...");
|
||||||
|
|
||||||
|
// Check the Failed counter
|
||||||
|
Locator failedBox = page.locator(".dashboard-box:has(.dashboard-box-number-text:text-is('Failed'))");
|
||||||
|
if (failedBox.count() > 0) {
|
||||||
|
Locator failedCount = failedBox.locator(".dashboard-box-number");
|
||||||
|
String failedText = failedCount.textContent().trim();
|
||||||
|
int failed = Integer.parseInt(failedText);
|
||||||
|
if (failed > 0) {
|
||||||
|
logger.severe("Calculation failed! Failed count: " + failed);
|
||||||
|
takeScreenshot("calculation_failed");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check the Completed counter increased
|
||||||
|
Locator completedBox = page.locator(".dashboard-box:has(.dashboard-box-number-text:text-is('Completed'))");
|
||||||
|
if (completedBox.count() > 0) {
|
||||||
|
Locator completedCount = completedBox.locator(".dashboard-box-number");
|
||||||
|
String completedText = completedCount.textContent().trim();
|
||||||
|
logger.info("Completed calculations: " + completedText);
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info("Calculation completed after " + attemptNum + " attempts");
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
} catch (NumberFormatException e) {
|
||||||
|
logger.warning("Could not parse running count: " + runningText);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Dashboard not found, try refreshing
|
||||||
|
logger.info("Attempt " + attemptNum + ": Dashboard not found, refreshing...");
|
||||||
|
page.reload();
|
||||||
|
page.waitForLoadState();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.warning("Calculation did not complete within " + CALCULATION_TIMEOUT_MS + "ms");
|
||||||
|
takeScreenshot("calculation_timeout");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
static Stream<Arguments> provideTestCases() {
|
||||||
|
return TestCases.ALL.stream()
|
||||||
|
.map(tc -> Arguments.of(tc.id(), tc.name(), tc));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,183 @@
|
||||||
|
package de.avatic.lcc.e2e.tests;
|
||||||
|
|
||||||
|
import com.microsoft.playwright.Locator;
|
||||||
|
import de.avatic.lcc.e2e.pages.AssistantPage;
|
||||||
|
import de.avatic.lcc.e2e.pages.CalculationEditPage;
|
||||||
|
import de.avatic.lcc.e2e.pages.ResultsPage;
|
||||||
|
import de.avatic.lcc.e2e.testdata.DestinationInput;
|
||||||
|
import de.avatic.lcc.e2e.testdata.TestCase;
|
||||||
|
import de.avatic.lcc.e2e.testdata.TestCases;
|
||||||
|
import de.avatic.lcc.e2e.util.DeviationReport;
|
||||||
|
import org.junit.jupiter.api.DisplayName;
|
||||||
|
import org.junit.jupiter.api.Tag;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.nio.file.Files;
|
||||||
|
import java.nio.file.Path;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.logging.Logger;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Runs all test cases and generates a deviation report comparing expected vs actual values.
|
||||||
|
* This test does not fail on deviations - it collects them all and prints a summary.
|
||||||
|
*/
|
||||||
|
@DisplayName("Deviation Analysis E2E Test")
|
||||||
|
@Tag("analysis")
|
||||||
|
class DeviationAnalysisE2ETest extends AbstractE2ETest {
|
||||||
|
|
||||||
|
private static final Logger logger = Logger.getLogger(DeviationAnalysisE2ETest.class.getName());
|
||||||
|
private static final int CALCULATION_TIMEOUT_MS = 120000;
|
||||||
|
private static final int POLL_INTERVAL_MS = 2000;
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("Analyze deviations across all test cases")
|
||||||
|
void analyzeDeviations() {
|
||||||
|
DeviationReport report = new DeviationReport();
|
||||||
|
|
||||||
|
for (TestCase testCase : TestCases.ALL) {
|
||||||
|
String id = testCase.id();
|
||||||
|
String name = testCase.name();
|
||||||
|
|
||||||
|
logger.info(() -> "\n========================================");
|
||||||
|
logger.info(() -> "Processing test case: " + id + " - " + name);
|
||||||
|
logger.info(() -> "========================================\n");
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Run the calculation workflow
|
||||||
|
Map<String, Object> actualResults = runCalculationAndGetResults(testCase);
|
||||||
|
|
||||||
|
// Add to deviation report
|
||||||
|
report.addTestCase(id, name, testCase.expected(), actualResults);
|
||||||
|
|
||||||
|
logger.info(() -> "Test case " + id + " completed successfully");
|
||||||
|
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.severe(() -> "Test case " + id + " failed with error: " + e.getMessage());
|
||||||
|
report.addError(id, name, e.getMessage());
|
||||||
|
takeScreenshot("error_" + id);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Print the deviation report
|
||||||
|
String reportContent = report.generateMarkdownTable();
|
||||||
|
System.out.println(reportContent);
|
||||||
|
logger.info(reportContent);
|
||||||
|
|
||||||
|
// Write report to file
|
||||||
|
try {
|
||||||
|
Path reportPath = Path.of("target/deviation-report.md");
|
||||||
|
Files.writeString(reportPath, reportContent);
|
||||||
|
logger.info("Deviation report written to: " + reportPath.toAbsolutePath());
|
||||||
|
} catch (IOException e) {
|
||||||
|
logger.warning("Could not write deviation report file: " + e.getMessage());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private Map<String, Object> runCalculationAndGetResults(TestCase testCase) {
|
||||||
|
// 1. Navigate to assistant and search part numbers
|
||||||
|
AssistantPage assistant = new AssistantPage(page);
|
||||||
|
assistant.navigate(getBaseUrl());
|
||||||
|
assistant.searchPartNumbers(testCase.input().partNumber());
|
||||||
|
|
||||||
|
// 2. Select supplier
|
||||||
|
assistant.deletePreselectedSuppliers();
|
||||||
|
assistant.selectSupplier(testCase.input().supplierName());
|
||||||
|
|
||||||
|
// 3. Create calculation
|
||||||
|
assistant.createCalculation(testCase.input().loadFromPrevious());
|
||||||
|
|
||||||
|
// 4. Fill the calculation form
|
||||||
|
CalculationEditPage calcPage = new CalculationEditPage(page);
|
||||||
|
|
||||||
|
// Enable screenshots for debugging
|
||||||
|
calcPage.enableScreenshots("case_" + testCase.id());
|
||||||
|
|
||||||
|
calcPage.fillForm(testCase.input());
|
||||||
|
|
||||||
|
// 5. Add and fill destinations (screenshots taken automatically for each)
|
||||||
|
for (DestinationInput dest : testCase.input().destinations()) {
|
||||||
|
calcPage.addDestination(dest);
|
||||||
|
calcPage.fillDestination(dest);
|
||||||
|
}
|
||||||
|
|
||||||
|
// 6. Take screenshot before clicking Calculate
|
||||||
|
calcPage.screenshotBeforeCalculate();
|
||||||
|
|
||||||
|
// 7. Click "Calculate & close" button
|
||||||
|
Locator calcButton = page.locator("xpath=//button[contains(., 'Calculate & close')]");
|
||||||
|
calcButton.waitFor();
|
||||||
|
|
||||||
|
if (!calcButton.isEnabled()) {
|
||||||
|
throw new AssertionError("Calculate & close button is not enabled for test case " + testCase.id());
|
||||||
|
}
|
||||||
|
|
||||||
|
calcButton.click();
|
||||||
|
|
||||||
|
// 8. Wait for navigation to calculations list
|
||||||
|
page.waitForURL("**/calculations**", new com.microsoft.playwright.Page.WaitForURLOptions().setTimeout(10000));
|
||||||
|
|
||||||
|
// 9. Wait for calculation to complete
|
||||||
|
boolean completed = waitForCalculationComplete(testCase.input().partNumber());
|
||||||
|
if (!completed) {
|
||||||
|
throw new AssertionError("Calculation did not complete within timeout for test case " + testCase.id());
|
||||||
|
}
|
||||||
|
|
||||||
|
// 10. Navigate to Reports and read results
|
||||||
|
ResultsPage resultsPage = new ResultsPage(page);
|
||||||
|
resultsPage.navigateToReports(getBaseUrl(), testCase.input().partNumber(), testCase.input().supplierName());
|
||||||
|
|
||||||
|
// 11. Take full page screenshot with all collapsible boxes expanded
|
||||||
|
resultsPage.takeFullPageScreenshot("report_" + testCase.id());
|
||||||
|
|
||||||
|
// 12. Read and return results (without verification)
|
||||||
|
return resultsPage.readResults();
|
||||||
|
}
|
||||||
|
|
||||||
|
private boolean waitForCalculationComplete(String partNumber) {
|
||||||
|
logger.info("Waiting for calculation to complete for: " + partNumber);
|
||||||
|
|
||||||
|
long startTime = System.currentTimeMillis();
|
||||||
|
int attempts = 0;
|
||||||
|
|
||||||
|
while (System.currentTimeMillis() - startTime < CALCULATION_TIMEOUT_MS) {
|
||||||
|
attempts++;
|
||||||
|
final int attemptNum = attempts;
|
||||||
|
|
||||||
|
page.waitForTimeout(POLL_INTERVAL_MS);
|
||||||
|
|
||||||
|
Locator runningBox = page.locator(".dashboard-box:has(.dashboard-box-number-text:text-is('Running'))");
|
||||||
|
|
||||||
|
if (runningBox.count() > 0) {
|
||||||
|
Locator runningCount = runningBox.locator(".dashboard-box-number");
|
||||||
|
if (runningCount.count() > 0) {
|
||||||
|
String runningText = runningCount.textContent().trim();
|
||||||
|
|
||||||
|
try {
|
||||||
|
int running = Integer.parseInt(runningText);
|
||||||
|
if (running == 0) {
|
||||||
|
Locator failedBox = page.locator(".dashboard-box:has(.dashboard-box-number-text:text-is('Failed'))");
|
||||||
|
if (failedBox.count() > 0) {
|
||||||
|
Locator failedCount = failedBox.locator(".dashboard-box-number");
|
||||||
|
String failedText = failedCount.textContent().trim();
|
||||||
|
int failed = Integer.parseInt(failedText);
|
||||||
|
if (failed > 0) {
|
||||||
|
logger.severe("Calculation failed! Failed count: " + failed);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
} catch (NumberFormatException e) {
|
||||||
|
logger.warning("Could not parse running count: " + runningText);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
page.reload();
|
||||||
|
page.waitForLoadState();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
110
src/test/java/de/avatic/lcc/e2e/tests/SmokeE2ETest.java
Normal file
110
src/test/java/de/avatic/lcc/e2e/tests/SmokeE2ETest.java
Normal file
|
|
@ -0,0 +1,110 @@
|
||||||
|
package de.avatic.lcc.e2e.tests;
|
||||||
|
|
||||||
|
import de.avatic.lcc.e2e.pages.AssistantPage;
|
||||||
|
import org.junit.jupiter.api.DisplayName;
|
||||||
|
import org.junit.jupiter.api.Tag;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
|
import java.util.logging.Logger;
|
||||||
|
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Smoke tests to verify basic application functionality.
|
||||||
|
* These tests run quickly and verify that the application is accessible.
|
||||||
|
*
|
||||||
|
* <p>The backend with integrated frontend is started automatically via @SpringBootTest.
|
||||||
|
*
|
||||||
|
* <p>Run with: {@code mvn test -Dtest=SmokeE2ETest -Dspring.profiles.active=test,mysql}
|
||||||
|
*/
|
||||||
|
@Tag("smoke")
|
||||||
|
@DisplayName("Smoke E2E Tests")
|
||||||
|
class SmokeE2ETest extends AbstractE2ETest {
|
||||||
|
|
||||||
|
private static final Logger logger = Logger.getLogger(SmokeE2ETest.class.getName());
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("Application is accessible")
|
||||||
|
void testApplicationIsAccessible() {
|
||||||
|
page.navigate(getBaseUrl());
|
||||||
|
String title = page.title();
|
||||||
|
|
||||||
|
logger.info(() -> "Page title: " + title);
|
||||||
|
assertTrue(title != null && !title.isEmpty(), "Page should have a title");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("Login was successful")
|
||||||
|
void testLoginWasSuccessful() {
|
||||||
|
// Login happens in @BeforeEach via AbstractE2ETest
|
||||||
|
// After login, we navigate away from dev page (done in AbstractE2ETest)
|
||||||
|
String currentUrl = page.url();
|
||||||
|
assertTrue(!currentUrl.contains("/dev"), "Should not be on dev page after login");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("Navigate to assistant page")
|
||||||
|
void testNavigateToAssistant() {
|
||||||
|
// Navigate to assistant
|
||||||
|
AssistantPage assistant = new AssistantPage(page);
|
||||||
|
assistant.navigate(getBaseUrl());
|
||||||
|
|
||||||
|
String currentUrl = page.url();
|
||||||
|
assertTrue(currentUrl.contains("/assistant"), "Should be on assistant page");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("Part number search is functional")
|
||||||
|
void testPartNumberSearchFunctional() {
|
||||||
|
// Navigate to assistant
|
||||||
|
AssistantPage assistant = new AssistantPage(page);
|
||||||
|
assistant.navigate(getBaseUrl());
|
||||||
|
|
||||||
|
// Take screenshot to debug
|
||||||
|
takeScreenshot("assistant_page");
|
||||||
|
|
||||||
|
// Verify the part number modal is shown with textarea
|
||||||
|
boolean textAreaVisible = page.locator("textarea").isVisible();
|
||||||
|
logger.info(() -> "Text area visible: " + textAreaVisible);
|
||||||
|
|
||||||
|
// Verify analyze button is present (text: "Analyze input")
|
||||||
|
boolean analyzeButtonVisible = page.getByText("Analyze input").isVisible();
|
||||||
|
logger.info(() -> "Analyze button visible: " + analyzeButtonVisible);
|
||||||
|
|
||||||
|
assertTrue(textAreaVisible, "Text area for part numbers should be visible");
|
||||||
|
assertTrue(analyzeButtonVisible, "Analyze input button should be visible");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("Test materials exist in database")
|
||||||
|
void testMaterialsExistInDatabase() {
|
||||||
|
// Check if our test materials are in the database
|
||||||
|
Integer count = jdbcTemplate.queryForObject(
|
||||||
|
"SELECT COUNT(*) FROM material WHERE normalized_part_number IN ('004222640104', '003064540201')",
|
||||||
|
Integer.class
|
||||||
|
);
|
||||||
|
logger.info(() -> "Found " + count + " test materials in database");
|
||||||
|
|
||||||
|
// List all materials for debugging
|
||||||
|
var materials = jdbcTemplate.queryForList(
|
||||||
|
"SELECT part_number, normalized_part_number, name FROM material LIMIT 20"
|
||||||
|
);
|
||||||
|
logger.info(() -> "Materials in DB: " + materials);
|
||||||
|
|
||||||
|
// Test the exact SQL that the API uses
|
||||||
|
var searchResult = jdbcTemplate.queryForList(
|
||||||
|
"SELECT * FROM material WHERE part_number IN (?) OR normalized_part_number IN (?)",
|
||||||
|
"003064540201", "003064540201"
|
||||||
|
);
|
||||||
|
logger.info(() -> "Search result for '003064540201': " + searchResult);
|
||||||
|
|
||||||
|
// Also test with the original part number
|
||||||
|
var searchResult2 = jdbcTemplate.queryForList(
|
||||||
|
"SELECT * FROM material WHERE part_number IN (?) OR normalized_part_number IN (?)",
|
||||||
|
"3064540201", "3064540201"
|
||||||
|
);
|
||||||
|
logger.info(() -> "Search result for '3064540201': " + searchResult2);
|
||||||
|
|
||||||
|
assertTrue(count != null && count >= 2, "At least 2 test materials should exist. Found: " + count);
|
||||||
|
}
|
||||||
|
}
|
||||||
182
src/test/java/de/avatic/lcc/e2e/util/DeviationReport.java
Normal file
182
src/test/java/de/avatic/lcc/e2e/util/DeviationReport.java
Normal file
|
|
@ -0,0 +1,182 @@
|
||||||
|
package de.avatic.lcc.e2e.util;
|
||||||
|
|
||||||
|
import de.avatic.lcc.e2e.testdata.TestCaseExpected;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Collects and reports deviations between expected and actual values.
|
||||||
|
*/
|
||||||
|
public class DeviationReport {
|
||||||
|
|
||||||
|
private final List<TestCaseDeviation> deviations = new ArrayList<>();
|
||||||
|
|
||||||
|
public void addTestCase(String testCaseId, String testCaseName, TestCaseExpected expected, Map<String, Object> actual) {
|
||||||
|
TestCaseDeviation deviation = new TestCaseDeviation(testCaseId, testCaseName);
|
||||||
|
|
||||||
|
deviation.addField("MEK_A", expected.mekA(), (Double) actual.get("mekA"));
|
||||||
|
deviation.addField("LOGISTIC_COST", expected.logisticCost(), (Double) actual.get("logisticCost"));
|
||||||
|
deviation.addField("MEK_B", expected.mekB(), (Double) actual.get("mekB"));
|
||||||
|
deviation.addField("FCA_FEE", expected.fcaFee(), (Double) actual.get("fcaFee"));
|
||||||
|
deviation.addField("TRANSPORTATION", expected.transportation(), (Double) actual.get("transportation"));
|
||||||
|
deviation.addField("D2D", expected.d2d(), (Double) actual.get("d2d"));
|
||||||
|
deviation.addField("AIR_FREIGHT", expected.airFreight(), (Double) actual.get("airFreight"));
|
||||||
|
deviation.addField("CUSTOM", expected.custom(), (Double) actual.get("custom"));
|
||||||
|
deviation.addField("REPACKAGING", expected.repackaging(), (Double) actual.get("repackaging"));
|
||||||
|
deviation.addField("HANDLING", expected.handling(), (Double) actual.get("handling"));
|
||||||
|
deviation.addField("DISPOSAL", expected.disposal(), (Double) actual.get("disposal"));
|
||||||
|
deviation.addField("SPACE", expected.space(), (Double) actual.get("space"));
|
||||||
|
deviation.addField("CAPITAL", expected.capital(), (Double) actual.get("capital"));
|
||||||
|
|
||||||
|
deviations.add(deviation);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void addError(String testCaseId, String testCaseName, String errorMessage) {
|
||||||
|
TestCaseDeviation deviation = new TestCaseDeviation(testCaseId, testCaseName);
|
||||||
|
deviation.setError(errorMessage);
|
||||||
|
deviations.add(deviation);
|
||||||
|
}
|
||||||
|
|
||||||
|
public String generateMarkdownTable() {
|
||||||
|
StringBuilder sb = new StringBuilder();
|
||||||
|
|
||||||
|
sb.append("\n\n");
|
||||||
|
sb.append("# DEVIATION REPORT\n");
|
||||||
|
sb.append("================================================================================\n\n");
|
||||||
|
|
||||||
|
// Summary table per test case
|
||||||
|
sb.append("## Summary by Test Case\n\n");
|
||||||
|
sb.append("| Test | Name | Status | Max Deviation |\n");
|
||||||
|
sb.append("|------|------|--------|---------------|\n");
|
||||||
|
|
||||||
|
for (TestCaseDeviation dev : deviations) {
|
||||||
|
if (dev.hasError()) {
|
||||||
|
sb.append(String.format("| %s | %s | ERROR | %s |\n",
|
||||||
|
dev.testCaseId, truncate(dev.testCaseName, 30), dev.errorMessage));
|
||||||
|
} else {
|
||||||
|
double maxDev = dev.getMaxDeviation();
|
||||||
|
String status = maxDev > 5.0 ? "⚠️ HIGH" : (maxDev > 1.0 ? "⚡ MEDIUM" : "✓ OK");
|
||||||
|
sb.append(String.format("| %s | %s | %s | %.2f%% |\n",
|
||||||
|
dev.testCaseId, truncate(dev.testCaseName, 30), status, maxDev));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Detailed deviations per field
|
||||||
|
sb.append("\n\n## Detailed Field Deviations\n\n");
|
||||||
|
sb.append("| Test | Field | Expected | Actual | Deviation |\n");
|
||||||
|
sb.append("|------|-------|----------|--------|----------|\n");
|
||||||
|
|
||||||
|
for (TestCaseDeviation dev : deviations) {
|
||||||
|
if (dev.hasError()) {
|
||||||
|
sb.append(String.format("| %s | ERROR | - | - | %s |\n", dev.testCaseId, dev.errorMessage));
|
||||||
|
} else {
|
||||||
|
for (FieldDeviation field : dev.fields) {
|
||||||
|
if (field.deviationPercent > 1.0 || field.actual == null) {
|
||||||
|
sb.append(String.format("| %s | %s | %.4f | %s | %.2f%% |\n",
|
||||||
|
dev.testCaseId,
|
||||||
|
field.fieldName,
|
||||||
|
field.expected,
|
||||||
|
field.actual != null ? String.format("%.4f", field.actual) : "null",
|
||||||
|
field.deviationPercent));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Field summary - which fields have issues across all tests
|
||||||
|
sb.append("\n\n## Field Summary (Average Deviation Across All Tests)\n\n");
|
||||||
|
sb.append("| Field | Avg Deviation | Max Deviation | Tests with >1% |\n");
|
||||||
|
sb.append("|-------|---------------|---------------|----------------|\n");
|
||||||
|
|
||||||
|
String[] fieldNames = {"MEK_A", "LOGISTIC_COST", "MEK_B", "FCA_FEE", "TRANSPORTATION",
|
||||||
|
"D2D", "AIR_FREIGHT", "CUSTOM", "REPACKAGING", "HANDLING", "DISPOSAL", "SPACE", "CAPITAL"};
|
||||||
|
|
||||||
|
for (String fieldName : fieldNames) {
|
||||||
|
double sumDev = 0;
|
||||||
|
double maxDev = 0;
|
||||||
|
int countHigh = 0;
|
||||||
|
int count = 0;
|
||||||
|
|
||||||
|
for (TestCaseDeviation dev : deviations) {
|
||||||
|
if (!dev.hasError()) {
|
||||||
|
for (FieldDeviation field : dev.fields) {
|
||||||
|
if (field.fieldName.equals(fieldName)) {
|
||||||
|
sumDev += field.deviationPercent;
|
||||||
|
maxDev = Math.max(maxDev, field.deviationPercent);
|
||||||
|
if (field.deviationPercent > 1.0) countHigh++;
|
||||||
|
count++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (count > 0) {
|
||||||
|
double avgDev = sumDev / count;
|
||||||
|
sb.append(String.format("| %s | %.2f%% | %.2f%% | %d/%d |\n",
|
||||||
|
fieldName, avgDev, maxDev, countHigh, count));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
sb.append("\n================================================================================\n");
|
||||||
|
|
||||||
|
return sb.toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
private String truncate(String s, int maxLen) {
|
||||||
|
return s.length() > maxLen ? s.substring(0, maxLen - 3) + "..." : s;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static class TestCaseDeviation {
|
||||||
|
String testCaseId;
|
||||||
|
String testCaseName;
|
||||||
|
List<FieldDeviation> fields = new ArrayList<>();
|
||||||
|
String errorMessage;
|
||||||
|
|
||||||
|
public TestCaseDeviation(String testCaseId, String testCaseName) {
|
||||||
|
this.testCaseId = testCaseId;
|
||||||
|
this.testCaseName = testCaseName;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void addField(String fieldName, double expected, Double actual) {
|
||||||
|
fields.add(new FieldDeviation(fieldName, expected, actual));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setError(String errorMessage) {
|
||||||
|
this.errorMessage = errorMessage;
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean hasError() {
|
||||||
|
return errorMessage != null;
|
||||||
|
}
|
||||||
|
|
||||||
|
public double getMaxDeviation() {
|
||||||
|
return fields.stream()
|
||||||
|
.mapToDouble(f -> f.deviationPercent)
|
||||||
|
.max()
|
||||||
|
.orElse(0.0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static class FieldDeviation {
|
||||||
|
String fieldName;
|
||||||
|
double expected;
|
||||||
|
Double actual;
|
||||||
|
double deviationPercent;
|
||||||
|
|
||||||
|
public FieldDeviation(String fieldName, double expected, Double actual) {
|
||||||
|
this.fieldName = fieldName;
|
||||||
|
this.expected = expected;
|
||||||
|
this.actual = actual;
|
||||||
|
|
||||||
|
if (actual == null) {
|
||||||
|
// Null actual - if expected is ~0, no deviation; otherwise 100%
|
||||||
|
this.deviationPercent = Math.abs(expected) < 0.001 ? 0.0 : 100.0;
|
||||||
|
} else {
|
||||||
|
double diff = Math.abs(expected - actual);
|
||||||
|
this.deviationPercent = expected != 0 ? (diff / Math.abs(expected)) * 100 : diff * 100;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
180
src/test/java/de/avatic/lcc/e2e/util/ResultComparator.java
Normal file
180
src/test/java/de/avatic/lcc/e2e/util/ResultComparator.java
Normal file
|
|
@ -0,0 +1,180 @@
|
||||||
|
package de.avatic.lcc.e2e.util;
|
||||||
|
|
||||||
|
import de.avatic.lcc.e2e.testdata.DestinationExpected;
|
||||||
|
import de.avatic.lcc.e2e.testdata.TestCaseExpected;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.logging.Logger;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Utility class for comparing actual results with expected values.
|
||||||
|
* Supports tolerance-based comparison for numeric values.
|
||||||
|
*/
|
||||||
|
public final class ResultComparator {
|
||||||
|
|
||||||
|
private static final Logger logger = Logger.getLogger(ResultComparator.class.getName());
|
||||||
|
|
||||||
|
private ResultComparator() {
|
||||||
|
// Utility class
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Asserts that actual results match expected values within the given tolerance.
|
||||||
|
*
|
||||||
|
* @param actualResults Map of actual result values from the UI
|
||||||
|
* @param expected Expected values from test case definition
|
||||||
|
* @param tolerance Relative tolerance for numeric comparisons (0.01 = 1%)
|
||||||
|
* @throws AssertionError if any values don't match within tolerance
|
||||||
|
*/
|
||||||
|
public static void assertResultsMatch(Map<String, Object> actualResults,
|
||||||
|
TestCaseExpected expected,
|
||||||
|
double tolerance) {
|
||||||
|
List<String> failures = new ArrayList<>();
|
||||||
|
|
||||||
|
// Compare main result fields
|
||||||
|
compareNumeric(failures, "MEK_A", expected.mekA(), getDouble(actualResults, "mekA"), tolerance);
|
||||||
|
compareNumeric(failures, "LOGISTIC_COST", expected.logisticCost(), getDouble(actualResults, "logisticCost"), tolerance);
|
||||||
|
compareNumeric(failures, "MEK_B", expected.mekB(), getDouble(actualResults, "mekB"), tolerance);
|
||||||
|
compareNumeric(failures, "FCA_FEE", expected.fcaFee(), getDouble(actualResults, "fcaFee"), tolerance);
|
||||||
|
compareNumeric(failures, "TRANSPORTATION", expected.transportation(), getDouble(actualResults, "transportation"), tolerance);
|
||||||
|
compareNumeric(failures, "D2D", expected.d2d(), getDouble(actualResults, "d2d"), tolerance);
|
||||||
|
compareNumeric(failures, "AIR_FREIGHT", expected.airFreight(), getDouble(actualResults, "airFreight"), tolerance);
|
||||||
|
compareNumeric(failures, "CUSTOM", expected.custom(), getDouble(actualResults, "custom"), tolerance);
|
||||||
|
compareNumeric(failures, "REPACKAGING", expected.repackaging(), getDouble(actualResults, "repackaging"), tolerance);
|
||||||
|
compareNumeric(failures, "HANDLING", expected.handling(), getDouble(actualResults, "handling"), tolerance);
|
||||||
|
compareNumeric(failures, "DISPOSAL", expected.disposal(), getDouble(actualResults, "disposal"), tolerance);
|
||||||
|
compareNumeric(failures, "SPACE", expected.space(), getDouble(actualResults, "space"), tolerance);
|
||||||
|
compareNumeric(failures, "CAPITAL", expected.capital(), getDouble(actualResults, "capital"), tolerance);
|
||||||
|
compareNumeric(failures, "SAFETY_STOCK", (double) expected.safetyStock(), getDouble(actualResults, "safetyStock"), tolerance);
|
||||||
|
|
||||||
|
// Compare destination results
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
List<Map<String, Object>> actualDestinations = (List<Map<String, Object>>) actualResults.get("destinations");
|
||||||
|
List<DestinationExpected> expectedDestinations = expected.destinations();
|
||||||
|
|
||||||
|
if (actualDestinations == null) {
|
||||||
|
actualDestinations = List.of();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (expectedDestinations.size() != actualDestinations.size()) {
|
||||||
|
failures.add(String.format(
|
||||||
|
"DESTINATION_COUNT: expected %d, got %d",
|
||||||
|
expectedDestinations.size(), actualDestinations.size()
|
||||||
|
));
|
||||||
|
} else {
|
||||||
|
for (int i = 0; i < expectedDestinations.size(); i++) {
|
||||||
|
DestinationExpected expDest = expectedDestinations.get(i);
|
||||||
|
Map<String, Object> actDest = actualDestinations.get(i);
|
||||||
|
String prefix = "DESTINATION_" + (i + 1) + "_";
|
||||||
|
|
||||||
|
compareNumeric(failures, prefix + "TRANSIT_TIME",
|
||||||
|
(double) expDest.transitTime(),
|
||||||
|
getDouble(actDest, "transitTime"), tolerance);
|
||||||
|
compareNumeric(failures, prefix + "STACKED_LAYERS",
|
||||||
|
(double) expDest.stackedLayers(),
|
||||||
|
getDouble(actDest, "stackedLayers"), tolerance);
|
||||||
|
compareNumeric(failures, prefix + "CONTAINER_UNIT_COUNT",
|
||||||
|
(double) expDest.containerUnitCount(),
|
||||||
|
getDouble(actDest, "containerUnitCount"), tolerance);
|
||||||
|
compareString(failures, prefix + "CONTAINER_TYPE",
|
||||||
|
expDest.containerType(),
|
||||||
|
getString(actDest, "containerType"));
|
||||||
|
compareString(failures, prefix + "LIMITING_FACTOR",
|
||||||
|
expDest.limitingFactor(),
|
||||||
|
getString(actDest, "limitingFactor"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!failures.isEmpty()) {
|
||||||
|
StringBuilder message = new StringBuilder("Result comparison failed:\n");
|
||||||
|
for (String failure : failures) {
|
||||||
|
message.append(" - ").append(failure).append("\n");
|
||||||
|
}
|
||||||
|
throw new AssertionError(message.toString());
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info("All results match within tolerance");
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Compares two numeric values with tolerance and adds failure message if they don't match.
|
||||||
|
*/
|
||||||
|
private static void compareNumeric(List<String> failures, String fieldName,
|
||||||
|
double expected, Double actual, double tolerance) {
|
||||||
|
// Handle zero expected values - if expected is ~0 and actual is null, treat as pass
|
||||||
|
// (some fields are not displayed in the UI when their value is 0)
|
||||||
|
if (Math.abs(expected) < 1e-10) {
|
||||||
|
if (actual == null) {
|
||||||
|
// Expected ~0 and actual is null (field not shown) - this is acceptable
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (Math.abs(actual) > tolerance) {
|
||||||
|
failures.add(String.format("%s: expected ~0, got %.6f", fieldName, actual));
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (actual == null) {
|
||||||
|
failures.add(String.format("%s: actual value is null, expected %.6f", fieldName, expected));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
double relativeDiff = Math.abs(expected - actual) / Math.abs(expected);
|
||||||
|
if (relativeDiff > tolerance) {
|
||||||
|
failures.add(String.format(
|
||||||
|
"%s: expected %.6f, got %.6f (diff: %.2f%%)",
|
||||||
|
fieldName, expected, actual, relativeDiff * 100
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Compares two string values and adds failure message if they don't match.
|
||||||
|
*/
|
||||||
|
private static void compareString(List<String> failures, String fieldName,
|
||||||
|
String expected, String actual) {
|
||||||
|
if (expected == null && actual == null) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (expected == null || actual == null || !expected.equals(actual)) {
|
||||||
|
failures.add(String.format("%s: expected '%s', got '%s'", fieldName, expected, actual));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Safely gets a Double value from a map.
|
||||||
|
*/
|
||||||
|
private static Double getDouble(Map<String, Object> map, String key) {
|
||||||
|
if (map == null) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
Object value = map.get(key);
|
||||||
|
if (value == null) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
if (value instanceof Double) {
|
||||||
|
return (Double) value;
|
||||||
|
}
|
||||||
|
if (value instanceof Number) {
|
||||||
|
return ((Number) value).doubleValue();
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
return Double.parseDouble(value.toString().replaceAll("[€$,\\s]", "").replace(",", "."));
|
||||||
|
} catch (NumberFormatException e) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Safely gets a String value from a map.
|
||||||
|
*/
|
||||||
|
private static String getString(Map<String, Object> map, String key) {
|
||||||
|
if (map == null) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
Object value = map.get(key);
|
||||||
|
return value != null ? value.toString() : null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,99 @@
|
||||||
|
package de.avatic.lcc.repositories;
|
||||||
|
|
||||||
|
import de.avatic.lcc.config.DatabaseTestConfiguration;
|
||||||
|
import de.avatic.lcc.config.RepositoryTestConfig;
|
||||||
|
import de.avatic.lcc.database.dialect.SqlDialectProvider;
|
||||||
|
import org.junit.jupiter.api.BeforeEach;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
import org.springframework.boot.test.context.SpringBootTest;
|
||||||
|
import org.springframework.context.annotation.Import;
|
||||||
|
import org.springframework.jdbc.core.JdbcTemplate;
|
||||||
|
import org.springframework.test.context.ActiveProfiles;
|
||||||
|
import org.springframework.transaction.annotation.Transactional;
|
||||||
|
import org.testcontainers.junit.jupiter.Testcontainers;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Abstract base class for repository integration tests.
|
||||||
|
* <p>
|
||||||
|
* Provides TestContainers-based database setup for both MySQL and MSSQL.
|
||||||
|
* Tests extending this class will run against the database specified by the active profile.
|
||||||
|
* Flyway migrations from db/migration/{mysql|mssql}/ will be automatically applied.
|
||||||
|
* <p>
|
||||||
|
* Only loads Repository and JDBC beans, not the full application context (no Controllers, no API Services).
|
||||||
|
* <p>
|
||||||
|
* Usage:
|
||||||
|
* <pre>
|
||||||
|
* // Run against MySQL
|
||||||
|
* mvn test -Dspring.profiles.active=test,mysql -Dtest=NodeRepositoryIntegrationTest
|
||||||
|
*
|
||||||
|
* // Run against MSSQL
|
||||||
|
* mvn test -Dspring.profiles.active=test,mssql -Dtest=NodeRepositoryIntegrationTest
|
||||||
|
* </pre>
|
||||||
|
*/
|
||||||
|
@SpringBootTest(
|
||||||
|
classes = {RepositoryTestConfig.class},
|
||||||
|
properties = {
|
||||||
|
"spring.main.web-application-type=none",
|
||||||
|
"spring.autoconfigure.exclude=" +
|
||||||
|
"org.springframework.boot.autoconfigure.security.servlet.SecurityAutoConfiguration," +
|
||||||
|
"org.springframework.boot.autoconfigure.security.oauth2.client.servlet.OAuth2ClientAutoConfiguration," +
|
||||||
|
"org.springframework.boot.autoconfigure.security.oauth2.resource.servlet.OAuth2ResourceServerAutoConfiguration," +
|
||||||
|
"org.springframework.boot.autoconfigure.webservices.WebServicesAutoConfiguration," +
|
||||||
|
"org.springframework.boot.autoconfigure.batch.BatchAutoConfiguration"
|
||||||
|
}
|
||||||
|
)
|
||||||
|
@Testcontainers
|
||||||
|
@Import(DatabaseTestConfiguration.class)
|
||||||
|
// NOTE: No @ActiveProfiles - profiles come from command line: -Dspring.profiles.active=test,mysql
|
||||||
|
@Transactional // Rollback after each test for isolation
|
||||||
|
public abstract class AbstractRepositoryIntegrationTest {
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
protected JdbcTemplate jdbcTemplate;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
protected SqlDialectProvider dialectProvider;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets the active database profile (mysql or mssql).
|
||||||
|
* Useful for profile-specific test assertions.
|
||||||
|
*/
|
||||||
|
protected String getDatabaseProfile() {
|
||||||
|
return System.getProperty("spring.profiles.active", "mysql");
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks if tests are running against MSSQL.
|
||||||
|
*/
|
||||||
|
protected boolean isMssql() {
|
||||||
|
return getDatabaseProfile().contains("mssql");
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks if tests are running against MySQL.
|
||||||
|
*/
|
||||||
|
protected boolean isMysql() {
|
||||||
|
return getDatabaseProfile().contains("mysql");
|
||||||
|
}
|
||||||
|
|
||||||
|
@BeforeEach
|
||||||
|
void baseSetup() {
|
||||||
|
// Common setup logic if needed
|
||||||
|
// Flyway migrations are automatically applied by Spring Boot
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Executes a raw SQL query for test data setup.
|
||||||
|
* Use with caution - prefer using repositories where possible.
|
||||||
|
*/
|
||||||
|
protected void executeRawSql(String sql, Object... params) {
|
||||||
|
jdbcTemplate.update(sql, params);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Counts rows in a table.
|
||||||
|
*/
|
||||||
|
protected int countRows(String tableName) {
|
||||||
|
return jdbcTemplate.queryForObject("SELECT COUNT(*) FROM " + tableName, Integer.class);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,222 @@
|
||||||
|
package de.avatic.lcc.repositories;
|
||||||
|
|
||||||
|
import de.avatic.lcc.model.db.country.Country;
|
||||||
|
import de.avatic.lcc.model.db.country.IsoCode;
|
||||||
|
import de.avatic.lcc.repositories.country.CountryRepository;
|
||||||
|
import de.avatic.lcc.repositories.pagination.SearchQueryPagination;
|
||||||
|
import de.avatic.lcc.repositories.pagination.SearchQueryResult;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Optional;
|
||||||
|
|
||||||
|
import static org.junit.jupiter.api.Assertions.*;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Integration tests for CountryRepository.
|
||||||
|
* <p>
|
||||||
|
* Tests critical functionality across both MySQL and MSSQL:
|
||||||
|
* - Basic retrieval operations (getById, getByIsoCode)
|
||||||
|
* - Pagination with ORDER BY (MSSQL requirement)
|
||||||
|
* - Search with filters
|
||||||
|
* - Boolean literal compatibility (deprecated filtering)
|
||||||
|
* <p>
|
||||||
|
* Countries are populated via Flyway migrations, so no insert tests are needed.
|
||||||
|
* <p>
|
||||||
|
* Run with:
|
||||||
|
* <pre>
|
||||||
|
* mvn test -Dspring.profiles.active=test,mysql -Dtest=CountryRepositoryIntegrationTest
|
||||||
|
* mvn test -Dspring.profiles.active=test,mssql -Dtest=CountryRepositoryIntegrationTest
|
||||||
|
* </pre>
|
||||||
|
*/
|
||||||
|
class CountryRepositoryIntegrationTest extends AbstractRepositoryIntegrationTest {
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private CountryRepository countryRepository;
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testGetById() {
|
||||||
|
// Given: Country with id=1 should exist (from Flyway migrations)
|
||||||
|
Integer countryId = 1;
|
||||||
|
|
||||||
|
// When: Retrieve by ID
|
||||||
|
Optional<Country> result = countryRepository.getById(countryId);
|
||||||
|
|
||||||
|
// Then: Should find the country
|
||||||
|
assertTrue(result.isPresent(), "Country with id=1 should exist");
|
||||||
|
assertEquals(countryId, result.get().getId());
|
||||||
|
assertNotNull(result.get().getIsoCode());
|
||||||
|
assertNotNull(result.get().getName());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testGetByIdNotFound() {
|
||||||
|
// Given: Non-existent country ID
|
||||||
|
Integer nonExistentId = 99999;
|
||||||
|
|
||||||
|
// When: Retrieve by ID
|
||||||
|
Optional<Country> result = countryRepository.getById(nonExistentId);
|
||||||
|
|
||||||
|
// Then: Should return empty
|
||||||
|
assertFalse(result.isPresent(), "Should not find country with non-existent ID");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testGetByIsoCode() {
|
||||||
|
// Given: Germany should exist (from Flyway migrations)
|
||||||
|
IsoCode isoCode = IsoCode.DE;
|
||||||
|
|
||||||
|
// When: Retrieve by ISO code
|
||||||
|
Optional<Country> result = countryRepository.getByIsoCode(isoCode);
|
||||||
|
|
||||||
|
// Then: Should find Germany
|
||||||
|
assertTrue(result.isPresent(), "Should find country with ISO code DE");
|
||||||
|
assertEquals(IsoCode.DE, result.get().getIsoCode());
|
||||||
|
assertTrue(result.get().getName().contains("German") || result.get().getName().contains("Deutschland"));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testGetByIsoCodeNotFound() {
|
||||||
|
// Given: Invalid ISO code that shouldn't exist
|
||||||
|
// Note: This will throw IllegalArgumentException if the enum doesn't exist
|
||||||
|
// So we test with a valid enum that might not be in the database
|
||||||
|
|
||||||
|
// When/Then: Just verify the method works with any valid IsoCode
|
||||||
|
Optional<Country> result = countryRepository.getByIsoCode(IsoCode.US);
|
||||||
|
|
||||||
|
// We don't assert empty here because US might exist in migrations
|
||||||
|
// Just verify it doesn't throw an exception
|
||||||
|
assertNotNull(result);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testListAllCountries() {
|
||||||
|
// When: List all countries
|
||||||
|
List<Country> countries = countryRepository.listAllCountries();
|
||||||
|
|
||||||
|
// Then: Should have countries from Flyway migrations
|
||||||
|
assertNotNull(countries);
|
||||||
|
assertFalse(countries.isEmpty(), "Should have countries from migrations");
|
||||||
|
|
||||||
|
// Verify ordering by ISO code
|
||||||
|
for (int i = 1; i < countries.size(); i++) {
|
||||||
|
String prevIso = countries.get(i - 1).getIsoCode().name();
|
||||||
|
String currentIso = countries.get(i).getIsoCode().name();
|
||||||
|
assertTrue(prevIso.compareTo(currentIso) <= 0,
|
||||||
|
"Countries should be ordered by ISO code");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testListCountriesWithPagination() {
|
||||||
|
// Given: Pagination settings (page 1, size 5)
|
||||||
|
SearchQueryPagination pagination = new SearchQueryPagination(1, 5);
|
||||||
|
|
||||||
|
// When: List countries with pagination
|
||||||
|
SearchQueryResult<Country> result = countryRepository.listCountries(
|
||||||
|
Optional.empty(), false, pagination
|
||||||
|
);
|
||||||
|
|
||||||
|
// Then: Verify pagination works
|
||||||
|
assertNotNull(result);
|
||||||
|
assertNotNull(result.toList());
|
||||||
|
assertTrue(result.toList().size() <= 5, "Should return at most 5 countries per page");
|
||||||
|
assertTrue(result.getTotalElements() > 0, "Total elements should be positive");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testListCountriesWithFilter() {
|
||||||
|
// Given: Filter for "German" or "Deutschland"
|
||||||
|
String filter = "German";
|
||||||
|
|
||||||
|
// When: List countries with filter
|
||||||
|
SearchQueryResult<Country> result = countryRepository.listCountries(
|
||||||
|
Optional.of(filter), false
|
||||||
|
);
|
||||||
|
|
||||||
|
// Then: Should find matching countries
|
||||||
|
assertNotNull(result);
|
||||||
|
assertFalse(result.toList().isEmpty(), "Should find countries matching 'German'");
|
||||||
|
|
||||||
|
// Verify all results match the filter (name, iso_code, or region_code)
|
||||||
|
for (Country country : result.toList()) {
|
||||||
|
boolean matches = country.getName().toLowerCase().contains(filter.toLowerCase()) ||
|
||||||
|
country.getIsoCode().name().toLowerCase().contains(filter.toLowerCase()) ||
|
||||||
|
country.getRegionCode().name().toLowerCase().contains(filter.toLowerCase());
|
||||||
|
assertTrue(matches, "Country should match filter: " + country.getName());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testListCountriesWithFilterAndPagination() {
|
||||||
|
// Given: Filter + Pagination
|
||||||
|
String filter = "a"; // Should match many countries
|
||||||
|
SearchQueryPagination pagination = new SearchQueryPagination(1, 3);
|
||||||
|
|
||||||
|
// When: List countries with filter and pagination
|
||||||
|
SearchQueryResult<Country> result = countryRepository.listCountries(
|
||||||
|
Optional.of(filter), false, pagination
|
||||||
|
);
|
||||||
|
|
||||||
|
// Then: Should apply both filter and pagination
|
||||||
|
assertNotNull(result);
|
||||||
|
assertTrue(result.toList().size() <= 3, "Should respect pagination limit");
|
||||||
|
|
||||||
|
for (Country country : result.toList()) {
|
||||||
|
boolean matches = country.getName().toLowerCase().contains(filter.toLowerCase()) ||
|
||||||
|
country.getIsoCode().name().toLowerCase().contains(filter.toLowerCase()) ||
|
||||||
|
country.getRegionCode().name().toLowerCase().contains(filter.toLowerCase());
|
||||||
|
assertTrue(matches, "Country should match filter");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testBooleanLiteralCompatibility() {
|
||||||
|
// This test verifies that boolean literals work across MySQL (TRUE/FALSE) and MSSQL (1/0)
|
||||||
|
|
||||||
|
// When: List countries excluding deprecated
|
||||||
|
SearchQueryResult<Country> result = countryRepository.listCountries(
|
||||||
|
Optional.empty(), true // excludeDeprecated = true
|
||||||
|
);
|
||||||
|
|
||||||
|
// Then: Should only return non-deprecated countries
|
||||||
|
assertNotNull(result);
|
||||||
|
for (Country country : result.toList()) {
|
||||||
|
assertFalse(country.getDeprecated(),
|
||||||
|
"Should not include deprecated countries when excludeDeprecated=true");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testGetByIsoCodes() {
|
||||||
|
// Given: List of ISO codes
|
||||||
|
List<IsoCode> isoCodes = List.of(IsoCode.DE, IsoCode.FR, IsoCode.US);
|
||||||
|
|
||||||
|
// When: Get countries by ISO codes
|
||||||
|
List<Country> countries = countryRepository.getByIsoCodes(isoCodes);
|
||||||
|
|
||||||
|
// Then: Should return matching countries
|
||||||
|
assertNotNull(countries);
|
||||||
|
assertFalse(countries.isEmpty(), "Should find countries");
|
||||||
|
|
||||||
|
// Verify all returned countries are in the requested list
|
||||||
|
for (Country country : countries) {
|
||||||
|
assertTrue(isoCodes.contains(country.getIsoCode()),
|
||||||
|
"Returned country should be in requested ISO codes");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testGetByIsoCodesEmptyList() {
|
||||||
|
// Given: Empty list
|
||||||
|
List<IsoCode> emptyList = List.of();
|
||||||
|
|
||||||
|
// When: Get countries by empty ISO codes
|
||||||
|
List<Country> countries = countryRepository.getByIsoCodes(emptyList);
|
||||||
|
|
||||||
|
// Then: Should return empty list
|
||||||
|
assertNotNull(countries);
|
||||||
|
assertTrue(countries.isEmpty(), "Should return empty list for empty input");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,128 @@
|
||||||
|
package de.avatic.lcc.repositories;
|
||||||
|
|
||||||
|
import de.avatic.lcc.database.dialect.SqlDialectProvider;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
import org.springframework.jdbc.core.JdbcTemplate;
|
||||||
|
|
||||||
|
import static org.junit.jupiter.api.Assertions.*;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Smoke test to verify TestContainers and Flyway setup.
|
||||||
|
* <p>
|
||||||
|
* Validates:
|
||||||
|
* - TestContainers starts correctly
|
||||||
|
* - Flyway migrations run successfully
|
||||||
|
* - Database contains expected test data
|
||||||
|
* - Correct SqlDialectProvider is loaded
|
||||||
|
* <p>
|
||||||
|
* Run with:
|
||||||
|
* <pre>
|
||||||
|
* mvn test -Dspring.profiles.active=test,mysql -Dtest=DatabaseConfigurationSmokeTest
|
||||||
|
* mvn test -Dspring.profiles.active=test,mssql -Dtest=DatabaseConfigurationSmokeTest
|
||||||
|
* </pre>
|
||||||
|
*/
|
||||||
|
class DatabaseConfigurationSmokeTest extends AbstractRepositoryIntegrationTest {
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private JdbcTemplate jdbcTemplate;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private SqlDialectProvider dialectProvider;
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testDatabaseConnectionIsEstablished() {
|
||||||
|
// When: Query database
|
||||||
|
Integer result = jdbcTemplate.queryForObject("SELECT 1", Integer.class);
|
||||||
|
|
||||||
|
// Then: Connection works
|
||||||
|
assertNotNull(result);
|
||||||
|
assertEquals(1, result);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testFlywayMigrationsRanSuccessfully() {
|
||||||
|
// When: Check if core tables exist
|
||||||
|
Integer propertySetCount = jdbcTemplate.queryForObject(
|
||||||
|
"SELECT COUNT(*) FROM property_set", Integer.class);
|
||||||
|
|
||||||
|
// Then: Table exists (migrations ran)
|
||||||
|
assertNotNull(propertySetCount);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testCountriesWereLoadedFromMigrations() {
|
||||||
|
// When: Count countries
|
||||||
|
Integer countryCount = jdbcTemplate.queryForObject(
|
||||||
|
"SELECT COUNT(*) FROM country", Integer.class);
|
||||||
|
|
||||||
|
// Then: Countries exist (V4__Country.sql ran)
|
||||||
|
assertNotNull(countryCount);
|
||||||
|
assertTrue(countryCount > 0, "Countries should be loaded from V4__Country.sql migration");
|
||||||
|
System.out.println("Found " + countryCount + " countries in database");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testNodesWereLoadedFromMigrations() {
|
||||||
|
// When: Count nodes
|
||||||
|
Integer nodeCount = jdbcTemplate.queryForObject(
|
||||||
|
"SELECT COUNT(*) FROM node", Integer.class);
|
||||||
|
|
||||||
|
// Then: Nodes exist (V5__Nodes.sql ran)
|
||||||
|
assertNotNull(nodeCount);
|
||||||
|
assertTrue(nodeCount > 0, "Nodes should be loaded from V5__Nodes.sql migration");
|
||||||
|
System.out.println("Found " + nodeCount + " nodes in database");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testCorrectSqlDialectProviderIsLoaded() {
|
||||||
|
// Debug: Print active profiles
|
||||||
|
String[] activeProfiles = jdbcTemplate.getDataSource() != null ?
|
||||||
|
new String[]{getDatabaseProfile()} : new String[]{};
|
||||||
|
System.out.println("Active Spring profiles from getDatabaseProfile(): " + getDatabaseProfile());
|
||||||
|
System.out.println("System property spring.profiles.active: " + System.getProperty("spring.profiles.active"));
|
||||||
|
|
||||||
|
// When: Check which dialect provider is active
|
||||||
|
String booleanTrue = dialectProvider.getBooleanTrue();
|
||||||
|
|
||||||
|
// Then: Correct provider based on profile
|
||||||
|
if (isMysql()) {
|
||||||
|
assertEquals("TRUE", booleanTrue, "MySQL should use TRUE literal");
|
||||||
|
} else if (isMssql()) {
|
||||||
|
assertEquals("1", booleanTrue, "MSSQL should use 1 literal");
|
||||||
|
}
|
||||||
|
|
||||||
|
System.out.println("Active database profile: " + getDatabaseProfile());
|
||||||
|
System.out.println("Dialect provider class: " + dialectProvider.getClass().getSimpleName());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testBooleanLiteralInQuery() {
|
||||||
|
// When: Query with boolean literal from dialect provider
|
||||||
|
String query = "SELECT COUNT(*) FROM node WHERE is_deprecated = " +
|
||||||
|
dialectProvider.getBooleanFalse();
|
||||||
|
Integer activeNodeCount = jdbcTemplate.queryForObject(query, Integer.class);
|
||||||
|
|
||||||
|
// Then: Query executes without syntax error
|
||||||
|
assertNotNull(activeNodeCount);
|
||||||
|
System.out.println("Active (non-deprecated) nodes: " + activeNodeCount);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testPaginationQuery() {
|
||||||
|
// When: Execute query with pagination (requires ORDER BY in MSSQL)
|
||||||
|
String paginationClause = dialectProvider.buildPaginationClause(5, 0);
|
||||||
|
Object[] paginationParams = dialectProvider.getPaginationParameters(5, 0);
|
||||||
|
|
||||||
|
String query = "SELECT id FROM node ORDER BY id " + paginationClause;
|
||||||
|
var nodeIds = jdbcTemplate.query(query,
|
||||||
|
(rs, rowNum) -> rs.getInt("id"),
|
||||||
|
paginationParams[0], paginationParams[1]);
|
||||||
|
|
||||||
|
// Then: Query executes successfully and returns up to 5 results
|
||||||
|
assertNotNull(nodeIds);
|
||||||
|
assertFalse(nodeIds.isEmpty(), "Should return at least one node");
|
||||||
|
assertTrue(nodeIds.size() <= 5, "Should return at most 5 nodes");
|
||||||
|
System.out.println("Returned " + nodeIds.size() + " nodes with pagination: " + nodeIds);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,300 @@
|
||||||
|
package de.avatic.lcc.repositories;
|
||||||
|
|
||||||
|
import de.avatic.lcc.model.db.nodes.Distance;
|
||||||
|
import de.avatic.lcc.model.db.nodes.DistanceMatrixState;
|
||||||
|
import de.avatic.lcc.model.db.nodes.Node;
|
||||||
|
import org.junit.jupiter.api.BeforeEach;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
|
||||||
|
import java.math.BigDecimal;
|
||||||
|
import java.time.LocalDateTime;
|
||||||
|
import java.util.Optional;
|
||||||
|
|
||||||
|
import static org.junit.jupiter.api.Assertions.*;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Integration tests for DistanceMatrixRepository.
|
||||||
|
* <p>
|
||||||
|
* Tests critical functionality across both MySQL and MSSQL:
|
||||||
|
* - Distance lookup operations
|
||||||
|
* - Save/update logic (INSERT or UPDATE based on existence)
|
||||||
|
* - Retry counter updates
|
||||||
|
* - Enum handling (DistanceMatrixState)
|
||||||
|
* - Timestamp handling
|
||||||
|
* <p>
|
||||||
|
* Run with:
|
||||||
|
* <pre>
|
||||||
|
* mvn test -Dspring.profiles.active=test,mysql -Dtest=DistanceMatrixRepositoryIntegrationTest
|
||||||
|
* mvn test -Dspring.profiles.active=test,mssql -Dtest=DistanceMatrixRepositoryIntegrationTest
|
||||||
|
* </pre>
|
||||||
|
*/
|
||||||
|
class DistanceMatrixRepositoryIntegrationTest extends AbstractRepositoryIntegrationTest {
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private DistanceMatrixRepository distanceMatrixRepository;
|
||||||
|
|
||||||
|
private Integer testNodeId1;
|
||||||
|
private Integer testNodeId2;
|
||||||
|
private Integer testUserNodeId1;
|
||||||
|
private Integer testUserNodeId2;
|
||||||
|
|
||||||
|
@BeforeEach
|
||||||
|
void setupTestData() {
|
||||||
|
// Create test nodes
|
||||||
|
testNodeId1 = createTestNode("Node 1", "Berlin", 52.5200, 13.4050);
|
||||||
|
testNodeId2 = createTestNode("Node 2", "Munich", 48.1351, 11.5820);
|
||||||
|
|
||||||
|
// Create test user nodes
|
||||||
|
Integer userId = createTestUser("distancetest@test.com", "DISTWORK001");
|
||||||
|
testUserNodeId1 = createTestUserNode(userId, "User Node 1", "Hamburg", 53.5511, 9.9937);
|
||||||
|
testUserNodeId2 = createTestUserNode(userId, "User Node 2", "Frankfurt", 50.1109, 8.6821);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testGetDistanceNodeToNode() {
|
||||||
|
// Given: Create distance entry
|
||||||
|
Distance distance = createTestDistance(testNodeId1, testNodeId2, null, null,
|
||||||
|
52.5200, 13.4050, 48.1351, 11.5820, 504.2);
|
||||||
|
distanceMatrixRepository.saveDistance(distance);
|
||||||
|
|
||||||
|
// When: Get distance
|
||||||
|
Node from = createNodeObject(testNodeId1);
|
||||||
|
Node to = createNodeObject(testNodeId2);
|
||||||
|
Optional<Distance> result = distanceMatrixRepository.getDistance(from, false, to, false);
|
||||||
|
|
||||||
|
// Then: Should find distance
|
||||||
|
assertTrue(result.isPresent(), "Should find distance between nodes");
|
||||||
|
assertEquals(0, new BigDecimal("504.2").compareTo(result.get().getDistance()),
|
||||||
|
"Distance should be 504.2");
|
||||||
|
assertEquals(DistanceMatrixState.VALID, result.get().getState());
|
||||||
|
assertEquals(testNodeId1, result.get().getFromNodeId());
|
||||||
|
assertEquals(testNodeId2, result.get().getToNodeId());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testGetDistanceUserNodeToUserNode() {
|
||||||
|
// Given: Create user node distance entry
|
||||||
|
Distance distance = createTestDistance(null, null, testUserNodeId1, testUserNodeId2,
|
||||||
|
53.5511, 9.9937, 50.1109, 8.6821, 393.5);
|
||||||
|
distanceMatrixRepository.saveDistance(distance);
|
||||||
|
|
||||||
|
// When: Get distance
|
||||||
|
Node from = createNodeObject(testUserNodeId1);
|
||||||
|
Node to = createNodeObject(testUserNodeId2);
|
||||||
|
Optional<Distance> result = distanceMatrixRepository.getDistance(from, true, to, true);
|
||||||
|
|
||||||
|
// Then: Should find distance
|
||||||
|
assertTrue(result.isPresent(), "Should find distance between user nodes");
|
||||||
|
assertEquals(0, new BigDecimal("393.5").compareTo(result.get().getDistance()),
|
||||||
|
"Distance should be 393.5");
|
||||||
|
assertEquals(testUserNodeId1, result.get().getFromUserNodeId());
|
||||||
|
assertEquals(testUserNodeId2, result.get().getToUserNodeId());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testGetDistanceNotFound() {
|
||||||
|
// When: Get non-existent distance
|
||||||
|
Node from = createNodeObject(testNodeId1);
|
||||||
|
Node to = createNodeObject(testNodeId2);
|
||||||
|
Optional<Distance> result = distanceMatrixRepository.getDistance(from, false, to, false);
|
||||||
|
|
||||||
|
// Then: Should return empty
|
||||||
|
assertFalse(result.isPresent(), "Should not find non-existent distance");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testSaveDistanceInsert() {
|
||||||
|
// Given: New distance
|
||||||
|
Distance distance = createTestDistance(testNodeId1, testNodeId2, null, null,
|
||||||
|
52.5200, 13.4050, 48.1351, 11.5820, 504.2);
|
||||||
|
|
||||||
|
// When: Save
|
||||||
|
distanceMatrixRepository.saveDistance(distance);
|
||||||
|
|
||||||
|
// Then: Should be inserted
|
||||||
|
Node from = createNodeObject(testNodeId1);
|
||||||
|
Node to = createNodeObject(testNodeId2);
|
||||||
|
Optional<Distance> saved = distanceMatrixRepository.getDistance(from, false, to, false);
|
||||||
|
|
||||||
|
assertTrue(saved.isPresent(), "Distance should be saved");
|
||||||
|
assertEquals(0, new BigDecimal("504.2").compareTo(saved.get().getDistance()),
|
||||||
|
"Distance should be 504.2");
|
||||||
|
assertEquals(DistanceMatrixState.VALID, saved.get().getState());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testSaveDistanceUpdate() {
|
||||||
|
// Given: Existing distance
|
||||||
|
Distance distance = createTestDistance(testNodeId1, testNodeId2, null, null,
|
||||||
|
52.5200, 13.4050, 48.1351, 11.5820, 504.2);
|
||||||
|
distanceMatrixRepository.saveDistance(distance);
|
||||||
|
|
||||||
|
// When: Update with new distance
|
||||||
|
Distance updated = createTestDistance(testNodeId1, testNodeId2, null, null,
|
||||||
|
52.5200, 13.4050, 48.1351, 11.5820, 510.0);
|
||||||
|
updated.setState(DistanceMatrixState.STALE);
|
||||||
|
distanceMatrixRepository.saveDistance(updated);
|
||||||
|
|
||||||
|
// Then: Should be updated
|
||||||
|
Node from = createNodeObject(testNodeId1);
|
||||||
|
Node to = createNodeObject(testNodeId2);
|
||||||
|
Optional<Distance> result = distanceMatrixRepository.getDistance(from, false, to, false);
|
||||||
|
|
||||||
|
assertTrue(result.isPresent());
|
||||||
|
assertEquals(0, new BigDecimal("510.0").compareTo(result.get().getDistance()),
|
||||||
|
"Distance should be 510.0");
|
||||||
|
assertEquals(DistanceMatrixState.STALE, result.get().getState());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testUpdateRetries() {
|
||||||
|
// Given: Insert distance
|
||||||
|
Distance distance = createTestDistance(testNodeId1, testNodeId2, null, null,
|
||||||
|
52.5200, 13.4050, 48.1351, 11.5820, 504.2);
|
||||||
|
distanceMatrixRepository.saveDistance(distance);
|
||||||
|
|
||||||
|
// Get the ID
|
||||||
|
Node from = createNodeObject(testNodeId1);
|
||||||
|
Node to = createNodeObject(testNodeId2);
|
||||||
|
Distance saved = distanceMatrixRepository.getDistance(from, false, to, false).orElseThrow();
|
||||||
|
Integer distanceId = saved.getId();
|
||||||
|
int initialRetries = saved.getRetries();
|
||||||
|
|
||||||
|
// When: Update retries
|
||||||
|
distanceMatrixRepository.updateRetries(distanceId);
|
||||||
|
|
||||||
|
// Then: Retries should be incremented
|
||||||
|
Distance afterUpdate = distanceMatrixRepository.getDistance(from, false, to, false).orElseThrow();
|
||||||
|
assertEquals(initialRetries + 1, afterUpdate.getRetries(),
|
||||||
|
"Retries should be incremented by 1");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testDistanceStates() {
|
||||||
|
// Test different states
|
||||||
|
for (DistanceMatrixState state : new DistanceMatrixState[]{
|
||||||
|
DistanceMatrixState.VALID,
|
||||||
|
DistanceMatrixState.STALE,
|
||||||
|
DistanceMatrixState.EXCEPTION
|
||||||
|
}) {
|
||||||
|
// Given: Create distance with specific state
|
||||||
|
Integer fromId = createTestNode("From " + state, "Address", 50.0, 10.0);
|
||||||
|
Integer toId = createTestNode("To " + state, "Address", 51.0, 11.0);
|
||||||
|
|
||||||
|
Distance distance = createTestDistance(fromId, toId, null, null,
|
||||||
|
50.0, 10.0, 51.0, 11.0, 100.0);
|
||||||
|
distance.setState(state);
|
||||||
|
distanceMatrixRepository.saveDistance(distance);
|
||||||
|
|
||||||
|
// When: Retrieve
|
||||||
|
Node from = createNodeObject(fromId);
|
||||||
|
Node to = createNodeObject(toId);
|
||||||
|
Optional<Distance> result = distanceMatrixRepository.getDistance(from, false, to, false);
|
||||||
|
|
||||||
|
// Then: Should have correct state
|
||||||
|
assertTrue(result.isPresent(), "Should find distance with state " + state);
|
||||||
|
assertEquals(state, result.get().getState(), "State should be " + state);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testMixedNodeTypes() {
|
||||||
|
// Given: Distance from regular node to user node
|
||||||
|
Distance distance = createTestDistance(testNodeId1, null, null, testUserNodeId1,
|
||||||
|
52.5200, 13.4050, 53.5511, 9.9937, 289.3);
|
||||||
|
distanceMatrixRepository.saveDistance(distance);
|
||||||
|
|
||||||
|
// When: Get distance
|
||||||
|
Node from = createNodeObject(testNodeId1);
|
||||||
|
Node to = createNodeObject(testUserNodeId1);
|
||||||
|
Optional<Distance> result = distanceMatrixRepository.getDistance(from, false, to, true);
|
||||||
|
|
||||||
|
// Then: Should find distance
|
||||||
|
assertTrue(result.isPresent(), "Should find distance between mixed node types");
|
||||||
|
assertEquals(0, new BigDecimal("289.3").compareTo(result.get().getDistance()),
|
||||||
|
"Distance should be 289.3");
|
||||||
|
assertEquals(testNodeId1, result.get().getFromNodeId());
|
||||||
|
assertEquals(testUserNodeId1, result.get().getToUserNodeId());
|
||||||
|
assertNull(result.get().getToNodeId());
|
||||||
|
assertNull(result.get().getFromUserNodeId());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testTimestampHandling() {
|
||||||
|
// Given: Create distance with timestamp
|
||||||
|
Distance distance = createTestDistance(testNodeId1, testNodeId2, null, null,
|
||||||
|
52.5200, 13.4050, 48.1351, 11.5820, 504.2);
|
||||||
|
LocalDateTime beforeSave = LocalDateTime.now().minusSeconds(1);
|
||||||
|
distanceMatrixRepository.saveDistance(distance);
|
||||||
|
|
||||||
|
// When: Retrieve
|
||||||
|
Node from = createNodeObject(testNodeId1);
|
||||||
|
Node to = createNodeObject(testNodeId2);
|
||||||
|
Optional<Distance> result = distanceMatrixRepository.getDistance(from, false, to, false);
|
||||||
|
|
||||||
|
// Then: Should have valid timestamp
|
||||||
|
assertTrue(result.isPresent());
|
||||||
|
assertNotNull(result.get().getUpdatedAt(), "Updated timestamp should be set");
|
||||||
|
assertTrue(result.get().getUpdatedAt().isAfter(beforeSave),
|
||||||
|
"Updated timestamp should be recent");
|
||||||
|
}
|
||||||
|
|
||||||
|
// ========== Helper Methods ==========
|
||||||
|
|
||||||
|
private Integer createTestNode(String name, String address, double geoLat, double geoLng) {
|
||||||
|
String sql = "INSERT INTO node (name, address, geo_lat, geo_lng, is_deprecated, is_destination, is_source, is_intermediate, country_id, predecessor_required) " +
|
||||||
|
"VALUES (?, ?, ?, ?, " + dialectProvider.getBooleanFalse() + ", " +
|
||||||
|
dialectProvider.getBooleanTrue() + ", " + dialectProvider.getBooleanTrue() + ", " +
|
||||||
|
dialectProvider.getBooleanFalse() + ", ?, " + dialectProvider.getBooleanFalse() + ")";
|
||||||
|
executeRawSql(sql, name, address, new BigDecimal(geoLat), new BigDecimal(geoLng), 1);
|
||||||
|
|
||||||
|
String selectSql = isMysql() ? "SELECT LAST_INSERT_ID()" : "SELECT CAST(@@IDENTITY AS INT)";
|
||||||
|
return jdbcTemplate.queryForObject(selectSql, Integer.class);
|
||||||
|
}
|
||||||
|
|
||||||
|
private Integer createTestUser(String email, String workdayId) {
|
||||||
|
String sql = "INSERT INTO sys_user (email, workday_id, firstname, lastname, is_active) VALUES (?, ?, ?, ?, " +
|
||||||
|
dialectProvider.getBooleanTrue() + ")";
|
||||||
|
executeRawSql(sql, email, workdayId, "Test", "User");
|
||||||
|
|
||||||
|
String selectSql = isMysql() ? "SELECT LAST_INSERT_ID()" : "SELECT CAST(@@IDENTITY AS INT)";
|
||||||
|
return jdbcTemplate.queryForObject(selectSql, Integer.class);
|
||||||
|
}
|
||||||
|
|
||||||
|
private Integer createTestUserNode(Integer userId, String name, String address, double geoLat, double geoLng) {
|
||||||
|
String sql = "INSERT INTO sys_user_node (name, address, geo_lat, geo_lng, is_deprecated, country_id, user_id) " +
|
||||||
|
"VALUES (?, ?, ?, ?, " + dialectProvider.getBooleanFalse() + ", ?, ?)";
|
||||||
|
executeRawSql(sql, name, address, new BigDecimal(geoLat), new BigDecimal(geoLng), 1, userId);
|
||||||
|
|
||||||
|
String selectSql = isMysql() ? "SELECT LAST_INSERT_ID()" : "SELECT CAST(@@IDENTITY AS INT)";
|
||||||
|
return jdbcTemplate.queryForObject(selectSql, Integer.class);
|
||||||
|
}
|
||||||
|
|
||||||
|
private Distance createTestDistance(Integer fromNodeId, Integer toNodeId,
|
||||||
|
Integer fromUserNodeId, Integer toUserNodeId,
|
||||||
|
double fromLat, double fromLng,
|
||||||
|
double toLat, double toLng,
|
||||||
|
double distance) {
|
||||||
|
Distance d = new Distance();
|
||||||
|
d.setFromNodeId(fromNodeId);
|
||||||
|
d.setToNodeId(toNodeId);
|
||||||
|
d.setFromUserNodeId(fromUserNodeId);
|
||||||
|
d.setToUserNodeId(toUserNodeId);
|
||||||
|
d.setFromGeoLat(new BigDecimal(fromLat));
|
||||||
|
d.setFromGeoLng(new BigDecimal(fromLng));
|
||||||
|
d.setToGeoLat(new BigDecimal(toLat));
|
||||||
|
d.setToGeoLng(new BigDecimal(toLng));
|
||||||
|
d.setDistance(new BigDecimal(distance));
|
||||||
|
d.setState(DistanceMatrixState.VALID);
|
||||||
|
d.setUpdatedAt(LocalDateTime.now());
|
||||||
|
d.setRetries(0);
|
||||||
|
return d;
|
||||||
|
}
|
||||||
|
|
||||||
|
private Node createNodeObject(Integer id) {
|
||||||
|
Node node = new Node();
|
||||||
|
node.setId(id);
|
||||||
|
return node;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,351 @@
|
||||||
|
package de.avatic.lcc.repositories;
|
||||||
|
|
||||||
|
import de.avatic.lcc.model.db.materials.Material;
|
||||||
|
import de.avatic.lcc.repositories.pagination.SearchQueryPagination;
|
||||||
|
import de.avatic.lcc.repositories.pagination.SearchQueryResult;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Optional;
|
||||||
|
|
||||||
|
import static org.junit.jupiter.api.Assertions.*;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Integration tests for MaterialRepository.
|
||||||
|
* <p>
|
||||||
|
* Tests critical functionality across both MySQL and MSSQL:
|
||||||
|
* - CRUD operations (Create, Read, Update, Delete)
|
||||||
|
* - Pagination with ORDER BY (MSSQL requirement)
|
||||||
|
* - Search with filters (name and part_number)
|
||||||
|
* - Boolean literal compatibility (deprecated filtering)
|
||||||
|
* - Bulk operations (getByPartNumbers, deleteByIds, findMissingIds)
|
||||||
|
* <p>
|
||||||
|
* Run with:
|
||||||
|
* <pre>
|
||||||
|
* mvn test -Dspring.profiles.active=test,mysql -Dtest=MaterialRepositoryIntegrationTest
|
||||||
|
* mvn test -Dspring.profiles.active=test,mssql -Dtest=MaterialRepositoryIntegrationTest
|
||||||
|
* </pre>
|
||||||
|
*/
|
||||||
|
class MaterialRepositoryIntegrationTest extends AbstractRepositoryIntegrationTest {
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private MaterialRepository materialRepository;
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testInsertAndRetrieve() {
|
||||||
|
// Given: Create material
|
||||||
|
Material material = createTestMaterial("TEST-001", "Test Material 1");
|
||||||
|
|
||||||
|
// When: Insert
|
||||||
|
materialRepository.insert(material);
|
||||||
|
|
||||||
|
// When: Retrieve by part number
|
||||||
|
Optional<Material> retrieved = materialRepository.getByPartNumber("TEST-001");
|
||||||
|
|
||||||
|
// Then: Should retrieve successfully
|
||||||
|
assertTrue(retrieved.isPresent(), "Material should be retrievable after insert");
|
||||||
|
assertEquals("TEST-001", retrieved.get().getPartNumber());
|
||||||
|
assertEquals("Test Material 1", retrieved.get().getName());
|
||||||
|
assertFalse(retrieved.get().getDeprecated());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testUpdate() {
|
||||||
|
// Given: Insert material
|
||||||
|
Material material = createTestMaterial("TEST-002", "Original Name");
|
||||||
|
materialRepository.insert(material);
|
||||||
|
|
||||||
|
// When: Update material
|
||||||
|
Material toUpdate = materialRepository.getByPartNumber("TEST-002").orElseThrow();
|
||||||
|
toUpdate.setName("Updated Name");
|
||||||
|
toUpdate.setHsCode("12345678901");
|
||||||
|
materialRepository.update(toUpdate);
|
||||||
|
|
||||||
|
// Then: Verify update
|
||||||
|
Material updated = materialRepository.getById(toUpdate.getId()).orElseThrow();
|
||||||
|
assertEquals("Updated Name", updated.getName());
|
||||||
|
assertEquals("12345678901", updated.getHsCode());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testUpdateByPartNumber() {
|
||||||
|
// Given: Insert material
|
||||||
|
Material material = createTestMaterial("TEST-003", "Original Name");
|
||||||
|
materialRepository.insert(material);
|
||||||
|
|
||||||
|
// When: Update by part number
|
||||||
|
Material toUpdate = materialRepository.getByPartNumber("TEST-003").orElseThrow();
|
||||||
|
toUpdate.setName("Updated via PartNumber");
|
||||||
|
materialRepository.updateByPartNumber(toUpdate);
|
||||||
|
|
||||||
|
// Then: Verify update
|
||||||
|
Material updated = materialRepository.getByPartNumber("TEST-003").orElseThrow();
|
||||||
|
assertEquals("Updated via PartNumber", updated.getName());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testSetDeprecatedById() {
|
||||||
|
// Given: Insert material
|
||||||
|
Material material = createTestMaterial("TEST-004", "Material to Deprecate");
|
||||||
|
materialRepository.insert(material);
|
||||||
|
Integer materialId = materialRepository.getByPartNumber("TEST-004").orElseThrow().getId();
|
||||||
|
|
||||||
|
// When: Deprecate
|
||||||
|
Optional<Integer> result = materialRepository.setDeprecatedById(materialId);
|
||||||
|
|
||||||
|
// Then: Should be deprecated
|
||||||
|
assertTrue(result.isPresent());
|
||||||
|
|
||||||
|
// getById() excludes deprecated
|
||||||
|
Optional<Material> deprecated = materialRepository.getById(materialId);
|
||||||
|
assertFalse(deprecated.isPresent(), "getById() should exclude deprecated materials");
|
||||||
|
|
||||||
|
// But getByIdIncludeDeprecated() should find it
|
||||||
|
Optional<Material> includingDeprecated = materialRepository.getByIdIncludeDeprecated(materialId);
|
||||||
|
assertTrue(includingDeprecated.isPresent(), "getByIdIncludeDeprecated() should find deprecated materials");
|
||||||
|
assertTrue(includingDeprecated.get().getDeprecated());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testDeleteById() {
|
||||||
|
// Given: Insert material
|
||||||
|
Material material = createTestMaterial("TEST-005", "Material to Delete");
|
||||||
|
materialRepository.insert(material);
|
||||||
|
Integer materialId = materialRepository.getByPartNumber("TEST-005").orElseThrow().getId();
|
||||||
|
|
||||||
|
// When: Delete (soft delete - sets deprecated)
|
||||||
|
materialRepository.deleteById(materialId);
|
||||||
|
|
||||||
|
// Then: Should be deprecated
|
||||||
|
Optional<Material> deleted = materialRepository.getById(materialId);
|
||||||
|
assertFalse(deleted.isPresent(), "Deleted material should not be retrievable via getById()");
|
||||||
|
|
||||||
|
Optional<Material> includingDeleted = materialRepository.getByIdIncludeDeprecated(materialId);
|
||||||
|
assertTrue(includingDeleted.isPresent());
|
||||||
|
assertTrue(includingDeleted.get().getDeprecated());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testListMaterialsWithPagination() {
|
||||||
|
// Given: Insert multiple materials
|
||||||
|
for (int i = 1; i <= 5; i++) {
|
||||||
|
Material material = createTestMaterial("PAGE-" + String.format("%03d", i), "Pagination Material " + i);
|
||||||
|
materialRepository.insert(material);
|
||||||
|
}
|
||||||
|
|
||||||
|
// When: List with pagination (page 1, size 3)
|
||||||
|
SearchQueryPagination pagination = new SearchQueryPagination(1, 3);
|
||||||
|
SearchQueryResult<Material> result = materialRepository.listMaterials(
|
||||||
|
Optional.empty(), false, pagination
|
||||||
|
);
|
||||||
|
|
||||||
|
// Then: Verify pagination works
|
||||||
|
assertNotNull(result);
|
||||||
|
assertNotNull(result.toList());
|
||||||
|
assertTrue(result.toList().size() <= 3, "Should return at most 3 materials per page");
|
||||||
|
assertTrue(result.getTotalElements() >= 5, "Should have at least 5 materials total");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testListMaterialsWithFilter() {
|
||||||
|
// Given: Insert materials with different names
|
||||||
|
Material material1 = createTestMaterial("FILTER-001", "Special Widget");
|
||||||
|
materialRepository.insert(material1);
|
||||||
|
|
||||||
|
Material material2 = createTestMaterial("FILTER-002", "Normal Component");
|
||||||
|
materialRepository.insert(material2);
|
||||||
|
|
||||||
|
Material material3 = createTestMaterial("FILTER-003", "Special Gadget");
|
||||||
|
materialRepository.insert(material3);
|
||||||
|
|
||||||
|
// When: Search for "Special"
|
||||||
|
SearchQueryPagination pagination = new SearchQueryPagination(1, 10);
|
||||||
|
SearchQueryResult<Material> result = materialRepository.listMaterials(
|
||||||
|
Optional.of("SPECIAL"), false, pagination
|
||||||
|
);
|
||||||
|
|
||||||
|
// Then: Should find materials with "Special" in name
|
||||||
|
assertNotNull(result);
|
||||||
|
assertTrue(result.toList().size() >= 2, "Should find at least 2 materials with 'Special'");
|
||||||
|
|
||||||
|
for (Material m : result.toList()) {
|
||||||
|
boolean matches = m.getName().toUpperCase().contains("SPECIAL") ||
|
||||||
|
m.getPartNumber().toUpperCase().contains("SPECIAL");
|
||||||
|
assertTrue(matches, "Material should match filter");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testListMaterialsExcludeDeprecated() {
|
||||||
|
// Given: Insert deprecated and active materials
|
||||||
|
Material deprecated = createTestMaterial("DEPR-001", "Deprecated Material");
|
||||||
|
deprecated.setDeprecated(true);
|
||||||
|
materialRepository.insert(deprecated);
|
||||||
|
|
||||||
|
Material active = createTestMaterial("ACTIVE-001", "Active Material");
|
||||||
|
materialRepository.insert(active);
|
||||||
|
|
||||||
|
// When: List excluding deprecated
|
||||||
|
SearchQueryPagination pagination = new SearchQueryPagination(1, 10);
|
||||||
|
SearchQueryResult<Material> result = materialRepository.listMaterials(
|
||||||
|
Optional.empty(), true, pagination
|
||||||
|
);
|
||||||
|
|
||||||
|
// Then: Should not include deprecated materials
|
||||||
|
assertNotNull(result);
|
||||||
|
for (Material m : result.toList()) {
|
||||||
|
assertFalse(m.getDeprecated(), "Should not include deprecated materials");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testListAllMaterials() {
|
||||||
|
// Given: Insert materials
|
||||||
|
Material material1 = createTestMaterial("ALL-001", "Material 1");
|
||||||
|
materialRepository.insert(material1);
|
||||||
|
|
||||||
|
Material material2 = createTestMaterial("ALL-002", "Material 2");
|
||||||
|
materialRepository.insert(material2);
|
||||||
|
|
||||||
|
// When: List all
|
||||||
|
List<Material> materials = materialRepository.listAllMaterials();
|
||||||
|
|
||||||
|
// Then: Should return all materials ordered by normalized_part_number
|
||||||
|
assertNotNull(materials);
|
||||||
|
assertFalse(materials.isEmpty());
|
||||||
|
|
||||||
|
// Verify ordering
|
||||||
|
for (int i = 1; i < materials.size(); i++) {
|
||||||
|
String prev = materials.get(i - 1).getNormalizedPartNumber();
|
||||||
|
String current = materials.get(i).getNormalizedPartNumber();
|
||||||
|
assertTrue(prev.compareTo(current) <= 0,
|
||||||
|
"Materials should be ordered by normalized_part_number");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testGetByPartNumber() {
|
||||||
|
// Given: Insert material
|
||||||
|
Material material = createTestMaterial("BYPART-001", "Get By Part");
|
||||||
|
materialRepository.insert(material);
|
||||||
|
|
||||||
|
// When: Get by part number
|
||||||
|
Optional<Material> result = materialRepository.getByPartNumber("BYPART-001");
|
||||||
|
|
||||||
|
// Then: Should find material
|
||||||
|
assertTrue(result.isPresent());
|
||||||
|
assertEquals("BYPART-001", result.get().getPartNumber());
|
||||||
|
assertEquals("Get By Part", result.get().getName());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testGetByPartNumberNotFound() {
|
||||||
|
// When: Get by non-existent part number
|
||||||
|
Optional<Material> result = materialRepository.getByPartNumber("NONEXISTENT-999");
|
||||||
|
|
||||||
|
// Then: Should return empty
|
||||||
|
assertFalse(result.isPresent(), "Should not find material with non-existent part number");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testGetByPartNumbers() {
|
||||||
|
// Given: Insert multiple materials
|
||||||
|
Material material1 = createTestMaterial("BULK-001", "Bulk Material 1");
|
||||||
|
materialRepository.insert(material1);
|
||||||
|
|
||||||
|
Material material2 = createTestMaterial("BULK-002", "Bulk Material 2");
|
||||||
|
materialRepository.insert(material2);
|
||||||
|
|
||||||
|
Material material3 = createTestMaterial("BULK-003", "Bulk Material 3");
|
||||||
|
materialRepository.insert(material3);
|
||||||
|
|
||||||
|
// When: Get by part numbers
|
||||||
|
List<String> partNumbers = List.of("BULK-001", "BULK-002", "NONEXISTENT");
|
||||||
|
List<Material> materials = materialRepository.getByPartNumbers(partNumbers);
|
||||||
|
|
||||||
|
// Then: Should find existing materials (2 out of 3 part numbers)
|
||||||
|
assertNotNull(materials);
|
||||||
|
assertTrue(materials.size() >= 2, "Should find at least 2 materials");
|
||||||
|
|
||||||
|
List<String> foundPartNumbers = materials.stream()
|
||||||
|
.map(Material::getPartNumber)
|
||||||
|
.toList();
|
||||||
|
assertTrue(foundPartNumbers.contains("BULK-001"));
|
||||||
|
assertTrue(foundPartNumbers.contains("BULK-002"));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testGetByPartNumbersEmptyList() {
|
||||||
|
// When: Get by empty list
|
||||||
|
List<Material> materials = materialRepository.getByPartNumbers(List.of());
|
||||||
|
|
||||||
|
// Then: Should return empty list
|
||||||
|
assertNotNull(materials);
|
||||||
|
assertTrue(materials.isEmpty());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testDeleteByIds() {
|
||||||
|
// Given: Insert multiple materials
|
||||||
|
Material material1 = createTestMaterial("DELETE-001", "To Delete 1");
|
||||||
|
materialRepository.insert(material1);
|
||||||
|
Integer id1 = materialRepository.getByPartNumber("DELETE-001").orElseThrow().getId();
|
||||||
|
|
||||||
|
Material material2 = createTestMaterial("DELETE-002", "To Delete 2");
|
||||||
|
materialRepository.insert(material2);
|
||||||
|
Integer id2 = materialRepository.getByPartNumber("DELETE-002").orElseThrow().getId();
|
||||||
|
|
||||||
|
// When: Delete by IDs
|
||||||
|
materialRepository.deleteByIds(List.of(id1, id2));
|
||||||
|
|
||||||
|
// Then: Should be deprecated
|
||||||
|
assertFalse(materialRepository.getById(id1).isPresent());
|
||||||
|
assertFalse(materialRepository.getById(id2).isPresent());
|
||||||
|
|
||||||
|
// But should exist with deprecated flag
|
||||||
|
assertTrue(materialRepository.getByIdIncludeDeprecated(id1).orElseThrow().getDeprecated());
|
||||||
|
assertTrue(materialRepository.getByIdIncludeDeprecated(id2).orElseThrow().getDeprecated());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testFindMissingIds() {
|
||||||
|
// Given: Insert some materials
|
||||||
|
Material material1 = createTestMaterial("MISSING-001", "Material 1");
|
||||||
|
materialRepository.insert(material1);
|
||||||
|
Integer existingId = materialRepository.getByPartNumber("MISSING-001").orElseThrow().getId();
|
||||||
|
|
||||||
|
// When: Check for missing IDs
|
||||||
|
List<Integer> idsToCheck = List.of(existingId, 99999, 99998);
|
||||||
|
List<Integer> missingIds = materialRepository.findMissingIds(idsToCheck);
|
||||||
|
|
||||||
|
// Then: Should return only non-existent IDs
|
||||||
|
assertNotNull(missingIds);
|
||||||
|
assertEquals(2, missingIds.size(), "Should find 2 missing IDs");
|
||||||
|
assertTrue(missingIds.contains(99999));
|
||||||
|
assertTrue(missingIds.contains(99998));
|
||||||
|
assertFalse(missingIds.contains(existingId), "Existing ID should not be in missing list");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testFindMissingIdsEmptyList() {
|
||||||
|
// When: Check empty list
|
||||||
|
List<Integer> missingIds = materialRepository.findMissingIds(List.of());
|
||||||
|
|
||||||
|
// Then: Should return empty list
|
||||||
|
assertNotNull(missingIds);
|
||||||
|
assertTrue(missingIds.isEmpty());
|
||||||
|
}
|
||||||
|
|
||||||
|
// ========== Helper Methods ==========
|
||||||
|
|
||||||
|
private Material createTestMaterial(String partNumber, String name) {
|
||||||
|
Material material = new Material();
|
||||||
|
material.setPartNumber(partNumber);
|
||||||
|
material.setNormalizedPartNumber(partNumber.toUpperCase());
|
||||||
|
material.setName(name);
|
||||||
|
material.setHsCode(null);
|
||||||
|
material.setDeprecated(false);
|
||||||
|
return material;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,208 @@
|
||||||
|
package de.avatic.lcc.repositories;
|
||||||
|
|
||||||
|
import de.avatic.lcc.dto.generic.NodeType;
|
||||||
|
import de.avatic.lcc.repositories.pagination.SearchQueryPagination;
|
||||||
|
import de.avatic.lcc.repositories.pagination.SearchQueryResult;
|
||||||
|
import de.avatic.lcc.model.db.nodes.Node;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
|
||||||
|
import java.math.BigDecimal;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Optional;
|
||||||
|
|
||||||
|
import static org.junit.jupiter.api.Assertions.*;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Integration tests for NodeRepository.
|
||||||
|
* <p>
|
||||||
|
* Tests critical functionality across both MySQL and MSSQL:
|
||||||
|
* - Basic CRUD operations
|
||||||
|
* - Pagination with ORDER BY (MSSQL requirement)
|
||||||
|
* - Haversine distance calculations
|
||||||
|
* - Complex search queries
|
||||||
|
* <p>
|
||||||
|
* Run with:
|
||||||
|
* <pre>
|
||||||
|
* mvn test -Dspring.profiles.active=test,mysql -Dtest=NodeRepositoryIntegrationTest
|
||||||
|
* mvn test -Dspring.profiles.active=test,mssql -Dtest=NodeRepositoryIntegrationTest
|
||||||
|
* </pre>
|
||||||
|
*/
|
||||||
|
class NodeRepositoryIntegrationTest extends AbstractRepositoryIntegrationTest {
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private NodeRepository nodeRepository;
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testInsertAndRetrieveNode() {
|
||||||
|
// Given
|
||||||
|
Node node = new Node();
|
||||||
|
node.setName("Test Node");
|
||||||
|
node.setAddress("Test Address 123");
|
||||||
|
node.setGeoLat(new BigDecimal("52.5200"));
|
||||||
|
node.setGeoLng(new BigDecimal("13.4050"));
|
||||||
|
node.setDeprecated(false);
|
||||||
|
node.setCountryId(1); // Assuming country with id=1 exists in Flyway migrations
|
||||||
|
|
||||||
|
// When
|
||||||
|
Integer nodeId = nodeRepository.insert(node);
|
||||||
|
|
||||||
|
// Then
|
||||||
|
assertNotNull(nodeId, "Node ID should not be null");
|
||||||
|
assertTrue(nodeId > 0, "Node ID should be positive");
|
||||||
|
|
||||||
|
Optional<Node> retrieved = nodeRepository.getById(nodeId);
|
||||||
|
assertTrue(retrieved.isPresent(), "Node should be retrievable after creation");
|
||||||
|
assertEquals("Test Node", retrieved.get().getName());
|
||||||
|
assertEquals("Test Address 123", retrieved.get().getAddress());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testUpdateNode() {
|
||||||
|
// Given: Create a node first
|
||||||
|
Node node = createTestNode("Original Name", "Original Address", "50.0", "10.0");
|
||||||
|
Integer nodeId = nodeRepository.insert(node);
|
||||||
|
|
||||||
|
// When: Update the node
|
||||||
|
Node updatedNode = nodeRepository.getById(nodeId).orElseThrow();
|
||||||
|
updatedNode.setName("Updated Name");
|
||||||
|
updatedNode.setAddress("Updated Address");
|
||||||
|
nodeRepository.update(updatedNode);
|
||||||
|
|
||||||
|
// Then: Verify update
|
||||||
|
Node result = nodeRepository.getById(nodeId).orElseThrow();
|
||||||
|
assertEquals("Updated Name", result.getName());
|
||||||
|
assertEquals("Updated Address", result.getAddress());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testDeprecateNode() {
|
||||||
|
// Given: Create a node
|
||||||
|
Node node = createTestNode("Node to Deprecate", "Address", "50.0", "10.0");
|
||||||
|
Integer nodeId = nodeRepository.insert(node);
|
||||||
|
|
||||||
|
// When: Deprecate the node
|
||||||
|
nodeRepository.setDeprecatedById(nodeId);
|
||||||
|
|
||||||
|
// Then: Verify node is deprecated
|
||||||
|
Node deprecated = nodeRepository.getById(nodeId).orElseThrow();
|
||||||
|
assertTrue(deprecated.getDeprecated(), "Node should be marked as deprecated");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testListNodesWithPagination() {
|
||||||
|
// Given: Create multiple nodes
|
||||||
|
for (int i = 1; i <= 5; i++) {
|
||||||
|
Node node = createTestNode("Pagination Node " + i, "Address " + i, "50." + i, "10." + i);
|
||||||
|
nodeRepository.insert(node);
|
||||||
|
}
|
||||||
|
|
||||||
|
// When: List nodes with pagination (page 1, size 3)
|
||||||
|
SearchQueryPagination pagination = new SearchQueryPagination(1, 3);
|
||||||
|
SearchQueryResult<Node> result = nodeRepository.listNodes(null, false, pagination);
|
||||||
|
|
||||||
|
// Then: Verify pagination works (ORDER BY is required for MSSQL)
|
||||||
|
assertNotNull(result);
|
||||||
|
assertNotNull(result.toList());
|
||||||
|
assertTrue(result.toList().size() <= 3, "Should return at most 3 nodes per page");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testSearchNodeWithFilter() {
|
||||||
|
// Given: Create nodes with different names
|
||||||
|
Node node1 = createTestNode("Berlin Node Test", "Berlin Street 1", "52.5200", "13.4050");
|
||||||
|
Node node2 = createTestNode("Munich Node Test", "Munich Street 1", "48.1351", "11.5820");
|
||||||
|
Node node3 = createTestNode("Hamburg Node Test", "Hamburg Street 1", "53.5511", "9.9937");
|
||||||
|
nodeRepository.insert(node1);
|
||||||
|
nodeRepository.insert(node2);
|
||||||
|
nodeRepository.insert(node3);
|
||||||
|
|
||||||
|
// When: Search for nodes containing "Berlin"
|
||||||
|
List<Node> results = nodeRepository.searchNode("Berlin", 10, null, false);
|
||||||
|
|
||||||
|
// Then: Should find Berlin node
|
||||||
|
assertFalse(results.isEmpty(), "Should find at least one node");
|
||||||
|
assertTrue(results.stream().anyMatch(n -> n.getName().contains("Berlin")),
|
||||||
|
"Should contain Berlin node");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testGetByDistanceWithHaversineFormula() {
|
||||||
|
// Given: Create a reference node (Berlin)
|
||||||
|
Node referenceNode = createTestNode("Berlin Distance Test", "Berlin Center", "52.5200", "13.4050");
|
||||||
|
referenceNode.setUserNode(false);
|
||||||
|
Integer refId = nodeRepository.insert(referenceNode);
|
||||||
|
referenceNode.setId(refId);
|
||||||
|
|
||||||
|
// Create a nearby node (Potsdam, ~30km from Berlin)
|
||||||
|
Node nearbyNode = createTestNode("Potsdam Distance Test", "Potsdam Center", "52.3906", "13.0645");
|
||||||
|
nodeRepository.insert(nearbyNode);
|
||||||
|
|
||||||
|
// Create a far node (Munich, ~500km from Berlin)
|
||||||
|
Node farNode = createTestNode("Munich Distance Test", "Munich Center", "48.1351", "11.5820");
|
||||||
|
nodeRepository.insert(farNode);
|
||||||
|
|
||||||
|
// When: Get nodes within 100km radius
|
||||||
|
// The Haversine formula returns distance in kilometers for both MySQL and MSSQL
|
||||||
|
List<Node> nodesWithin100km = nodeRepository.getByDistance(referenceNode, 100);
|
||||||
|
|
||||||
|
// Then: Should find nearby node but not far node
|
||||||
|
assertNotNull(nodesWithin100km);
|
||||||
|
assertTrue(nodesWithin100km.stream().anyMatch(n -> n.getName().contains("Potsdam")),
|
||||||
|
"Should find Potsdam (30km away)");
|
||||||
|
assertFalse(nodesWithin100km.stream().anyMatch(n -> n.getName().contains("Munich")),
|
||||||
|
"Should not find Munich (500km away)");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testGetByDistanceExcludingReferenceNode() {
|
||||||
|
// Given: Create reference node
|
||||||
|
Node referenceNode = createTestNode("Reference Node Distance", "Ref Address", "50.0", "10.0");
|
||||||
|
referenceNode.setUserNode(false);
|
||||||
|
Integer refId = nodeRepository.insert(referenceNode);
|
||||||
|
referenceNode.setId(refId);
|
||||||
|
|
||||||
|
// Create nearby node
|
||||||
|
Node nearbyNode = createTestNode("Nearby Node Distance", "Nearby Address", "50.1", "10.1");
|
||||||
|
nodeRepository.insert(nearbyNode);
|
||||||
|
|
||||||
|
// When: Get nodes within large radius
|
||||||
|
List<Node> results = nodeRepository.getByDistance(referenceNode, 1000);
|
||||||
|
|
||||||
|
// Then: Reference node itself should be excluded (via id != ?)
|
||||||
|
assertFalse(results.stream().anyMatch(n -> n.getId().equals(refId)),
|
||||||
|
"Reference node should be excluded from results");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testBooleanLiteralCompatibility() {
|
||||||
|
// Given: Create deprecated and non-deprecated nodes
|
||||||
|
Node deprecatedNode = createTestNode("Deprecated Boolean Test", "Addr1", "50.0", "10.0");
|
||||||
|
Integer depId = nodeRepository.insert(deprecatedNode);
|
||||||
|
nodeRepository.setDeprecatedById(depId);
|
||||||
|
|
||||||
|
Node activeNode = createTestNode("Active Boolean Test", "Addr2", "50.1", "10.1");
|
||||||
|
nodeRepository.insert(activeNode);
|
||||||
|
|
||||||
|
// When: Search excluding deprecated nodes
|
||||||
|
List<Node> activeNodes = nodeRepository.searchNode("Boolean Test", 100, null, true);
|
||||||
|
|
||||||
|
// Then: Should not include deprecated node
|
||||||
|
assertFalse(activeNodes.stream().anyMatch(n -> n.getId().equals(depId)),
|
||||||
|
"Should exclude deprecated nodes when excludeDeprecated=true");
|
||||||
|
}
|
||||||
|
|
||||||
|
// ========== Helper Methods ==========
|
||||||
|
|
||||||
|
private Node createTestNode(String name, String address, String lat, String lng) {
|
||||||
|
Node node = new Node();
|
||||||
|
node.setName(name);
|
||||||
|
node.setAddress(address);
|
||||||
|
node.setGeoLat(new BigDecimal(lat));
|
||||||
|
node.setGeoLng(new BigDecimal(lng));
|
||||||
|
node.setDeprecated(false);
|
||||||
|
node.setCountryId(1); // Assuming country id=1 exists
|
||||||
|
node.setUserNode(false);
|
||||||
|
return node;
|
||||||
|
}
|
||||||
|
}
|
||||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue