Compare commits

...
Sign in to create a new pull request.

63 commits

Author SHA1 Message Date
Renovate Bot
dfbc60b91a Update dependency org.mockito:mockito-core to v5.21.0 2026-02-10 18:35:35 +00:00
Jan
0f401ff77b set PLAYWRIGHT_DOWNLOAD_CONNECTION_TIMEOUT in test.yml to ensure stable browser installation 2026-02-10 09:58:59 +01:00
Jan
7f6b6378f2 updated Maven command in test.yml to use test-compile for Playwright browser installation 2026-02-10 09:45:39 +01:00
Jan
081d69577e added Maven compile step before Playwright browser installation in test.yml 2026-02-10 09:38:05 +01:00
Jan
2d8cbae7a5 added Allure annotations (Epic, Feature, Story) to integration tests 2026-02-10 09:17:10 +01:00
Jan
3af4b675eb added playwright browsers to test.yml 2026-02-08 22:11:07 +01:00
Jan
96d877d2ef added npm build to test.yml, reduced verbosity of jdbc in tests 2026-02-08 21:28:55 +01:00
Jan
2314892be4 use branch as project name 2026-02-08 20:44:48 +01:00
Jan
34df33bdea fixed allure again. 2026-02-08 20:42:08 +01:00
Jan
9b958696b4 batch upload allure 2026-02-08 20:20:22 +01:00
Jan
8f7b132dd4 added debug log to test execution 2026-02-08 20:08:08 +01:00
Jan
d2960bc892 fixed csfr token for allure in test.yml 2026-02-08 19:51:15 +01:00
Jan
448943dfe2 fixed indentation of test.yml 2026-02-08 19:37:59 +01:00
Jan
1563be7260 fixed test.yml and pom.xml (excludes controller tests) 2026-02-08 19:36:53 +01:00
Jan
65094b6cff fixed aspect j version in argline 2026-02-08 19:20:53 +01:00
Jan
2832e28790 fixed test.yml 2026-02-08 18:54:10 +01:00
Jan
b2af7a2718 fixed test.yml 2026-02-08 18:50:48 +01:00
Jan
be231d1c9d fixed test.yml 2026-02-08 18:44:38 +01:00
Jan
daa6bc46fe changed test container to a maven container 2026-02-08 18:38:09 +01:00
75060714b5 Merge pull request 'feature/systemtests' (#109) from feature/systemtests into dev
Reviewed-on: #109
2026-02-08 15:24:47 +00:00
Jan
b674b8f477 All tests running. fixed cases with container calculations containing "-". 2026-02-08 11:51:13 +01:00
Jan
c727bbccc2 wip: enhancing 2026-02-06 19:13:15 +01:00
Jan
adf3666430 wip: input data fixed 2026-02-06 14:37:54 +01:00
Jan
b389480cc8 wip: input data fixed 2026-02-05 19:30:42 +01:00
Jan
00dc7e9843 wip: intermediate commit. 2026-02-05 13:56:52 +01:00
Jan
26986b1131 wip: intermediate commit. testscripts running, but with high deviations. 2026-02-05 13:04:17 +01:00
Jan
b708af177f Merge remote-tracking branch 'origin/feature/frontend-optimization' into dev 2026-02-01 11:09:39 +01:00
Jan
f86e2fb1d8 using "!mssql" instead of "mysql" for MySQLDialectProvider so that if no database profile is set, the MySQLDialectProvider is used. 2026-01-31 18:04:11 +01:00
Jan
ae83b0845c using mysql as default config if no profile is active. 2026-01-31 17:54:11 +01:00
Jan
9dca1e8abb Added --down option to db.sh to stop database containers without deleting volumes. 2026-01-30 14:20:35 +01:00
Jan
8794a8a193 created db.sh script to start mysql or mssql in container. --clean deletes previous volumens and --users creates dummy users for debugging 2026-01-30 13:45:39 +01:00
Jan
15854e1076 Add part number chips feature to CalculationMassEdit modal
- Display selected part numbers as chips with a count summary.
- Add logic to extract and display unique part numbers from edit IDs.
- Update modal and style components to support the new feature.
2026-01-28 22:07:16 +01:00
Jan
8e428af4d2 Updated CLAUDE.md to include multi database support. 2026-01-28 21:08:25 +01:00
Jan
21d00b8756 Refactored PackagingPropertiesRepositoryIntegrationTest 2026-01-28 20:56:50 +01:00
Jan
96715562e6 Added integration tests for CalculationJobDestinationRepository and CalculationJobRouteSectionRepository for MySQL and MSSQL; 2026-01-28 17:53:42 +01:00
Jan
8d08fedbc4 Added integration tests for RouteRepository and RouteNodeRepository for MySQL and MSSQL; Marked DestinationRepository as @Repository. 2026-01-28 12:18:52 +01:00
Jan
a381ca7ef8 Improved removeOld method in BulkOperationRepository to fix subquery limitations in MySQL and optimize deletion logic. 2026-01-28 11:21:00 +01:00
Jan
ffc08ebff6 Added integration tests for BulkOperationRepository and CalculationJobRepository for MySQL and MSSQL. 2026-01-28 11:20:39 +01:00
Jan
52116be1c3 Added integration tests for ContainerRateRepository, GroupRepository, and UserRepository for MySQL and MSSQL. Improved test coverage for pagination, filtering, UPSERT operations, and data cleanup methods. 2026-01-28 10:36:24 +01:00
Jan
5c8165c60e Added MatrixRateRepositoryIntegrationTest for MySQL and MSSQL; ensured data cleanup, improved test coverage for rate operations, and adjusted SQL logic for validation and copying between periods. 2026-01-28 09:12:27 +01:00
Jan
a5fd03cc68 Refactored integration tests for NomenclatureRepository to ensure test data cleanup and alignment with updated table structure. Removed unused beans and dependencies in RepositoryTestConfig. 2026-01-28 00:15:02 +01:00
Jan
3f8453f93b Refactored and extended integration tests for repositories; added data cleanup methods, improved test coverage for property date handling, and adjusted SQL queries for validity and property set logic. Removed unused dependencies in NomenclatureRepository. 2026-01-27 22:58:39 +01:00
Jan
1a5a00e111 Removed MSSQL-specific test skips and fixed buildInsertIgnoreStatement by replacing IF NOT EXISTS logic with MERGE syntax. Adjusted SQL query to include missing GROUP BY fields in CountryPropertyRepository. 2026-01-27 22:10:17 +01:00
Jan
861c5e7bbc Added integration tests for CountryPropertyRepository, SysErrorRepository, and PropertyRepository for MySQL and MSSQL. 2026-01-27 21:21:44 +01:00
Jan
6fc0839320 Added integration tests for UserNodeRepository, MaterialRepository, NomenclatureRepository, and PackagingDimensionRepository for MySQL and MSSQL. 2026-01-27 20:05:11 +01:00
Jan
919c9d0499 Added PackagingRepositoryIntegrationTest for MySQL and MSSQL; extended PackagingRepository with additional fields in SELECT query and improved SQL pagination logic. 2026-01-27 19:27:13 +01:00
Jan
c25f00bb01 Added CountryRepositoryIntegrationTest for MySQL and MSSQL, updated CountryRepository to support dialect-specific boolean literals. 2026-01-27 18:20:54 +01:00
Jan
8e6cc8cf07 Added TestContainers-based testing configuration for MySQL and MSSQL integration tests. Added module test for DialectProviders, Smoketests for TestContainers-based integration tests. NodeRepositoryIntegrationTest. Other Repository integration tests still missing. 2026-01-27 18:04:08 +01:00
Jan
5fb025e4b3 further sql fixes. frontend fix height of help system 2026-01-27 13:16:07 +01:00
Jan
e53f865210 further sql fixes 2026-01-27 12:40:11 +01:00
Jan
b1d46c1057 Updated migration scripts to ensure Unicode compatibility by using N-prefixed strings in MSSQL SQL INSERT statements. 2026-01-27 12:10:06 +01:00
Jan
1baf3111aa Updated migration scripts to ensure Unicode compatibility by using N-prefixed strings in MSSQL SQL INSERT statements. 2026-01-27 11:06:47 +01:00
Jan
cd411d8b01 Updated MSSQL schema migration script to use NVARCHAR for Unicode support. 2026-01-27 10:57:01 +01:00
Jan
48ce77dad3 Added missing boolean values to dialectProvider 2026-01-27 10:43:40 +01:00
Jan
5b2018c9e0 Fixed "TRUE/FALSE" => "0/1" mssql 2026-01-27 10:00:48 +01:00
Jan
28ee19d654 Ported flyway migration steps to mssql. Add initial MSSQL support with Docker scripts and SQL initialization. 2026-01-27 09:15:40 +01:00
Jan
0d4fb1f04f Step 2.3 - Add MSSQLDialectProvider Implementation 2026-01-26 21:08:04 +01:00
Jan
5866f8edc8 Fixed build errors 2026-01-25 19:41:52 +01:00
Jan
eff5d26ea3 Step 2.3 - Finalize Repositories with SqlDialectProvider Integration 2026-01-25 19:30:45 +01:00
Jan
1084c5b1cd Step 2.2 - Mid-Priority Repositories 2026-01-25 19:16:22 +01:00
Jan
29675f9ff4 Step 2.1: Kritische Repositories (3 Tasks) 2026-01-25 18:42:31 +01:00
Jan
10a8cfa72b Step 1 - Foundation & Infrastruktur (SqlDialectProvider Interface, Maven Dependencies, Konfiguration) 2026-01-25 18:30:51 +01:00
Jan
417221eca8 Fix: More stable bulk geocoding. Added @Transactional to outer bulk service call, to revert all changes to database if anything fails 2026-01-23 16:57:06 +01:00
152 changed files with 64911 additions and 1199 deletions

View file

@ -0,0 +1,21 @@
{
"permissions": {
"allow": [
"Bash(tree:*)",
"Bash(xargs:*)",
"Bash(mvn compile:*)",
"Bash(mvn test-compile:*)",
"Bash(find:*)",
"Bash(mvn test:*)",
"Bash(tee:*)",
"Bash(export TESTCONTAINERS_RYUK_DISABLED=true)",
"Bash(echo:*)",
"Bash(pgrep:*)",
"Bash(pkill:*)",
"Bash(ls:*)",
"Bash(sleep 120 echo \"=== Screenshots generated so far ===\" ls -la target/screenshots/case_*.png)",
"Bash(wc:*)",
"Bash(export DOCKER_HOST=unix:///run/user/1000/podman/podman.sock)"
]
}
}

118
.gitea/workflows/test.yml Normal file
View file

@ -0,0 +1,118 @@
name: Tests
on:
push:
branches: [main, dev]
pull_request:
branches: [main]
env:
ALLURE_SERVER: "http://10.80.0.6:5050"
ALLURE_PROJECT: "lcc-${{ gitea.ref_name }}"
jobs:
test:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup Java 23
uses: actions/setup-java@v4
with:
distribution: 'temurin'
java-version: '23'
cache: 'maven'
- name: Install Maven
run: |
apt-get update && apt-get install -y maven
- name: Setup Node
uses: actions/setup-node@v4
with:
node-version: '20'
- name: Build Frontend
run: cd src/frontend && npm ci && BUILD_FOR_SPRING=true npm run build
- name: Install Playwright Browsers
run: |
mvn test-compile -B --no-transfer-progress
mvn exec:java -e -D exec.mainClass=com.microsoft.playwright.CLI -D exec.classpathScope=test -D exec.args="install --with-deps chromium"
env:
PLAYWRIGHT_DOWNLOAD_CONNECTION_TIMEOUT: "300000"
- name: Run Tests
run: mvn verify -B --no-transfer-progress
env:
TESTCONTAINERS_RYUK_DISABLED: "true"
- name: Prepare Allure Results
if: always()
run: |
mkdir -p target/allure-results
cat > target/allure-results/executor.json << EOF
{
"name": "Gitea Actions",
"type": "gitea",
"buildName": "#${{ gitea.run_number }}",
"buildOrder": ${{ gitea.run_number }},
"buildUrl": "${{ gitea.server_url }}/${{ gitea.repository }}/actions/runs/${{ gitea.run_id }}"
}
EOF
- name: Upload to Allure
if: always()
run: |
# Login
curl -s -c cookies.txt \
-X POST "${ALLURE_SERVER}/allure-docker-service/login" \
-H 'Content-Type: application/json' \
-d '{"username":"admin","password":"${{ secrets.ALLURE_PASSWORD }}"}'
CSRF_TOKEN=$(grep csrf_access_token cookies.txt | awk '{print $7}')
# Create project
curl -s -o /dev/null -b cookies.txt \
-H "X-CSRF-TOKEN: ${CSRF_TOKEN}" \
-X POST "${ALLURE_SERVER}/allure-docker-service/projects" \
-H "Content-Type: application/json" \
-d '{"id":"'${ALLURE_PROJECT}'"}' || true
# Clean old results
curl -s -o /dev/null -b cookies.txt \
-H "X-CSRF-TOKEN: ${CSRF_TOKEN}" \
"${ALLURE_SERVER}/allure-docker-service/clean-results?project_id=${ALLURE_PROJECT}"
# Build JSON payload with base64
echo '{"results":[' > payload.json
FIRST=true
for f in target/allure-results/*; do
if [ -f "$f" ]; then
FILENAME=$(basename "$f")
CONTENT=$(base64 -w 0 "$f")
if [ "$FIRST" = true ]; then
FIRST=false
else
echo ',' >> payload.json
fi
echo '{"file_name":"'"$FILENAME"'","content_base64":"'"$CONTENT"'"}' >> payload.json
fi
done
echo ']}' >> payload.json
# Upload via JSON
curl -s -o /dev/null -b cookies.txt \
-H "X-CSRF-TOKEN: ${CSRF_TOKEN}" \
-H "Content-Type: application/json" \
-X POST "${ALLURE_SERVER}/allure-docker-service/send-results?project_id=${ALLURE_PROJECT}" \
-d @payload.json
# Generate report
curl -s -b cookies.txt \
-H "X-CSRF-TOKEN: ${CSRF_TOKEN}" \
"${ALLURE_SERVER}/allure-docker-service/generate-report?project_id=${ALLURE_PROJECT}"
echo "✅ Allure upload complete"

1
.gitignore vendored
View file

@ -14,6 +14,7 @@ target/
.sts4-cache
.env.example
/.env
/.env.*
### IntelliJ IDEA ###
.idea

636
CLAUDE.md Normal file
View file

@ -0,0 +1,636 @@
# CLAUDE.md
This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
## Project Overview
LCC (Logistic Cost Calculator) is a Spring Boot 3.5.9 backend API for calculating complex logistics costs across supply chain networks. It handles materials, packaging, transportation rates, route planning, and multi-component cost calculations including customs duties, handling, inventory, and risk assessment.
**Database Support:** The application supports both **MySQL 8.0** and **MSSQL Server 2022** through a database abstraction layer (`SqlDialectProvider`), allowing deployment flexibility across different database platforms.
## Build & Run Commands
```bash
# Build the project
mvn clean install
# Run the application (default: MySQL)
mvn spring-boot:run
# Run with MSSQL
mvn spring-boot:run -Dspring.profiles.active=mssql
# Run all tests on MySQL
mvn test -Dspring.profiles.active=test,mysql
# Run all tests on MSSQL
mvn test -Dspring.profiles.active=test,mssql
# Run repository integration tests on both databases
mvn test -Dtest="*RepositoryIntegrationTest" -Dspring.profiles.active=test,mysql
mvn test -Dtest="*RepositoryIntegrationTest" -Dspring.profiles.active=test,mssql
# Run a specific test class
mvn test -Dtest=NodeControllerIntegrationTest
# Run a specific test method
mvn test -Dtest=NodeControllerIntegrationTest#shouldReturnListOfNodesWithDefaultPagination
# Skip tests during build
mvn clean install -DskipTests
# Generate JAXB classes from WSDL (EU taxation service)
mvn jaxb:generate
# Generate Allure test report (requires allure-commandline)
mvn clean test
allure serve target/allure-results
```
## Development Environment (Distrobox)
**IMPORTANT:** This project runs inside a **Distrobox** container. This affects how TestContainers and Podman work.
### TestContainers with Distrobox + Podman
TestContainers needs access to the **host's Podman socket**, not the one inside the Distrobox. The configuration is handled via `~/.testcontainers.properties`:
```properties
docker.host=unix:///run/host/run/user/1000/podman/podman.sock
ryuk.disabled=true
```
### Troubleshooting TestContainers / Podman Issues
If tests fail with "Could not find a valid Docker environment":
1. **Check if Podman works on the host:**
```bash
distrobox-host-exec podman info
```
2. **If you see cgroup or UID/GID errors, run migration on the host:**
```bash
distrobox-host-exec podman system migrate
```
3. **Restart podman socket on host if needed:**
```bash
distrobox-host-exec systemctl --user restart podman.socket
```
4. **Verify the host socket is accessible from Distrobox:**
```bash
ls -la /run/host/run/user/1000/podman/podman.sock
```
5. **Test container execution via host:**
```bash
distrobox-host-exec podman run --rm hello-world
```
### Key Paths
| Path | Description |
|------|-------------|
| `/run/host/run/user/1000/podman/podman.sock` | Host's Podman socket (accessible from Distrobox) |
| `~/.testcontainers.properties` | TestContainers configuration file |
## Architecture
### Layered Architecture
```
Controllers → DTOs → Services → Transformers → Repositories → SqlDialectProvider → Database (MySQL/MSSQL)
```
### Package Structure (`de.avatic.lcc`)
- **controller/** - REST endpoints organized by domain (calculation, configuration, bulk, users, report)
- **service/access/** - Business logic for domain entities (PremisesService, MaterialService, NodeService, etc.)
- **service/calculation/** - Logistics cost calculation orchestration and step services
- **service/calculation/execution/steps/** - Individual calculation components (airfreight, handling, inventory, customs, etc.)
- **service/bulk/** - Excel-based bulk import/export operations
- **service/api/** - External API integrations (Azure Maps geocoding, EU taxation)
- **service/transformer/** - Entity-to-DTO mapping
- **repositories/** - JDBC-based data access (not JPA) with custom RowMappers
- **database/dialect/** - Database abstraction layer (SqlDialectProvider, MySQLDialectProvider, MSSQLDialectProvider)
- **model/db/** - Database entity classes
- **dto/** - Data transfer objects for API contracts
### Key Design Decisions
- **JDBC over JPA**: Uses `JdbcTemplate` and `NamedParameterJdbcTemplate` for complex queries
- **SqlDialectProvider abstraction**: Database-agnostic SQL through dialect-specific implementations (MySQL/MSSQL)
- **Transformer layer**: Explicit DTO mapping keeps entities separate from API contracts
- **Calculation chain**: Cost calculations broken into fine-grained services in `execution/steps/`
- **Profile-based configuration**: Spring profiles for environment-specific database selection
### Core Calculation Flow
```
CalculationExecutionService.launchJobCalculation()
→ ContainerCalculationService (container type selection: FEU/TEU/HC/TRUCK)
→ RouteSectionCostCalculationService (per-section costs)
→ AirfreightCalculationService
→ HandlingCostCalculationService
→ InventoryCostCalculationService
→ CustomCostCalculationService (tariff/duties)
```
### Authorization Model
Role-based access control via `@PreAuthorize` annotations:
- SUPER, CALCULATION, MATERIAL, FREIGHT, PACKAGING, BASIC
## Testing
### Test Architecture
**Integration Test Base Class:**
All repository integration tests extend `AbstractRepositoryIntegrationTest`, which provides:
- `JdbcTemplate` for test data setup
- `SqlDialectProvider` for database-agnostic SQL
- Helper methods: `isMysql()`, `isMssql()`, `executeRawSql()`
- Automatic TestContainers setup via `@Testcontainers`
- Transaction isolation via `@Transactional`
**TestContainers Setup:**
```java
@SpringBootTest(classes = {RepositoryTestConfig.class})
@Testcontainers
@Import(DatabaseTestConfiguration.class)
@Transactional
public abstract class AbstractRepositoryIntegrationTest {
@Autowired
protected JdbcTemplate jdbcTemplate;
@Autowired
protected SqlDialectProvider dialectProvider;
protected boolean isMysql() {
return getDatabaseProfile().contains("mysql");
}
protected void executeRawSql(String sql, Object... params) {
jdbcTemplate.update(sql, params);
}
}
```
**DatabaseTestConfiguration:**
- MySQL: `MySQLContainer` with `mysql:8.0` image
- MSSQL: `MSSQLServerContainer` with `mcr.microsoft.com/mssql/server:2022-latest` image
- Profile-based activation via `@Profile("mysql")` and `@Profile("mssql")`
### Database-Agnostic Test Patterns
**Pattern 1: Boolean literals in test data**
```java
String sql = String.format(
"INSERT INTO node (name, is_active) VALUES (?, %s)",
dialectProvider.getBooleanTrue());
```
**Pattern 2: Auto-increment ID retrieval**
```java
executeRawSql("INSERT INTO table (name) VALUES (?)", name);
String selectSql = isMysql() ? "SELECT LAST_INSERT_ID()" : "SELECT CAST(@@IDENTITY AS INT)";
return jdbcTemplate.queryForObject(selectSql, Integer.class);
```
**Pattern 3: Date functions**
```java
String dateFunc = isMysql() ? "NOW()" : "GETDATE()";
String sql = String.format("INSERT INTO table (created_at) VALUES (%s)", dateFunc);
```
### Running Tests
**Run all tests on MySQL:**
```bash
mvn test -Dspring.profiles.active=test,mysql
```
**Run all tests on MSSQL:**
```bash
mvn test -Dspring.profiles.active=test,mssql
```
**Run specific repository tests:**
```bash
mvn test -Dtest=CalculationJobRepositoryIntegrationTest -Dspring.profiles.active=test,mysql
mvn test -Dtest=CalculationJobRepositoryIntegrationTest -Dspring.profiles.active=test,mssql
```
**Run all repository integration tests on both databases:**
```bash
mvn test -Dtest="*RepositoryIntegrationTest" -Dspring.profiles.active=test,mysql
mvn test -Dtest="*RepositoryIntegrationTest" -Dspring.profiles.active=test,mssql
```
### Test Coverage
**Current Status (as of Phase 6 completion):**
- **365 tests** passing on both MySQL and MSSQL (100% success rate)
- **28 repository integration test classes** covering:
- Calculation repositories (CalculationJobRepository, CalculationJobDestinationRepository, CalculationJobRouteSectionRepository)
- Configuration repositories (NodeRepository, MaterialRepository, PackagingRepository, CountryRepository)
- Rate repositories (ContainerRateRepository, MatrixRateRepository)
- Property repositories (PropertyRepository, CountryPropertyRepository, PackagingPropertiesRepository)
- User repositories (UserRepository, GroupRepository)
- Bulk operation repositories (BulkOperationRepository)
- And 14 additional repositories
**Test Data:**
- `@Sql` annotations for controller integration tests from `src/test/resources/master_data/`
- Repository tests use inline SQL with `executeRawSql()` for database-agnostic test data setup
- Test data cleanup in `@BeforeEach` respects foreign key constraints
### Allure Test Reporting
**Overview:**
All tests (46 test classes, ~624 test methods) are annotated with Allure reporting framework annotations for comprehensive test documentation and reporting.
**Annotation Hierarchy:**
```java
@Epic("Controller") // Test layer: Controller, Repository, Database Layer, End-to-End
@Feature("Calculation") // Domain/subpackage: Calculation, Configuration, Master Data, etc.
@DisplayName("Test Suite Name") // Human-readable test suite name
class ExampleTest {
@Test
@Story("Create new calculation") // Test scenario description
@DisplayName("Should create calculation with valid data")
void testCreateCalculation() { ... }
}
```
**Annotation Coverage by Layer:**
| Layer | Epic | Features | Test Classes | Test Methods |
|-------|------|----------|--------------|--------------|
| **Controller** | `@Epic("Controller")` | Configuration, Calculation, Report | 11 | ~100 |
| **Repository** | `@Epic("Repository")` | Calculation, Master Data, Premise, Rates, Properties, Country, Packaging, Users, Bulk, Error, Infrastructure | 28 | ~400 |
| **Database Layer** | `@Epic("Database Layer")` | MySQL Dialect, MSSQL Dialect | 2 | ~42 |
| **End-to-End** | `@Epic("End-to-End")` | Smoke Tests, Calculation Workflow, Deviation Analysis | 3 | ~7 |
**Local Report Generation:**
```bash
# Run tests and generate Allure results
mvn clean test -Dspring.profiles.active=test,mysql
# Generate and view Allure report (requires allure-commandline)
allure serve target/allure-results
```
**CI/CD Integration:**
- Gitea Actions workflow (`.gitea/workflows/test.yml`) automatically uploads Allure results to Allure server
- Reports available at: `http://10.80.0.6:5050` (project: `lcc-{branch}`)
- Each CI run generates a new report with execution metadata
**Allure Configuration:**
- Dependency: `io.qameta.allure:allure-junit5` (version 2.29.0)
- Results directory: `target/allure-results`
- Report includes: test duration, stack traces, categorization by Epic/Feature/Story
## Database
### Multi-Database Support
The application supports both **MySQL 8.0** and **MSSQL Server 2022** through the `SqlDialectProvider` abstraction layer.
**Database selection via Spring profiles:**
- `mysql` - MySQL 8.0 (default)
- `mssql` - Microsoft SQL Server 2022
**Environment variables:**
```bash
export SPRING_PROFILES_ACTIVE=mysql # or mssql
export DB_HOST=localhost
export DB_DATABASE=lcc
export DB_USER=your_user
export DB_PASSWORD=your_password
```
### SqlDialectProvider Pattern
Database-specific SQL syntax is abstracted through `de.avatic.lcc.database.dialect.SqlDialectProvider`:
- **MySQLDialectProvider** - MySQL-specific SQL (LIMIT/OFFSET, NOW(), ON DUPLICATE KEY UPDATE, FOR UPDATE SKIP LOCKED)
- **MSSQLDialectProvider** - MSSQL-specific SQL (OFFSET/FETCH, GETDATE(), MERGE, WITH (UPDLOCK, READPAST))
**Key dialect differences:**
| Feature | MySQL | MSSQL |
|---------|-------|-------|
| Pagination | `LIMIT ? OFFSET ?` | `OFFSET ? ROWS FETCH NEXT ? ROWS ONLY` |
| Current timestamp | `NOW()` | `GETDATE()` |
| Date subtraction | `DATE_SUB(NOW(), INTERVAL 3 DAY)` | `DATEADD(DAY, -3, GETDATE())` |
| Boolean literals | `TRUE`, `FALSE` | `1`, `0` |
| Auto-increment | `AUTO_INCREMENT` | `IDENTITY(1,1)` |
| Upsert | `ON DUPLICATE KEY UPDATE` | `MERGE` statement |
| Insert ignore | `INSERT IGNORE` | `IF NOT EXISTS ... INSERT` |
| Skip locked rows | `FOR UPDATE SKIP LOCKED` | `WITH (UPDLOCK, READPAST)` |
| Last insert ID | `LAST_INSERT_ID()` | `CAST(@@IDENTITY AS INT)` |
**Repository usage example:**
```java
@Repository
public class ExampleRepository {
private final JdbcTemplate jdbcTemplate;
private final SqlDialectProvider dialectProvider;
public ExampleRepository(JdbcTemplate jdbcTemplate, SqlDialectProvider dialectProvider) {
this.jdbcTemplate = jdbcTemplate;
this.dialectProvider = dialectProvider;
}
public List<Entity> list(int limit, int offset) {
String sql = "SELECT * FROM table ORDER BY id " +
dialectProvider.buildPaginationClause(limit, offset);
Object[] params = dialectProvider.getPaginationParameters(limit, offset);
return jdbcTemplate.query(sql, params, rowMapper);
}
}
```
### Flyway Migrations
Database-specific migrations are organized by database type:
```
src/main/resources/db/migration/
├── mysql/
│ ├── V1__Create_schema.sql
│ ├── V2__Property_Set_Period.sql
│ └── V3-V12 (additional migrations)
└── mssql/
├── V1__Create_schema.sql
├── V2__Property_Set_Period.sql
└── V3-V12 (MSSQL-specific conversions)
```
**Migration naming:** `V{N}__{Description}.sql`
**Key schema differences:**
- MySQL uses `AUTO_INCREMENT`, MSSQL uses `IDENTITY(1,1)`
- MySQL supports `TIMESTAMP ... ON UPDATE CURRENT_TIMESTAMP`, MSSQL requires triggers
- MySQL `BOOLEAN` maps to MSSQL `BIT`
- Check constraints syntax differs (BETWEEN vs >= AND <=)
### Key Tables
Core entities:
- **premiss**, **premiss_sink**, **premiss_route** - Supply chain scenarios and routing
- **calculation_job**, **calculation_job_destination**, **calculation_job_route_section** - Calculation workflow
- **node** - Suppliers, destinations, intermediate locations
- **material**, **packaging** - Product and packaging master data
- **container_rate**, **country_matrix_rate** - Transportation rates
- **property_set**, **property** - Versioned configuration properties
## Important Database Considerations
### Concurrency Control
**Calculation Job Locking:**
The `CalculationJobRepository.fetchAndLockNextJob()` method uses database-specific row-level locking to prevent concurrent job processing:
- **MySQL**: `FOR UPDATE SKIP LOCKED` - Skips locked rows and returns next available job
- **MSSQL**: `WITH (UPDLOCK, READPAST)` - Similar semantics but different syntax
Both implementations ensure that multiple job processors can run concurrently without conflicts.
### Transaction Isolation
- Default isolation level: READ_COMMITTED
- Repository tests use `@Transactional` for automatic rollback
- Critical operations (job locking, rate updates) use pessimistic locking
### Schema Conversion Gotchas
When adding new Flyway migrations, be aware of these differences:
**Auto-increment columns:**
```sql
-- MySQL
id INT AUTO_INCREMENT PRIMARY KEY
-- MSSQL
id INT IDENTITY(1,1) PRIMARY KEY
```
**Timestamp with auto-update:**
```sql
-- MySQL
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP
-- MSSQL (requires trigger)
updated_at DATETIME2 DEFAULT GETDATE()
-- Plus CREATE TRIGGER for ON UPDATE behavior
```
**Boolean values:**
```sql
-- MySQL
is_active BOOLEAN DEFAULT TRUE
-- MSSQL
is_active BIT DEFAULT 1
```
**Check constraints:**
```sql
-- MySQL
CHECK (latitude BETWEEN -90 AND 90)
-- MSSQL
CHECK (latitude >= -90 AND latitude <= 90)
```
### Performance Considerations
- Both databases use similar execution plans for most queries
- Indexes are defined identically in both migration sets
- MSSQL may benefit from additional statistics maintenance for complex joins
- Performance regression < 5% observed in comparative testing
## External Integrations
- **Azure AD**: OAuth2/OIDC authentication
- **Azure Maps**: Geocoding and route distance calculations (GeoApiService, DistanceApiService)
- **EU Taxation API**: TARIC nomenclature lookup for customs duties (EUTaxationApiService)
## Configuration
### Profile-Based Database Configuration
The application uses Spring profiles for database selection:
**application-mysql.properties:**
```properties
spring.profiles.active=mysql
spring.datasource.driver-class-name=com.mysql.cj.jdbc.Driver
spring.datasource.url=jdbc:mysql://${DB_HOST:localhost}:3306/${DB_DATABASE}
spring.datasource.username=${DB_USER}
spring.datasource.password=${DB_PASSWORD}
spring.flyway.enabled=true
spring.flyway.locations=classpath:db/migration/mysql
spring.flyway.baseline-on-migrate=true
```
**application-mssql.properties:**
```properties
spring.profiles.active=mssql
spring.datasource.driver-class-name=com.microsoft.sqlserver.jdbc.SQLServerDriver
spring.datasource.url=jdbc:sqlserver://${DB_HOST:localhost}:1433;databaseName=${DB_DATABASE};encrypt=true;trustServerCertificate=true
spring.datasource.username=${DB_USER}
spring.datasource.password=${DB_PASSWORD}
spring.flyway.enabled=true
spring.flyway.locations=classpath:db/migration/mssql
spring.flyway.baseline-on-migrate=true
```
**Environment Variables:**
```bash
# MySQL setup
export SPRING_PROFILES_ACTIVE=mysql
export DB_HOST=localhost
export DB_DATABASE=lcc
export DB_USER=root
export DB_PASSWORD=your_password
# MSSQL setup
export SPRING_PROFILES_ACTIVE=mssql
export DB_HOST=localhost
export DB_DATABASE=lcc
export DB_USER=sa
export DB_PASSWORD=YourStrong!Passw0rd
```
### Application Properties
Key properties in `application.properties`:
- `lcc.auth.identify.by` - User identification method (workday)
- `calculation.job.processor.*` - Async calculation job settings
- Flyway enabled by default; migrations run on startup
**Database-specific bean activation:**
- `@Profile("mysql")` - Activates MySQLDialectProvider
- `@Profile("mssql")` - Activates MSSQLDialectProvider
## Quick Reference
### Switching Databases
**Switch from MySQL to MSSQL:**
```bash
# Update environment
export SPRING_PROFILES_ACTIVE=mssql
export DB_HOST=localhost
export DB_DATABASE=lcc
export DB_USER=sa
export DB_PASSWORD=YourStrong!Passw0rd
# Run application
mvn spring-boot:run
```
**Switch back to MySQL:**
```bash
export SPRING_PROFILES_ACTIVE=mysql
export DB_HOST=localhost
export DB_DATABASE=lcc
export DB_USER=root
export DB_PASSWORD=your_password
mvn spring-boot:run
```
### Running Migrations
Migrations run automatically on application startup when Flyway is enabled.
**Manual migration with Flyway CLI:**
```bash
# MySQL
flyway -url=jdbc:mysql://localhost:3306/lcc -user=root -password=pass -locations=filesystem:src/main/resources/db/migration/mysql migrate
# MSSQL
flyway -url=jdbc:sqlserver://localhost:1433;databaseName=lcc -user=sa -password=pass -locations=filesystem:src/main/resources/db/migration/mssql migrate
```
### Testing Checklist
When modifying repositories or adding new database-dependent code:
1. **Run unit tests** (if applicable)
```bash
mvn test -Dtest=MySQLDialectProviderTest
mvn test -Dtest=MSSQLDialectProviderTest
```
2. **Run repository integration tests on MySQL**
```bash
mvn test -Dtest="*RepositoryIntegrationTest" -Dspring.profiles.active=test,mysql
```
3. **Run repository integration tests on MSSQL**
```bash
mvn test -Dtest="*RepositoryIntegrationTest" -Dspring.profiles.active=test,mssql
```
4. **Run full test suite on both databases**
```bash
mvn test -Dspring.profiles.active=test,mysql
mvn test -Dspring.profiles.active=test,mssql
```
### Common Repository Patterns
**Pattern 1: Constructor injection with SqlDialectProvider**
```java
@Repository
public class ExampleRepository {
private final JdbcTemplate jdbcTemplate;
private final SqlDialectProvider dialectProvider;
public ExampleRepository(JdbcTemplate jdbcTemplate, SqlDialectProvider dialectProvider) {
this.jdbcTemplate = jdbcTemplate;
this.dialectProvider = dialectProvider;
}
}
```
**Pattern 2: Pagination queries**
```java
public List<Entity> list(int limit, int offset) {
String sql = "SELECT * FROM table WHERE condition ORDER BY id " +
dialectProvider.buildPaginationClause(limit, offset);
Object[] params = ArrayUtils.addAll(
new Object[]{conditionValue},
dialectProvider.getPaginationParameters(limit, offset)
);
return jdbcTemplate.query(sql, params, rowMapper);
}
```
**Pattern 3: Insert with ID retrieval**
```java
public Integer create(Entity entity) {
String sql = "INSERT INTO table (name, is_active) VALUES (?, ?)";
jdbcTemplate.update(sql, entity.getName(), entity.isActive());
String idSql = dialectProvider.getLastInsertIdQuery();
return jdbcTemplate.queryForObject(idSql, Integer.class);
}
```
**Pattern 4: Upsert operations**
```java
public void upsert(Entity entity) {
String sql = dialectProvider.buildUpsertStatement(
"table_name",
List.of("unique_col1", "unique_col2"), // unique columns
List.of("unique_col1", "unique_col2", "value"), // insert columns
List.of("value") // update columns
);
jdbcTemplate.update(sql, entity.getCol1(), entity.getCol2(), entity.getValue());
}
```

131
db.sh Executable file
View file

@ -0,0 +1,131 @@
#!/bin/bash
# db.sh - Manage database containers
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
cd "$SCRIPT_DIR"
usage() {
echo "Usage: $0 <mysql|mssql> [--clean] [--users] [--down]"
echo ""
echo "Options:"
echo " mysql|mssql Which database to start"
echo " --clean Delete volumes and start fresh"
echo " --users Only import test users (database must be running)"
echo " --down Stop the database container"
exit 1
}
# Parse parameters
DB=""
CLEAN=false
USERS_ONLY=false
DOWN_ONLY=false
for arg in "$@"; do
case $arg in
mysql|mssql)
DB=$arg
;;
--clean)
CLEAN=true
;;
--users)
USERS_ONLY=true
;;
--down)
DOWN_ONLY=true
;;
*)
usage
;;
esac
done
[ -z "$DB" ] && usage
# Stop container only
if [ "$DOWN_ONLY" = true ]; then
if [ "$DB" = "mysql" ]; then
echo "==> Stopping MySQL..."
podman-compose down 2>/dev/null || true
elif [ "$DB" = "mssql" ]; then
echo "==> Stopping MSSQL..."
podman-compose --profile mssql down 2>/dev/null || true
fi
echo "==> Done!"
exit 0
fi
# Import users only
if [ "$USERS_ONLY" = true ]; then
if [ "$DB" = "mysql" ]; then
echo "==> Importing users into MySQL..."
DB_USER=$(grep SPRING_DATASOURCE_USERNAME .env | cut -d= -f2)
DB_PASS=$(grep SPRING_DATASOURCE_PASSWORD .env | cut -d= -f2)
podman exec -i lcc-mysql-local mysql -u"${DB_USER}" -p"${DB_PASS}" lcc \
< src/test/resources/master_data/users.sql
echo "==> Users imported!"
elif [ "$DB" = "mssql" ]; then
echo "==> Importing users into MSSQL..."
DB_PASS=$(grep DB_ROOT_PASSWORD .env.mssql | cut -d= -f2)
podman exec -e "SQLCMDPASSWORD=${DB_PASS}" lcc-mssql-local /opt/mssql-tools18/bin/sqlcmd \
-S localhost -U sa -d lcc -C \
-i /dev/stdin < src/test/resources/master_data/users_mssql.sql
echo "==> Users imported!"
fi
exit 0
fi
echo "==> Stopping all DB containers..."
podman-compose --profile mssql down 2>/dev/null || true
if [ "$CLEAN" = true ]; then
echo "==> Deleting volumes..."
podman volume rm lcc_tool_mysql-data-local 2>/dev/null || true
podman volume rm lcc_tool_mssql-data-local 2>/dev/null || true
fi
echo "==> Linking .env -> .env.$DB"
rm -f .env
ln -s .env.$DB .env
# Check if volume exists (for init decision)
VOLUME_EXISTS=false
if [ "$DB" = "mysql" ]; then
podman volume exists lcc_tool_mysql-data-local 2>/dev/null && VOLUME_EXISTS=true
elif [ "$DB" = "mssql" ]; then
podman volume exists lcc_tool_mssql-data-local 2>/dev/null && VOLUME_EXISTS=true
fi
echo "==> Starting $DB..."
if [ "$DB" = "mysql" ]; then
podman-compose up -d mysql
echo "==> Waiting for MySQL..."
until podman exec lcc-mysql-local mysqladmin ping -h localhost --silent 2>/dev/null; do
sleep 2
done
echo "==> MySQL is ready!"
elif [ "$DB" = "mssql" ]; then
podman-compose --profile mssql up -d mssql
echo "==> Waiting for MSSQL..."
until [ "$(podman inspect -f '{{.State.Health.Status}}' lcc-mssql-local 2>/dev/null)" = "healthy" ]; do
sleep 2
done
echo "==> MSSQL is ready!"
if [ "$VOLUME_EXISTS" = false ]; then
echo "==> New volume detected, creating database..."
DB_PASS=$(grep DB_ROOT_PASSWORD .env | cut -d= -f2)
podman exec lcc-mssql-local /opt/mssql-tools18/bin/sqlcmd \
-S localhost -U sa -P "${DB_PASS}" -C \
-Q "IF NOT EXISTS (SELECT * FROM sys.databases WHERE name = 'lcc') CREATE DATABASE lcc"
echo "==> Database 'lcc' created!"
fi
fi
echo "==> Done! .env points to .env.$DB"

View file

@ -2,6 +2,8 @@ services:
mysql:
image: mysql:8.4
container_name: lcc-mysql-local
env_file:
- .env.mysql
environment:
MYSQL_ROOT_PASSWORD: ${DB_ROOT_PASSWORD}
MYSQL_DATABASE: lcc
@ -20,6 +22,30 @@ services:
retries: 5
restart: unless-stopped
# MSSQL Database (optional - nur für MSSQL-Tests)
mssql:
image: mcr.microsoft.com/mssql/server:2022-latest
container_name: lcc-mssql-local
environment:
ACCEPT_EULA: "Y"
MSSQL_SA_PASSWORD: ${DB_ROOT_PASSWORD}
MSSQL_PID: "Developer"
volumes:
- mssql-data-local:/var/opt/mssql
ports:
- "1433:1433"
networks:
- lcc-network-local
healthcheck:
test: /opt/mssql-tools18/bin/sqlcmd -S localhost -U sa -P "$${MSSQL_SA_PASSWORD}" -Q "SELECT 1" -C || exit 1
interval: 10s
timeout: 5s
retries: 10
start_period: 30s
restart: unless-stopped
profiles:
- mssql # Startet nur mit: docker-compose --profile mssql up
lcc-app:
#image: git.avatic.de/avatic/lcc:latest
# Oder für lokales Bauen:
@ -29,7 +55,7 @@ services:
mysql:
condition: service_healthy
env_file:
- .env
- .env.mysql
environment:
# Überschreibe die Datasource URL für Docker-Netzwerk
SPRING_DATASOURCE_URL: jdbc:mysql://mysql:3306/lcc
@ -44,6 +70,7 @@ services:
volumes:
mysql-data-local:
mssql-data-local:
networks:
lcc-network-local:

0
mvnw vendored Normal file → Executable file
View file

85
pom.xml
View file

@ -29,10 +29,19 @@
<properties>
<java.version>23</java.version>
<spring-cloud-azure.version>5.24.1</spring-cloud-azure.version>
<mockito.version>5.20.0</mockito.version>
<mockito.version>5.21.0</mockito.version>
<flyway.version>11.18.0</flyway.version>
<surefire.excludedGroups>analysis</surefire.excludedGroups>
<aspectj.version>1.9.21</aspectj.version>
</properties>
<dependencies>
<!-- Allure -->
<dependency>
<groupId>io.qameta.allure</groupId>
<artifactId>allure-junit5</artifactId>
<version>2.29.0</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-jdbc</artifactId>
@ -90,6 +99,12 @@
<artifactId>mysql-connector-j</artifactId>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>com.microsoft.sqlserver</groupId>
<artifactId>mssql-jdbc</artifactId>
<version>12.6.1.jre11</version>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
@ -178,6 +193,10 @@
<groupId>org.flywaydb</groupId>
<artifactId>flyway-mysql</artifactId>
</dependency>
<dependency>
<groupId>org.flywaydb</groupId>
<artifactId>flyway-sqlserver</artifactId>
</dependency>
<dependency>
<groupId>org.glassfish.jaxb</groupId>
@ -195,6 +214,52 @@
<version>3.2.3</version>
</dependency>
<!-- TestContainers for multi-database integration testing -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-testcontainers</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.testcontainers</groupId>
<artifactId>testcontainers</artifactId>
<version>1.19.7</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.testcontainers</groupId>
<artifactId>mysql</artifactId>
<version>1.19.7</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.testcontainers</groupId>
<artifactId>mssqlserver</artifactId>
<version>1.19.7</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.testcontainers</groupId>
<artifactId>junit-jupiter</artifactId>
<version>1.19.7</version>
<scope>test</scope>
</dependency>
<!-- Playwright for E2E testing -->
<dependency>
<groupId>com.microsoft.playwright</groupId>
<artifactId>playwright</artifactId>
<version>1.48.0</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.aspectj</groupId>
<artifactId>aspectjweaver</artifactId>
<version>1.9.21</version>
<scope>test</scope>
</dependency>
</dependencies>
<dependencyManagement>
<dependencies>
@ -210,6 +275,7 @@
<build>
<plugins>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>versions-maven-plugin</artifactId>
@ -235,15 +301,30 @@
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>3.5.4</version>
<configuration>
<argLine>
-javaagent:${settings.localRepository}/org/mockito/mockito-core/${mockito.version}/mockito-core-${mockito.version}.jar
-javaagent:${settings.localRepository}/org/aspectj/aspectjweaver/${aspectj.version}/aspectjweaver-${aspectj.version}.jar
</argLine>
<systemPropertyVariables>
<allure.results.directory>${project.build.directory}/allure-results</allure.results.directory>
</systemPropertyVariables>
<!-- Exclude analysis tests by default -->
<excludedGroups>${surefire.excludedGroups}</excludedGroups>
<excludes>
<exclude>**/controller/**/*Test.java</exclude>
</excludes>
</configuration>
<dependencies>
<dependency>
<groupId>org.aspectj</groupId>
<artifactId>aspectjweaver</artifactId>
<version>1.9.21</version>
</dependency>
</dependencies>
</plugin>
<plugin>
<groupId>org.springframework.boot</groupId>

View file

@ -10,7 +10,6 @@
"dependencies": {
"@phosphor-icons/vue": "^2.2.1",
"@vueuse/core": "^13.6.0",
"azure-maps-control": "^3.6.1",
"chart.js": "^4.5.0",
"leaflet": "^1.9.4",
"loglevel": "^1.9.2",
@ -43,27 +42,6 @@
"node": ">=6.0.0"
}
},
"node_modules/@azure/msal-browser": {
"version": "2.39.0",
"resolved": "https://registry.npmjs.org/@azure/msal-browser/-/msal-browser-2.39.0.tgz",
"integrity": "sha512-kks/n2AJzKUk+DBqZhiD+7zeQGBl+WpSOQYzWy6hff3bU0ZrYFqr4keFLlzB5VKuKZog0X59/FGHb1RPBDZLVg==",
"license": "MIT",
"dependencies": {
"@azure/msal-common": "13.3.3"
},
"engines": {
"node": ">=0.8.0"
}
},
"node_modules/@azure/msal-common": {
"version": "13.3.3",
"resolved": "https://registry.npmjs.org/@azure/msal-common/-/msal-common-13.3.3.tgz",
"integrity": "sha512-n278DdCXKeiWhLwhEL7/u9HRMyzhUXLefeajiknf6AmEedoiOiv2r5aRJ7LXdT3NGPyubkdIbthaJlVtmuEqvA==",
"license": "MIT",
"engines": {
"node": ">=0.8.0"
}
},
"node_modules/@babel/code-frame": {
"version": "7.27.1",
"resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.27.1.tgz",
@ -95,7 +73,6 @@
"integrity": "sha512-yDBHV9kQNcr2/sUr9jghVyz9C3Y5G2zUM2H2lo+9mKv4sFgbA8s8Z9t8D1jiTkGoO/NoIfKMyKWr4s6CN23ZwQ==",
"dev": true,
"license": "MIT",
"peer": true,
"dependencies": {
"@ampproject/remapping": "^2.2.0",
"@babel/code-frame": "^7.27.1",
@ -980,46 +957,6 @@
"integrity": "sha512-M5UknZPHRu3DEDWoipU6sE8PdkZ6Z/S+v4dD+Ke8IaNlpdSQah50lz1KtcFBa2vsdOnwbbnxJwVM4wty6udA5w==",
"license": "MIT"
},
"node_modules/@mapbox/jsonlint-lines-primitives": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/@mapbox/jsonlint-lines-primitives/-/jsonlint-lines-primitives-2.0.2.tgz",
"integrity": "sha512-rY0o9A5ECsTQRVhv7tL/OyDpGAoUB4tTvLiW1DSzQGq4bvTPhNw1VpSNjDJc5GFZ2XuyOtSWSVN05qOtcD71qQ==",
"engines": {
"node": ">= 0.6"
}
},
"node_modules/@mapbox/mapbox-gl-supported": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/@mapbox/mapbox-gl-supported/-/mapbox-gl-supported-2.0.1.tgz",
"integrity": "sha512-HP6XvfNIzfoMVfyGjBckjiAOQK9WfX0ywdLubuPMPv+Vqf5fj0uCbgBQYpiqcWZT6cbyyRnTSXDheT1ugvF6UQ==",
"license": "BSD-3-Clause"
},
"node_modules/@mapbox/unitbezier": {
"version": "0.0.1",
"resolved": "https://registry.npmjs.org/@mapbox/unitbezier/-/unitbezier-0.0.1.tgz",
"integrity": "sha512-nMkuDXFv60aBr9soUG5q+GvZYL+2KZHVvsqFCzqnkGEf46U2fvmytHaEVc1/YZbiLn8X+eR3QzX1+dwDO1lxlw==",
"license": "BSD-2-Clause"
},
"node_modules/@maplibre/maplibre-gl-style-spec": {
"version": "20.4.0",
"resolved": "https://registry.npmjs.org/@maplibre/maplibre-gl-style-spec/-/maplibre-gl-style-spec-20.4.0.tgz",
"integrity": "sha512-AzBy3095fTFPjDjmWpR2w6HVRAZJ6hQZUCwk5Plz6EyfnfuQW1odeW5i2Ai47Y6TBA2hQnC+azscjBSALpaWgw==",
"license": "ISC",
"dependencies": {
"@mapbox/jsonlint-lines-primitives": "~2.0.2",
"@mapbox/unitbezier": "^0.0.1",
"json-stringify-pretty-compact": "^4.0.0",
"minimist": "^1.2.8",
"quickselect": "^2.0.0",
"rw": "^1.3.3",
"tinyqueue": "^3.0.0"
},
"bin": {
"gl-style-format": "dist/gl-style-format.mjs",
"gl-style-migrate": "dist/gl-style-migrate.mjs",
"gl-style-validate": "dist/gl-style-validate.mjs"
}
},
"node_modules/@phosphor-icons/vue": {
"version": "2.2.1",
"resolved": "https://registry.npmjs.org/@phosphor-icons/vue/-/vue-2.2.1.tgz",
@ -1345,12 +1282,6 @@
"integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==",
"license": "MIT"
},
"node_modules/@types/geojson": {
"version": "7946.0.16",
"resolved": "https://registry.npmjs.org/@types/geojson/-/geojson-7946.0.16.tgz",
"integrity": "sha512-6C8nqWur3j98U6+lXDfTUWIfgvZU+EumvpHKcYjujKH7woYyLj2sUmff0tRhrqM7BohUw7Pz3ZB1jj2gW9Fvmg==",
"license": "MIT"
},
"node_modules/@types/web-bluetooth": {
"version": "0.0.21",
"resolved": "https://registry.npmjs.org/@types/web-bluetooth/-/web-bluetooth-0.0.21.tgz",
@ -1696,18 +1627,6 @@
"url": "https://github.com/sponsors/jonschlinkert"
}
},
"node_modules/azure-maps-control": {
"version": "3.6.1",
"resolved": "https://registry.npmjs.org/azure-maps-control/-/azure-maps-control-3.6.1.tgz",
"integrity": "sha512-EqJ96GOjUcCG9XizUbyqDu92x3KKT9C9AwRL3hmPicQjn00ql7em6RbBqJYO4nvIoH53DG6MOITj9t/zv1mQYg==",
"license": "SEE LICENSE.TXT",
"dependencies": {
"@azure/msal-browser": "^2.32.1",
"@mapbox/mapbox-gl-supported": "^2.0.1",
"@maplibre/maplibre-gl-style-spec": "^20.0.0",
"@types/geojson": "^7946.0.14"
}
},
"node_modules/binary-extensions": {
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz",
@ -1761,7 +1680,6 @@
}
],
"license": "MIT",
"peer": true,
"dependencies": {
"caniuse-lite": "^1.0.30001737",
"electron-to-chromium": "^1.5.211",
@ -1817,7 +1735,6 @@
"resolved": "https://registry.npmjs.org/chart.js/-/chart.js-4.5.0.tgz",
"integrity": "sha512-aYeC/jDgSEx8SHWZvANYMioYMZ2KX02W6f6uVfyteuCGcadDLcYVHdfdygsTQkQ4TKn5lghoojAsPj5pu0SnvQ==",
"license": "MIT",
"peer": true,
"dependencies": {
"@kurkle/color": "^0.3.0"
},
@ -2371,12 +2288,6 @@
"node": ">=6"
}
},
"node_modules/json-stringify-pretty-compact": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/json-stringify-pretty-compact/-/json-stringify-pretty-compact-4.0.0.tgz",
"integrity": "sha512-3CNZ2DnrpByG9Nqj6Xo8vqbjT4F6N+tb4Gb28ESAZjYZ5yqvmc56J+/kuIwkaAMOyblTQhUW7PxMkUb8Q36N3Q==",
"license": "MIT"
},
"node_modules/json5": {
"version": "2.2.3",
"resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz",
@ -2447,15 +2358,6 @@
"@jridgewell/sourcemap-codec": "^1.5.5"
}
},
"node_modules/minimist": {
"version": "1.2.8",
"resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz",
"integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==",
"license": "MIT",
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/mitt": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/mitt/-/mitt-3.0.1.tgz",
@ -2700,12 +2602,6 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/quickselect": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/quickselect/-/quickselect-2.0.0.tgz",
"integrity": "sha512-RKJ22hX8mHe3Y6wH/N3wCM6BWtjaxIyyUIkpHOvfFnxdI4yD4tBXEBKSbriGujF6jnSVkJrffuo6vxACiSSxIw==",
"license": "ISC"
},
"node_modules/readdirp": {
"version": "3.6.0",
"resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz",
@ -2789,12 +2685,6 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/rw": {
"version": "1.3.3",
"resolved": "https://registry.npmjs.org/rw/-/rw-1.3.3.tgz",
"integrity": "sha512-PdhdWy89SiZogBLaw42zdeqtRJ//zFd2PgQavcICDUgJT5oW10QCRKbJ6bg4r0/UY2M6BWd5tkxuGFRvCkgfHQ==",
"license": "BSD-3-Clause"
},
"node_modules/semver": {
"version": "6.3.1",
"resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
@ -2915,12 +2805,6 @@
"url": "https://github.com/sponsors/SuperchupuDev"
}
},
"node_modules/tinyqueue": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/tinyqueue/-/tinyqueue-3.0.0.tgz",
"integrity": "sha512-gRa9gwYU3ECmQYv3lslts5hxuIa90veaEcxDYuu3QGOIAEM2mOZkVHp48ANJuu1CURtRdHKUBY5Lm1tHV+sD4g==",
"license": "ISC"
},
"node_modules/to-regex-range": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz",
@ -3018,7 +2902,6 @@
"resolved": "https://registry.npmjs.org/vite/-/vite-7.1.4.tgz",
"integrity": "sha512-X5QFK4SGynAeeIt+A7ZWnApdUyHYm+pzv/8/A57LqSGcI88U6R6ipOs3uCesdc6yl7nl+zNO0t8LmqAdXcQihw==",
"license": "MIT",
"peer": true,
"dependencies": {
"esbuild": "^0.25.0",
"fdir": "^6.5.0",
@ -3250,7 +3133,6 @@
"resolved": "https://registry.npmjs.org/vue/-/vue-3.5.21.tgz",
"integrity": "sha512-xxf9rum9KtOdwdRkiApWL+9hZEMWE90FHh8yS1+KJAiWYh+iGWV1FquPjoO9VUHQ+VIhsCXNNyZ5Sf4++RVZBA==",
"license": "MIT",
"peer": true,
"dependencies": {
"@vue/compiler-dom": "3.5.21",
"@vue/compiler-sfc": "3.5.21",

View file

@ -86,7 +86,7 @@ export default {
flex-direction: column;
gap: 1.6rem;
width: min(80vw, 180rem);
height: min(80vh, 120rem);
height: min(90vh, 120rem);
min-height: 0;
}

View file

@ -107,6 +107,27 @@
<modal :z-index="2000" :state="modalShow">
<div class="modal-content-container">
<h3 class="sub-header">{{ modalTitle }}</h3>
<!-- Part Number Chips -->
<div v-if="shouldShowPartNumbers" class="parts-selection-container">
<div class="parts-chips">
<basic-badge
v-for="partNumber in selectedPartNumbers.slice(0, 5)"
:key="partNumber"
variant="primary"
size="compact"
class="part-chip"
>
{{ partNumber }}
</basic-badge>
<span v-if="selectedPartNumbers.length > 5" class="parts-ellipsis">...</span>
</div>
<div v-if="partNumberCountText" class="parts-count">
{{ partNumberCountText }}
</div>
</div>
<!-- END: Part Number Chips -->
<component
:is="modalComponentType"
ref="modalComponent"
@ -176,6 +197,7 @@ import Modal from "@/components/UI/Modal.vue";
import PriceEdit from "@/components/layout/edit/PriceEdit.vue";
import MaterialEdit from "@/components/layout/edit/MaterialEdit.vue";
import PackagingEdit from "@/components/layout/edit/PackagingEdit.vue";
import BasicBadge from "@/components/UI/BasicBadge.vue";
import {useNotificationStore} from "@/store/notification.js";
import {useDestinationEditStore} from "@/store/destinationEdit.js";
@ -211,7 +233,8 @@ export default {
CalculationListItem,
Checkbox,
BulkEditRow,
BasicButton
BasicButton,
BasicBadge
},
data() {
return {
@ -286,6 +309,55 @@ export default {
return "Please wait. Prepare calculation ..."
return this.processingMessage;
},
/**
* Extrahiert eindeutige Teilenummern aus ausgewählten Premises
* @returns {Array<string>} Array eindeutiger Teilenummern, sortiert
*/
selectedPartNumbers() {
// Guard: Keine editIds oder nicht relevant
if (!this.editIds || this.editIds.length === 0) {
return [];
}
// Nur für Material/Price/Packaging Modals anzeigen
const relevantTypes = ['material', 'price', 'packaging'];
if (!relevantTypes.includes(this.modalType)) {
return [];
}
try {
// Teilenummern extrahieren
const partNumbers = this.editIds
.map(id => {
const premise = this.premiseEditStore.getById(id);
return premise?.material?.part_number;
})
.filter(partNumber => partNumber != null && partNumber !== '');
// Duplikate entfernen und sortieren
return [...new Set(partNumbers)].sort();
} catch (error) {
logger.log('Error extracting part numbers:', error);
return [];
}
},
/**
* Prüft ob Part Numbers angezeigt werden sollen
*/
shouldShowPartNumbers() {
return this.selectedPartNumbers.length > 0;
},
/**
* Anzahl-Text für viele Teile (> 5)
*/
partNumberCountText() {
const count = this.selectedPartNumbers.length;
return count > 5 ? `${count} part numbers` : null;
}
},
watch: {
@ -630,6 +702,38 @@ export default {
margin-bottom: 1.6rem;
}
/* Part Number Chips Styling */
.parts-selection-container {
display: flex;
flex-direction: column;
gap: 0.4rem;
margin-bottom: 1.6rem;
padding-bottom: 1.6rem;
border-bottom: 0.1rem solid rgba(107, 134, 156, 0.1);
}
.parts-chips {
display: flex;
flex-wrap: wrap;
gap: 0.6rem;
}
.part-chip {
flex-shrink: 0;
}
.parts-ellipsis {
font-size: 1.4rem;
color: #6B869C;
align-self: center;
padding: 0 0.4rem;
}
.parts-count {
font-size: 1.2rem;
color: #9CA3AF;
}
/* Global style für copy-mode cursor */
.edit-calculation-container.has-selection :deep(.edit-calculation-list-header-cell--copyable:hover) {
cursor: url("data:image/svg+xml;base64,PD94bWwgdmVyc2lvbj0iMS4wIiBlbmNvZGluZz0iVVRGLTgiPz48c3ZnIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyIgdmlld0JveD0iMCAwIDEyOC41MSAxMzQuMDUiPjxkZWZzPjxzdHlsZT4uY3tmaWxsOm5vbmU7fS5jLC5ke3N0cm9rZTojMDEwMTAxO3N0cm9rZS1saW5lY2FwOnJvdW5kO3N0cm9rZS1saW5lam9pbjpyb3VuZDtzdHJva2Utd2lkdGg6NXB4O30uZHtmaWxsOiNmZmY7fTwvc3R5bGU+PC9kZWZzPjxnIGlkPSJhIj48cGF0aCBjbGFzcz0iYyIgZD0ibTU0Ljg5LDExMi41MWgtMi4yNGMtMS4yNCwwLTIuMjQtMS0yLjI0LTIuMjR2LTIuMjQiLz48bGluZSBjbGFzcz0iYyIgeDE9IjcwLjU3IiB5MT0iNzYuNjciIHgyPSI2My44NSIgeTI9Ijc2LjY3Ii8+PGxpbmUgY2xhc3M9ImMiIHgxPSI3MC41NyIgeTE9IjExMi41MSIgeDI9IjY2LjA5IiB5Mj0iMTEyLjUxIi8+PGxpbmUgY2xhc3M9ImMiIHgxPSI4Ni4yNSIgeTE9Ijk5LjA3IiB4Mj0iODYuMjUiIHkyPSI5Mi4zNSIvPjxsaW5lIGNsYXNzPSJjIiB4MT0iNTAuNDEiIHkxPSI5Ni44MyIgeDI9IjUwLjQxIiB5Mj0iOTIuMzUiLz48cGF0aCBjbGFzcz0iYyIgZD0ibTgxLjc3LDExMi41MWgyLjI0YzEuMjQsMCwyLjI0LTEsMi4yNC0yLjI0di0yLjI0Ii8+PHBhdGggY2xhc3M9ImMiIGQ9Im04MS43Nyw3Ni42N2gyLjI0YzEuMjQsMCwyLjI0LDEsMi4yNCwyLjI0djIuMjQiLz48cGF0aCBjbGFzcz0iYyIgZD0ibTU0Ljg5LDc2LjY3aC0yLjI0Yy0xLjI0LDAtMi4yNCwxLTIuMjQsMi4yNHYyLjI0Ii8+PHBhdGggY2xhc3M9ImMiIGQ9Im04Ni4yNSw5OS4wN2gxMS4yYzEuMjQsMCwyLjI0LTEsMi4yNC0yLjI0di0zMS4zNmMwLTEuMjQtMS0yLjI0LTIuMjQtMi4yNGgtMzEuMzZjLTEuMjQsMC0yLjI0LDEtMi4yNCwyLjI0djExLjIiLz48L2c+PGcgaWQ9ImIiPjxwYXRoIGNsYXNzPSJkIiBkPSJtNDQuMDgsNDQuMDdsMzIuOTQtOS4yYzEuNjktLjUyLDIuNjQtMi4zMSwyLjEyLTQtLjMtLjk4LTEuMDUtMS43NS0yLjAxLTIuMDlMNi43MywyLjY3Yy0xLjY3LS41Ny0zLjQ5LjMzLTQuMDYsMi0uMjMuNjYtLjIzLDEuMzgsMCwyLjA1bDI2LjExLDcwLjRjLjU4LDEuNjcsMi40LDIuNTYsNC4wNywxLjk4Ljk3LS4zMywxLjcxLTEuMTEsMi4wMS0yLjA5bDkuMjItMzIuOTRaIi8+PC9nPjwvc3ZnPg==") 12 12, pointer;

View file

@ -35,6 +35,7 @@ export default defineConfig({
},
},
server: {
host: true,
proxy: {
'/api': {
target: 'http://localhost:8080',

View file

@ -81,9 +81,15 @@ public class PremiseController {
@GetMapping({"/search", "/search/"})
@PreAuthorize("hasAnyRole('SUPER', 'CALCULATION')")
public ResponseEntity<PremiseSearchResultDTO> findMaterialsAndSuppliers(@RequestParam String search) {
log.info("Search request received with query: '{}' (length: {})", search, search != null ? search.length() : 0);
try {
return ResponseEntity.ok(premiseSearchStringAnalyzerService.findMaterialAndSuppliers(search));
var result = premiseSearchStringAnalyzerService.findMaterialAndSuppliers(search);
log.info("Search result: {} materials, {} suppliers, {} user suppliers",
result.getMaterials() != null ? result.getMaterials().size() : 0,
result.getSupplier() != null ? result.getSupplier().size() : 0,
result.getUserSupplier() != null ? result.getUserSupplier().size() : 0);
return ResponseEntity.ok(result);
} catch (Exception e) {
throw new BadRequestException("Bad string encoding", "Unable to decode request", e);
}

View file

@ -0,0 +1,454 @@
package de.avatic.lcc.database.dialect;
import org.springframework.context.annotation.Profile;
import org.springframework.stereotype.Component;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;
/**
* Microsoft SQL Server-specific implementation of {@link SqlDialectProvider}.
*
* <p>This provider generates SQL syntax compatible with SQL Server 2017+.
* It is automatically activated when the "mssql" Spring profile is active.</p>
*
* @author LCC Team
* @since 1.0
*/
@Component
@Profile("mssql")
public class MSSQLDialectProvider implements SqlDialectProvider {
@Override
public String getDialectName() {
return "Microsoft SQL Server";
}
@Override
public String getDriverClassName() {
return "com.microsoft.sqlserver.jdbc.SQLServerDriver";
}
// ========== Pagination ==========
/**
* Builds MSSQL pagination clause using OFFSET/FETCH.
*
* <p>MSSQL syntax: {@code OFFSET ? ROWS FETCH NEXT ? ROWS ONLY}</p>
*
* @param limit maximum number of rows to return
* @param offset number of rows to skip
* @return MSSQL pagination clause
*/
@Override
public String buildPaginationClause(int limit, int offset) {
return "OFFSET ? ROWS FETCH NEXT ? ROWS ONLY";
}
/**
* Returns pagination parameters for MSSQL in correct order: [offset, limit].
*
* <p>Note: MSSQL requires OFFSET first, then FETCH NEXT (opposite of MySQL).</p>
*
* @param limit maximum number of rows
* @param offset number of rows to skip
* @return array with [offset, limit] (reversed compared to MySQL)
*/
@Override
public Object[] getPaginationParameters(int limit, int offset) {
return new Object[]{offset, limit}; // MSSQL: offset first, then limit
}
/**
* Returns the maximum LIMIT value for MSSQL.
*
* <p>MSSQL INT max value: {@code 2147483647}</p>
*
* @return "2147483647"
*/
@Override
public String getMaxLimitValue() {
return "2147483647"; // INT max value in MSSQL
}
// ========== Upsert/Insert Ignore ==========
/**
* Builds MSSQL MERGE statement for upsert operations.
*
* <p>MSSQL uses MERGE instead of MySQL's ON DUPLICATE KEY UPDATE.</p>
*
* <p>Example generated SQL:</p>
* <pre>
* MERGE INTO table AS target
* USING (SELECT ? AS col1, ? AS col2) AS source
* ON target.key1 = source.key1 AND target.key2 = source.key2
* WHEN MATCHED THEN
* UPDATE SET target.col3 = source.col3
* WHEN NOT MATCHED THEN
* INSERT (col1, col2, col3) VALUES (source.col1, source.col2, source.col3);
* </pre>
*
* @param tableName target table name
* @param uniqueColumns columns that define uniqueness (for ON clause)
* @param insertColumns all columns to insert
* @param updateColumns columns to update on match
* @return MSSQL MERGE statement
*/
@Override
public String buildUpsertStatement(
String tableName,
List<String> uniqueColumns,
List<String> insertColumns,
List<String> updateColumns
) {
if (tableName == null || uniqueColumns.isEmpty() || insertColumns.isEmpty()) {
throw new IllegalArgumentException("tableName, uniqueColumns, and insertColumns must not be empty");
}
// Build source column list with placeholders
String sourceColumns = insertColumns.stream()
.map(col -> "? AS " + col)
.collect(Collectors.joining(", "));
// Build ON clause matching unique columns
String onClause = uniqueColumns.stream()
.map(col -> "target." + col + " = source." + col)
.collect(Collectors.joining(" AND "));
// Build UPDATE SET clause (only if updateColumns is not empty)
String updateClause = "";
if (updateColumns != null && !updateColumns.isEmpty()) {
updateClause = "WHEN MATCHED THEN UPDATE SET " +
updateColumns.stream()
.map(col -> "target." + col + " = source." + col)
.collect(Collectors.joining(", ")) + " ";
}
// Build INSERT clause
String insertColumnList = String.join(", ", insertColumns);
String insertValueList = insertColumns.stream()
.map(col -> "source." + col)
.collect(Collectors.joining(", "));
return String.format(
"MERGE INTO %s AS target " +
"USING (SELECT %s) AS source " +
"ON %s " +
"%s" + // UPDATE clause (may be empty)
"WHEN NOT MATCHED THEN " +
"INSERT (%s) VALUES (%s);",
tableName,
sourceColumns,
onClause,
updateClause,
insertColumnList,
insertValueList
);
}
@Override
public String buildInsertIgnoreStatement(
String tableName,
List<String> columns,
List<String> uniqueColumns
) {
String columnList = String.join(", ", columns);
String placeholders = columns.stream().map(c -> "?").collect(Collectors.joining(", "));
String uniqueCondition = uniqueColumns.stream()
.map(c -> String.format("target.%s = source.%s", c, c))
.collect(Collectors.joining(" AND "));
String sourceColumns = columns.stream()
.map(c -> String.format("source.%s", c))
.collect(Collectors.joining(", "));
return String.format(
"MERGE INTO %s AS target " +
"USING (SELECT %s) AS source (%s) " +
"ON %s " +
"WHEN NOT MATCHED THEN INSERT (%s) VALUES (%s);",
tableName,
placeholders,
columnList,
uniqueCondition,
columnList,
sourceColumns
);
}
// ========== Locking Strategies ==========
/**
* Builds MSSQL SELECT with UPDLOCK and READPAST hints (equivalent to MySQL SKIP LOCKED).
*
* <p>MSSQL syntax: {@code SELECT ... FROM table WITH (UPDLOCK, READPAST)}</p>
*
* <p>The WITH hint must be placed after the table name in FROM clause.</p>
*
* @param selectStatement base SELECT statement
* @return SELECT statement with UPDLOCK, READPAST hints
*/
@Override
public String buildSelectForUpdateSkipLocked(String selectStatement) {
// Insert WITH (UPDLOCK, READPAST) after the first table name in FROM clause
// This is a simplified approach - assumes "FROM tablename" pattern
return selectStatement.replaceFirst(
"FROM\\s+(\\w+)",
"FROM $1 WITH (UPDLOCK, READPAST)"
);
}
/**
* Builds MSSQL SELECT with UPDLOCK hint (standard pessimistic locking).
*
* <p>MSSQL syntax: {@code SELECT ... FROM table WITH (UPDLOCK, ROWLOCK)}</p>
*
* @param selectStatement base SELECT statement
* @return SELECT statement with UPDLOCK hint
*/
@Override
public String buildSelectForUpdate(String selectStatement) {
return selectStatement.replaceFirst(
"FROM\\s+(\\w+)",
"FROM $1 WITH (UPDLOCK, ROWLOCK)"
);
}
// ========== Date/Time Functions ==========
/**
* Returns MSSQL current timestamp function: {@code GETDATE()}.
*
* @return {@code GETDATE()}
*/
@Override
public String getCurrentTimestamp() {
return "GETDATE()";
}
/**
* Builds MSSQL date subtraction using DATEADD with negative value.
*
* <p>MSSQL syntax: {@code DATEADD(DAY, -?, GETDATE())}</p>
*
* @param baseDate base date expression (or null to use GETDATE())
* @param value placeholder for subtraction amount
* @param unit time unit (DAY, HOUR, MINUTE, etc.)
* @return MSSQL DATEADD expression with negative value
*/
@Override
public String buildDateSubtraction(String baseDate, String value, DateUnit unit) {
String base = (baseDate != null && !baseDate.isEmpty()) ? baseDate : "GETDATE()";
// MSSQL uses DATEADD with negative value for subtraction
return String.format("DATEADD(%s, -%s, %s)", unit.name(), value, base);
}
/**
* Builds MSSQL date addition using DATEADD.
*
* <p>MSSQL syntax: {@code DATEADD(DAY, ?, GETDATE())}</p>
*
* @param baseDate base date expression (or null to use GETDATE())
* @param value placeholder for addition amount
* @param unit time unit (DAY, HOUR, MINUTE, etc.)
* @return MSSQL DATEADD expression
*/
@Override
public String buildDateAddition(String baseDate, String value, DateUnit unit) {
String base = (baseDate != null && !baseDate.isEmpty()) ? baseDate : "GETDATE()";
return String.format("DATEADD(%s, %s, %s)", unit.name(), value, base);
}
/**
* Extracts date part from datetime expression using CAST.
*
* <p>MSSQL syntax: {@code CAST(column AS DATE)}</p>
*
* @param columnOrExpression column name or expression
* @return MSSQL CAST expression
*/
@Override
public String extractDate(String columnOrExpression) {
return String.format("CAST(%s AS DATE)", columnOrExpression);
}
// ========== Auto-increment Reset ==========
/**
* Resets IDENTITY counter for a table using DBCC CHECKIDENT.
*
* <p>MSSQL syntax: {@code DBCC CHECKIDENT ('table', RESEED, 0)}</p>
*
* @param tableName table to reset IDENTITY counter
* @return MSSQL DBCC CHECKIDENT statement
*/
@Override
public String buildAutoIncrementReset(String tableName) {
return String.format("DBCC CHECKIDENT ('%s', RESEED, 0)", tableName);
}
// ========== Geospatial Distance Calculation ==========
/**
* Builds Haversine distance formula for MSSQL.
*
* <p>MSSQL supports the same trigonometric functions as MySQL (SIN, COS, ACOS, RADIANS),
* so the formula is identical. Calculates great-circle distance in kilometers.</p>
*
* <p>Formula:</p>
* <pre>
* 6371 * ACOS(
* COS(RADIANS(lat1)) * COS(RADIANS(lat2)) * COS(RADIANS(lng2) - RADIANS(lng1)) +
* SIN(RADIANS(lat1)) * SIN(RADIANS(lat2))
* )
* </pre>
*
* @param lat1 first latitude column/expression
* @param lng1 first longitude column/expression
* @param lat2 second latitude column/expression
* @param lng2 second longitude column/expression
* @return Haversine distance expression in kilometers
*/
@Override
public String buildHaversineDistance(String lat1, String lng1, String lat2, String lng2) {
return String.format(
"6371 * ACOS(" +
"COS(RADIANS(%s)) * COS(RADIANS(%s)) * " +
"COS(RADIANS(%s) - RADIANS(%s)) + " +
"SIN(RADIANS(%s)) * SIN(RADIANS(%s))" +
")",
lat1, lat2, lng2, lng1, lat1, lat2
);
}
// ========== String/Type Functions ==========
/**
* Builds string concatenation using CONCAT function (SQL Server 2012+).
*
* <p>MSSQL syntax: {@code CONCAT(a, b, c)}</p>
*
* @param expressions expressions to concatenate
* @return MSSQL CONCAT expression
*/
@Override
public String buildConcat(String... expressions) {
if (expressions == null || expressions.length == 0) {
return "''";
}
return "CONCAT(" + String.join(", ", expressions) + ")";
}
/**
* Casts expression to string type.
*
* <p>MSSQL syntax: {@code CAST(expression AS VARCHAR(MAX))}</p>
*
* @param expression expression to cast to string
* @return MSSQL CAST expression
*/
@Override
public String castToString(String expression) {
return String.format("CAST(%s AS VARCHAR(MAX))", expression);
}
// ========== RETURNING Clause Support ==========
/**
* MSSQL supports RETURNING clause via OUTPUT INSERTED.
*
* @return true
*/
@Override
public boolean supportsReturningClause() {
return true;
}
/**
* Builds MSSQL OUTPUT clause for INSERT statements.
*
* <p>MSSQL syntax: {@code OUTPUT INSERTED.column1, INSERTED.column2}</p>
*
* @param columns columns to return from inserted row
* @return MSSQL OUTPUT INSERTED clause
*/
@Override
public String buildReturningClause(String... columns) {
if (columns == null || columns.length == 0) {
throw new IllegalArgumentException("At least one column must be specified");
}
String columnList = Arrays.stream(columns)
.map(col -> "INSERTED." + col)
.collect(Collectors.joining(", "));
return "OUTPUT " + columnList;
}
/**
* Returns MSSQL IDENTITY definition for auto-increment columns.
*
* <p>MSSQL syntax: {@code IDENTITY(1,1)}</p>
*
* @return {@code IDENTITY(1,1)}
*/
@Override
public String getAutoIncrementDefinition() {
return "IDENTITY(1,1)";
}
/**
* Returns MSSQL timestamp column definition.
*
* <p>MSSQL uses DATETIME2 with DEFAULT constraint.
* Note: MSSQL doesn't support ON UPDATE CURRENT_TIMESTAMP like MySQL,
* so updates must be handled via triggers or application logic.</p>
*
* @return DATETIME2 column definition
*/
@Override
public String getTimestampDefinition() {
return "DATETIME2 DEFAULT GETDATE()";
}
// ========== Boolean Literals ==========
/**
* Returns MSSQL boolean TRUE literal as numeric 1.
*
* <p>MSSQL BIT type uses 1 for TRUE.</p>
*
* @return "1"
*/
@Override
public String getBooleanTrue() {
return "1";
}
/**
* Returns MSSQL boolean FALSE literal as numeric 0.
*
* <p>MSSQL BIT type uses 0 for FALSE.</p>
*
* @return "0"
*/
@Override
public String getBooleanFalse() {
return "0";
}
// ========== Identifier Escaping ==========
/**
* Escapes identifier with square brackets for MSSQL reserved words.
*
* <p>MSSQL uses square brackets to escape reserved words like 'file', 'user', 'order'.</p>
*
* @param identifier column or table name to escape
* @return escaped identifier with square brackets
*/
@Override
public String escapeIdentifier(String identifier) {
// MSSQL uses square brackets for escaping reserved words
return "[" + identifier + "]";
}
}

View file

@ -0,0 +1,205 @@
package de.avatic.lcc.database.dialect;
import org.springframework.context.annotation.Profile;
import org.springframework.stereotype.Component;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;
/**
* MySQL-specific implementation of {@link SqlDialectProvider}.
*
* <p>This provider generates SQL syntax compatible with MySQL 8.0+.
* It is automatically activated when the "mysql" Spring profile is active.</p>
*
* @author LCC Team
* @since 1.0
*/
@Component
@Profile("!mssql")
public class MySQLDialectProvider implements SqlDialectProvider {
@Override
public String getDialectName() {
return "MySQL";
}
@Override
public String getDriverClassName() {
return "com.mysql.cj.jdbc.Driver";
}
// ========== Pagination ==========
@Override
public String buildPaginationClause(int limit, int offset) {
return "LIMIT ? OFFSET ?";
}
@Override
public Object[] getPaginationParameters(int limit, int offset) {
return new Object[]{limit, offset};
}
// ========== Upsert Operations ==========
@Override
public String buildUpsertStatement(
String tableName,
List<String> uniqueColumns,
List<String> insertColumns,
List<String> updateColumns
) {
// INSERT INTO table (col1, col2, ...) VALUES (?, ?, ...)
String insertPart = String.format(
"INSERT INTO %s (%s) VALUES (%s)",
tableName,
String.join(", ", insertColumns),
insertColumns.stream().map(c -> "?").collect(Collectors.joining(", "))
);
// ON DUPLICATE KEY UPDATE col1 = VALUES(col1), col2 = VALUES(col2), ...
String updatePart = updateColumns.stream()
.map(col -> col + " = VALUES(" + col + ")")
.collect(Collectors.joining(", "));
return insertPart + " ON DUPLICATE KEY UPDATE " + updatePart;
}
@Override
public String buildInsertIgnoreStatement(
String tableName,
List<String> columns,
List<String> uniqueColumns
) {
return String.format(
"INSERT IGNORE INTO %s (%s) VALUES (%s)",
tableName,
String.join(", ", columns),
columns.stream().map(c -> "?").collect(Collectors.joining(", "))
);
}
// ========== Locking Strategies ==========
@Override
public String buildSelectForUpdateSkipLocked(String selectStatement) {
return selectStatement + " FOR UPDATE SKIP LOCKED";
}
@Override
public String buildSelectForUpdate(String selectStatement) {
return selectStatement + " FOR UPDATE";
}
// ========== Date/Time Functions ==========
@Override
public String getCurrentTimestamp() {
return "NOW()";
}
@Override
public String buildDateSubtraction(String baseDate, String value, DateUnit unit) {
String base = baseDate != null ? baseDate : "NOW()";
return String.format("DATE_SUB(%s, INTERVAL %s %s)", base, value, unit.name());
}
@Override
public String buildDateAddition(String baseDate, String value, DateUnit unit) {
String base = baseDate != null ? baseDate : "NOW()";
return String.format("DATE_ADD(%s, INTERVAL %s %s)", base, value, unit.name());
}
@Override
public String extractDate(String columnOrExpression) {
return "DATE(" + columnOrExpression + ")";
}
// ========== Auto-increment Reset ==========
@Override
public String buildAutoIncrementReset(String tableName) {
return String.format("ALTER TABLE %s AUTO_INCREMENT = 1", tableName);
}
// ========== Geospatial Distance Calculation ==========
@Override
public String buildHaversineDistance(String lat1, String lng1, String lat2, String lng2) {
// Haversine formula: 6371 km (Earth radius) * acos(...)
// Formula: d = 2R * arcsin(sqrt(sin²((lat2-lat1)/2) + cos(lat1)*cos(lat2)*sin²((lon2-lon1)/2)))
// Simplified: R * acos(cos(lat1)*cos(lat2)*cos(lng2-lng1) + sin(lat1)*sin(lat2))
// Returns distance in KILOMETERS
return String.format(
"6371 * ACOS(COS(RADIANS(%s)) * COS(RADIANS(%s)) * " +
"COS(RADIANS(%s) - RADIANS(%s)) + SIN(RADIANS(%s)) * SIN(RADIANS(%s)))",
lat1, lat2, lng2, lng1, lat1, lat2
);
}
// ========== String/Type Functions ==========
@Override
public String buildConcat(String... expressions) {
return "CONCAT(" + String.join(", ", expressions) + ")";
}
@Override
public String castToString(String expression) {
return "CAST(" + expression + " AS CHAR)";
}
// ========== Bulk Operations ==========
@Override
public String getMaxLimitValue() {
// MySQL BIGINT UNSIGNED max value
return "18446744073709551615";
}
@Override
public boolean supportsReturningClause() {
return false;
}
@Override
public String buildReturningClause(String... columns) {
throw new UnsupportedOperationException(
"MySQL does not support RETURNING clause. Use LAST_INSERT_ID() or GeneratedKeyHolder instead."
);
}
// ========== Schema/DDL ==========
@Override
public String getAutoIncrementDefinition() {
return "INT NOT NULL AUTO_INCREMENT";
}
@Override
public String getTimestampDefinition() {
return "TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP";
}
// ========== Boolean Literals ==========
@Override
public String getBooleanTrue() {
return "TRUE";
}
@Override
public String getBooleanFalse() {
return "FALSE";
}
// ========== Identifier Escaping ==========
@Override
public String escapeIdentifier(String identifier) {
// MySQL uses backticks for escaping reserved words
return "`" + identifier + "`";
}
}

View file

@ -0,0 +1,403 @@
package de.avatic.lcc.database.dialect;
import java.util.List;
/**
* Provides database-specific SQL syntax for different RDBMS implementations.
* Supports MySQL and MSSQL Server with identical semantic behavior.
*
* <p>This interface abstracts database-specific SQL patterns to enable multi-database support
* in the LCC application. Each dialect provider implements the SQL syntax specific to
* its target database while maintaining consistent semantics across all implementations.</p>
*
* @author LCC Team
* @since 1.0
*/
public interface SqlDialectProvider {
// ========== Metadata ==========
/**
* Returns the dialect name (e.g., "MySQL", "MSSQL").
*
* @return the name of the database dialect
*/
String getDialectName();
/**
* Returns the JDBC driver class name for this dialect.
*
* @return the fully qualified JDBC driver class name
*/
String getDriverClassName();
// ========== Pagination ==========
/**
* Generates the pagination clause for limiting and offsetting query results.
*
* <p>Examples:</p>
* <ul>
* <li>MySQL: {@code LIMIT ? OFFSET ?}</li>
* <li>MSSQL: {@code OFFSET ? ROWS FETCH NEXT ? ROWS ONLY}</li>
* </ul>
*
* <p><b>Note:</b> MSSQL requires an ORDER BY clause before OFFSET/FETCH.</p>
*
* @param limit maximum number of rows to return
* @param offset number of rows to skip
* @return SQL clause for pagination (without parameter values)
*/
String buildPaginationClause(int limit, int offset);
/**
* Returns parameter values in the correct order for the pagination clause.
*
* <p>Parameter order varies by database:</p>
* <ul>
* <li>MySQL: {@code [limit, offset]}</li>
* <li>MSSQL: {@code [offset, limit]}</li>
* </ul>
*
* @param limit maximum number of rows to return
* @param offset number of rows to skip
* @return array of parameters in database-specific order
*/
Object[] getPaginationParameters(int limit, int offset);
// ========== Upsert Operations ==========
/**
* Builds an UPSERT (INSERT or UPDATE) statement.
*
* <p>Database-specific implementations:</p>
* <ul>
* <li>MySQL: {@code INSERT ... ON DUPLICATE KEY UPDATE ...}</li>
* <li>MSSQL: {@code MERGE ... WHEN MATCHED THEN UPDATE WHEN NOT MATCHED THEN INSERT ...}</li>
* </ul>
*
* @param tableName target table name
* @param uniqueColumns columns that define uniqueness (for matching existing rows)
* @param insertColumns all columns to insert in a new row
* @param updateColumns columns to update if row exists
* @return complete UPSERT SQL statement with placeholders
*/
String buildUpsertStatement(
String tableName,
List<String> uniqueColumns,
List<String> insertColumns,
List<String> updateColumns
);
/**
* Builds an INSERT IGNORE statement that inserts only if the row does not exist.
*
* <p>Database-specific implementations:</p>
* <ul>
* <li>MySQL: {@code INSERT IGNORE INTO ...}</li>
* <li>MSSQL: {@code IF NOT EXISTS (...) INSERT INTO ...}</li>
* </ul>
*
* @param tableName target table name
* @param columns columns to insert
* @param uniqueColumns columns that define uniqueness (for existence check)
* @return INSERT IGNORE statement with placeholders
*/
String buildInsertIgnoreStatement(
String tableName,
List<String> columns,
List<String> uniqueColumns
);
// ========== Locking Strategies ==========
/**
* Builds SELECT FOR UPDATE with skip locked capability for pessimistic locking.
*
* <p>This is critical for {@code CalculationJobRepository} concurrent job processing.</p>
*
* <p>Database-specific implementations:</p>
* <ul>
* <li>MySQL: {@code SELECT ... FOR UPDATE SKIP LOCKED}</li>
* <li>MSSQL: {@code SELECT ... WITH (UPDLOCK, READPAST)}</li>
* </ul>
*
* @param selectStatement base SELECT statement (without locking clause)
* @return complete statement with pessimistic locking that skips locked rows
*/
String buildSelectForUpdateSkipLocked(String selectStatement);
/**
* Builds standard SELECT FOR UPDATE for pessimistic locking (waits for locks).
*
* <p>Database-specific implementations:</p>
* <ul>
* <li>MySQL: {@code SELECT ... FOR UPDATE}</li>
* <li>MSSQL: {@code SELECT ... WITH (UPDLOCK, ROWLOCK)}</li>
* </ul>
*
* @param selectStatement base SELECT statement (without locking clause)
* @return complete statement with pessimistic locking
*/
String buildSelectForUpdate(String selectStatement);
// ========== Date/Time Functions ==========
/**
* Returns the SQL function for getting the current timestamp.
*
* <p>Database-specific implementations:</p>
* <ul>
* <li>MySQL: {@code NOW()}</li>
* <li>MSSQL: {@code GETDATE()}</li>
* </ul>
*
* @return SQL function for current timestamp
*/
String getCurrentTimestamp();
/**
* Builds a date subtraction expression.
*
* <p>Database-specific implementations:</p>
* <ul>
* <li>MySQL: {@code DATE_SUB(NOW(), INTERVAL ? DAY)}</li>
* <li>MSSQL: {@code DATEADD(DAY, -?, GETDATE())}</li>
* </ul>
*
* @param baseDate base date expression (or null to use current timestamp)
* @param value placeholder for number of time units to subtract (e.g., "?")
* @param unit time unit (DAY, HOUR, MINUTE, etc.)
* @return date subtraction expression
*/
String buildDateSubtraction(String baseDate, String value, DateUnit unit);
/**
* Builds a date addition expression.
*
* <p>Database-specific implementations:</p>
* <ul>
* <li>MySQL: {@code DATE_ADD(NOW(), INTERVAL ? DAY)}</li>
* <li>MSSQL: {@code DATEADD(DAY, ?, GETDATE())}</li>
* </ul>
*
* @param baseDate base date expression (or null to use current timestamp)
* @param value placeholder for number of time units to add (e.g., "?")
* @param unit time unit (DAY, HOUR, MINUTE, etc.)
* @return date addition expression
*/
String buildDateAddition(String baseDate, String value, DateUnit unit);
/**
* Extracts the date part from a datetime expression (ignoring time component).
*
* <p>Database-specific implementations:</p>
* <ul>
* <li>MySQL: {@code DATE(column)}</li>
* <li>MSSQL: {@code CAST(column AS DATE)}</li>
* </ul>
*
* @param columnOrExpression column name or expression to extract date from
* @return expression that extracts date component
*/
String extractDate(String columnOrExpression);
// ========== Auto-increment Reset ==========
/**
* Resets the auto-increment counter for a table (primarily used in tests).
*
* <p>Database-specific implementations:</p>
* <ul>
* <li>MySQL: {@code ALTER TABLE table AUTO_INCREMENT = 1}</li>
* <li>MSSQL: {@code DBCC CHECKIDENT ('table', RESEED, 0)}</li>
* </ul>
*
* @param tableName table to reset auto-increment counter
* @return SQL statement to reset auto-increment
*/
String buildAutoIncrementReset(String tableName);
// ========== Geospatial Distance Calculation ==========
/**
* Builds a Haversine distance calculation expression.
*
* <p>Used in {@code NodeRepository} for finding nearby nodes based on geographic coordinates.
* Calculates the great-circle distance between two points on Earth's surface.</p>
*
* <p>Both MySQL and MSSQL support trigonometric functions (SIN, COS, ACOS, RADIANS),
* so the implementation is similar across databases.</p>
*
* @param lat1 first latitude column or expression
* @param lng1 first longitude column or expression
* @param lat2 second latitude column or expression
* @param lng2 second longitude column or expression
* @return expression calculating distance in meters
*/
String buildHaversineDistance(String lat1, String lng1, String lat2, String lng2);
// ========== String/Type Functions ==========
/**
* Builds a string concatenation expression.
*
* <p>Database-specific implementations:</p>
* <ul>
* <li>MySQL: {@code CONCAT(a, b, c)}</li>
* <li>MSSQL: {@code CONCAT(a, b, c)} (SQL Server 2012+) or {@code a + b + c}</li>
* </ul>
*
* @param expressions expressions to concatenate
* @return concatenation expression
*/
String buildConcat(String... expressions);
/**
* Converts an expression to string type.
*
* <p>Database-specific implementations:</p>
* <ul>
* <li>MySQL: {@code CAST(x AS CHAR)}</li>
* <li>MSSQL: {@code CAST(x AS VARCHAR(MAX))}</li>
* </ul>
*
* @param expression expression to convert to string
* @return cast-to-string expression
*/
String castToString(String expression);
// ========== Bulk Operations ==========
/**
* Returns the maximum safe value for LIMIT clause.
*
* <p>Used for workarounds in queries that need to skip LIMIT but still use OFFSET.</p>
* <ul>
* <li>MySQL: {@code 18446744073709551615} (BIGINT UNSIGNED max)</li>
* <li>MSSQL: {@code 2147483647} (INT max)</li>
* </ul>
*
* @return maximum limit value as string
*/
String getMaxLimitValue();
/**
* Checks if the dialect supports RETURNING clause for INSERT statements.
*
* <ul>
* <li>MySQL: {@code false} (use LAST_INSERT_ID())</li>
* <li>MSSQL: {@code true} (supports OUTPUT INSERTED.id)</li>
* </ul>
*
* @return true if RETURNING clause is supported
*/
boolean supportsReturningClause();
/**
* Builds a RETURNING clause for INSERT statement.
*
* <p>MSSQL example: {@code OUTPUT INSERTED.id}</p>
*
* @param columns columns to return
* @return RETURNING clause
* @throws UnsupportedOperationException if dialect does not support RETURNING
*/
String buildReturningClause(String... columns);
// ========== Schema/DDL ==========
/**
* Returns the auto-increment column definition for schema creation.
*
* <p>Database-specific implementations:</p>
* <ul>
* <li>MySQL: {@code INT NOT NULL AUTO_INCREMENT}</li>
* <li>MSSQL: {@code INT NOT NULL IDENTITY(1,1)}</li>
* </ul>
*
* @return auto-increment column definition
*/
String getAutoIncrementDefinition();
/**
* Returns the timestamp column definition with automatic update capability.
*
* <p>Database-specific implementations:</p>
* <ul>
* <li>MySQL: {@code TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP}</li>
* <li>MSSQL: {@code DATETIME2 NOT NULL DEFAULT GETDATE()} (requires trigger for ON UPDATE)</li>
* </ul>
*
* <p><b>Note:</b> For MSSQL, triggers must be created separately to handle ON UPDATE behavior.</p>
*
* @return timestamp column definition
*/
String getTimestampDefinition();
// ========== Boolean Literals ==========
/**
* Returns the SQL literal for boolean TRUE value.
*
* <p>Database-specific implementations:</p>
* <ul>
* <li>MySQL: {@code TRUE}</li>
* <li>MSSQL: {@code 1}</li>
* </ul>
*
* @return SQL literal for true
*/
String getBooleanTrue();
/**
* Returns the SQL literal for boolean FALSE value.
*
* <p>Database-specific implementations:</p>
* <ul>
* <li>MySQL: {@code FALSE}</li>
* <li>MSSQL: {@code 0}</li>
* </ul>
*
* @return SQL literal for false
*/
String getBooleanFalse();
// ========== Identifier Escaping ==========
/**
* Escapes a column or table identifier if it conflicts with reserved words.
*
* <p>Database-specific implementations:</p>
* <ul>
* <li>MySQL: {@code `identifier`}</li>
* <li>MSSQL: {@code [identifier]}</li>
* </ul>
*
* <p>Used for reserved words like "file", "user", "order", etc.</p>
*
* @param identifier column or table name to escape
* @return escaped identifier
*/
String escapeIdentifier(String identifier);
// ========== Helper Enums ==========
/**
* Time units for date arithmetic operations.
*/
enum DateUnit {
/** Year unit */
YEAR,
/** Month unit */
MONTH,
/** Day unit */
DAY,
/** Hour unit */
HOUR,
/** Minute unit */
MINUTE,
/** Second unit */
SECOND
}
}

View file

@ -1,5 +1,6 @@
package de.avatic.lcc.repositories;
import de.avatic.lcc.database.dialect.SqlDialectProvider;
import de.avatic.lcc.model.db.materials.Material;
import de.avatic.lcc.repositories.pagination.SearchQueryPagination;
import de.avatic.lcc.repositories.pagination.SearchQueryResult;
@ -18,19 +19,21 @@ import java.util.stream.Collectors;
public class MaterialRepository {
JdbcTemplate jdbcTemplate;
SqlDialectProvider dialectProvider;
@Autowired
public MaterialRepository(JdbcTemplate jdbcTemplate) {
public MaterialRepository(JdbcTemplate jdbcTemplate, SqlDialectProvider dialectProvider) {
this.jdbcTemplate = jdbcTemplate;
this.dialectProvider = dialectProvider;
}
private static String buildCountQuery(String filter, boolean excludeDeprecated) {
private String buildCountQuery(String filter, boolean excludeDeprecated) {
StringBuilder queryBuilder = new StringBuilder("""
SELECT count(*)
FROM material WHERE 1=1""");
if (excludeDeprecated) {
queryBuilder.append(" AND is_deprecated = FALSE");
queryBuilder.append(" AND is_deprecated = ").append(dialectProvider.getBooleanFalse());
}
if (filter != null) {
queryBuilder.append(" AND (name LIKE ? OR part_number LIKE ?) ");
@ -39,18 +42,19 @@ public class MaterialRepository {
return queryBuilder.toString();
}
private static String buildQuery(String filter, boolean excludeDeprecated) {
private String buildQuery(String filter, boolean excludeDeprecated, SearchQueryPagination pagination) {
StringBuilder queryBuilder = new StringBuilder("""
SELECT id, name, part_number, normalized_part_number, hs_code, is_deprecated
FROM material WHERE 1=1""");
if (excludeDeprecated) {
queryBuilder.append(" AND is_deprecated = FALSE");
queryBuilder.append(" AND is_deprecated = ").append(dialectProvider.getBooleanFalse());
}
if (filter != null) {
queryBuilder.append(" AND (name LIKE ? OR part_number LIKE ? ) ");
}
queryBuilder.append(" ORDER BY normalized_part_number LIMIT ? OFFSET ?");
queryBuilder.append(" ORDER BY normalized_part_number ");
queryBuilder.append(dialectProvider.buildPaginationClause(pagination.getLimit(), pagination.getOffset()));
return queryBuilder.toString();
}
@ -95,20 +99,22 @@ public class MaterialRepository {
@Transactional
public Optional<Integer> setDeprecatedById(Integer id) {
String query = "UPDATE material SET is_deprecated = TRUE WHERE id = ?";
String query = "UPDATE material SET is_deprecated = " + dialectProvider.getBooleanTrue() + " WHERE id = ?";
return Optional.ofNullable(jdbcTemplate.update(query, id) == 0 ? null : id);
}
@Transactional
public SearchQueryResult<Material> listMaterials(Optional<String> filter, boolean excludeDeprecated, SearchQueryPagination pagination) {
String query = buildQuery(filter.orElse(null), excludeDeprecated);
String query = buildQuery(filter.orElse(null), excludeDeprecated, pagination);
Object[] paginationParams = dialectProvider.getPaginationParameters(pagination.getLimit(), pagination.getOffset());
var materials = filter.isPresent() ?
jdbcTemplate.query(query, new MaterialMapper(),
filter.get() + "%", filter.get() + "%", pagination.getLimit(), pagination.getOffset()) :
filter.get() + "%", filter.get() + "%", paginationParams[0], paginationParams[1]) :
jdbcTemplate.query(query, new MaterialMapper(),
pagination.getLimit(), pagination.getOffset());
paginationParams[0], paginationParams[1]);
String countQuery = buildCountQuery(filter.orElse(null), excludeDeprecated);
@ -134,7 +140,7 @@ public class MaterialRepository {
@Transactional
public Optional<Material> getById(Integer id) {
String query = "SELECT * FROM material WHERE id = ? AND is_deprecated = FALSE";
String query = "SELECT * FROM material WHERE id = ? AND is_deprecated = " + dialectProvider.getBooleanFalse();
var material = jdbcTemplate.query(query, new MaterialMapper(), id);
@ -146,7 +152,7 @@ public class MaterialRepository {
@Transactional
public void deleteById(Integer id) {
String deleteQuery = "UPDATE material SET is_deprecated = TRUE WHERE id = ?";
String deleteQuery = "UPDATE material SET is_deprecated = " + dialectProvider.getBooleanTrue() + " WHERE id = ?";
jdbcTemplate.update(deleteQuery, id);
}
@ -210,9 +216,9 @@ public class MaterialRepository {
.map(id -> "?")
.collect(Collectors.joining(","));
String sql = "UPDATE material SET is_deprecated = TRUE WHERE id IN ("+placeholders+")";
String sql = "UPDATE material SET is_deprecated = " + dialectProvider.getBooleanTrue() + " WHERE id IN ("+placeholders+")";
jdbcTemplate.update(sql, ids);
jdbcTemplate.update(sql, ids.toArray());
}

View file

@ -1,5 +1,6 @@
package de.avatic.lcc.repositories;
import de.avatic.lcc.database.dialect.SqlDialectProvider;
import de.avatic.lcc.dto.generic.NodeType;
import de.avatic.lcc.model.db.ValidityTuple;
import de.avatic.lcc.model.db.nodes.Node;
@ -27,10 +28,12 @@ public class NodeRepository {
private final JdbcTemplate jdbcTemplate;
private final NamedParameterJdbcTemplate namedParameterJdbcTemplate;
private final SqlDialectProvider dialectProvider;
public NodeRepository(JdbcTemplate jdbcTemplate, NamedParameterJdbcTemplate namedParameterJdbcTemplate) {
public NodeRepository(JdbcTemplate jdbcTemplate, NamedParameterJdbcTemplate namedParameterJdbcTemplate, SqlDialectProvider dialectProvider) {
this.jdbcTemplate = jdbcTemplate;
this.namedParameterJdbcTemplate = namedParameterJdbcTemplate;
this.dialectProvider = dialectProvider;
}
@Transactional
@ -102,11 +105,13 @@ public class NodeRepository {
List<Node> entities = null;
Integer totalCount = 0;
Object[] paginationParams = dialectProvider.getPaginationParameters(pagination.getLimit(), pagination.getOffset());
if (filter == null) {
entities = jdbcTemplate.query(query, new NodeMapper(), pagination.getLimit(), pagination.getOffset());
entities = jdbcTemplate.query(query, new NodeMapper(), paginationParams[0], paginationParams[1]);
totalCount = jdbcTemplate.queryForObject(countQuery, Integer.class);
} else {
entities = jdbcTemplate.query(query, new NodeMapper(), "%" + filter + "%", "%" + filter + "%", "%" + filter + "%", "%" + filter + "%", pagination.getLimit(), pagination.getOffset());
entities = jdbcTemplate.query(query, new NodeMapper(), "%" + filter + "%", "%" + filter + "%", "%" + filter + "%", "%" + filter + "%", paginationParams[0], paginationParams[1]);
totalCount = jdbcTemplate.queryForObject(countQuery, Integer.class, "%" + filter + "%", "%" + filter + "%", "%" + filter + "%", "%" + filter + "%");
}
@ -122,7 +127,7 @@ public class NodeRepository {
WHERE 1=1""");
if (excludeDeprecated) {
queryBuilder.append(" AND node.is_deprecated = FALSE");
queryBuilder.append(" AND node.is_deprecated = ").append(dialectProvider.getBooleanFalse());
}
if (filter != null) {
queryBuilder.append(" AND (node.name LIKE ? OR node.external_mapping_id LIKE ? OR node.address LIKE ? OR country.iso_code LIKE ?)");
@ -140,21 +145,22 @@ public class NodeRepository {
""");
if (excludeDeprecated) {
queryBuilder.append(" AND node.is_deprecated = FALSE");
queryBuilder.append(" AND node.is_deprecated = ").append(dialectProvider.getBooleanFalse());
}
if (filter != null) {
queryBuilder.append(" AND (node.name LIKE ? OR node.external_mapping_id LIKE ? OR node.address LIKE ? OR country.iso_code LIKE ?)");
}
queryBuilder.append(" ORDER BY node.id LIMIT ? OFFSET ?");
queryBuilder.append(" ORDER BY node.id ");
queryBuilder.append(dialectProvider.buildPaginationClause(searchQueryPagination.getLimit(), searchQueryPagination.getOffset()));
return queryBuilder.toString();
}
@Transactional
public Optional<Integer> setDeprecatedById(Integer id) {
String query = "UPDATE node SET is_deprecated = TRUE WHERE id = ?";
String query = "UPDATE node SET is_deprecated = " + dialectProvider.getBooleanTrue() + " WHERE id = ?";
// Mark all linked RouteNodes as outdated
jdbcTemplate.update("UPDATE premise_route_node SET is_outdated = TRUE WHERE node_id = ?", id);
jdbcTemplate.update("UPDATE premise_route_node SET is_outdated = " + dialectProvider.getBooleanTrue() + " WHERE node_id = ?", id);
return Optional.ofNullable(jdbcTemplate.update(query, id) == 0 ? null : id);
@ -169,7 +175,7 @@ public class NodeRepository {
if(node.isUserNode())
throw new DatabaseException("Cannot update user node in node repository.");
String updateNodeSql = """
String updateNodeSql = String.format("""
UPDATE node SET
country_id = ?,
name = ?,
@ -182,9 +188,9 @@ public class NodeRepository {
geo_lat = ?,
geo_lng = ?,
is_deprecated = ?,
updated_at = CURRENT_TIMESTAMP
updated_at = %s
WHERE id = ?
""";
""", dialectProvider.getCurrentTimestamp());
int rowsUpdated = jdbcTemplate.update(updateNodeSql,
node.getCountryId(),
@ -255,7 +261,7 @@ public class NodeRepository {
}
// Mark all linked RouteNodes as outdated
jdbcTemplate.update("UPDATE premise_route_node SET is_outdated = TRUE WHERE node_id = ?", node.getId());
jdbcTemplate.update("UPDATE premise_route_node SET is_outdated = " + dialectProvider.getBooleanTrue() + " WHERE node_id = ?", node.getId());
// Mark all distance matrix entries as stale
jdbcTemplate.update("UPDATE distance_matrix SET state = 'STALE' WHERE ((from_node_id = ?) OR (to_node_id = ?))", node.getId(), node.getId());
@ -288,11 +294,11 @@ public class NodeRepository {
}
if (nodeType.equals(NodeType.SOURCE)) {
queryBuilder.append("is_source = true");
queryBuilder.append("is_source = ").append(dialectProvider.getBooleanTrue());
} else if (nodeType.equals(NodeType.DESTINATION)) {
queryBuilder.append("is_destination = true");
queryBuilder.append("is_destination = ").append(dialectProvider.getBooleanTrue());
} else if (nodeType.equals(NodeType.INTERMEDIATE)) {
queryBuilder.append("is_intermediate = true");
queryBuilder.append("is_intermediate = ").append(dialectProvider.getBooleanTrue());
}
}
@ -303,11 +309,15 @@ public class NodeRepository {
} else {
queryBuilder.append(" AND ");
}
queryBuilder.append("is_deprecated = false");
queryBuilder.append("is_deprecated = ").append(dialectProvider.getBooleanFalse());
}
queryBuilder.append(" LIMIT ?");
parameters.add(limit);
// MSSQL requires ORDER BY before OFFSET
queryBuilder.append(" ORDER BY id ");
queryBuilder.append(dialectProvider.buildPaginationClause(limit, 0));
Object[] paginationParams = dialectProvider.getPaginationParameters(limit, 0);
parameters.add(paginationParams[0]);
parameters.add(paginationParams[1]);
return jdbcTemplate.query(queryBuilder.toString(), new NodeMapper(), parameters.toArray());
}
@ -315,7 +325,7 @@ public class NodeRepository {
public List<Node> listAllNodes(boolean onlySources) {
StringBuilder queryBuilder = new StringBuilder("SELECT * FROM node");
if (onlySources) {
queryBuilder.append(" WHERE is_source = true");
queryBuilder.append(" WHERE is_source = ").append(dialectProvider.getBooleanTrue());
}
queryBuilder.append(" ORDER BY id");
@ -393,40 +403,35 @@ public class NodeRepository {
@Transactional
public List<Node> getByDistance(Node node, Integer regionRadius) {
if(node.isUserNode()) {
String query = """
SELECT * FROM node
WHERE is_deprecated = FALSE AND
(
6371 * acos(
cos(radians(?)) *
cos(radians(geo_lat)) *
cos(radians(geo_lng) - radians(?)) +
sin(radians(?)) *
sin(radians(geo_lat))
)
) <= ?
""";
String haversineFormula = dialectProvider.buildHaversineDistance("geo_lat", "geo_lng", "?", "?");
return jdbcTemplate.query(query, new NodeMapper(), node.getGeoLat(), node.getGeoLng(), node.getGeoLat(), regionRadius);
if(node.isUserNode()) {
String query = String.format("""
SELECT * FROM node
WHERE is_deprecated = %s AND
(%s) <= ?
""", dialectProvider.getBooleanFalse(), haversineFormula);
return jdbcTemplate.query(query, new NodeMapper(),
node.getGeoLat(), // for COS(RADIANS(?))
node.getGeoLng(), // for COS(RADIANS(?) - RADIANS(geo_lng))
node.getGeoLat(), // for SIN(RADIANS(?))
regionRadius); // for <= ?
}
String query = """
String query = String.format("""
SELECT * FROM node
WHERE is_deprecated = FALSE AND id != ? AND
(
6371 * acos(
cos(radians(?)) *
cos(radians(geo_lat)) *
cos(radians(geo_lng) - radians(?)) +
sin(radians(?)) *
sin(radians(geo_lat))
)
) <= ?
""";
WHERE is_deprecated = %s AND id != ? AND
(%s) <= ?
""", dialectProvider.getBooleanFalse(), haversineFormula);
return jdbcTemplate.query(query, new NodeMapper(), node.getId(), node.getGeoLat(), node.getGeoLng(), node.getGeoLat(), regionRadius);
return jdbcTemplate.query(query, new NodeMapper(),
node.getId(), // for id != ?
node.getGeoLat(), // for COS(RADIANS(?))
node.getGeoLng(), // for COS(RADIANS(?) - RADIANS(geo_lng))
node.getGeoLat(), // for SIN(RADIANS(?))
regionRadius); // for <= ?
}
@ -441,12 +446,12 @@ public class NodeRepository {
* Returns an empty list if no outbound nodes are found.
*/
public List<Node> getAllOutboundFor(Integer countryId) {
String query = """
String query = String.format("""
SELECT node.*
FROM node
LEFT JOIN outbound_country_mapping ON outbound_country_mapping.node_id = node.id
WHERE node.is_deprecated = FALSE AND (outbound_country_mapping.country_id = ? OR (node.is_intermediate = TRUE AND node.country_id = ?))
""";
WHERE node.is_deprecated = %s AND (outbound_country_mapping.country_id = ? OR (node.is_intermediate = %s AND node.country_id = ?))
""", dialectProvider.getBooleanFalse(), dialectProvider.getBooleanTrue());
return jdbcTemplate.query(query, new NodeMapper(), countryId, countryId);
}
@ -472,7 +477,7 @@ public class NodeRepository {
public Optional<Node> getByDestinationId(Integer id) {
String query = "SELECT node.* FROM node INNER JOIN premise_destination WHERE node.id = premise_destination.destination_node_id AND premise_destination.id = ?";
String query = "SELECT node.* FROM node INNER JOIN premise_destination ON node.id = premise_destination.destination_node_id WHERE premise_destination.id = ?";
var node = jdbcTemplate.query(query, new NodeMapper(), id);

View file

@ -1,6 +1,6 @@
package de.avatic.lcc.repositories;
import de.avatic.lcc.service.api.EUTaxationApiService;
import de.avatic.lcc.database.dialect.SqlDialectProvider;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.stereotype.Repository;
@ -10,19 +10,24 @@ import java.util.List;
public class NomenclatureRepository {
private final JdbcTemplate jdbcTemplate;
private final EUTaxationApiService eUTaxationApiService;
private final SqlDialectProvider dialectProvider;
public NomenclatureRepository(JdbcTemplate jdbcTemplate, EUTaxationApiService eUTaxationApiService) {
public NomenclatureRepository(JdbcTemplate jdbcTemplate, SqlDialectProvider dialectProvider) {
this.jdbcTemplate = jdbcTemplate;
this.eUTaxationApiService = eUTaxationApiService;
this.dialectProvider = dialectProvider;
}
public List<String> searchHsCode(String search) {
String sql = """
SELECT hs_code FROM nomenclature WHERE hs_code LIKE CONCAT(?, '%') LIMIT 10
""";
String concatExpression = dialectProvider.buildConcat("?", "'%'");
String sql = String.format(
"SELECT hs_code FROM nomenclature WHERE hs_code LIKE %s ORDER BY hs_code %s",
concatExpression,
dialectProvider.buildPaginationClause(10, 0)
);
return jdbcTemplate.queryForList (sql, String.class, search);
Object[] paginationParams = dialectProvider.getPaginationParameters(10, 0);
return jdbcTemplate.queryForList(sql, String.class, search, paginationParams[0], paginationParams[1]);
}
}

View file

@ -1,5 +1,6 @@
package de.avatic.lcc.repositories.bulk;
import de.avatic.lcc.database.dialect.SqlDialectProvider;
import de.avatic.lcc.dto.bulk.BulkFileType;
import de.avatic.lcc.dto.bulk.BulkOperationState;
import de.avatic.lcc.dto.bulk.BulkProcessingType;
@ -24,9 +25,11 @@ public class BulkOperationRepository {
private final JdbcTemplate jdbcTemplate;
private final SqlDialectProvider dialectProvider;
public BulkOperationRepository(JdbcTemplate jdbcTemplate) {
public BulkOperationRepository(JdbcTemplate jdbcTemplate, SqlDialectProvider dialectProvider) {
this.jdbcTemplate = jdbcTemplate;
this.dialectProvider = dialectProvider;
}
@Transactional
@ -34,10 +37,10 @@ public class BulkOperationRepository {
removeOld(operation.getUserId());
String sql = """
INSERT INTO bulk_operation (user_id, bulk_file_type, bulk_processing_type, state, file, validity_period_id)
String sql = String.format("""
INSERT INTO bulk_operation (user_id, bulk_file_type, bulk_processing_type, state, %s, validity_period_id)
VALUES (?, ?, ?, ?, ?, ?)
""";
""", dialectProvider.escapeIdentifier("file"));
GeneratedKeyHolder keyHolder = new GeneratedKeyHolder();
@ -66,43 +69,49 @@ public class BulkOperationRepository {
@Transactional
public void removeOld(Integer userId) {
// First, update sys_error records to set bulk_operation_id to NULL
// for bulk operations that will be deleted (all but the 10 newest for the current user)
String updateErrorsSql = """
// First, fetch the IDs of the 10 newest operations to keep
// (MySQL doesn't support LIMIT in IN/NOT IN subqueries)
String fetchNewestSql = "SELECT id FROM bulk_operation WHERE user_id = ? AND state NOT IN ('SCHEDULED', 'PROCESSING') ORDER BY created_at DESC " +
dialectProvider.buildPaginationClause(10, 0);
Object[] paginationParams = dialectProvider.getPaginationParameters(10, 0);
Object[] fetchParams = new Object[]{userId, paginationParams[0], paginationParams[1]};
List<Integer> newestIds = jdbcTemplate.queryForList(fetchNewestSql, Integer.class, fetchParams);
// If there are 10 or fewer operations, nothing to delete
if (newestIds.size() <= 10) {
return;
}
// Build comma-separated list of IDs to keep
String idsToKeep = newestIds.stream()
.map(String::valueOf)
.reduce((a, b) -> a + "," + b)
.orElse("0");
// Update sys_error records to set bulk_operation_id to NULL for operations that will be deleted
String updateErrorsSql = String.format("""
UPDATE sys_error
SET bulk_operation_id = NULL
WHERE bulk_operation_id IN (
SELECT id FROM (
SELECT id
FROM bulk_operation
WHERE user_id = ?
AND state NOT IN ('SCHEDULED', 'PROCESSING')
ORDER BY created_at DESC
LIMIT 18446744073709551615 OFFSET 10
) AS old_operations
SELECT id FROM bulk_operation
WHERE user_id = ?
AND state NOT IN ('SCHEDULED', 'PROCESSING')
AND id NOT IN (%s)
)
""";
""", idsToKeep);
jdbcTemplate.update(updateErrorsSql, userId);
// Then delete the old bulk_operation entries (keeping only the 10 newest for the current user)
String deleteBulkSql = """
// Delete the old bulk_operation entries (keeping only the 10 newest for the current user)
String deleteBulkSql = String.format("""
DELETE FROM bulk_operation
WHERE user_id = ?
AND state NOT IN ('SCHEDULED', 'PROCESSING')
AND id NOT IN (
SELECT id FROM (
SELECT id
FROM bulk_operation
WHERE user_id = ?
AND state NOT IN ('SCHEDULED', 'PROCESSING')
ORDER BY created_at DESC
LIMIT 10
) AS newest_operations
)
""";
AND id NOT IN (%s)
""", idsToKeep);
jdbcTemplate.update(deleteBulkSql, userId, userId);
jdbcTemplate.update(deleteBulkSql, userId);
}
@Transactional
@ -121,33 +130,44 @@ public class BulkOperationRepository {
cleanupTimeouts(userId);
String sql = """
String baseQuery = """
SELECT id, user_id, bulk_file_type, bulk_processing_type, state, created_at, validity_period_id
FROM bulk_operation
WHERE user_id = ?
ORDER BY created_at DESC LIMIT 10
ORDER BY created_at DESC
""";
return jdbcTemplate.query(sql, new BulkOperationRowMapper(true), userId);
String sql = baseQuery + dialectProvider.buildPaginationClause(10, 0);
Object[] paginationParams = dialectProvider.getPaginationParameters(10, 0);
// Combine userId with pagination params
Object[] allParams = new Object[]{userId, paginationParams[0], paginationParams[1]};
return jdbcTemplate.query(sql, new BulkOperationRowMapper(true), allParams);
}
private void cleanupTimeouts(Integer userId) {
String sql = """
UPDATE bulk_operation SET state = 'EXCEPTION' WHERE user_id = ? AND (state = 'PROCESSING' OR state = 'SCHEDULED') AND created_at < NOW() - INTERVAL 60 MINUTE
""";
// Build date subtraction expression (60 minutes ago)
String dateCondition = dialectProvider.buildDateSubtraction(null, "60", SqlDialectProvider.DateUnit.MINUTE);
String sql = String.format("""
UPDATE bulk_operation SET state = 'EXCEPTION'
WHERE user_id = ?
AND (state = 'PROCESSING' OR state = 'SCHEDULED')
AND created_at < %s
""", dateCondition);
jdbcTemplate.update(sql, userId);
}
@Transactional
public Optional<BulkOperation> getOperationById(Integer id) {
String sql = """
SELECT id, user_id, bulk_file_type, bulk_processing_type, state, file, created_at, validity_period_id
String sql = String.format("""
SELECT id, user_id, bulk_file_type, bulk_processing_type, state, %s, created_at, validity_period_id
FROM bulk_operation
WHERE id = ?
""";
""", dialectProvider.escapeIdentifier("file"));
List<BulkOperation> results = jdbcTemplate.query(sql, new BulkOperationRowMapper(false), id);
@ -156,11 +176,11 @@ public class BulkOperationRepository {
@Transactional
public void update(BulkOperation op) {
String sql = """
String sql = String.format("""
UPDATE bulk_operation
SET user_id = ?, bulk_file_type = ?, state = ?, file = ?, validity_period_id = ?
SET user_id = ?, bulk_file_type = ?, state = ?, %s = ?, validity_period_id = ?
WHERE id = ?
""";
""", dialectProvider.escapeIdentifier("file"));
jdbcTemplate.update(sql,
op.getUserId(),

View file

@ -1,5 +1,6 @@
package de.avatic.lcc.repositories.calculation;
import de.avatic.lcc.database.dialect.SqlDialectProvider;
import de.avatic.lcc.model.db.calculations.CalculationJob;
import de.avatic.lcc.model.db.calculations.CalculationJobPriority;
import de.avatic.lcc.model.db.calculations.CalculationJobState;
@ -18,9 +19,11 @@ import java.util.Optional;
@Repository
public class CalculationJobRepository {
private final JdbcTemplate jdbcTemplate;
private final SqlDialectProvider dialectProvider;
public CalculationJobRepository(JdbcTemplate jdbcTemplate) {
public CalculationJobRepository(JdbcTemplate jdbcTemplate, SqlDialectProvider dialectProvider) {
this.jdbcTemplate = jdbcTemplate;
this.dialectProvider = dialectProvider;
}
@Transactional
@ -63,7 +66,8 @@ public class CalculationJobRepository {
*/
@Transactional
public Optional<CalculationJob> fetchAndLockNextJob() {
String sql = """
// Build base query with ORDER BY (required for OFFSET/FETCH in MSSQL)
String baseQuery = """
SELECT * FROM calculation_job
WHERE (job_state = 'CREATED')
OR (job_state = 'EXCEPTION' AND retries < 3)
@ -75,11 +79,18 @@ public class CalculationJobRepository {
WHEN job_state = 'EXCEPTION' THEN 4
END,
calculation_date
LIMIT 1
FOR UPDATE SKIP LOCKED
""";
""";
var jobs = jdbcTemplate.query(sql, new CalculationJobMapper());
// Add pagination (LIMIT 1 OFFSET 0)
String paginatedQuery = baseQuery + " " + dialectProvider.buildPaginationClause(1, 0);
// Add pessimistic locking with skip locked
String sql = dialectProvider.buildSelectForUpdateSkipLocked(paginatedQuery);
// Get pagination parameters in correct order for the database
Object[] params = dialectProvider.getPaginationParameters(1, 0);
var jobs = jdbcTemplate.query(sql, new CalculationJobMapper(), params);
if (jobs.isEmpty()) {
return Optional.empty();
@ -151,9 +162,14 @@ public class CalculationJobRepository {
public Optional<CalculationJob> getCalculationJobWithJobStateValid(Integer periodId, Integer setId, Integer nodeId, Integer materialId) {
/* there should only be one job per period id, node id and material id combination */
String query = "SELECT * FROM calculation_job AS cj INNER JOIN premise AS p ON cj.premise_id = p.id WHERE job_state = 'VALID' AND validity_period_id = ? AND property_set_id = ? AND p.supplier_node_id = ? AND material_id = ? ORDER BY cj.calculation_date DESC LIMIT 1";
String baseQuery = "SELECT * FROM calculation_job AS cj INNER JOIN premise AS p ON cj.premise_id = p.id WHERE job_state = 'VALID' AND validity_period_id = ? AND property_set_id = ? AND p.supplier_node_id = ? AND material_id = ? ORDER BY cj.calculation_date DESC ";
String query = baseQuery + dialectProvider.buildPaginationClause(1, 0);
Object[] params = dialectProvider.getPaginationParameters(1, 0);
var job = jdbcTemplate.query(query, new CalculationJobMapper(), periodId, setId, nodeId, materialId);
// Combine business logic params with pagination params
Object[] allParams = new Object[]{periodId, setId, nodeId, materialId, params[0], params[1]};
var job = jdbcTemplate.query(query, new CalculationJobMapper(), allParams);
if (job.isEmpty())
return Optional.empty();
@ -165,9 +181,14 @@ public class CalculationJobRepository {
public Optional<CalculationJob> getCalculationJobWithJobStateValidUserNodeId(Integer periodId, Integer setId, Integer userNodeId, Integer materialId) {
/* there should only be one job per period id, node id and material id combination */
String query = "SELECT * FROM calculation_job AS cj INNER JOIN premise AS p ON cj.premise_id = p.id WHERE job_state = 'VALID' AND validity_period_id = ? AND property_set_id = ? AND p.user_supplier_node_id = ? AND material_id = ? ORDER BY cj.calculation_date DESC LIMIT 1";
String baseQuery = "SELECT * FROM calculation_job AS cj INNER JOIN premise AS p ON cj.premise_id = p.id WHERE job_state = 'VALID' AND validity_period_id = ? AND property_set_id = ? AND p.user_supplier_node_id = ? AND material_id = ? ORDER BY cj.calculation_date DESC ";
String query = baseQuery + dialectProvider.buildPaginationClause(1, 0);
Object[] params = dialectProvider.getPaginationParameters(1, 0);
var job = jdbcTemplate.query(query, new CalculationJobMapper(), periodId, setId, userNodeId, materialId);
// Combine business logic params with pagination params
Object[] allParams = new Object[]{periodId, setId, userNodeId, materialId, params[0], params[1]};
var job = jdbcTemplate.query(query, new CalculationJobMapper(), allParams);
if (job.isEmpty())
return Optional.empty();
@ -211,8 +232,14 @@ public class CalculationJobRepository {
@Transactional
public CalculationJobState getLastStateFor(Integer premiseId) {
String sql = "SELECT job_state FROM calculation_job WHERE premise_id = ? ORDER BY calculation_date DESC LIMIT 1";
var result = jdbcTemplate.query(sql, (rs, rowNum) -> CalculationJobState.valueOf(rs.getString("job_state")), premiseId);
String baseQuery = "SELECT job_state FROM calculation_job WHERE premise_id = ? ORDER BY calculation_date DESC ";
String sql = baseQuery + dialectProvider.buildPaginationClause(1, 0);
Object[] params = dialectProvider.getPaginationParameters(1, 0);
// Combine business logic params with pagination params
Object[] allParams = new Object[]{premiseId, params[0], params[1]};
var result = jdbcTemplate.query(sql, (rs, rowNum) -> CalculationJobState.valueOf(rs.getString("job_state")), allParams);
if (result.isEmpty())
return null;
@ -227,9 +254,13 @@ public class CalculationJobRepository {
public Integer getFailedJobByUserId(Integer userId) {
String sql = "SELECT COUNT(*) FROM calculation_job WHERE user_id = ? AND job_state = 'EXCEPTION' AND calculation_date > DATE_SUB(NOW(), INTERVAL 3 DAY)";
// Build date subtraction expression using dialect provider
String dateCondition = dialectProvider.buildDateSubtraction(null, "3", SqlDialectProvider.DateUnit.DAY);
String sql = String.format(
"SELECT COUNT(*) FROM calculation_job WHERE user_id = ? AND job_state = 'EXCEPTION' AND calculation_date > %s",
dateCondition
);
return jdbcTemplate.queryForObject(sql, Integer.class, userId);
}

View file

@ -1,5 +1,6 @@
package de.avatic.lcc.repositories.country;
import de.avatic.lcc.database.dialect.SqlDialectProvider;
import de.avatic.lcc.dto.generic.PropertyDTO;
import de.avatic.lcc.model.db.properties.CountryPropertyMappingId;
import de.avatic.lcc.model.db.rates.ValidityPeriodState;
@ -20,9 +21,11 @@ public class CountryPropertyRepository {
private final JdbcTemplate jdbcTemplate;
private final SqlDialectProvider dialectProvider;
public CountryPropertyRepository(JdbcTemplate jdbcTemplate) {
public CountryPropertyRepository(JdbcTemplate jdbcTemplate, SqlDialectProvider dialectProvider) {
this.jdbcTemplate = jdbcTemplate;
this.dialectProvider = dialectProvider;
}
@Transactional
@ -44,11 +47,14 @@ public class CountryPropertyRepository {
return;
}
String query = """
INSERT INTO country_property (property_value, country_id, country_property_type_id, property_set_id) VALUES (?, ?, ?, ?) ON DUPLICATE KEY UPDATE property_value = ?
""";
String query = dialectProvider.buildUpsertStatement(
"country_property",
List.of("property_set_id", "country_property_type_id", "country_id"),
List.of("property_value", "country_id", "country_property_type_id", "property_set_id"),
List.of("property_value")
);
int affectedRows = jdbcTemplate.update(query, value, countryId, typeId, setId, value);
int affectedRows = jdbcTemplate.update(query, value, countryId, typeId, setId);
if(!(affectedRows > 0))
throw new DatabaseException("Could not update property value for country " + countryId + " and property type " + mappingId);
@ -144,7 +150,6 @@ public class CountryPropertyRepository {
type.external_mapping_id as externalMappingId,
type.validation_rule as validationRule,
type.is_required as is_required,
type.is_required as is_required,
type.description as description,
type.property_group as propertyGroup,
type.sequence_number as sequenceNumber,
@ -153,8 +158,10 @@ public class CountryPropertyRepository {
FROM country_property_type AS type
LEFT JOIN country_property AS cp ON cp.country_property_type_id = type.id AND cp.country_id = ?
LEFT JOIN property_set AS ps ON ps.id = cp.property_set_id AND ps.state IN ('DRAFT', 'VALID')
GROUP BY type.id, type.name, type.data_type, type.external_mapping_id, type.validation_rule
HAVING draftValue IS NOT NULL OR validValue IS NOT NULL;
GROUP BY type.id, type.name, type.data_type, type.external_mapping_id, type.validation_rule,
type.is_required, type.description, type.property_group, type.sequence_number
HAVING MAX(CASE WHEN ps.state = 'DRAFT' THEN cp.property_value END) IS NOT NULL
OR MAX(CASE WHEN ps.state = 'VALID' THEN cp.property_value END) IS NOT NULL;
""";
@ -184,9 +191,13 @@ public class CountryPropertyRepository {
LEFT JOIN country_property AS property ON property.country_property_type_id = type.id
LEFT JOIN property_set AS propertySet ON propertySet.id = property.property_set_id WHERE propertySet.state = 'VALID'""";
String insertQuery = dialectProvider.buildInsertIgnoreStatement(
"country_property",
List.of("property_value", "country_id", "country_property_type_id", "property_set_id"),
List.of("property_set_id", "country_property_type_id", "country_id")
);
jdbcTemplate.query(query, (rs, rowNum) -> {
String insertQuery = "INSERT IGNORE INTO country_property (property_value, country_id, country_property_type_id, property_set_id) VALUES (?, ?, ?, ?)";
jdbcTemplate.update(insertQuery, rs.getString("value"), rs.getInt("country_id"), rs.getInt("typeId"), setId);
return null;
});

View file

@ -1,5 +1,6 @@
package de.avatic.lcc.repositories.country;
import de.avatic.lcc.database.dialect.SqlDialectProvider;
import de.avatic.lcc.model.db.country.Country;
import de.avatic.lcc.model.db.country.IsoCode;
import de.avatic.lcc.model.db.country.RegionCode;
@ -22,10 +23,12 @@ public class CountryRepository {
private final JdbcTemplate jdbcTemplate;
private final NamedParameterJdbcTemplate namedParameterJdbcTemplate;
private final SqlDialectProvider dialectProvider;
public CountryRepository(JdbcTemplate jdbcTemplate, NamedParameterJdbcTemplate namedParameterJdbcTemplate) {
public CountryRepository(JdbcTemplate jdbcTemplate, NamedParameterJdbcTemplate namedParameterJdbcTemplate, SqlDialectProvider dialectProvider) {
this.jdbcTemplate = jdbcTemplate;
this.namedParameterJdbcTemplate = namedParameterJdbcTemplate;
this.dialectProvider = dialectProvider;
}
@Transactional
@ -66,13 +69,15 @@ public class CountryRepository {
@Transactional
public SearchQueryResult<Country> listCountries(Optional<String> filter, boolean excludeDeprecated, SearchQueryPagination pagination) {
String query = buildQuery(filter.orElse(null), excludeDeprecated, true);
String query = buildQuery(filter.orElse(null), excludeDeprecated, pagination);
Object[] paginationParams = dialectProvider.getPaginationParameters(pagination.getLimit(), pagination.getOffset());
var countries = filter.isPresent() ?
jdbcTemplate.query(query, new CountryMapper(),
"%" + filter.get() + "%", "%" + filter.get() + "%", "%" + filter.get() + "%", pagination.getLimit(), pagination.getOffset()) :
"%" + filter.get() + "%", "%" + filter.get() + "%", "%" + filter.get() + "%", paginationParams[0], paginationParams[1]) :
jdbcTemplate.query(query, new CountryMapper()
, pagination.getLimit(), pagination.getOffset());
, paginationParams[0], paginationParams[1]);
Integer totalCount = filter.isPresent() ?
jdbcTemplate.queryForObject(
@ -89,7 +94,7 @@ public class CountryRepository {
@Transactional
public SearchQueryResult<Country> listCountries(Optional<String> filter, boolean excludeDeprecated) {
String query = buildQuery(filter.orElse(null), excludeDeprecated, false);
String query = buildQuery(filter.orElse(null), excludeDeprecated, null);
var countries = filter.map(f -> jdbcTemplate.query(query, new CountryMapper(),
"%" + f + "%", "%" + f + "%", "%" + f + "%"))
@ -111,7 +116,7 @@ public class CountryRepository {
FROM country WHERE 1=1""");
if (excludeDeprecated) {
queryBuilder.append(" AND is_deprecated = FALSE");
queryBuilder.append(" AND is_deprecated = ").append(dialectProvider.getBooleanFalse());
}
if (filter != null) {
queryBuilder.append(" AND (iso_code LIKE ? OR region_code LIKE ? or name LIKE ?) ");
@ -120,21 +125,20 @@ public class CountryRepository {
return queryBuilder.toString();
}
private String buildQuery(String filter, boolean excludeDeprecated, boolean hasLimit) {
private String buildQuery(String filter, boolean excludeDeprecated, SearchQueryPagination pagination) {
StringBuilder queryBuilder = new StringBuilder("""
SELECT id, iso_code, region_code, is_deprecated, name
FROM country WHERE 1=1""");
if (excludeDeprecated) {
queryBuilder.append(" AND is_deprecated = FALSE ");
queryBuilder.append(" AND is_deprecated = ").append(dialectProvider.getBooleanFalse()).append(" ");
}
if (filter != null) {
queryBuilder.append(" AND (iso_code LIKE ? OR region_code LIKE ? OR name LIKE ?) ");
}
if (hasLimit) {
queryBuilder.append(" ORDER BY iso_code LIMIT ? OFFSET ? ");
} else {
queryBuilder.append(" ORDER BY iso_code ");
queryBuilder.append(" ORDER BY iso_code ");
if (pagination != null) {
queryBuilder.append(dialectProvider.buildPaginationClause(pagination.getLimit(), pagination.getOffset()));
}
return queryBuilder.toString();
}

View file

@ -1,5 +1,6 @@
package de.avatic.lcc.repositories.error;
import de.avatic.lcc.database.dialect.SqlDialectProvider;
import de.avatic.lcc.dto.error.CalculationJobDumpDTO;
import de.avatic.lcc.dto.error.CalculationJobDestinationDumpDTO;
import de.avatic.lcc.dto.error.CalculationJobRouteSectionDumpDTO;
@ -31,16 +32,17 @@ import java.util.Map;
public class DumpRepository {
private final NamedParameterJdbcTemplate namedParameterJdbcTemplate;
private final JdbcTemplate jdbcTemplate;
private final PremiseRepository premiseRepository;
private final PremiseTransformer premiseTransformer;
private final SqlDialectProvider dialectProvider;
public DumpRepository(NamedParameterJdbcTemplate namedParameterJdbcTemplate, JdbcTemplate jdbcTemplate, PremiseRepository premiseRepository, PremiseTransformer premiseTransformer) {
public DumpRepository(NamedParameterJdbcTemplate namedParameterJdbcTemplate, JdbcTemplate jdbcTemplate, PremiseRepository premiseRepository, PremiseTransformer premiseTransformer, SqlDialectProvider dialectProvider) {
this.namedParameterJdbcTemplate = namedParameterJdbcTemplate;
this.jdbcTemplate = jdbcTemplate;
this.premiseRepository = premiseRepository;
this.premiseTransformer = premiseTransformer;
this.dialectProvider = dialectProvider;
}
@Transactional(readOnly = true)
@ -112,12 +114,12 @@ public class DumpRepository {
}
private List<ErrorLogTraceItemDto> loadErrorTraceItems(Integer errorId) {
String traceQuery = """
SELECT line, file, method, fullPath
String traceQuery = String.format("""
SELECT line, %s, method, fullPath
FROM sys_error_trace_item
WHERE error_id = :errorId
ORDER BY id
""";
""", dialectProvider.escapeIdentifier("file"));
MapSqlParameterSource params = new MapSqlParameterSource("errorId", errorId);
@ -272,20 +274,17 @@ public class DumpRepository {
public SearchQueryResult<CalculationJobDumpDTO> listDumps(SearchQueryPagination searchQueryPagination) {
String calculationJobQuery = """
String calculationJobQuery = String.format("""
SELECT cj.id, cj.premise_id, cj.calculation_date, cj.validity_period_id,
cj.property_set_id, cj.job_state, cj.error_id, cj.user_id
FROM calculation_job cj
ORDER BY id DESC LIMIT :limit OFFSET :offset
""";
ORDER BY id DESC %s
""", dialectProvider.buildPaginationClause(searchQueryPagination.getLimit(), searchQueryPagination.getOffset()));
MapSqlParameterSource params = new MapSqlParameterSource();
params.addValue("offset", searchQueryPagination.getOffset());
params.addValue("limit", searchQueryPagination.getLimit());
Object[] paginationParams = dialectProvider.getPaginationParameters(searchQueryPagination.getLimit(), searchQueryPagination.getOffset());
var dumps = namedParameterJdbcTemplate.query(
var dumps = jdbcTemplate.query(
calculationJobQuery,
params,
(rs, _) -> {
CalculationJobDumpDTO dto = new CalculationJobDumpDTO();
dto.setId(rs.getInt("id"));
@ -308,7 +307,8 @@ public class DumpRepository {
}
return dto;
});
},
paginationParams[0], paginationParams[1]);
for(var dump : dumps) {
// Load premise details

View file

@ -1,6 +1,7 @@
package de.avatic.lcc.repositories.error;
import de.avatic.lcc.database.dialect.SqlDialectProvider;
import de.avatic.lcc.model.db.error.SysError;
import de.avatic.lcc.model.db.error.SysErrorTraceItem;
import de.avatic.lcc.model.db.error.SysErrorType;
@ -27,10 +28,12 @@ public class SysErrorRepository {
private final JdbcTemplate jdbcTemplate;
private final NamedParameterJdbcTemplate namedParameterJdbcTemplate;
private final SqlDialectProvider dialectProvider;
public SysErrorRepository(JdbcTemplate jdbcTemplate, NamedParameterJdbcTemplate namedParameterJdbcTemplate) {
public SysErrorRepository(JdbcTemplate jdbcTemplate, NamedParameterJdbcTemplate namedParameterJdbcTemplate, SqlDialectProvider dialectProvider) {
this.jdbcTemplate = jdbcTemplate;
this.namedParameterJdbcTemplate = namedParameterJdbcTemplate;
this.dialectProvider = dialectProvider;
}
@Transactional
@ -99,7 +102,8 @@ public class SysErrorRepository {
}
private void insertTraceItems(Integer errorId, List<SysErrorTraceItem> traceItems) {
String traceSql = "INSERT INTO sys_error_trace_item (error_id, line, file, method, fullPath) VALUES (?, ?, ?, ?, ?)";
String traceSql = String.format("INSERT INTO sys_error_trace_item (error_id, line, %s, method, fullPath) VALUES (?, ?, ?, ?, ?)",
dialectProvider.escapeIdentifier("file"));
jdbcTemplate.batchUpdate(traceSql, traceItems, traceItems.size(),
(ps, traceItem) -> {
@ -114,35 +118,40 @@ public class SysErrorRepository {
@Transactional
public SearchQueryResult<SysError> listErrors(Optional<String> filter, SearchQueryPagination pagination) {
StringBuilder whereClause = new StringBuilder();
MapSqlParameterSource parameters = new MapSqlParameterSource();
List<Object> params = new ArrayList<>();
// Build WHERE clause if filter is provided
if (filter.isPresent() && !filter.get().trim().isEmpty()) {
String filterValue = "%" + filter.get().trim() + "%";
whereClause.append(" WHERE (e.title LIKE :filter OR e.message LIKE :filter OR e.code LIKE :filter)");
parameters.addValue("filter", filterValue);
whereClause.append(" WHERE (e.title LIKE ? OR e.message LIKE ? OR e.code LIKE ?)");
params.add(filterValue);
params.add(filterValue);
params.add(filterValue);
}
// Count total elements
String countSql = "SELECT COUNT(*) FROM sys_error e" + whereClause;
Integer totalElements = namedParameterJdbcTemplate.queryForObject(countSql, parameters, Integer.class);
Integer totalElements = params.isEmpty()
? jdbcTemplate.queryForObject(countSql, Integer.class)
: jdbcTemplate.queryForObject(countSql, Integer.class, params.toArray());
// Build main query with pagination
String sql = """
String sql = String.format("""
SELECT e.id, e.user_id, e.title, e.code, e.message, e.pinia,
e.calculation_job_id, e.bulk_operation_id, e.type, e.created_at, e.request
FROM sys_error e
""" + whereClause + """
%s
ORDER BY e.created_at DESC
LIMIT :limit OFFSET :offset
""";
%s
""", whereClause, dialectProvider.buildPaginationClause(pagination.getLimit(), pagination.getOffset()));
// Add pagination parameters
parameters.addValue("limit", pagination.getLimit());
parameters.addValue("offset", pagination.getOffset());
Object[] paginationParams = dialectProvider.getPaginationParameters(pagination.getLimit(), pagination.getOffset());
params.add(paginationParams[0]);
params.add(paginationParams[1]);
// Execute query
List<SysError> errors = namedParameterJdbcTemplate.query(sql, parameters, new SysErrorMapper());
List<SysError> errors = jdbcTemplate.query(sql, new SysErrorMapper(), params.toArray());
// Load trace items for each error
if (!errors.isEmpty()) {
@ -162,12 +171,12 @@ public class SysErrorRepository {
return;
}
String traceSql = """
SELECT error_id, id, line, file, method, fullPath
String traceSql = String.format("""
SELECT error_id, id, line, %s, method, fullPath
FROM sys_error_trace_item
WHERE error_id IN (:errorIds)
ORDER BY error_id, id
""";
""", dialectProvider.escapeIdentifier("file"));
MapSqlParameterSource traceParameters = new MapSqlParameterSource("errorIds", errorIds);

View file

@ -1,5 +1,6 @@
package de.avatic.lcc.repositories.packaging;
import de.avatic.lcc.database.dialect.SqlDialectProvider;
import de.avatic.lcc.model.db.packaging.PackagingDimension;
import de.avatic.lcc.model.db.packaging.PackagingType;
import de.avatic.lcc.model.db.utils.DimensionUnit;
@ -19,18 +20,21 @@ import java.util.Optional;
public class PackagingDimensionRepository {
private final JdbcTemplate jdbcTemplate;
private final SqlDialectProvider dialectProvider;
public PackagingDimensionRepository(JdbcTemplate jdbcTemplate) {
public PackagingDimensionRepository(JdbcTemplate jdbcTemplate, SqlDialectProvider dialectProvider) {
this.jdbcTemplate = jdbcTemplate;
this.dialectProvider = dialectProvider;
}
@Transactional
public Optional<PackagingDimension> getById(Integer id) {
String query = """
String query = String.format("""
SELECT id, displayed_dimension_unit, displayed_weight_unit, width, length, height,
weight, content_unit_count, type, is_deprecated
FROM packaging_dimension
WHERE packaging_dimension.id = ? AND packaging_dimension.is_deprecated = false""";
WHERE packaging_dimension.id = ? AND packaging_dimension.is_deprecated = %s""",
dialectProvider.getBooleanFalse());
//TODO: what if i need to get deprecated materials?
@ -113,7 +117,7 @@ public class PackagingDimensionRepository {
}
public Optional<Integer> setDeprecatedById(Integer id) {
String query = "UPDATE packaging_dimension SET is_deprecated = TRUE WHERE id = ?";
String query = "UPDATE packaging_dimension SET is_deprecated = " + dialectProvider.getBooleanTrue() + " WHERE id = ?";
return Optional.ofNullable(jdbcTemplate.update(query, id) == 0 ? null : id);
}

View file

@ -1,5 +1,6 @@
package de.avatic.lcc.repositories.packaging;
import de.avatic.lcc.database.dialect.SqlDialectProvider;
import de.avatic.lcc.model.db.properties.PackagingProperty;
import de.avatic.lcc.model.db.properties.PropertyDataType;
import de.avatic.lcc.model.db.properties.PropertyType;
@ -16,9 +17,11 @@ import java.util.Optional;
public class PackagingPropertiesRepository {
private final JdbcTemplate jdbcTemplate;
private final SqlDialectProvider dialectProvider;
public PackagingPropertiesRepository(JdbcTemplate jdbcTemplate) {
public PackagingPropertiesRepository(JdbcTemplate jdbcTemplate, SqlDialectProvider dialectProvider) {
this.jdbcTemplate = jdbcTemplate;
this.dialectProvider = dialectProvider;
}
public List<PackagingProperty> getByPackagingId(Integer id) {
@ -94,11 +97,14 @@ public class PackagingPropertiesRepository {
public void update(Integer packagingId, Integer typeId, String value) {
String query = """
INSERT INTO packaging_property (property_value, packaging_id, packaging_property_type_id) VALUES (?, ?, ?)
ON DUPLICATE KEY UPDATE property_value = ?""";
String query = dialectProvider.buildUpsertStatement(
"packaging_property",
List.of("packaging_id", "packaging_property_type_id"),
List.of("property_value", "packaging_id", "packaging_property_type_id"),
List.of("property_value")
);
jdbcTemplate.update(query, value, packagingId, typeId, value);
jdbcTemplate.update(query, value, packagingId, typeId);
}
public Integer getTypeIdByMappingId(String mappingId) {
@ -108,11 +114,14 @@ public class PackagingPropertiesRepository {
public void update(Integer packagingId, String typeId, String value) {
String query = """
INSERT INTO packaging_property (property_value, packaging_id, packaging_property_type_id) VALUES (?, ?, ?)
ON DUPLICATE KEY UPDATE property_value = ?""";
String query = dialectProvider.buildUpsertStatement(
"packaging_property",
List.of("packaging_id", "packaging_property_type_id"),
List.of("property_value", "packaging_id", "packaging_property_type_id"),
List.of("property_value")
);
jdbcTemplate.update(query, value, packagingId, typeId, value);
jdbcTemplate.update(query, value, packagingId, typeId);
}
}

View file

@ -1,5 +1,6 @@
package de.avatic.lcc.repositories.packaging;
import de.avatic.lcc.database.dialect.SqlDialectProvider;
import de.avatic.lcc.model.db.packaging.Packaging;
import de.avatic.lcc.repositories.pagination.SearchQueryPagination;
import de.avatic.lcc.repositories.pagination.SearchQueryResult;
@ -45,40 +46,44 @@ public class PackagingRepository {
private final JdbcTemplate jdbcTemplate;
private final SqlDialectProvider dialectProvider;
public PackagingRepository(JdbcTemplate jdbcTemplate) {
public PackagingRepository(JdbcTemplate jdbcTemplate, SqlDialectProvider dialectProvider) {
this.jdbcTemplate = jdbcTemplate;
this.dialectProvider = dialectProvider;
}
@Transactional
public SearchQueryResult<Packaging> listPackaging(Integer materialId, Integer supplierId, boolean excludeDeprecated, SearchQueryPagination pagination) {
String query = buildQuery(materialId, supplierId, excludeDeprecated);
String query = buildQuery(materialId, supplierId, excludeDeprecated, pagination);
Object[] paginationParams = dialectProvider.getPaginationParameters(pagination.getLimit(), pagination.getOffset());
var params = new ArrayList<Object>();
params.add(excludeDeprecated);
// Note: excludeDeprecated is not added as parameter - it's inserted as boolean literal in buildQuery()
if (materialId != null) {
params.add(materialId);
}
if (supplierId != null) {
params.add(supplierId);
}
params.add(pagination.getLimit());
params.add(pagination.getOffset());
params.add(paginationParams[0]);
params.add(paginationParams[1]);
var packaging = jdbcTemplate.query(query, new PackagingMapper(), params.toArray());
return new SearchQueryResult<>(packaging, pagination.getPage(), countPackaging(materialId, supplierId, excludeDeprecated), pagination.getLimit());
}
private static String buildQuery(Integer materialId, Integer supplierId, boolean excludeDeprecated) {
private String buildQuery(Integer materialId, Integer supplierId, boolean excludeDeprecated, SearchQueryPagination pagination) {
StringBuilder queryBuilder = new StringBuilder("""
SELECT id,
SELECT id, supplier_node_id, material_id, hu_dimension_id, shu_dimension_id, is_deprecated
FROM packaging
WHERE 1=1""");
if (excludeDeprecated) {
queryBuilder.append(" AND is_deprecated = FALSE");
queryBuilder.append(" AND is_deprecated = ").append(dialectProvider.getBooleanFalse());
}
if (materialId != null) {
queryBuilder.append(" AND material_id = ?");
@ -86,7 +91,8 @@ public class PackagingRepository {
if (supplierId != null) {
queryBuilder.append(" AND supplier_node_id = ?");
}
queryBuilder.append("ORDER BY id LIMIT ? OFFSET ?");
queryBuilder.append(" ORDER BY id ");
queryBuilder.append(dialectProvider.buildPaginationClause(pagination.getLimit(), pagination.getOffset()));
return queryBuilder.toString();
}
@ -145,7 +151,7 @@ public class PackagingRepository {
@Transactional
public Optional<Integer> setDeprecatedById(Integer id) {
String query = "UPDATE packaging SET is_deprecated = TRUE WHERE id = ?";
String query = "UPDATE packaging SET is_deprecated = " + dialectProvider.getBooleanTrue() + " WHERE id = ?";
return Optional.ofNullable(jdbcTemplate.update(query, id) == 0 ? null : id);
}

View file

@ -9,6 +9,7 @@ import org.springframework.jdbc.core.RowMapper;
import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate;
import org.springframework.jdbc.support.GeneratedKeyHolder;
import org.springframework.jdbc.support.KeyHolder;
import org.springframework.stereotype.Repository;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
@ -19,7 +20,7 @@ import java.sql.Statement;
import java.util.*;
import java.util.stream.Collectors;
@Service
@Repository
public class DestinationRepository {
private final JdbcTemplate jdbcTemplate;

View file

@ -1,5 +1,6 @@
package de.avatic.lcc.repositories.premise;
import de.avatic.lcc.database.dialect.SqlDialectProvider;
import de.avatic.lcc.model.db.materials.Material;
import de.avatic.lcc.model.db.nodes.Location;
import de.avatic.lcc.model.db.nodes.Node;
@ -37,10 +38,12 @@ public class PremiseRepository {
private final JdbcTemplate jdbcTemplate;
private final NamedParameterJdbcTemplate namedParameterJdbcTemplate;
private final SqlDialectProvider dialectProvider;
public PremiseRepository(JdbcTemplate jdbcTemplate, NamedParameterJdbcTemplate namedParameterJdbcTemplate) {
public PremiseRepository(JdbcTemplate jdbcTemplate, NamedParameterJdbcTemplate namedParameterJdbcTemplate, SqlDialectProvider dialectProvider) {
this.jdbcTemplate = jdbcTemplate;
this.namedParameterJdbcTemplate = namedParameterJdbcTemplate;
this.dialectProvider = dialectProvider;
}
@Transactional
@ -53,7 +56,7 @@ public class PremiseRepository {
.withArchived(archived)
.withDone(done);
String query = queryBuilder.buildSelectQuery();
String query = queryBuilder.buildSelectQuery(dialectProvider, pagination);
String countQuery = queryBuilder.buildCountQuery();
List<PremiseListEntry> entities;
@ -77,12 +80,14 @@ public class PremiseRepository {
private List<PremiseListEntry> executeQueryWithoutFilter(String query, Integer userId,
SearchQueryPagination pagination) {
Object[] paginationParams = dialectProvider.getPaginationParameters(pagination.getLimit(), pagination.getOffset());
return jdbcTemplate.query(
query,
new PremiseListEntryMapper(),
userId,
pagination.getLimit(),
pagination.getOffset()
paginationParams[0],
paginationParams[1]
);
}
@ -104,11 +109,13 @@ public class PremiseRepository {
}
private Object[] createFilterParams(Integer userId, String wildcardFilter, SearchQueryPagination pagination) {
Object[] paginationParams = dialectProvider.getPaginationParameters(pagination.getLimit(), pagination.getOffset());
return new Object[]{
userId,
wildcardFilter, wildcardFilter, wildcardFilter, wildcardFilter,
wildcardFilter, wildcardFilter,
pagination.getLimit(), pagination.getOffset()
paginationParams[0], paginationParams[1]
};
}
@ -353,7 +360,7 @@ public class PremiseRepository {
}
String placeholders = String.join(",", Collections.nCopies(premiseIds.size(), "?"));
String query = "UPDATE premise SET material_cost = null, is_fca_enabled = false, oversea_share = null WHERE id IN (" + placeholders + ")";
String query = "UPDATE premise SET material_cost = null, is_fca_enabled = " + dialectProvider.getBooleanFalse() + ", oversea_share = null WHERE id IN (" + placeholders + ")";
jdbcTemplate.update(query, premiseIds.toArray());
}
@ -580,11 +587,15 @@ public class PremiseRepository {
KeyHolder keyHolder = new GeneratedKeyHolder();
String sql = String.format(
"INSERT INTO premise (material_id, supplier_node_id, user_supplier_node_id, user_id, state, created_at, updated_at, geo_lat, geo_lng, country_id)" +
" VALUES (?, ?, ?, ?, 'DRAFT', %s, %s, ?, ?, ?)",
dialectProvider.getCurrentTimestamp(),
dialectProvider.getCurrentTimestamp()
);
jdbcTemplate.update(connection -> {
PreparedStatement ps = connection.prepareStatement(
"INSERT INTO premise (material_id, supplier_node_id, user_supplier_node_id, user_id, state, created_at, updated_at, geo_lat, geo_lng, country_id)" +
" VALUES (?, ?, ?, ?, 'DRAFT', CURRENT_TIMESTAMP, CURRENT_TIMESTAMP, ?, ?, ?)",
Statement.RETURN_GENERATED_KEYS);
PreparedStatement ps = connection.prepareStatement(sql, Statement.RETURN_GENERATED_KEYS);
ps.setInt(1, materialId);
ps.setObject(2, supplierId);
@ -699,7 +710,7 @@ public class PremiseRepository {
return premiseIds;
}
String sql = "SELECT id FROM premise WHERE id IN (:ids) AND tariff_unlocked = TRUE";
String sql = "SELECT id FROM premise WHERE id IN (:ids) AND tariff_unlocked = " + dialectProvider.getBooleanTrue();
List<Integer> unlockedIds = namedParameterJdbcTemplate.query(
sql,
@ -725,7 +736,7 @@ public class PremiseRepository {
/**
* Encapsulates SQL query building logic
*/
private static class QueryBuilder {
private class QueryBuilder {
private static final String BASE_JOIN_QUERY = """
FROM premise AS p
LEFT JOIN material as m ON p.material_id = m.id
@ -769,7 +780,7 @@ public class PremiseRepository {
return queryBuilder.toString();
}
public String buildSelectQuery() {
public String buildSelectQuery(SqlDialectProvider dialectProvider, SearchQueryPagination pagination) {
StringBuilder queryBuilder = new StringBuilder();
queryBuilder.append("""
SELECT p.id as 'p.id', p.state as 'p.state', p.user_id as 'p.user_id',
@ -785,8 +796,8 @@ public class PremiseRepository {
user_n.country_id as 'user_n.country_id', user_n.geo_lat as 'user_n.geo_lat', user_n.geo_lng as 'user_n.geo_lng'
""").append(BASE_JOIN_QUERY);
appendConditions(queryBuilder);
queryBuilder.append(" ORDER BY p.updated_at DESC, p.id DESC");
queryBuilder.append(" LIMIT ? OFFSET ?");
queryBuilder.append(" ORDER BY p.updated_at DESC, p.id DESC ");
queryBuilder.append(dialectProvider.buildPaginationClause(pagination.getLimit(), pagination.getOffset()));
return queryBuilder.toString();
}
@ -827,7 +838,7 @@ public class PremiseRepository {
private void appendBooleanCondition(StringBuilder queryBuilder, Boolean condition, String field) {
if (condition != null && condition) {
queryBuilder.append(" OR ").append(field).append(" = TRUE");
queryBuilder.append(" OR ").append(field).append(" = ").append(dialectProvider.getBooleanTrue());
}
}
}

View file

@ -1,5 +1,6 @@
package de.avatic.lcc.repositories.premise;
import de.avatic.lcc.database.dialect.SqlDialectProvider;
import de.avatic.lcc.model.db.premises.route.Route;
import de.avatic.lcc.util.exception.internalerror.DatabaseException;
import org.springframework.jdbc.core.JdbcTemplate;
@ -20,9 +21,11 @@ import java.util.Optional;
public class RouteRepository {
private final JdbcTemplate jdbcTemplate;
private final SqlDialectProvider dialectProvider;
public RouteRepository(JdbcTemplate jdbcTemplate) {
public RouteRepository(JdbcTemplate jdbcTemplate, SqlDialectProvider dialectProvider) {
this.jdbcTemplate = jdbcTemplate;
this.dialectProvider = dialectProvider;
}
public List<Route> getByDestinationId(Integer id) {
@ -31,7 +34,7 @@ public class RouteRepository {
}
public Optional<Route> getSelectedByDestinationId(Integer id) {
String query = "SELECT * FROM premise_route WHERE premise_destination_id = ? AND is_selected = TRUE";
String query = "SELECT * FROM premise_route WHERE premise_destination_id = ? AND is_selected = " + dialectProvider.getBooleanTrue();
var route = jdbcTemplate.query(query, new RouteMapper(), id);
if(route.isEmpty()) {
@ -78,12 +81,12 @@ public class RouteRepository {
}
public void updateSelectedByDestinationId(Integer destinationId, Integer selectedRouteId) {
String deselectQuery = """
UPDATE premise_route SET is_selected = FALSE WHERE is_selected = TRUE AND premise_destination_id = ?
""";
String selectQuery = """
UPDATE premise_route SET is_selected = TRUE WHERE id = ?
""";
String deselectQuery = String.format("""
UPDATE premise_route SET is_selected = %s WHERE is_selected = %s AND premise_destination_id = ?
""", dialectProvider.getBooleanFalse(), dialectProvider.getBooleanTrue());
String selectQuery = String.format("""
UPDATE premise_route SET is_selected = %s WHERE id = ?
""", dialectProvider.getBooleanTrue());
jdbcTemplate.update(deselectQuery, destinationId);
var affectedRowsSelect = jdbcTemplate.update(selectQuery, selectedRouteId);

View file

@ -1,5 +1,6 @@
package de.avatic.lcc.repositories.properties;
import de.avatic.lcc.database.dialect.SqlDialectProvider;
import de.avatic.lcc.dto.generic.PropertyDTO;
import de.avatic.lcc.model.db.properties.SystemPropertyMappingId;
import de.avatic.lcc.model.db.rates.ValidityPeriodState;
@ -26,9 +27,11 @@ import java.util.stream.Collectors;
public class PropertyRepository {
private final JdbcTemplate jdbcTemplate;
private final SqlDialectProvider dialectProvider;
public PropertyRepository(JdbcTemplate jdbcTemplate) {
public PropertyRepository(JdbcTemplate jdbcTemplate, SqlDialectProvider dialectProvider) {
this.jdbcTemplate = jdbcTemplate;
this.dialectProvider = dialectProvider;
}
/**
@ -58,11 +61,14 @@ public class PropertyRepository {
return;
}
String query = """
INSERT INTO system_property (property_set_id, system_property_type_id, property_value) VALUES (?, ?, ?)
ON DUPLICATE KEY UPDATE property_value = ?""";
String query = dialectProvider.buildUpsertStatement(
"system_property",
List.of("property_set_id", "system_property_type_id"),
List.of("property_set_id", "system_property_type_id", "property_value"),
List.of("property_value")
);
var affectedRows = jdbcTemplate.update(query, setId, typeId, value, value);
var affectedRows = jdbcTemplate.update(query, setId, typeId, value);
if (!(affectedRows > 0)) {
throw new DatabaseException("Could not update property value for property set " + setId + " and property type " + mappingId);
@ -99,10 +105,15 @@ public class PropertyRepository {
LEFT JOIN system_property AS sp ON sp.system_property_type_id = type.id
LEFT JOIN property_set AS ps ON ps.id = sp.property_set_id AND ps.state IN (?, ?)
GROUP BY type.id, type.name, type.data_type, type.external_mapping_id, type.validation_rule, type.description, type.property_group, type.sequence_number
HAVING draftValue IS NOT NULL OR validValue IS NOT NULL ORDER BY type.property_group , type.sequence_number;
HAVING MAX(CASE WHEN ps.state = ? THEN sp.property_value END) IS NOT NULL
OR MAX(CASE WHEN ps.state = ? THEN sp.property_value END) IS NOT NULL
ORDER BY type.property_group , type.sequence_number;
""";
return jdbcTemplate.query(query, new PropertyMapper(), ValidityPeriodState.DRAFT.name(), ValidityPeriodState.VALID.name(), ValidityPeriodState.DRAFT.name(), ValidityPeriodState.VALID.name());
return jdbcTemplate.query(query, new PropertyMapper(),
ValidityPeriodState.DRAFT.name(), ValidityPeriodState.VALID.name(),
ValidityPeriodState.DRAFT.name(), ValidityPeriodState.VALID.name(),
ValidityPeriodState.DRAFT.name(), ValidityPeriodState.VALID.name());
}
@ -182,9 +193,11 @@ public class PropertyRepository {
try {
List<Map<String, Object>> results = jdbcTemplate.queryForList(query, ValidityPeriodState.VALID.name());
String insertQuery = """
INSERT IGNORE INTO system_property (property_value, system_property_type_id, property_set_id)
VALUES (?, ?, ?)""";
String insertQuery = dialectProvider.buildInsertIgnoreStatement(
"system_property",
List.of("property_value", "system_property_type_id", "property_set_id"),
List.of("property_set_id", "system_property_type_id")
);
List<Object[]> batchArgs = results.stream()
.map(row -> new Object[]{row.get("value"), row.get("typeId"), setId})

View file

@ -1,6 +1,7 @@
package de.avatic.lcc.repositories.properties;
import de.avatic.lcc.database.dialect.SqlDialectProvider;
import de.avatic.lcc.model.db.properties.PropertySet;
import de.avatic.lcc.model.db.rates.ValidityPeriodState;
import org.springframework.jdbc.core.JdbcTemplate;
@ -23,9 +24,11 @@ import java.util.Optional;
public class PropertySetRepository {
private final JdbcTemplate jdbcTemplate;
private final SqlDialectProvider dialectProvider;
public PropertySetRepository(JdbcTemplate jdbcTemplate) {
public PropertySetRepository(JdbcTemplate jdbcTemplate, SqlDialectProvider dialectProvider) {
this.jdbcTemplate = jdbcTemplate;
this.dialectProvider = dialectProvider;
}
/**
@ -155,16 +158,21 @@ public class PropertySetRepository {
}
public Optional<PropertySet> getByDate(LocalDate date) {
String query = """
String query = String.format("""
SELECT id, start_date, end_date, state
FROM property_set
WHERE DATE(start_date) <= ?
AND (end_date IS NULL OR DATE(end_date) >= ?)
WHERE %s <= ?
AND (end_date IS NULL OR %s >= ?)
ORDER BY start_date DESC
LIMIT 1
""";
%s
""",
dialectProvider.extractDate("start_date"),
dialectProvider.extractDate("end_date"),
dialectProvider.buildPaginationClause(1, 0)
);
var propertySets = jdbcTemplate.query(query, new PropertySetMapper(), date, date);
Object[] paginationParams = dialectProvider.getPaginationParameters(1, 0);
var propertySets = jdbcTemplate.query(query, new PropertySetMapper(), date, date, paginationParams[0], paginationParams[1]);
return propertySets.isEmpty() ? Optional.empty() : Optional.of(propertySets.getFirst());
}

View file

@ -1,5 +1,6 @@
package de.avatic.lcc.repositories.rates;
import de.avatic.lcc.database.dialect.SqlDialectProvider;
import de.avatic.lcc.dto.generic.TransportType;
import de.avatic.lcc.model.db.rates.ContainerRate;
import de.avatic.lcc.model.db.rates.ValidityPeriodState;
@ -13,6 +14,7 @@ import org.springframework.transaction.annotation.Transactional;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
@ -21,9 +23,11 @@ import java.util.Optional;
public class ContainerRateRepository {
private final JdbcTemplate jdbcTemplate;
private final SqlDialectProvider dialectProvider;
public ContainerRateRepository(JdbcTemplate jdbcTemplate) {
public ContainerRateRepository(JdbcTemplate jdbcTemplate, SqlDialectProvider dialectProvider) {
this.jdbcTemplate = jdbcTemplate;
this.dialectProvider = dialectProvider;
}
/**
@ -74,9 +78,12 @@ public class ContainerRateRepository {
}
}
queryBuilder.append(" ORDER BY cr.id LIMIT ? OFFSET ?");
params.add(pagination.getLimit());
params.add(pagination.getOffset());
queryBuilder.append(" ORDER BY cr.id ");
queryBuilder.append(dialectProvider.buildPaginationClause(pagination.getLimit(), pagination.getOffset()));
Object[] paginationParams = dialectProvider.getPaginationParameters(pagination.getLimit(), pagination.getOffset());
params.add(paginationParams[0]);
params.add(paginationParams[1]);
Integer totalCount = jdbcTemplate.queryForObject(countQueryBuilder.toString(), Integer.class, countParams.toArray());
var results = jdbcTemplate.query(queryBuilder.toString(), new ContainerRateMapper(), params.toArray());
@ -128,10 +135,12 @@ public class ContainerRateRepository {
LEFT JOIN node AS from_node ON from_node.id = container_rate.from_node_id
LEFT JOIN validity_period ON validity_period.id = container_rate.validity_period_id
WHERE validity_period.state = ?
AND to_node.is_deprecated = FALSE
AND from_node.is_deprecated = FALSE
AND to_node.is_deprecated = %s
AND from_node.is_deprecated = %s
AND (container_rate.container_rate_type = ? OR container_rate.container_rate_type = ?)
AND container_rate.from_node_id = ? AND to_node.country_id IN (%s)""".formatted(
dialectProvider.getBooleanFalse(),
dialectProvider.getBooleanFalse(),
destinationCountryPlaceholders);
List<Object> params = new ArrayList<>();
@ -147,7 +156,7 @@ public class ContainerRateRepository {
@Transactional
public List<ContainerRate> getPostRunsFor(ContainerRate mainRun) {
String query = """
String query = String.format("""
SELECT container_rate.id AS id,
container_rate.validity_period_id AS validity_period_id,
container_rate.container_rate_type AS container_rate_type,
@ -164,9 +173,11 @@ public class ContainerRateRepository {
LEFT JOIN node AS from_node ON from_node.id = container_rate.from_node_id
LEFT JOIN validity_period ON validity_period.id = container_rate.validity_period_id
WHERE validity_period.state = ?
AND to_node.is_deprecated = FALSE
AND from_node.is_deprecated = FALSE
AND container_rate.from_node_id = ? AND container_rate.container_rate_type = ?""";
AND to_node.is_deprecated = %s
AND from_node.is_deprecated = %s
AND container_rate.from_node_id = ? AND container_rate.container_rate_type = ?""",
dialectProvider.getBooleanFalse(),
dialectProvider.getBooleanFalse());
return jdbcTemplate.query(query, new ContainerRateMapper(true), ValidityPeriodState.VALID.name(), mainRun.getToNodeId(), TransportType.POST_RUN.name());
}
@ -213,17 +224,17 @@ public class ContainerRateRepository {
@Transactional
public void insert(ContainerRate containerRate) {
String sql = """
INSERT INTO container_rate
(from_node_id, to_node_id, container_rate_type, rate_teu, rate_feu, rate_hc, lead_time, validity_period_id)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
ON DUPLICATE KEY UPDATE
container_rate_type = VALUES(container_rate_type),
rate_teu = VALUES(rate_teu),
rate_feu = VALUES(rate_feu),
rate_hc = VALUES(rate_hc),
lead_time = VALUES(lead_time)
""";
// Build UPSERT statement using dialect provider
List<String> uniqueColumns = Arrays.asList("from_node_id", "to_node_id", "container_rate_type", "validity_period_id");
List<String> insertColumns = Arrays.asList("from_node_id", "to_node_id", "container_rate_type", "rate_teu", "rate_feu", "rate_hc", "lead_time", "validity_period_id");
List<String> updateColumns = Arrays.asList("container_rate_type", "rate_teu", "rate_feu", "rate_hc", "lead_time");
String sql = dialectProvider.buildUpsertStatement(
"container_rate",
uniqueColumns,
insertColumns,
updateColumns
);
jdbcTemplate.update(sql,
containerRate.getFromNodeId(),
@ -240,15 +251,16 @@ public class ContainerRateRepository {
@Transactional
public boolean hasMainRun(Integer nodeId) {
String query = """
SELECT EXISTS(
SELECT CASE WHEN EXISTS(
SELECT 1 FROM container_rate
WHERE (from_node_id = ? OR to_node_id = ?)
AND (container_rate_type = ? OR container_rate_type = ?)
)
) THEN 1 ELSE 0 END
""";
return Boolean.TRUE.equals(jdbcTemplate.queryForObject(query, Boolean.class,
nodeId, nodeId, TransportType.SEA.name(), TransportType.RAIL.name()));
Integer result = jdbcTemplate.queryForObject(query, Integer.class,
nodeId, nodeId, TransportType.SEA.name(), TransportType.RAIL.name());
return result != null && result > 0;
}
@Transactional
@ -259,7 +271,11 @@ public class ContainerRateRepository {
@Transactional
public void copyCurrentToDraft() {
String sql = """
// Build LIMIT clause for subquery
String limitClause = dialectProvider.buildPaginationClause(1, 0);
Object[] paginationParams = dialectProvider.getPaginationParameters(1, 0);
String sql = String.format("""
INSERT INTO container_rate (
from_node_id,
to_node_id,
@ -278,13 +294,13 @@ public class ContainerRateRepository {
cr.rate_feu,
cr.rate_hc,
cr.lead_time,
(SELECT id FROM validity_period WHERE state = 'DRAFT' LIMIT 1) as validity_period_id
(SELECT id FROM validity_period WHERE state = 'DRAFT' %s) as validity_period_id
FROM container_rate cr
INNER JOIN validity_period vp ON cr.validity_period_id = vp.id
WHERE vp.state = 'VALID'
""";
""", limitClause);
jdbcTemplate.update(sql);
jdbcTemplate.update(sql, paginationParams);
}

View file

@ -1,5 +1,6 @@
package de.avatic.lcc.repositories.rates;
import de.avatic.lcc.database.dialect.SqlDialectProvider;
import de.avatic.lcc.model.db.rates.MatrixRate;
import de.avatic.lcc.model.db.rates.ValidityPeriodState;
import de.avatic.lcc.repositories.pagination.SearchQueryPagination;
@ -23,14 +24,17 @@ import java.util.Optional;
public class MatrixRateRepository {
private final JdbcTemplate jdbcTemplate;
private final SqlDialectProvider dialectProvider;
/**
* Instantiates the repository by injecting a {@link JdbcTemplate}.
*
* @param jdbcTemplate the {@link JdbcTemplate} to be used for database interactions
* @param dialectProvider the {@link SqlDialectProvider} for database-specific SQL syntax
*/
public MatrixRateRepository(JdbcTemplate jdbcTemplate) {
public MatrixRateRepository(JdbcTemplate jdbcTemplate, SqlDialectProvider dialectProvider) {
this.jdbcTemplate = jdbcTemplate;
this.dialectProvider = dialectProvider;
}
/**
@ -42,9 +46,13 @@ public class MatrixRateRepository {
*/
@Transactional
public SearchQueryResult<MatrixRate> listRates(SearchQueryPagination pagination) {
String query = "SELECT * FROM country_matrix_rate ORDER BY id LIMIT ? OFFSET ?";
String query = String.format("SELECT * FROM country_matrix_rate ORDER BY id %s",
dialectProvider.buildPaginationClause(pagination.getLimit(), pagination.getOffset()));
Object[] paginationParams = dialectProvider.getPaginationParameters(pagination.getLimit(), pagination.getOffset());
var totalCount = jdbcTemplate.queryForObject("SELECT COUNT(*) FROM country_matrix_rate", Integer.class);
return new SearchQueryResult<>(jdbcTemplate.query(query, new MatrixRateMapper(), pagination.getLimit(), pagination.getOffset()), pagination.getPage(), totalCount, pagination.getLimit());
return new SearchQueryResult<>(jdbcTemplate.query(query, new MatrixRateMapper(), paginationParams[0], paginationParams[1]), pagination.getPage(), totalCount, pagination.getLimit());
}
/**
@ -96,9 +104,12 @@ public class MatrixRateRepository {
}
}
queryBuilder.append(" ORDER BY cmr.id LIMIT ? OFFSET ?");
params.add(pagination.getLimit());
params.add(pagination.getOffset());
queryBuilder.append(" ORDER BY cmr.id ");
queryBuilder.append(dialectProvider.buildPaginationClause(pagination.getLimit(), pagination.getOffset()));
Object[] paginationParams = dialectProvider.getPaginationParameters(pagination.getLimit(), pagination.getOffset());
params.add(paginationParams[0]);
params.add(paginationParams[1]);
var totalCount = jdbcTemplate.queryForObject(countQueryBuilder.toString(), Integer.class, countParams.toArray());
var results = jdbcTemplate.query(queryBuilder.toString(), new MatrixRateMapper(), params.toArray());
@ -164,12 +175,12 @@ public class MatrixRateRepository {
@Transactional
public void insert(MatrixRate rate) {
String sql = """
INSERT INTO country_matrix_rate (from_country_id, to_country_id, rate, validity_period_id)
VALUES (?, ?, ?, ?)
ON DUPLICATE KEY UPDATE
rate = VALUES(rate)
""";
String sql = dialectProvider.buildUpsertStatement(
"country_matrix_rate",
List.of("from_country_id", "to_country_id", "validity_period_id"),
List.of("from_country_id", "to_country_id", "rate", "validity_period_id"),
List.of("rate")
);
jdbcTemplate.update(sql,
rate.getFromCountry(),
@ -180,13 +191,14 @@ public class MatrixRateRepository {
@Transactional
public void copyCurrentToDraft() {
// Note: No pagination needed for the DRAFT subquery - there should only be one DRAFT period
String sql = """
INSERT INTO country_matrix_rate (from_country_id, to_country_id, rate, validity_period_id)
SELECT
cmr.from_country_id,
cmr.to_country_id,
cmr.rate,
(SELECT id FROM validity_period WHERE state = 'DRAFT' LIMIT 1) AS validity_period_id
(SELECT id FROM validity_period WHERE state = 'DRAFT') AS validity_period_id
FROM country_matrix_rate cmr
INNER JOIN validity_period vp ON cmr.validity_period_id = vp.id
WHERE vp.state = 'VALID'

View file

@ -1,5 +1,6 @@
package de.avatic.lcc.repositories.rates;
import de.avatic.lcc.database.dialect.SqlDialectProvider;
import de.avatic.lcc.model.db.ValidityTuple;
import de.avatic.lcc.model.db.rates.ValidityPeriod;
import de.avatic.lcc.model.db.rates.ValidityPeriodState;
@ -30,14 +31,17 @@ public class ValidityPeriodRepository {
* The {@link JdbcTemplate} used for interacting with the database.
*/
private final JdbcTemplate jdbcTemplate;
private final SqlDialectProvider dialectProvider;
/**
* Constructs a new repository with a given {@link JdbcTemplate}.
*
* @param jdbcTemplate the {@link JdbcTemplate} used for executing SQL queries.
* @param dialectProvider the {@link SqlDialectProvider} for database-specific SQL syntax
*/
public ValidityPeriodRepository(JdbcTemplate jdbcTemplate) {
public ValidityPeriodRepository(JdbcTemplate jdbcTemplate, SqlDialectProvider dialectProvider) {
this.jdbcTemplate = jdbcTemplate;
this.dialectProvider = dialectProvider;
}
/**
@ -60,8 +64,8 @@ public class ValidityPeriodRepository {
*/
@Transactional
public Optional<Integer> getPeriodId(LocalDateTime validAt) {
String query = "SELECT id FROM validity_period WHERE ? BETWEEN start_date AND end_date";
return Optional.ofNullable(jdbcTemplate.query(query, (rs) -> rs.next() ? rs.getInt("id") : null, validAt));
String query = "SELECT id FROM validity_period WHERE start_date <= ? AND (end_date IS NULL OR end_date >= ?)";
return Optional.ofNullable(jdbcTemplate.query(query, (rs) -> rs.next() ? rs.getInt("id") : null, validAt, validAt));
}
/**
@ -274,7 +278,9 @@ public class ValidityPeriodRepository {
+ whereClause + """
GROUP BY
cj.validity_period_id,
cj.property_set_id
cj.property_set_id,
ps.start_date,
vp.start_date
HAVING
COUNT(DISTINCT COALESCE(p.supplier_node_id, p.user_supplier_node_id)) = ?
ORDER BY
@ -329,15 +335,20 @@ public class ValidityPeriodRepository {
}
public Optional<ValidityPeriod> getByDate(LocalDate date) {
String query = """
String query = String.format("""
SELECT * FROM validity_period
WHERE DATE(start_date) <= ?
AND (end_date IS NULL OR DATE(end_date) >= ?)
WHERE %s <= ?
AND (end_date IS NULL OR %s >= ?)
ORDER BY start_date DESC
LIMIT 1
""";
%s
""",
dialectProvider.extractDate("start_date"),
dialectProvider.extractDate("end_date"),
dialectProvider.buildPaginationClause(1, 0)
);
var periods = jdbcTemplate.query(query, new ValidityPeriodMapper(), date, date);
Object[] paginationParams = dialectProvider.getPaginationParameters(1, 0);
var periods = jdbcTemplate.query(query, new ValidityPeriodMapper(), date, date, paginationParams[0], paginationParams[1]);
return periods.isEmpty() ? Optional.empty() : Optional.of(periods.getFirst());
}

View file

@ -1,5 +1,6 @@
package de.avatic.lcc.repositories.users;
import de.avatic.lcc.database.dialect.SqlDialectProvider;
import de.avatic.lcc.model.db.users.App;
import de.avatic.lcc.model.db.users.Group;
import org.springframework.jdbc.core.JdbcTemplate;
@ -31,16 +32,19 @@ public class AppRepository {
private final JdbcTemplate jdbcTemplate;
private final GroupRepository groupRepository;
private final SqlDialectProvider dialectProvider;
/**
* Creates a new AppRepository.
*
* @param jdbcTemplate Spring JdbcTemplate used for executing SQL queries
* @param groupRepository Repository used to resolve group identifiers
* @param dialectProvider SQL dialect provider for database-specific SQL syntax
*/
public AppRepository(JdbcTemplate jdbcTemplate, GroupRepository groupRepository) {
public AppRepository(JdbcTemplate jdbcTemplate, GroupRepository groupRepository, SqlDialectProvider dialectProvider) {
this.jdbcTemplate = jdbcTemplate;
this.groupRepository = groupRepository;
this.dialectProvider = dialectProvider;
}
/**
@ -128,11 +132,14 @@ public class AppRepository {
jdbcTemplate.update("DELETE FROM sys_app_group_mapping WHERE app_id = ?", appId);
return;
} else {
String insertQuery = dialectProvider.buildInsertIgnoreStatement(
"sys_app_group_mapping",
List.of("app_id", "group_id"),
List.of("app_id", "group_id")
);
for (Integer groupId : groups) {
jdbcTemplate.update(
"INSERT IGNORE INTO sys_app_group_mapping (app_id, group_id) VALUES (?, ?)",
appId, groupId
);
jdbcTemplate.update(insertQuery, appId, groupId);
}
}

View file

@ -1,5 +1,6 @@
package de.avatic.lcc.repositories.users;
import de.avatic.lcc.database.dialect.SqlDialectProvider;
import de.avatic.lcc.model.db.users.Group;
import de.avatic.lcc.repositories.pagination.SearchQueryPagination;
import de.avatic.lcc.repositories.pagination.SearchQueryResult;
@ -16,21 +17,26 @@ import java.util.List;
@Repository
public class GroupRepository {
private final JdbcTemplate jdbcTemplate;
private final SqlDialectProvider dialectProvider;
public GroupRepository(JdbcTemplate jdbcTemplate) {
public GroupRepository(JdbcTemplate jdbcTemplate, SqlDialectProvider dialectProvider) {
this.jdbcTemplate = jdbcTemplate;
this.dialectProvider = dialectProvider;
}
@Transactional
public SearchQueryResult<Group> listGroups(SearchQueryPagination pagination) {
String query = "SELECT * FROM sys_group ORDER BY group_name LIMIT ? OFFSET ?";
String query = String.format("SELECT * FROM sys_group ORDER BY group_name %s",
dialectProvider.buildPaginationClause(pagination.getLimit(), pagination.getOffset()));
Object[] paginationParams = dialectProvider.getPaginationParameters(pagination.getLimit(), pagination.getOffset());
var groups = jdbcTemplate.query(query, new GroupMapper(),
pagination.getLimit(), pagination.getOffset());
paginationParams[0], paginationParams[1]);
Integer totalCount = jdbcTemplate.queryForObject(
"SELECT COUNT(*) FROM sys_group ORDER BY group_name",
"SELECT COUNT(*) FROM sys_group",
Integer.class
);
@ -63,8 +69,13 @@ public class GroupRepository {
@Transactional
public void updateGroup(Group group) {
String query = "INSERT INTO sys_group (group_name, group_description) VALUES (?, ?) ON DUPLICATE KEY UPDATE group_description = ?";
jdbcTemplate.update(query, group.getName(), group.getDescription(), group.getDescription());
String query = dialectProvider.buildUpsertStatement(
"sys_group",
List.of("group_name"),
List.of("group_name", "group_description"),
List.of("group_description")
);
jdbcTemplate.update(query, group.getName(), group.getDescription());
}
private static class GroupMapper implements RowMapper<Group> {

View file

@ -1,5 +1,6 @@
package de.avatic.lcc.repositories.users;
import de.avatic.lcc.database.dialect.SqlDialectProvider;
import de.avatic.lcc.model.db.ValidityTuple;
import de.avatic.lcc.model.db.nodes.Node;
import de.avatic.lcc.util.exception.base.ForbiddenException;
@ -22,9 +23,11 @@ public class UserNodeRepository {
private final JdbcTemplate jdbcTemplate;
private final SqlDialectProvider dialectProvider;
public UserNodeRepository(JdbcTemplate jdbcTemplate) {
public UserNodeRepository(JdbcTemplate jdbcTemplate, SqlDialectProvider dialectProvider) {
this.jdbcTemplate = jdbcTemplate;
this.dialectProvider = dialectProvider;
}
@Transactional
@ -43,11 +46,15 @@ public class UserNodeRepository {
}
if (excludeDeprecated) {
queryBuilder.append(" AND is_deprecated = FALSE");
queryBuilder.append(" AND is_deprecated = ").append(dialectProvider.getBooleanFalse());
}
queryBuilder.append(" LIMIT ?");
params.add(limit);
queryBuilder.append(" ORDER BY id");
queryBuilder.append(" ").append(dialectProvider.buildPaginationClause(limit, 0));
Object[] paginationParams = dialectProvider.getPaginationParameters(limit, 0);
params.add(paginationParams[0]);
params.add(paginationParams[1]);
return jdbcTemplate.query(queryBuilder.toString(), new NodeMapper(), params.toArray());
}
@ -139,11 +146,19 @@ public class UserNodeRepository {
@Transactional
public void checkOwner(List<Integer> userNodeIds, Integer userId) {
String query = """
SELECT id FROM sys_user_node WHERE id IN (?) AND user_id <> ?
""";
if (userNodeIds.isEmpty()) {
return;
}
var otherIds = jdbcTemplate.queryForList(query, Integer.class, userNodeIds, userId);
String placeholders = String.join(",", Collections.nCopies(userNodeIds.size(), "?"));
String query = """
SELECT id FROM sys_user_node WHERE id IN (""" + placeholders + ") AND user_id <> ?";
// Combine userNodeIds and userId into a single parameter array
List<Object> params = new ArrayList<>(userNodeIds);
params.add(userId);
var otherIds = jdbcTemplate.queryForList(query, Integer.class, params.toArray());
if(!otherIds.isEmpty()) {
throw new ForbiddenException("Access violation. Cannot open user nodes with ids = " + otherIds);

View file

@ -1,5 +1,6 @@
package de.avatic.lcc.repositories.users;
import de.avatic.lcc.database.dialect.SqlDialectProvider;
import de.avatic.lcc.model.db.users.Group;
import de.avatic.lcc.model.db.users.User;
import de.avatic.lcc.repositories.pagination.SearchQueryPagination;
@ -25,20 +26,24 @@ public class UserRepository {
private final JdbcTemplate jdbcTemplate;
private final GroupRepository groupRepository;
private final SqlDialectProvider dialectProvider;
public UserRepository(JdbcTemplate jdbcTemplate, GroupRepository groupRepository) {
public UserRepository(JdbcTemplate jdbcTemplate, GroupRepository groupRepository, SqlDialectProvider dialectProvider) {
this.jdbcTemplate = jdbcTemplate;
this.groupRepository = groupRepository;
this.dialectProvider = dialectProvider;
}
@Transactional
public SearchQueryResult<User> listUsers(SearchQueryPagination pagination) {
String query = """
String query = String.format("""
SELECT *
FROM sys_user
ORDER BY sys_user.workday_id LIMIT ? OFFSET ?""";
ORDER BY sys_user.workday_id %s""", dialectProvider.buildPaginationClause(pagination.getLimit(), pagination.getOffset()));
return new SearchQueryResult<>(jdbcTemplate.query(query, new UserMapper(), pagination.getLimit(), pagination.getOffset()), pagination.getPage(), getTotalUserCount(), pagination.getLimit());
Object[] paginationParams = dialectProvider.getPaginationParameters(pagination.getLimit(), pagination.getOffset());
return new SearchQueryResult<>(jdbcTemplate.query(query, new UserMapper(), paginationParams[0], paginationParams[1]), pagination.getPage(), getTotalUserCount(), pagination.getLimit());
}
@ -113,11 +118,14 @@ public class UserRepository {
return;
} else
{
String insertQuery = dialectProvider.buildInsertIgnoreStatement(
"sys_user_group_mapping",
List.of("user_id", "group_id"),
List.of("user_id", "group_id")
);
for (Integer groupId : groups) {
jdbcTemplate.update(
"INSERT IGNORE INTO sys_user_group_mapping (user_id, group_id) VALUES (?, ?)",
userId, groupId
);
jdbcTemplate.update(insertQuery, userId, groupId);
}
}

View file

@ -49,6 +49,7 @@ public class BatchGeoApiService {
ArrayList<BulkInstruction<ExcelNode>> noGeo = new ArrayList<>();
ArrayList<BulkInstruction<ExcelNode>> failedGeoLookups = new ArrayList<>();
ArrayList<BulkInstruction<ExcelNode>> failedFuzzyGeoLookups = new ArrayList<>();
int totalSuccessful = 0;
for (var node : nodes) {
@ -57,7 +58,6 @@ public class BatchGeoApiService {
}
}
for (int currentBatch = 0; currentBatch < noGeo.size(); currentBatch += MAX_BATCH_SIZE) {
int end = Math.min(currentBatch + MAX_BATCH_SIZE, noGeo.size());
var chunk = noGeo.subList(currentBatch, end);
@ -67,34 +67,109 @@ public class BatchGeoApiService {
.toList());
if (chunkResult.isPresent()) {
var response = chunkResult.get();
totalSuccessful += chunkResult.get().getSummary().getSuccessfulRequests();
if (response.getSummary() != null && response.getSummary().getSuccessfulRequests() != null) {
totalSuccessful += response.getSummary().getSuccessfulRequests();
}
if (response.getBatchItems() == null || response.getBatchItems().isEmpty()) {
logger.warn("Batch response contains no items");
failedGeoLookups.addAll(chunk);
continue;
}
for (int itemIdx = 0; itemIdx < chunk.size(); itemIdx++) {
var result = chunkResult.get().getBatchItems().get(itemIdx);
if (itemIdx >= response.getBatchItems().size()) {
logger.warn("BatchItems size mismatch at index {}", itemIdx);
failedGeoLookups.add(chunk.get(itemIdx));
continue;
}
var result = response.getBatchItems().get(itemIdx);
var node = chunk.get(itemIdx).getEntity();
if (!result.getFeatures().isEmpty() &&
(result.getFeatures().getFirst().getProperties().getConfidence().equalsIgnoreCase("high") ||
result.getFeatures().getFirst().getProperties().getConfidence().equalsIgnoreCase("medium") ||
(result.getFeatures().getFirst().getProperties().getMatchCodes() != null &&
result.getFeatures().getFirst().getProperties().getMatchCodes().stream().anyMatch(s -> s.equalsIgnoreCase("good"))))) {
var geometry = result.getFeatures().getFirst().getGeometry();
var properties = result.getFeatures().getFirst().getProperties();
node.setGeoLng(BigDecimal.valueOf(geometry.getCoordinates().get(0)));
node.setGeoLat(BigDecimal.valueOf(geometry.getCoordinates().get(1)));
node.setAddress(properties.getAddress().getFormattedAddress());
node.setCountryId(IsoCode.valueOf(properties.getAddress().getCountryRegion().getIso()));
} else {
logger.warn("Geocoding failed for address {}", node.getAddress());
if (result == null || result.getFeatures() == null || result.getFeatures().isEmpty()) {
logger.warn("No geocoding result for address {}",
node.getAddress() != null ? node.getAddress() : "unknown");
failedGeoLookups.add(chunk.get(itemIdx));
continue;
}
var feature = result.getFeatures().getFirst();
if (feature == null) {
logger.warn("Feature is null for address {}", node.getAddress());
failedGeoLookups.add(chunk.get(itemIdx));
continue;
}
var properties = feature.getProperties();
if (properties == null) {
logger.warn("Properties is null for address {}", node.getAddress());
failedGeoLookups.add(chunk.get(itemIdx));
continue;
}
String confidence = properties.getConfidence();
boolean hasGoodConfidence = confidence != null &&
(confidence.equalsIgnoreCase("high") ||
confidence.equalsIgnoreCase("medium"));
boolean hasGoodMatchCode = properties.getMatchCodes() != null &&
properties.getMatchCodes().stream()
.anyMatch(s -> s != null && s.equalsIgnoreCase("good"));
if (hasGoodConfidence || hasGoodMatchCode) {
var geometry = feature.getGeometry();
if (geometry == null || geometry.getCoordinates() == null ||
geometry.getCoordinates().size() < 2) {
logger.warn("Invalid geometry for address {}", node.getAddress());
failedGeoLookups.add(chunk.get(itemIdx));
continue;
}
var coordinates = geometry.getCoordinates();
if (coordinates.get(0) == null || coordinates.get(1) == null) {
logger.warn("Null coordinates for address {}", node.getAddress());
failedGeoLookups.add(chunk.get(itemIdx));
continue;
}
node.setGeoLng(BigDecimal.valueOf(coordinates.get(0)));
node.setGeoLat(BigDecimal.valueOf(coordinates.get(1)));
if (properties.getAddress() != null &&
properties.getAddress().getFormattedAddress() != null) {
node.setAddress(properties.getAddress().getFormattedAddress());
}
if (properties.getAddress() != null &&
properties.getAddress().getCountryRegion() != null &&
properties.getAddress().getCountryRegion().getIso() != null) {
try {
node.setCountryId(IsoCode.valueOf(
properties.getAddress().getCountryRegion().getIso()));
} catch (IllegalArgumentException e) {
logger.warn("Invalid ISO code: {}",
properties.getAddress().getCountryRegion().getIso());
}
}
} else {
logger.warn("Geocoding failed for address {} (low confidence)",
node.getAddress());
failedGeoLookups.add(chunk.get(itemIdx));
//throw new ExcelValidationError("Unable to geocode " + node.getName() + ". Please check your address or enter geo position yourself.");
}
}
} else {
logger.warn("Batch request returned empty result");
failedGeoLookups.addAll(chunk);
}
}
// Second pass: fuzzy lookup with company name for failed addresses
if (!failedGeoLookups.isEmpty()) {
logger.info("Retrying {} failed lookups with fuzzy search", failedGeoLookups.size());
@ -108,31 +183,52 @@ public class BatchGeoApiService {
&& !fuzzyResult.get().getResults().isEmpty()) {
var result = fuzzyResult.get().getResults().getFirst();
// Score >= 0.7 means good confidence (1.0 = perfect match)
if (result.getScore() >= 7.0) {
node.setGeoLat(BigDecimal.valueOf(result.getPosition().getLat()));
node.setGeoLng(BigDecimal.valueOf(result.getPosition().getLon()));
node.setAddress(result.getAddress().getFreeformAddress());
// Update country if it differs
if (result.getAddress().getCountryCode() != null) {
try {
node.setCountryId(IsoCode.valueOf(result.getAddress().getCountryCode()));
} catch (IllegalArgumentException e) {
logger.warn("Unknown country code: {}", result.getAddress().getCountryCode());
}
}
fuzzySuccessful++;
logger.info("Fuzzy search successful for: {} (score: {})",
node.getName(), result.getScore());
} else {
logger.warn("Fuzzy search returned low confidence result for: {} (score: {})",
node.getName(), result.getScore());
if (result == null) {
logger.warn("Fuzzy result is null for: {}", node.getName());
failedFuzzyGeoLookups.add(instruction);
continue;
}
} else {
logger.error("Fuzzy search found no results for: {}", node.getName());
double score = result.getScore();
if (score < 7.0) {
logger.warn("Fuzzy search returned low confidence result for: {} (score: {})",
node.getName(), score);
failedFuzzyGeoLookups.add(instruction);
continue;
}
if (result.getPosition() == null) {
logger.warn("Position is null for: {}", node.getName());
failedFuzzyGeoLookups.add(instruction);
continue;
}
double lat = result.getPosition().getLat();
double lon = result.getPosition().getLon();
node.setGeoLat(BigDecimal.valueOf(lat));
node.setGeoLng(BigDecimal.valueOf(lon));
if (result.getAddress() != null &&
result.getAddress().getFreeformAddress() != null) {
node.setAddress(result.getAddress().getFreeformAddress());
}
if (result.getAddress() != null &&
result.getAddress().getCountryCode() != null) {
try {
node.setCountryId(IsoCode.valueOf(result.getAddress().getCountryCode()));
} catch (IllegalArgumentException e) {
logger.warn("Unknown country code: {}",
result.getAddress().getCountryCode());
failedFuzzyGeoLookups.add(instruction);
continue;
}
}
fuzzySuccessful++;
logger.info("Fuzzy search successful for: {} (score: {})",
node.getName(), score);
}
}
@ -140,8 +236,10 @@ public class BatchGeoApiService {
fuzzySuccessful, failedGeoLookups.size());
// Throw error for remaining failed lookups
int remainingFailed = failedGeoLookups.size() - fuzzySuccessful;
if (remainingFailed > 0) {
if (!failedFuzzyGeoLookups.isEmpty()) {
failedFuzzyGeoLookups.forEach(instruction -> {logger.warn("Lookup finally failed for: {}", instruction.getEntity().getName());});
var firstFailed = failedGeoLookups.stream()
.filter(i -> i.getEntity().getGeoLat() == null)
.findFirst()
@ -149,7 +247,9 @@ public class BatchGeoApiService {
.orElse(null);
if (firstFailed != null) {
throw new ExcelValidationError("Unable to geocode " + firstFailed.getName()
String name = firstFailed.getName() != null ?
firstFailed.getName() : "unknown";
throw new ExcelValidationError("Unable to geocode " + name
+ ". Please check your address or enter geo position yourself.");
}
}
@ -159,13 +259,32 @@ public class BatchGeoApiService {
private Optional<FuzzySearchResponse> executeFuzzySearch(ExcelNode node) {
try {
String companyName = node.getName();
String country = node.getCountryId().name();
if (companyName == null) {
logger.warn("Company name is null for fuzzy search");
return Optional.empty();
}
IsoCode countryId = node.getCountryId();
if (countryId == null) {
logger.warn("Country ID is null for fuzzy search: {}", companyName);
return Optional.empty();
}
String country = countryId.name();
String address = node.getAddress();
if (address == null) {
logger.warn("Address is null for fuzzy search: {}", companyName);
address = ""; // Fallback zu leerem String
}
// Normalisiere Unicode für konsistente Suche
companyName = java.text.Normalizer.normalize(companyName, java.text.Normalizer.Form.NFC);
companyName = java.text.Normalizer.normalize(companyName,
java.text.Normalizer.Form.NFC);
// URL-Encoding
String encodedQuery = URLEncoder.encode(companyName + ", " + node.getAddress() + ", " + country, StandardCharsets.UTF_8);
String encodedQuery = URLEncoder.encode(
companyName + ", " + address + ", " + country,
StandardCharsets.UTF_8);
String url = String.format(
"https://atlas.microsoft.com/search/fuzzy/json?api-version=1.0&subscription-key=%s&query=%s&limit=5",
@ -185,13 +304,21 @@ public class BatchGeoApiService {
return Optional.ofNullable(response.getBody());
} catch (Exception e) {
logger.error("Fuzzy search failed for {}", node.getName(), e);
logger.error("Fuzzy search failed for {}",
node.getName() != null ? node.getName() : "unknown", e);
return Optional.empty();
}
}
private String getGeoCodeString(ExcelNode excelNode) {
return excelNode.getAddress() + ", " + excelNode.getCountryId();
String address = excelNode.getAddress();
IsoCode countryId = excelNode.getCountryId();
// Fallback-Werte für null
String addressStr = address != null ? address : "";
String countryStr = countryId != null ? countryId.name() : "";
return addressStr + ", " + countryStr;
}
private Optional<BatchGeocodingResponse> executeBatchRequest(List<BatchItem> batchItems) {

View file

@ -15,6 +15,7 @@ import org.apache.poi.xssf.usermodel.XSSFWorkbook;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.io.ByteArrayInputStream;
import java.io.IOException;
@ -56,6 +57,7 @@ public class BulkImportService {
this.materialFastExcelMapper = materialFastExcelMapper;
}
@Transactional
public void processOperation(BulkOperation op) throws IOException {
var file = op.getFile();
var type = op.getFileType();

View file

@ -9,6 +9,7 @@ import de.avatic.lcc.service.transformer.generic.NodeTransformer;
import de.avatic.lcc.util.exception.internalerror.ExcelValidationError;
import org.springframework.stereotype.Service;
import java.math.BigDecimal;
import java.util.*;
@Service
@ -61,22 +62,26 @@ public class NodeBulkImportService {
}
private boolean compare(Node updateNode, Node currentNode) {
return updateNode.getName().equals(currentNode.getName()) &&
updateNode.getGeoLat().compareTo(currentNode.getGeoLat()) == 0 &&
updateNode.getGeoLng().compareTo(currentNode.getGeoLng()) == 0 &&
updateNode.getExternalMappingId().equals(currentNode.getExternalMappingId()) &&
updateNode.getCountryId().equals(currentNode.getCountryId()) &&
updateNode.getIntermediate().equals(currentNode.getIntermediate()) &&
updateNode.getDestination().equals(currentNode.getDestination()) &&
updateNode.getSource().equals(currentNode.getSource()) &&
updateNode.getAddress().equals(currentNode.getAddress()) &&
updateNode.getDeprecated().equals(currentNode.getDeprecated()) &&
updateNode.getId().equals(currentNode.getId()) &&
updateNode.getPredecessorRequired().equals(currentNode.getPredecessorRequired()) &&
return Objects.equals(updateNode.getName(), currentNode.getName()) &&
compareBigDecimal(updateNode.getGeoLat(), currentNode.getGeoLat()) &&
compareBigDecimal(updateNode.getGeoLng(), currentNode.getGeoLng()) &&
Objects.equals(updateNode.getExternalMappingId(), currentNode.getExternalMappingId()) &&
Objects.equals(updateNode.getCountryId(), currentNode.getCountryId()) &&
Objects.equals(updateNode.getIntermediate(), currentNode.getIntermediate()) &&
Objects.equals(updateNode.getDestination(), currentNode.getDestination()) &&
Objects.equals(updateNode.getSource(), currentNode.getSource()) &&
Objects.equals(updateNode.getAddress(), currentNode.getAddress()) &&
Objects.equals(updateNode.getDeprecated(), currentNode.getDeprecated()) &&
Objects.equals(updateNode.getId(), currentNode.getId()) &&
Objects.equals(updateNode.getPredecessorRequired(), currentNode.getPredecessorRequired()) &&
compare(updateNode.getNodePredecessors(), currentNode.getNodePredecessors()) &&
compare(updateNode.getOutboundCountries(), currentNode.getOutboundCountries());
}
private boolean compareBigDecimal(BigDecimal a, BigDecimal b) {
if (a == null && b == null) return true;
if (a == null || b == null) return false;
return a.compareTo(b) == 0;
}
private boolean compare(Collection<Integer> outbound1, Collection<Integer> outbound2) {

View file

@ -0,0 +1,50 @@
# MSSQL Profile Configuration
# Activate with: -Dspring.profiles.active=mssql or SPRING_PROFILES_ACTIVE=mssql
# Application Name
spring.application.name=lcc
# Database Configuration - MSSQL
spring.datasource.driver-class-name=com.microsoft.sqlserver.jdbc.SQLServerDriver
spring.datasource.url=jdbc:sqlserver://${DB_HOST:localhost}:1433;databaseName=${DB_DATABASE:lcc};encrypt=true;trustServerCertificate=true
spring.datasource.username=${DB_USER:sa}
spring.datasource.password=${DB_PASSWORD}
# File Upload Limits
spring.servlet.multipart.max-file-size=30MB
spring.servlet.multipart.max-request-size=50MB
# Azure AD Configuration
spring.cloud.azure.active-directory.enabled=true
spring.cloud.azure.active-directory.authorization-clients.graph.scopes=openid,profile,email,https://graph.microsoft.com/User.Read
# Management Endpoints
management.endpoints.web.exposure.include=health,info,metrics
management.endpoint.health.show-details=when-authorized
# Flyway Migration - MSSQL
spring.flyway.enabled=true
spring.flyway.locations=classpath:db/migration/mssql
spring.flyway.baseline-on-migrate=true
spring.sql.init.mode=never
# LCC Configuration
lcc.allowed_cors=
lcc.allowed_oauth_token_cors=*
lcc.auth.identify.by=workday
lcc.auth.claim.workday=employeeid
lcc.auth.claim.email=preferred_username
lcc.auth.claim.firstname=given_name
lcc.auth.claim.lastname=family_name
lcc.auth.claim.ignore.workday=false
# Bulk Import
lcc.bulk.sheet_password=secretSheet?!
# Calculation Job Processor Configuration
calculation.job.processor.enabled=true
calculation.job.processor.pool-size=1
calculation.job.processor.delay=5000
calculation.job.processor.thread-name-prefix=calc-job-

View file

@ -0,0 +1,50 @@
# MySQL Profile Configuration
# Activate with: -Dspring.profiles.active=mysql or SPRING_PROFILES_ACTIVE=mysql
# Application Name
spring.application.name=lcc
# Database Configuration - MySQL
spring.datasource.driver-class-name=com.mysql.cj.jdbc.Driver
spring.datasource.url=jdbc:mysql://${DB_HOST:localhost}:3306/${DB_DATABASE:lcc}
spring.datasource.username=${DB_USER:root}
spring.datasource.password=${DB_PASSWORD}
# File Upload Limits
spring.servlet.multipart.max-file-size=30MB
spring.servlet.multipart.max-request-size=50MB
# Azure AD Configuration
spring.cloud.azure.active-directory.enabled=true
spring.cloud.azure.active-directory.authorization-clients.graph.scopes=openid,profile,email,https://graph.microsoft.com/User.Read
# Management Endpoints
management.endpoints.web.exposure.include=health,info,metrics
management.endpoint.health.show-details=when-authorized
# Flyway Migration - MySQL
spring.flyway.enabled=true
spring.flyway.locations=classpath:db/migration/mysql
spring.flyway.baseline-on-migrate=true
spring.sql.init.mode=never
# LCC Configuration
lcc.allowed_cors=
lcc.allowed_oauth_token_cors=*
lcc.auth.identify.by=workday
lcc.auth.claim.workday=employeeid
lcc.auth.claim.email=preferred_username
lcc.auth.claim.firstname=given_name
lcc.auth.claim.lastname=family_name
lcc.auth.claim.ignore.workday=false
# Bulk Import
lcc.bulk.sheet_password=secretSheet?!
# Calculation Job Processor Configuration
calculation.job.processor.enabled=true
calculation.job.processor.pool-size=1
calculation.job.processor.delay=5000
calculation.job.processor.thread-name-prefix=calc-job-

View file

@ -1,8 +1,17 @@
# MySQL Profile Configuration
# Activate with: -Dspring.profiles.active=mysql or SPRING_PROFILES_ACTIVE=mysql
# Application Name
spring.application.name=lcc
# Database Configuration
# Active Profile (mysql or mssql)
spring.profiles.active=prod,mysql
# Database Configuration - MySQL
spring.datasource.driver-class-name=com.mysql.cj.jdbc.Driver
spring.datasource.url=jdbc:mysql://${DB_HOST:localhost}:3306/${DB_DATABASE:lcc}
spring.datasource.username=${DB_USER:root}
spring.datasource.password=${DB_PASSWORD}
# File Upload Limits
spring.servlet.multipart.max-file-size=30MB
@ -16,16 +25,16 @@ spring.cloud.azure.active-directory.authorization-clients.graph.scopes=openid,pr
management.endpoints.web.exposure.include=health,info,metrics
management.endpoint.health.show-details=when-authorized
# Flyway Migration
# Flyway Migration - MySQL
spring.flyway.enabled=true
spring.flyway.locations=classpath:db/migration
spring.flyway.locations=classpath:db/migration/mysql
spring.flyway.baseline-on-migrate=true
spring.sql.init.mode=never
# LCC Configuration
lcc.allowed_cors=
lcc.allowed_oauth_token_cors=*
lcc.auth.identify.by=workday
lcc.auth.claim.workday=employeeid
lcc.auth.claim.email=preferred_username

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,58 @@
-- Add retries and priority columns to calculation_job (if not exists)
IF NOT EXISTS (SELECT * FROM sys.columns WHERE object_id = OBJECT_ID(N'calculation_job') AND name = 'retries')
BEGIN
ALTER TABLE calculation_job ADD retries INT NOT NULL DEFAULT 0;
END
IF NOT EXISTS (SELECT * FROM sys.columns WHERE object_id = OBJECT_ID(N'calculation_job') AND name = 'priority')
BEGIN
ALTER TABLE calculation_job
ADD priority VARCHAR(10) NOT NULL DEFAULT 'MEDIUM'
CHECK (priority IN ('LOW', 'MEDIUM', 'HIGH'));
END
IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = OBJECT_ID(N'calculation_job') AND name = 'idx_priority')
BEGIN
CREATE INDEX idx_priority ON calculation_job(priority);
END
-- Add retries column to distance_matrix (if not exists)
IF NOT EXISTS (SELECT * FROM sys.columns WHERE object_id = OBJECT_ID(N'distance_matrix') AND name = 'retries')
BEGIN
ALTER TABLE distance_matrix ADD retries INT NOT NULL DEFAULT 0;
END
ALTER TABLE distance_matrix
DROP CONSTRAINT chk_distance_matrix_state;
ALTER TABLE distance_matrix
ADD CONSTRAINT chk_distance_matrix_state CHECK (state IN ('VALID', 'STALE', 'EXCEPTION'));
-- Check if distance_d2d column exists before adding (already exists in V1)
IF NOT EXISTS (SELECT * FROM sys.columns WHERE object_id = OBJECT_ID(N'premise_destination') AND name = 'distance_d2d')
BEGIN
ALTER TABLE premise_destination
ADD distance_d2d DECIMAL(15, 2) DEFAULT NULL;
EXEC sp_addextendedproperty
@name = N'MS_Description',
@value = N'travel distance between the two nodes in meters',
@level0type = N'SCHEMA', @level0name = 'dbo',
@level1type = N'TABLE', @level1name = 'premise_destination',
@level2type = N'COLUMN', @level2name = 'distance_d2d';
END
-- Add distance column to premise_route_section (if not exists)
IF NOT EXISTS (SELECT * FROM sys.columns WHERE object_id = OBJECT_ID(N'premise_route_section') AND name = 'distance')
BEGIN
ALTER TABLE premise_route_section
ADD distance DECIMAL(15, 2) DEFAULT NULL;
EXEC sp_addextendedproperty
@name = N'MS_Description',
@value = N'travel distance between the two nodes in meters',
@level0type = N'SCHEMA', @level0name = 'dbo',
@level1type = N'TABLE', @level1name = 'premise_route_section',
@level2type = N'COLUMN', @level2name = 'distance';
END

View file

@ -0,0 +1,15 @@
-- Merge statement for MSSQL (equivalent to INSERT ... ON DUPLICATE KEY UPDATE)
MERGE INTO packaging_property_type AS target
USING (VALUES
(N'Stackable', 'STACKABLE', 'BOOLEAN', NULL, 0, N'desc', 'general', 1),
(N'Rust Prevention', 'RUST_PREVENTION', 'BOOLEAN', NULL, 0, N'desc', 'general', 2),
(N'Mixable', 'MIXABLE', 'BOOLEAN', NULL, 0, N'desc', 'general', 3)
) AS source (name, external_mapping_id, data_type, validation_rule, is_required, description, property_group, sequence_number)
ON target.external_mapping_id = source.external_mapping_id
WHEN MATCHED THEN
UPDATE SET
name = source.name,
data_type = source.data_type
WHEN NOT MATCHED THEN
INSERT (name, external_mapping_id, data_type, validation_rule, is_required, description, property_group, sequence_number)
VALUES (source.name, source.external_mapping_id, source.data_type, source.validation_rule, source.is_required, source.description, source.property_group, source.sequence_number);

View file

@ -0,0 +1,666 @@
-- Property management tables
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'property_set') AND type in (N'U'))
CREATE TABLE property_set
(
-- Represents a collection of properties valid for a specific time period
id INT NOT NULL IDENTITY(1,1) PRIMARY KEY,
start_date DATETIME2 NOT NULL DEFAULT GETDATE(),
end_date DATETIME2 NULL,
state VARCHAR(8) NOT NULL,
CONSTRAINT chk_property_state_values CHECK (state IN ('DRAFT', 'VALID', 'INVALID', 'EXPIRED')),
CONSTRAINT chk_property_date_range CHECK (end_date IS NULL OR end_date > start_date)
);
CREATE INDEX idx_dates ON property_set (start_date, end_date);
CREATE INDEX idx_property_set_id ON property_set (id);
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'system_property_type') AND type in (N'U'))
CREATE TABLE system_property_type
(
-- Stores system-wide configuration property types
id INT NOT NULL IDENTITY(1,1) PRIMARY KEY,
name NVARCHAR(255) NOT NULL,
external_mapping_id VARCHAR(16),
description NVARCHAR(512) NOT NULL,
property_group VARCHAR(32) NOT NULL,
sequence_number INT NOT NULL,
data_type VARCHAR(16) NOT NULL,
validation_rule VARCHAR(64),
CONSTRAINT idx_external_mapping UNIQUE (external_mapping_id),
CONSTRAINT chk_system_data_type_values CHECK (data_type IN
('INT', 'PERCENTAGE', 'BOOLEAN', 'CURRENCY', 'ENUMERATION',
'TEXT'))
);
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'system_property') AND type in (N'U'))
CREATE TABLE system_property
(
-- Stores system-wide configuration properties
id INT NOT NULL IDENTITY(1,1) PRIMARY KEY,
property_set_id INT NOT NULL,
system_property_type_id INT NOT NULL,
property_value NVARCHAR(500),
FOREIGN KEY (property_set_id) REFERENCES property_set (id),
FOREIGN KEY (system_property_type_id) REFERENCES system_property_type (id),
CONSTRAINT idx_system_property_type_id_property_set UNIQUE (system_property_type_id, property_set_id)
);
CREATE INDEX idx_system_property_type_id ON system_property (system_property_type_id);
CREATE INDEX idx_system_property_set_id ON system_property (property_set_id);
-- country
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'country') AND type in (N'U'))
CREATE TABLE country
(
id INT NOT NULL IDENTITY(1,1),
iso_code VARCHAR(2) NOT NULL,
region_code VARCHAR(5) NOT NULL,
name NVARCHAR(255) NOT NULL,
is_deprecated BIT NOT NULL DEFAULT 0,
PRIMARY KEY (id),
CONSTRAINT uk_country_iso_code UNIQUE (iso_code),
CONSTRAINT chk_country_region_code
CHECK (region_code IN ('EMEA', 'LATAM', 'APAC', 'NAM'))
);
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'country_property_type') AND type in (N'U'))
CREATE TABLE country_property_type
(
id INT NOT NULL IDENTITY(1,1),
name NVARCHAR(255) NOT NULL,
external_mapping_id VARCHAR(16),
data_type VARCHAR(16) NOT NULL,
validation_rule VARCHAR(64),
description NVARCHAR(512) NOT NULL,
property_group VARCHAR(32) NOT NULL,
sequence_number INT NOT NULL,
is_required BIT NOT NULL DEFAULT 0,
CONSTRAINT chk_country_data_type_values CHECK (data_type IN
('INT', 'PERCENTAGE', 'BOOLEAN', 'CURRENCY', 'ENUMERATION',
'TEXT')),
PRIMARY KEY (id)
);
CREATE INDEX idx_property_type_data_type ON country_property_type (data_type);
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'country_property') AND type in (N'U'))
CREATE TABLE country_property
(
id INT NOT NULL IDENTITY(1,1) PRIMARY KEY,
country_id INT NOT NULL,
country_property_type_id INT NOT NULL,
property_set_id INT NOT NULL,
property_value NVARCHAR(500),
FOREIGN KEY (country_id) REFERENCES country (id),
FOREIGN KEY (country_property_type_id) REFERENCES country_property_type (id),
FOREIGN KEY (property_set_id) REFERENCES property_set (id),
CONSTRAINT idx_country_property UNIQUE (country_id, country_property_type_id, property_set_id)
);
-- Main table for user information
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'sys_user') AND type in (N'U'))
CREATE TABLE sys_user
(
id INT NOT NULL IDENTITY(1,1) PRIMARY KEY,
workday_id VARCHAR(32) NOT NULL,
email VARCHAR(254) NOT NULL,
firstname NVARCHAR(100) NOT NULL,
lastname NVARCHAR(100) NOT NULL,
is_active BIT NOT NULL DEFAULT 1,
CONSTRAINT idx_user_email UNIQUE (email),
CONSTRAINT idx_user_workday UNIQUE (workday_id)
);
-- Group definitions
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'sys_group') AND type in (N'U'))
CREATE TABLE sys_group
(
id INT NOT NULL IDENTITY(1,1) PRIMARY KEY,
group_name NVARCHAR(64) NOT NULL,
group_description NVARCHAR(MAX) NOT NULL,
CONSTRAINT idx_group_name UNIQUE (group_name)
);
-- Junction table for user-group assignments
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'sys_user_group_mapping') AND type in (N'U'))
CREATE TABLE sys_user_group_mapping
(
user_id INT NOT NULL,
group_id INT NOT NULL,
PRIMARY KEY (user_id, group_id),
FOREIGN KEY (user_id) REFERENCES sys_user (id),
FOREIGN KEY (group_id) REFERENCES sys_group (id)
);
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'sys_user_node') AND type in (N'U'))
CREATE TABLE sys_user_node
(
id INT NOT NULL IDENTITY(1,1) PRIMARY KEY,
user_id INT NOT NULL,
country_id INT NOT NULL,
name NVARCHAR(254) NOT NULL,
address NVARCHAR(500) NOT NULL,
geo_lat DECIMAL(8, 4) CHECK (geo_lat BETWEEN -90 AND 90),
geo_lng DECIMAL(8, 4) CHECK (geo_lng BETWEEN -180 AND 180),
is_deprecated BIT DEFAULT 0,
FOREIGN KEY (user_id) REFERENCES sys_user (id),
FOREIGN KEY (country_id) REFERENCES country (id)
);
-- Main table for application information
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'sys_app') AND type in (N'U'))
CREATE TABLE sys_app
(
id INT NOT NULL IDENTITY(1,1) PRIMARY KEY,
client_id VARCHAR(255) NOT NULL UNIQUE,
client_secret VARCHAR(255) NOT NULL,
name NVARCHAR(255) NOT NULL
);
-- Junction table for app-group assignments
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'sys_app_group_mapping') AND type in (N'U'))
CREATE TABLE sys_app_group_mapping
(
app_id INT NOT NULL,
group_id INT NOT NULL,
PRIMARY KEY (app_id, group_id),
FOREIGN KEY (app_id) REFERENCES sys_app (id),
FOREIGN KEY (group_id) REFERENCES sys_group (id)
);
-- logistic nodes
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'node') AND type in (N'U'))
CREATE TABLE node
(
id INT IDENTITY(1,1) PRIMARY KEY,
country_id INT NOT NULL,
name NVARCHAR(255) NOT NULL,
address NVARCHAR(500) NOT NULL,
external_mapping_id VARCHAR(32),
predecessor_required BIT NOT NULL DEFAULT 0,
is_destination BIT NOT NULL,
is_source BIT NOT NULL,
is_intermediate BIT NOT NULL,
geo_lat DECIMAL(8, 4) CHECK (geo_lat BETWEEN -90 AND 90),
geo_lng DECIMAL(8, 4) CHECK (geo_lng BETWEEN -180 AND 180),
updated_at DATETIME2 NOT NULL DEFAULT GETDATE(),
is_deprecated BIT NOT NULL DEFAULT 0,
FOREIGN KEY (country_id) REFERENCES country (id)
);
CREATE INDEX idx_country_id ON node (country_id);
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'node_predecessor_chain') AND type in (N'U'))
CREATE TABLE node_predecessor_chain
(
id INT NOT NULL IDENTITY(1,1) PRIMARY KEY,
node_id INT NOT NULL,
FOREIGN KEY (node_id) REFERENCES node (id)
);
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'node_predecessor_entry') AND type in (N'U'))
CREATE TABLE node_predecessor_entry
(
id INT NOT NULL IDENTITY(1,1) PRIMARY KEY,
node_id INT NOT NULL,
node_predecessor_chain_id INT NOT NULL,
sequence_number INT NOT NULL CHECK (sequence_number > 0),
FOREIGN KEY (node_id) REFERENCES node (id),
FOREIGN KEY (node_predecessor_chain_id) REFERENCES node_predecessor_chain (id),
CONSTRAINT uk_node_predecessor UNIQUE (node_predecessor_chain_id, sequence_number)
);
CREATE INDEX idx_node_predecessor ON node_predecessor_entry (node_predecessor_chain_id);
CREATE INDEX idx_sequence ON node_predecessor_entry (sequence_number);
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'outbound_country_mapping') AND type in (N'U'))
CREATE TABLE outbound_country_mapping
(
id INT NOT NULL IDENTITY(1,1) PRIMARY KEY,
node_id INT NOT NULL,
country_id INT NOT NULL,
FOREIGN KEY (node_id) REFERENCES node (id),
FOREIGN KEY (country_id) REFERENCES country (id),
CONSTRAINT uk_node_id_country_id UNIQUE (node_id, country_id)
);
CREATE INDEX idx_ocm_node_id ON outbound_country_mapping (node_id);
CREATE INDEX idx_ocm_country_id ON outbound_country_mapping (country_id);
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'distance_matrix') AND type in (N'U'))
CREATE TABLE distance_matrix
(
id INT NOT NULL IDENTITY(1,1) PRIMARY KEY,
from_node_id INT DEFAULT NULL,
to_node_id INT DEFAULT NULL,
from_user_node_id INT DEFAULT NULL,
to_user_node_id INT DEFAULT NULL,
from_geo_lat DECIMAL(8, 4) CHECK (from_geo_lat BETWEEN -90 AND 90),
from_geo_lng DECIMAL(8, 4) CHECK (from_geo_lng BETWEEN -180 AND 180),
to_geo_lat DECIMAL(8, 4) CHECK (to_geo_lat BETWEEN -90 AND 90),
to_geo_lng DECIMAL(8, 4) CHECK (to_geo_lng BETWEEN -180 AND 180),
distance DECIMAL(15, 2) NOT NULL,
updated_at DATETIME2 NOT NULL DEFAULT GETDATE(),
state VARCHAR(10) NOT NULL,
FOREIGN KEY (from_node_id) REFERENCES node (id),
FOREIGN KEY (to_node_id) REFERENCES node (id),
FOREIGN KEY (from_user_node_id) REFERENCES sys_user_node (id),
FOREIGN KEY (to_user_node_id) REFERENCES sys_user_node (id),
CONSTRAINT chk_distance_matrix_state CHECK (state IN ('VALID', 'STALE')),
CONSTRAINT chk_from_node_xor CHECK (
(from_node_id IS NOT NULL AND from_user_node_id IS NULL) OR
(from_node_id IS NULL AND from_user_node_id IS NOT NULL)
),
CONSTRAINT chk_to_node_xor CHECK (
(to_node_id IS NOT NULL AND to_user_node_id IS NULL) OR
(to_node_id IS NULL AND to_user_node_id IS NOT NULL)
),
CONSTRAINT uk_nodes_unique UNIQUE (from_node_id, to_node_id, from_user_node_id, to_user_node_id)
);
CREATE INDEX idx_from_to_nodes ON distance_matrix (from_node_id, to_node_id);
CREATE INDEX idx_user_from_to_nodes ON distance_matrix (from_user_node_id, to_user_node_id);
-- container rates
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'validity_period') AND type in (N'U'))
CREATE TABLE validity_period
(
id INT NOT NULL IDENTITY(1,1) PRIMARY KEY,
start_date DATETIME2 NOT NULL DEFAULT GETDATE(),
end_date DATETIME2 DEFAULT NULL,
renewals INT DEFAULT 0,
state VARCHAR(8) NOT NULL CHECK (state IN ('DRAFT', 'VALID', 'INVALID', 'EXPIRED')),
CONSTRAINT chk_validity_date_range CHECK (end_date IS NULL OR end_date > start_date)
);
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'container_rate') AND type in (N'U'))
CREATE TABLE container_rate
(
id INT NOT NULL IDENTITY(1,1) PRIMARY KEY,
from_node_id INT NOT NULL,
to_node_id INT NOT NULL,
container_rate_type VARCHAR(8) CHECK (container_rate_type IN ('RAIL', 'SEA', 'POST_RUN', 'ROAD')),
rate_teu DECIMAL(15, 2) NOT NULL,
rate_feu DECIMAL(15, 2) NOT NULL,
rate_hc DECIMAL(15, 2) NOT NULL,
lead_time INT NOT NULL,
validity_period_id INT NOT NULL,
FOREIGN KEY (from_node_id) REFERENCES node (id),
FOREIGN KEY (to_node_id) REFERENCES node (id),
FOREIGN KEY (validity_period_id) REFERENCES validity_period (id),
CONSTRAINT uk_container_rate_unique UNIQUE (from_node_id, to_node_id, validity_period_id, container_rate_type)
);
CREATE INDEX idx_cr_from_to_nodes ON container_rate (from_node_id, to_node_id);
CREATE INDEX idx_cr_validity_period_id ON container_rate (validity_period_id);
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'country_matrix_rate') AND type in (N'U'))
CREATE TABLE country_matrix_rate
(
id INT NOT NULL IDENTITY(1,1) PRIMARY KEY,
from_country_id INT NOT NULL,
to_country_id INT NOT NULL,
rate DECIMAL(15, 2) NOT NULL,
validity_period_id INT NOT NULL,
FOREIGN KEY (from_country_id) REFERENCES country (id),
FOREIGN KEY (to_country_id) REFERENCES country (id),
FOREIGN KEY (validity_period_id) REFERENCES validity_period (id),
CONSTRAINT uk_country_matrix_rate_unique UNIQUE (from_country_id, to_country_id, validity_period_id)
);
CREATE INDEX idx_cmr_from_to_country ON country_matrix_rate (from_country_id, to_country_id);
CREATE INDEX idx_cmr_validity_period_id ON country_matrix_rate (validity_period_id);
-- packaging and material
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'material') AND type in (N'U'))
CREATE TABLE material
(
id INT NOT NULL IDENTITY(1,1) PRIMARY KEY,
part_number VARCHAR(12) NOT NULL,
normalized_part_number VARCHAR(12) NOT NULL,
hs_code VARCHAR(11),
name NVARCHAR(500) NOT NULL,
is_deprecated BIT NOT NULL DEFAULT 0,
CONSTRAINT uq_normalized_part_number UNIQUE (normalized_part_number)
);
CREATE INDEX idx_part_number ON material (part_number);
CREATE INDEX idx_normalized_part_number ON material (normalized_part_number);
CREATE INDEX idx_hs_code ON material (hs_code);
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'packaging_dimension') AND type in (N'U'))
CREATE TABLE packaging_dimension
(
id INT NOT NULL IDENTITY(1,1) PRIMARY KEY,
type VARCHAR(3) DEFAULT 'HU',
length INT NOT NULL,
width INT NOT NULL,
height INT NOT NULL,
displayed_dimension_unit VARCHAR(2) DEFAULT 'CM',
weight INT NOT NULL,
displayed_weight_unit VARCHAR(2) DEFAULT 'KG',
content_unit_count INT NOT NULL,
is_deprecated BIT NOT NULL DEFAULT 0,
CONSTRAINT chk_packaging_dimension_type_values CHECK (type IN ('SHU', 'HU')),
CONSTRAINT chk_packaging_dimension_displayed_dimension_unit CHECK (displayed_dimension_unit IN ('MM', 'CM', 'M')),
CONSTRAINT chk_packaging_dimension_displayed_weight_unit CHECK (displayed_weight_unit IN ('T', 'G', 'KG'))
);
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'packaging') AND type in (N'U'))
CREATE TABLE packaging
(
id INT NOT NULL IDENTITY(1,1) PRIMARY KEY,
supplier_node_id INT NOT NULL,
material_id INT NOT NULL,
hu_dimension_id INT NOT NULL,
shu_dimension_id INT NOT NULL,
is_deprecated BIT NOT NULL DEFAULT 0,
FOREIGN KEY (supplier_node_id) REFERENCES node (id),
FOREIGN KEY (material_id) REFERENCES material (id),
FOREIGN KEY (hu_dimension_id) REFERENCES packaging_dimension (id),
FOREIGN KEY (shu_dimension_id) REFERENCES packaging_dimension (id)
);
CREATE INDEX idx_pkg_material_id ON packaging (material_id);
CREATE INDEX idx_pkg_hu_dimension_id ON packaging (hu_dimension_id);
CREATE INDEX idx_pkg_shu_dimension_id ON packaging (shu_dimension_id);
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'packaging_property_type') AND type in (N'U'))
CREATE TABLE packaging_property_type
(
id INT NOT NULL IDENTITY(1,1) PRIMARY KEY,
name NVARCHAR(255) NOT NULL,
external_mapping_id VARCHAR(16) NOT NULL,
description NVARCHAR(255) NOT NULL,
property_group VARCHAR(32) NOT NULL,
sequence_number INT NOT NULL,
data_type VARCHAR(16),
validation_rule VARCHAR(64),
is_required BIT NOT NULL DEFAULT 0,
CONSTRAINT idx_packaging_property_type UNIQUE (external_mapping_id),
CONSTRAINT chk_packaging_data_type_values CHECK (data_type IN
('INT', 'PERCENTAGE', 'BOOLEAN', 'CURRENCY', 'ENUMERATION',
'TEXT'))
);
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'packaging_property') AND type in (N'U'))
CREATE TABLE packaging_property
(
id INT NOT NULL IDENTITY(1,1) PRIMARY KEY,
packaging_property_type_id INT NOT NULL,
packaging_id INT NOT NULL,
property_value NVARCHAR(500),
FOREIGN KEY (packaging_property_type_id) REFERENCES packaging_property_type (id),
FOREIGN KEY (packaging_id) REFERENCES packaging (id),
CONSTRAINT idx_packaging_property_unique UNIQUE (packaging_property_type_id, packaging_id)
);
CREATE INDEX idx_pp_packaging_property_type_id ON packaging_property (packaging_property_type_id);
CREATE INDEX idx_pp_packaging_id ON packaging_property (packaging_id);
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'premise') AND type in (N'U'))
CREATE TABLE premise
(
id INT NOT NULL IDENTITY(1,1) PRIMARY KEY,
material_id INT NOT NULL,
supplier_node_id INT,
user_supplier_node_id INT,
geo_lat DECIMAL(8, 4) CHECK (geo_lat BETWEEN -90 AND 90),
geo_lng DECIMAL(8, 4) CHECK (geo_lng BETWEEN -180 AND 180),
country_id INT NOT NULL,
packaging_id INT DEFAULT NULL,
user_id INT NOT NULL,
created_at DATETIME2 NOT NULL DEFAULT GETDATE(),
updated_at DATETIME2 NOT NULL DEFAULT GETDATE(),
material_cost DECIMAL(15, 2) DEFAULT NULL,
is_fca_enabled BIT DEFAULT 0,
oversea_share DECIMAL(8, 4) DEFAULT NULL,
hs_code VARCHAR(11) DEFAULT NULL,
tariff_measure INT DEFAULT NULL,
tariff_rate DECIMAL(8, 4) DEFAULT NULL,
tariff_unlocked BIT DEFAULT 0,
state VARCHAR(10) NOT NULL DEFAULT 'DRAFT',
individual_hu_length INT,
individual_hu_height INT,
individual_hu_width INT,
individual_hu_weight INT,
hu_displayed_dimension_unit VARCHAR(2) DEFAULT 'MM',
hu_displayed_weight_unit VARCHAR(2) DEFAULT 'KG',
hu_unit_count INT DEFAULT NULL,
hu_stackable BIT DEFAULT 1,
hu_mixable BIT DEFAULT 1,
FOREIGN KEY (material_id) REFERENCES material (id),
FOREIGN KEY (supplier_node_id) REFERENCES node (id),
FOREIGN KEY (user_supplier_node_id) REFERENCES sys_user_node (id),
FOREIGN KEY (packaging_id) REFERENCES packaging (id),
FOREIGN KEY (user_id) REFERENCES sys_user (id),
CONSTRAINT chk_premise_state_values CHECK (state IN ('DRAFT', 'COMPLETED', 'ARCHIVED')),
CONSTRAINT chk_premise_displayed_dimension_unit CHECK (hu_displayed_dimension_unit IN ('MM', 'CM', 'M')),
CONSTRAINT chk_premise_displayed_weight_unit CHECK (hu_displayed_weight_unit IN ('T', 'G', 'KG'))
);
CREATE INDEX idx_prem_material_id ON premise (material_id);
CREATE INDEX idx_prem_supplier_node_id ON premise (supplier_node_id);
CREATE INDEX idx_prem_packaging_id ON premise (packaging_id);
CREATE INDEX idx_prem_user_id ON premise (user_id);
CREATE INDEX idx_prem_user_supplier_node_id ON premise (user_supplier_node_id);
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'premise_destination') AND type in (N'U'))
CREATE TABLE premise_destination
(
id INT NOT NULL IDENTITY(1,1) PRIMARY KEY,
premise_id INT NOT NULL,
annual_amount INT,
destination_node_id INT NOT NULL,
is_d2d BIT DEFAULT 0,
rate_d2d DECIMAL(15, 2) DEFAULT NULL CHECK (rate_d2d >= 0),
lead_time_d2d INT DEFAULT NULL CHECK (lead_time_d2d >= 0),
repacking_cost DECIMAL(15, 2) DEFAULT NULL CHECK (repacking_cost >= 0),
handling_cost DECIMAL(15, 2) DEFAULT NULL CHECK (handling_cost >= 0),
disposal_cost DECIMAL(15, 2) DEFAULT NULL CHECK (disposal_cost >= 0),
geo_lat DECIMAL(8, 4) CHECK (geo_lat BETWEEN -90 AND 90),
geo_lng DECIMAL(8, 4) CHECK (geo_lng BETWEEN -180 AND 180),
country_id INT NOT NULL,
distance_d2d DECIMAL(15, 2),
FOREIGN KEY (premise_id) REFERENCES premise (id),
FOREIGN KEY (country_id) REFERENCES country (id),
FOREIGN KEY (destination_node_id) REFERENCES node (id)
);
CREATE INDEX idx_pd_destination_node_id ON premise_destination (destination_node_id);
CREATE INDEX idx_pd_premise_id ON premise_destination (premise_id);
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'premise_route_node') AND type in (N'U'))
CREATE TABLE premise_route_node
(
id INT NOT NULL IDENTITY(1,1) PRIMARY KEY,
node_id INT DEFAULT NULL,
user_node_id INT DEFAULT NULL,
name NVARCHAR(255) NOT NULL,
address NVARCHAR(500),
external_mapping_id VARCHAR(32) NOT NULL,
country_id INT NOT NULL,
is_destination BIT DEFAULT 0,
is_intermediate BIT DEFAULT 0,
is_source BIT DEFAULT 0,
geo_lat DECIMAL(8, 4) CHECK (geo_lat BETWEEN -90 AND 90),
geo_lng DECIMAL(8, 4) CHECK (geo_lng BETWEEN -180 AND 180),
is_outdated BIT DEFAULT 0,
FOREIGN KEY (node_id) REFERENCES node (id),
FOREIGN KEY (country_id) REFERENCES country (id),
FOREIGN KEY (user_node_id) REFERENCES sys_user_node (id),
CONSTRAINT chk_node CHECK (user_node_id IS NULL OR node_id IS NULL)
);
CREATE INDEX idx_prn_node_id ON premise_route_node (node_id);
CREATE INDEX idx_prn_user_node_id ON premise_route_node (user_node_id);
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'premise_route') AND type in (N'U'))
CREATE TABLE premise_route
(
id INT NOT NULL IDENTITY(1,1) PRIMARY KEY,
premise_destination_id INT NOT NULL,
is_fastest BIT DEFAULT 0,
is_cheapest BIT DEFAULT 0,
is_selected BIT DEFAULT 0,
FOREIGN KEY (premise_destination_id) REFERENCES premise_destination (id)
);
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'premise_route_section') AND type in (N'U'))
CREATE TABLE premise_route_section
(
id INT NOT NULL IDENTITY(1,1) PRIMARY KEY,
premise_route_id INT NOT NULL,
from_route_node_id INT NOT NULL,
to_route_node_id INT NOT NULL,
list_position INT NOT NULL,
transport_type VARCHAR(16) CHECK (transport_type IN ('RAIL', 'SEA', 'ROAD', 'POST_RUN')),
rate_type VARCHAR(16) CHECK (rate_type IN ('CONTAINER', 'MATRIX', 'NEAR_BY')),
is_pre_run BIT DEFAULT 0,
is_main_run BIT DEFAULT 0,
is_post_run BIT DEFAULT 0,
is_outdated BIT DEFAULT 0,
CONSTRAINT fk_premise_route_section_premise_route_id FOREIGN KEY (premise_route_id) REFERENCES premise_route (id),
FOREIGN KEY (from_route_node_id) REFERENCES premise_route_node (id),
FOREIGN KEY (to_route_node_id) REFERENCES premise_route_node (id),
CONSTRAINT chk_main_run CHECK (transport_type = 'ROAD' OR transport_type = 'POST_RUN' OR is_main_run = 1)
);
CREATE INDEX idx_prs_premise_route_id ON premise_route_section (premise_route_id);
CREATE INDEX idx_prs_from_route_node_id ON premise_route_section (from_route_node_id);
CREATE INDEX idx_prs_to_route_node_id ON premise_route_section (to_route_node_id);
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'calculation_job') AND type in (N'U'))
CREATE TABLE calculation_job
(
id INT NOT NULL IDENTITY(1,1) PRIMARY KEY,
premise_id INT NOT NULL,
calculation_date DATETIME2 NOT NULL DEFAULT GETDATE(),
validity_period_id INT NOT NULL,
property_set_id INT NOT NULL,
job_state VARCHAR(10) NOT NULL CHECK (job_state IN ('CREATED', 'SCHEDULED', 'VALID', 'INVALID', 'EXCEPTION')),
error_id INT DEFAULT NULL,
user_id INT NOT NULL,
FOREIGN KEY (premise_id) REFERENCES premise (id),
FOREIGN KEY (validity_period_id) REFERENCES validity_period (id),
FOREIGN KEY (property_set_id) REFERENCES property_set (id),
FOREIGN KEY (user_id) REFERENCES sys_user (id)
);
CREATE INDEX idx_cj_premise_id ON calculation_job (premise_id);
CREATE INDEX idx_cj_validity_period_id ON calculation_job (validity_period_id);
CREATE INDEX idx_cj_property_set_id ON calculation_job (property_set_id);
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'calculation_job_destination') AND type in (N'U'))
CREATE TABLE calculation_job_destination
(
id INT NOT NULL IDENTITY(1,1) PRIMARY KEY,
calculation_job_id INT NOT NULL,
premise_destination_id INT NOT NULL,
shipping_frequency INT,
total_cost DECIMAL(15, 2),
annual_amount DECIMAL(15, 2),
annual_risk_cost DECIMAL(15, 2) NOT NULL,
annual_chance_cost DECIMAL(15, 2) NOT NULL,
is_small_unit BIT DEFAULT 0,
annual_repacking_cost DECIMAL(15, 2) NOT NULL,
annual_handling_cost DECIMAL(15, 2) NOT NULL,
annual_disposal_cost DECIMAL(15, 2) NOT NULL,
operational_stock DECIMAL(15, 2) NOT NULL,
safety_stock DECIMAL(15, 2) NOT NULL,
stocked_inventory DECIMAL(15, 2) NOT NULL,
in_transport_stock DECIMAL(15, 2) NOT NULL,
stock_before_payment DECIMAL(15, 2) NOT NULL,
annual_capital_cost DECIMAL(15, 2) NOT NULL,
annual_storage_cost DECIMAL(15, 2) NOT NULL,
custom_value DECIMAL(15, 2) NOT NULL,
custom_duties DECIMAL(15, 2) NOT NULL,
tariff_rate DECIMAL(8, 4) NOT NULL,
annual_custom_cost DECIMAL(15, 2) NOT NULL,
air_freight_share_max DECIMAL(8, 4) NOT NULL,
air_freight_share DECIMAL(8, 4) NOT NULL,
air_freight_volumetric_weight DECIMAL(15, 2) NOT NULL,
air_freight_weight DECIMAL(15, 2) NOT NULL,
annual_air_freight_cost DECIMAL(15, 2) NOT NULL,
is_d2d BIT DEFAULT 0,
rate_d2d DECIMAL(15, 2) DEFAULT NULL,
container_type VARCHAR(8),
hu_count INT NOT NULL,
layer_structure NVARCHAR(MAX),
layer_count INT NOT NULL,
transport_weight_exceeded BIT DEFAULT 0,
annual_transportation_cost DECIMAL(15, 2) NOT NULL,
container_utilization DECIMAL(8, 4) NOT NULL,
transit_time_in_days INT NOT NULL,
safety_stock_in_days INT NOT NULL,
material_cost DECIMAL(15, 2) NOT NULL,
fca_cost DECIMAL(15, 2) NOT NULL,
FOREIGN KEY (calculation_job_id) REFERENCES calculation_job (id),
FOREIGN KEY (premise_destination_id) REFERENCES premise_destination (id),
CONSTRAINT chk_container_type CHECK (container_type IN ('TEU', 'FEU', 'HC', 'TRUCK'))
);
CREATE INDEX idx_cjd_calculation_job_id ON calculation_job_destination (calculation_job_id);
CREATE INDEX idx_cjd_premise_destination_id ON calculation_job_destination (premise_destination_id);
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'calculation_job_route_section') AND type in (N'U'))
CREATE TABLE calculation_job_route_section
(
id INT NOT NULL IDENTITY(1,1) PRIMARY KEY,
premise_route_section_id INT,
calculation_job_destination_id INT NOT NULL,
transport_type VARCHAR(16) CHECK (transport_type IN ('RAIL', 'SEA', 'ROAD', 'POST_RUN', 'MATRIX', 'D2D')),
is_unmixed_price BIT DEFAULT 0,
is_cbm_price BIT DEFAULT 0,
is_weight_price BIT DEFAULT 0,
is_stacked BIT DEFAULT 0,
is_pre_run BIT DEFAULT 0,
is_main_run BIT DEFAULT 0,
is_post_run BIT DEFAULT 0,
rate DECIMAL(15, 2) NOT NULL,
distance DECIMAL(15, 2) DEFAULT NULL,
cbm_price DECIMAL(15, 2) NOT NULL,
weight_price DECIMAL(15, 2) NOT NULL,
annual_cost DECIMAL(15, 2) NOT NULL,
transit_time INT NOT NULL,
FOREIGN KEY (premise_route_section_id) REFERENCES premise_route_section (id),
FOREIGN KEY (calculation_job_destination_id) REFERENCES calculation_job_destination (id),
CONSTRAINT chk_stacked CHECK (is_unmixed_price = 1 OR is_stacked = 1)
);
CREATE INDEX idx_cjrs_premise_route_section_id ON calculation_job_route_section (premise_route_section_id);
CREATE INDEX idx_cjrs_calculation_job_destination_id ON calculation_job_route_section (calculation_job_destination_id);
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'bulk_operation') AND type in (N'U'))
CREATE TABLE bulk_operation
(
id INT NOT NULL IDENTITY(1,1) PRIMARY KEY,
user_id INT NOT NULL,
bulk_file_type VARCHAR(32) NOT NULL,
bulk_processing_type VARCHAR(32) NOT NULL,
state VARCHAR(10) NOT NULL,
[file] VARBINARY(MAX) DEFAULT NULL,
validity_period_id INT DEFAULT NULL,
created_at DATETIME2 NOT NULL DEFAULT GETDATE(),
FOREIGN KEY (user_id) REFERENCES sys_user (id),
FOREIGN KEY (validity_period_id) REFERENCES validity_period (id),
CONSTRAINT chk_bulk_file_type CHECK (bulk_file_type IN ('CONTAINER_RATE', 'COUNTRY_MATRIX', 'MATERIAL', 'PACKAGING', 'NODE')),
CONSTRAINT chk_bulk_operation_state CHECK (state IN ('SCHEDULED', 'PROCESSING', 'COMPLETED', 'EXCEPTION')),
CONSTRAINT chk_bulk_processing_type CHECK (bulk_processing_type IN ('IMPORT', 'EXPORT'))
);
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'sys_error') AND type in (N'U'))
CREATE TABLE sys_error
(
id INT NOT NULL IDENTITY(1,1) PRIMARY KEY,
user_id INT DEFAULT NULL,
title NVARCHAR(255) NOT NULL,
code NVARCHAR(255) NOT NULL,
message NVARCHAR(1024) NOT NULL,
request NVARCHAR(MAX),
pinia NVARCHAR(MAX),
calculation_job_id INT DEFAULT NULL,
bulk_operation_id INT DEFAULT NULL,
type VARCHAR(16) NOT NULL DEFAULT 'BACKEND',
created_at DATETIME2 NOT NULL DEFAULT GETDATE(),
FOREIGN KEY (user_id) REFERENCES sys_user (id),
FOREIGN KEY (calculation_job_id) REFERENCES calculation_job (id),
FOREIGN KEY (bulk_operation_id) REFERENCES bulk_operation (id),
CONSTRAINT chk_error_type CHECK (type IN ('BACKEND', 'FRONTEND', 'BULK', 'CALCULATION'))
);
CREATE INDEX idx_se_user_id ON sys_error (user_id);
CREATE INDEX idx_se_calculation_job_id ON sys_error (calculation_job_id);
IF NOT EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'sys_error_trace_item') AND type in (N'U'))
CREATE TABLE sys_error_trace_item
(
id INT NOT NULL IDENTITY(1,1) PRIMARY KEY,
error_id INT NOT NULL,
line INT,
[file] VARCHAR(255) NOT NULL,
method VARCHAR(255) NOT NULL,
fullPath VARCHAR(1024) NOT NULL,
created_at DATETIME2 NOT NULL DEFAULT GETDATE(),
FOREIGN KEY (error_id) REFERENCES sys_error (id)
);

View file

@ -0,0 +1,18 @@
INSERT INTO property_set (state)
SELECT 'VALID'
WHERE NOT EXISTS (
SELECT 1 FROM property_set ps
WHERE ps.state = 'VALID'
AND ps.start_date <= GETDATE()
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
);
INSERT INTO validity_period (state)
SELECT 'VALID'
WHERE NOT EXISTS (
SELECT 1 FROM validity_period vp
WHERE vp.state = 'VALID'
AND vp.start_date <= GETDATE()
AND (vp.end_date IS NULL OR vp.end_date > GETDATE())
);

View file

@ -0,0 +1,603 @@
-- ===================================================
-- INSERT Statements für system_property_type
-- Mapping: external mapping id -> external_mapping_id
-- Description -> name
-- ===================================================
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'Reference route: Start node', 'START_REF', 'TEXT', '{}', N'Specifies the starting node of the reference route. A historical maximum and a historical minimum value are stored for the reference route. This reference route is used to calculate fluctuations in transport costs.', '2_Reference route', '1');
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'Reference route: End node', 'END_REF', 'TEXT', '{}', N'Specifies the end node of the reference route. A historical maximum and a historical minimum value are stored for the reference route. This reference route is used to calculate fluctuations in transport costs.', '2_Reference route', '2');
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'Reference route: All-time-high container rate (40 ft. GP) [EUR]', 'RISK_REF', 'CURRENCY', '{"GT":0}', N'Specifies the historically maximum container rate of the reference route for a 40 ft. GP container. A historical maximum and a historical minimum value are stored for the reference route. This reference route is used to calculate fluctuations in transport costs.', '2_Reference route', '3');
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'Reference route: All-time-low container rate (40 ft. GP) [EUR]', 'CHANCE_REF', 'CURRENCY', '{"GT":0}', N'Specifies the historically lowest container rate of the reference route for a 40 ft. GP container. A historical maximum and a historical minimum value are stored for the reference route. This reference route is used to calculate fluctuations in transport costs.', '2_Reference route', '4');
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'Payment terms [days]', 'PAYMENT_TERMS', 'INT', '{}', N'Payment terms agreed with suppliers in days. This value is used to calculate the financing costs for goods in transit and in safety stock.', '1_General', '3');
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'Annual working days', 'WORKDAYS', 'INT', '{"GT": 0, "LT": 366}', N'Annual production working days.', '1_General', '2');
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'Interest rate inventory [%]', 'INTEREST_RATE', 'PERCENTAGE', '{"GTE": 0}', N'Interest rate used for calculating capital costs.', '1_General', '4');
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'FCA fee [%]', 'FCA_FEE', 'PERCENTAGE', '{"GTE": 0}', N'FCA fee to be added to EXW prices. The logistics cost expert must explicitly select this during the calculation for the fee to be applied.', '1_General', '5');
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'Default customs rate [%]', 'TARIFF_RATE', 'PERCENTAGE', '{"GTE":0}', N'Standard customs duty rate to be applied when the HS Code cannot be resolved automatically.', '1_General', '6');
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'Customs clearance fee per import & HS code [EUR]', 'CUSTOM_FEE', 'CURRENCY', '{"GTE":0}', N'Avg. customs clearance fee per HS code and import.', '1_General', '7');
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'Standard reporting format', 'REPORTING', 'ENUMERATION', '{"ENUM":["MEK_B","MEK_C"]}', N'Specifies the reporting format. The MEK_C reporting format includes occasional air transports that occur with overseas production. The MEK_B reporting format hides these for reasons.', '1_General', '1');
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'40 ft.', 'FEU', 'BOOLEAN', '{}', N'Enable if calculation should include this container size; container rates to be maintained.', '3_Sea and road transport', '1');
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'20 ft.', 'TEU', 'BOOLEAN', '{}', N'Enable if calculation should include this container size; container rates to be maintained.', '3_Sea and road transport', '2');
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'40 ft. HC', 'FEU_HQ', 'BOOLEAN', '{}', N'Enable if calculation should include this container size; container rates to be maintained.', '3_Sea and road transport', '3');
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'Container utilization in mixed containers [%]', 'CONTAINER_UTIL', 'PERCENTAGE', '{"GTE":0,"LTE":1}', N'Utilization degree of mixed containers (loss from stacking/packaging).', '3_Sea and road transport', '6');
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'Truck utilization road transport EMEA [%]', 'TRUCK_UTIL', 'PERCENTAGE', '{"GTE":0,"LTE":1}', N'Utilization degree of trucks (loss from stacking/packaging).', '3_Sea and road transport', '8');
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'Max validity period of container freight rates [days]', 'VALID_DAYS', 'INT', '{"GT": 0}', N'After the validity period expires, no logistics cost calculations are possible with the current freight rates. This mechanism ensures that freight rates are regularly updated or verified by a freight rate key user.', '1_General', '8');
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'Metropolitan region size (diameter) [km]', 'RADIUS_REGION', 'INT', '{"GT": 0}', N'If there are no kilometer rates within a country, it is possible to use container rates from neighboring logistics nodes. However, the node must be within the metropolitan region radius.', '1_General', '9');
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'Min delivery frequency / year for container transports', 'FREQ_MIN', 'INT', '{"GT": 0, "LT": 366}', N'Low runners: Indicates the number of annual deliveries when the annual demand is lower than the content of a handling unit (The HU is then split up)', '1_General', '10');
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'Max delivery frequency / year for container transport', 'FREQ_MAX', 'INT', '{"GT": 0, "LT": 366}', N'High runners: Indicates the maximum number of annual deliveries. (If the annual demand exceeds this number, one delivery contains more than one HU). Please note that this value affects the storage space cost.', '1_General', '11');
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'Max weight load 20 ft. container [kg]', 'TEU_LOAD', 'INT', '{"GT": 0}', N'Weight limit of TEU container.', '3_Sea and road transport', '4');
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'Max weight load 40 ft. container [kg]', 'FEU_LOAD', 'INT', '{"GT": 0}', N'Weight limit of FEU container (may be restricted by law, e.g. CN truck load = 21 tons).', '3_Sea and road transport', '5');
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'Max weight load truck [kg]', 'TRUCK_LOAD', 'INT', '{"GT": 0}', N'Weight limit of standard truck.', '3_Sea and road transport', '7');
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'Pre-carriage [EUR/kg]', 'AIR_PRECARRIAGE', 'CURRENCY', '{"GTE": 0}', N'The pre-carriage costs per kilogram to the departure airport when calculating air freight costs.', '4_Air transport', '1');
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'Pre-carriage handling [EUR]', 'AIR_HANDLING', 'CURRENCY', '{"GTE": 0}', N'One-time costs for processing documents in an air freight transport.', '4_Air transport', '2');
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'Main carriage [EUR/kg]', 'AIR_MAINCARRIAGE', 'CURRENCY', '{"GTE": 0}', N'Air freight costs per kg on the route from China to Germany.', '4_Air transport', '3');
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'Hand over fee [EUR]', 'AIR_HANDOVER_FEE', 'CURRENCY', '{"GTE": 0}', N'One-time handover costs for air freight transports.', '4_Air transport', '4');
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'Customs clearance fee [EUR]', 'AIR_CUSTOM_FEE', 'CURRENCY', '{"GTE": 0}', N'One-time costs for customs clearance in air freight transports.', '4_Air transport', '5');
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'On-carriage [EUR/kg]', 'AIR_ONCARRIAGE', 'CURRENCY', '{"GTE": 0}', N'On-carriage costs per kilogram from destination airport to final destination when calculating air freight costs.', '4_Air transport', '6');
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'Terminal handling fee [EUR/kg]', 'AIR_TERMINAL_FEE', 'CURRENCY', '{"GTE": 0}', N'Terminal handling charges per kilogram for air freight transports.', '4_Air transport', '7');
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'GR handling KLT [EUR/HU]', 'KLT_HANDLING', 'CURRENCY', '{"GTE": 0}', N'Handling costs per received small load carrier (KLTs are handling units under 0.08 m³ volume) at German wage level.', '5_Warehouse', '4');
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'GR handling GLT [EUR/HU]', 'GLT_HANDLING', 'CURRENCY', '{"GTE": 0}', N'Handling costs per received large load carrier (GLT are handling units over 0.08 m³ volume) at German wage level.', '5_Warehouse', '5');
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'GLT booking & document handling [EUR/GR]', 'BOOKING', 'CURRENCY', '{"GTE": 0}', N'One-time document handling fee per GLT at German wage level.', '5_Warehouse', '2');
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'GLT release from storage [EUR/GLT]', 'GLT_RELEASE', 'CURRENCY', '{"GTE": 0}', N'Cost to release one GLT from storage at German wage level.', '5_Warehouse', '12');
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'KLT release from storage [EUR/KLT]', 'KLT_RELEASE', 'CURRENCY', '{"GTE": 0}', N'Cost to release one KLT from storage at German wage level.', '5_Warehouse', '11');
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'GLT dispatch [EUR/GLT]', 'GLT_DISPATCH', 'CURRENCY', '{"GTE": 0}', N'Cost to dispatch one GLT at German wage level.', '5_Warehouse', '14');
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'KLT dispatch [EUR/KLT]', 'KLT_DISPATCH', 'CURRENCY', '{"GTE": 0}', N'Cost to dispatch one KLT at German wage level.', '5_Warehouse', '13');
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'Repacking KLT, HU <15kg [EUR/HU]', 'KLT_REPACK_S', 'CURRENCY', '{"GTE": 0}', N'Cost to repack one KLT (with a weight under 15 kg) from one-way to returnable at German wage level.', '5_Warehouse', '6');
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'Repacking KLT, HU >=15kg [EUR/HU]', 'KLT_REPACK_M', 'CURRENCY', '{"GTE": 0}', N'Cost to repack one KLT (with a weight under or equal 15 kg) from one-way to returnable with crane at German wage level.', '5_Warehouse', '7');
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'Repacking GLT, HU <15kg [EUR/HU]', 'GLT_REPACK_S', 'CURRENCY', '{"GTE": 0}', N'Cost to repack one GLT (with a weight under 15 kg) from one-way to returnable at German wage level.', '5_Warehouse', '8');
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'Repacking GLT, HU 15 - 2000kg [EUR/HU]', 'GLT_REPACK_M', 'CURRENCY', '{"GTE": 0}', N'Cost to repack one GLT (with a weight over 15 but under or equal 2000 kg) from one-way to returnable with crane at German wage level.', '5_Warehouse', '9');
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'Repacking GLT, HU >2000kg [EUR/HU]', 'GLT_REPACK_L', 'INT', '{"GTE": 0}', N'Cost to repack one GLT (with a weight over 2000 kg) from one-way to returnable with crane at German wage level.', '5_Warehouse', '10');
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'GLT disposal [EUR/GLT]', 'DISPOSAL', 'INT', '{"GTE": 0}', N'Cost to dispose one wooden pallet.', '5_Warehouse', '15');
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'Space costs per cbm per night [EUR/cbm]', 'SPACE_COST', 'CURRENCY', '{"GTE": 0}', N'The storage costs incurred for a storage space of 1 square meter per started height unit (meter) and per day. E.g.: 1 Euro pallet with 1.8 m height is calculated as 1.2 x 0.8 x SPACE_COST x 2, where SPACE_COST is the entered price.', '5_Warehouse', '1');
INSERT INTO system_property_type ( name, external_mapping_id, data_type, validation_rule, description, property_group, sequence_number) VALUES ( N'KLT booking & document handling [EUR/GR]', 'BOOKING_KLT', 'CURRENCY', '{"GTE": 0}', N'One-time document handling fee per KLT at German wage level.', '5_Warehouse', '3');
-- ===================================================
-- INSERT Statements für system_property
-- Verwendung von Subqueries für dynamische ID-Ermittlung
-- ===================================================
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
VALUES (
(SELECT ps.id FROM property_set ps
WHERE ps.state = 'VALID'
AND ps.start_date <= GETDATE()
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
ORDER BY ps.start_date DESC
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'PAYMENT_TERMS'),
'30'
);
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
VALUES (
(SELECT ps.id FROM property_set ps
WHERE ps.state = 'VALID'
AND ps.start_date <= GETDATE()
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
ORDER BY ps.start_date DESC
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'START_REF'),
'CNXMN'
);
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
VALUES (
(SELECT ps.id FROM property_set ps
WHERE ps.state = 'VALID'
AND ps.start_date <= GETDATE()
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
ORDER BY ps.start_date DESC
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'END_REF'),
'DEHAM'
);
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
VALUES (
(SELECT ps.id FROM property_set ps
WHERE ps.state = 'VALID'
AND ps.start_date <= GETDATE()
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
ORDER BY ps.start_date DESC
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'RISK_REF'),
'20000.00'
);
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
VALUES (
(SELECT ps.id FROM property_set ps
WHERE ps.state = 'VALID'
AND ps.start_date <= GETDATE()
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
ORDER BY ps.start_date DESC
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'CHANCE_REF'),
'1000.00'
);
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
VALUES (
(SELECT ps.id FROM property_set ps
WHERE ps.state = 'VALID'
AND ps.start_date <= GETDATE()
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
ORDER BY ps.start_date DESC
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'TRUCK_UTIL'),
'0.7'
);
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
VALUES (
(SELECT ps.id FROM property_set ps
WHERE ps.state = 'VALID'
AND ps.start_date <= GETDATE()
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
ORDER BY ps.start_date DESC
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'WORKDAYS'),
'210'
);
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
VALUES (
(SELECT ps.id FROM property_set ps
WHERE ps.state = 'VALID'
AND ps.start_date <= GETDATE()
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
ORDER BY ps.start_date DESC
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'INTEREST_RATE'),
'0.12'
);
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
VALUES (
(SELECT ps.id FROM property_set ps
WHERE ps.state = 'VALID'
AND ps.start_date <= GETDATE()
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
ORDER BY ps.start_date DESC
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'FCA_FEE'),
'0.002'
);
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
VALUES (
(SELECT ps.id FROM property_set ps
WHERE ps.state = 'VALID'
AND ps.start_date <= GETDATE()
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
ORDER BY ps.start_date DESC
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'TARIFF_RATE'),
'0.03'
);
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
VALUES (
(SELECT ps.id FROM property_set ps
WHERE ps.state = 'VALID'
AND ps.start_date <= GETDATE()
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
ORDER BY ps.start_date DESC
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'CUSTOM_FEE'),
'35'
);
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
VALUES (
(SELECT ps.id FROM property_set ps
WHERE ps.state = 'VALID'
AND ps.start_date <= GETDATE()
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
ORDER BY ps.start_date DESC
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'REPORTING'),
'MEK_B'
);
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
VALUES (
(SELECT ps.id FROM property_set ps
WHERE ps.state = 'VALID'
AND ps.start_date <= GETDATE()
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
ORDER BY ps.start_date DESC
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'FEU'),
'true'
);
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
VALUES (
(SELECT ps.id FROM property_set ps
WHERE ps.state = 'VALID'
AND ps.start_date <= GETDATE()
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
ORDER BY ps.start_date DESC
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'TEU'),
'true'
);
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
VALUES (
(SELECT ps.id FROM property_set ps
WHERE ps.state = 'VALID'
AND ps.start_date <= GETDATE()
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
ORDER BY ps.start_date DESC
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'FEU_HQ'),
'true'
);
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
VALUES (
(SELECT ps.id FROM property_set ps
WHERE ps.state = 'VALID'
AND ps.start_date <= GETDATE()
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
ORDER BY ps.start_date DESC
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'CONTAINER_UTIL'),
'0.7'
);
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
VALUES (
(SELECT ps.id FROM property_set ps
WHERE ps.state = 'VALID'
AND ps.start_date <= GETDATE()
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
ORDER BY ps.start_date DESC
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'VALID_DAYS'),
'60'
);
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
VALUES (
(SELECT ps.id FROM property_set ps
WHERE ps.state = 'VALID'
AND ps.start_date <= GETDATE()
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
ORDER BY ps.start_date DESC
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'RADIUS_REGION'),
'20'
);
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
VALUES (
(SELECT ps.id FROM property_set ps
WHERE ps.state = 'VALID'
AND ps.start_date <= GETDATE()
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
ORDER BY ps.start_date DESC
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'FREQ_MIN'),
'3'
);
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
VALUES (
(SELECT ps.id FROM property_set ps
WHERE ps.state = 'VALID'
AND ps.start_date <= GETDATE()
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
ORDER BY ps.start_date DESC
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'FREQ_MAX'),
'50'
);
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
VALUES (
(SELECT ps.id FROM property_set ps
WHERE ps.state = 'VALID'
AND ps.start_date <= GETDATE()
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
ORDER BY ps.start_date DESC
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'TEU_LOAD'),
'20000'
);
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
VALUES (
(SELECT ps.id FROM property_set ps
WHERE ps.state = 'VALID'
AND ps.start_date <= GETDATE()
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
ORDER BY ps.start_date DESC
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'FEU_LOAD'),
'21000'
);
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
VALUES (
(SELECT ps.id FROM property_set ps
WHERE ps.state = 'VALID'
AND ps.start_date <= GETDATE()
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
ORDER BY ps.start_date DESC
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'TRUCK_LOAD'),
'25000'
);
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
VALUES (
(SELECT ps.id FROM property_set ps
WHERE ps.state = 'VALID'
AND ps.start_date <= GETDATE()
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
ORDER BY ps.start_date DESC
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'AIR_PRECARRIAGE'),
'0.1'
);
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
VALUES (
(SELECT ps.id FROM property_set ps
WHERE ps.state = 'VALID'
AND ps.start_date <= GETDATE()
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
ORDER BY ps.start_date DESC
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'AIR_HANDLING'),
'80'
);
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
VALUES (
(SELECT ps.id FROM property_set ps
WHERE ps.state = 'VALID'
AND ps.start_date <= GETDATE()
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
ORDER BY ps.start_date DESC
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'AIR_MAINCARRIAGE'),
'3.5'
);
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
VALUES (
(SELECT ps.id FROM property_set ps
WHERE ps.state = 'VALID'
AND ps.start_date <= GETDATE()
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
ORDER BY ps.start_date DESC
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'AIR_HANDOVER_FEE'),
'35'
);
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
VALUES (
(SELECT ps.id FROM property_set ps
WHERE ps.state = 'VALID'
AND ps.start_date <= GETDATE()
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
ORDER BY ps.start_date DESC
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'AIR_CUSTOM_FEE'),
'45'
);
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
VALUES (
(SELECT ps.id FROM property_set ps
WHERE ps.state = 'VALID'
AND ps.start_date <= GETDATE()
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
ORDER BY ps.start_date DESC
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'AIR_ONCARRIAGE'),
'0.2'
);
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
VALUES (
(SELECT ps.id FROM property_set ps
WHERE ps.state = 'VALID'
AND ps.start_date <= GETDATE()
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
ORDER BY ps.start_date DESC
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'AIR_TERMINAL_FEE'),
'0.2'
);
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
VALUES (
(SELECT ps.id FROM property_set ps
WHERE ps.state = 'VALID'
AND ps.start_date <= GETDATE()
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
ORDER BY ps.start_date DESC
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'KLT_HANDLING'),
'0.71'
);
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
VALUES (
(SELECT ps.id FROM property_set ps
WHERE ps.state = 'VALID'
AND ps.start_date <= GETDATE()
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
ORDER BY ps.start_date DESC
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'GLT_HANDLING'),
'3.5'
);
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
VALUES (
(SELECT ps.id FROM property_set ps
WHERE ps.state = 'VALID'
AND ps.start_date <= GETDATE()
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
ORDER BY ps.start_date DESC
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'BOOKING'),
'3.5'
);
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
VALUES (
(SELECT ps.id FROM property_set ps
WHERE ps.state = 'VALID'
AND ps.start_date <= GETDATE()
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
ORDER BY ps.start_date DESC
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'BOOKING_KLT'),
'0.35'
);
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
VALUES (
(SELECT ps.id FROM property_set ps
WHERE ps.state = 'VALID'
AND ps.start_date <= GETDATE()
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
ORDER BY ps.start_date DESC
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'GLT_RELEASE'),
'2.23'
);
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
VALUES (
(SELECT ps.id FROM property_set ps
WHERE ps.state = 'VALID'
AND ps.start_date <= GETDATE()
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
ORDER BY ps.start_date DESC
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'KLT_RELEASE'),
'1.12'
);
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
VALUES (
(SELECT ps.id FROM property_set ps
WHERE ps.state = 'VALID'
AND ps.start_date <= GETDATE()
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
ORDER BY ps.start_date DESC
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'GLT_DISPATCH'),
'1.61'
);
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
VALUES (
(SELECT ps.id FROM property_set ps
WHERE ps.state = 'VALID'
AND ps.start_date <= GETDATE()
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
ORDER BY ps.start_date DESC
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'KLT_DISPATCH'),
'0.333'
);
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
VALUES (
(SELECT ps.id FROM property_set ps
WHERE ps.state = 'VALID'
AND ps.start_date <= GETDATE()
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
ORDER BY ps.start_date DESC
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'KLT_REPACK_S'),
'2.08'
);
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
VALUES (
(SELECT ps.id FROM property_set ps
WHERE ps.state = 'VALID'
AND ps.start_date <= GETDATE()
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
ORDER BY ps.start_date DESC
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'KLT_REPACK_M'),
'3.02'
);
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
VALUES (
(SELECT ps.id FROM property_set ps
WHERE ps.state = 'VALID'
AND ps.start_date <= GETDATE()
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
ORDER BY ps.start_date DESC
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'GLT_REPACK_S'),
'3.02'
);
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
VALUES (
(SELECT ps.id FROM property_set ps
WHERE ps.state = 'VALID'
AND ps.start_date <= GETDATE()
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
ORDER BY ps.start_date DESC
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'GLT_REPACK_M'),
'7.76'
);
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
VALUES (
(SELECT ps.id FROM property_set ps
WHERE ps.state = 'VALID'
AND ps.start_date <= GETDATE()
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
ORDER BY ps.start_date DESC
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'GLT_REPACK_L'),
'14'
);
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
VALUES (
(SELECT ps.id FROM property_set ps
WHERE ps.state = 'VALID'
AND ps.start_date <= GETDATE()
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
ORDER BY ps.start_date DESC
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'DISPOSAL'),
'6'
);
INSERT INTO system_property (property_set_id, system_property_type_id, property_value)
VALUES (
(SELECT ps.id FROM property_set ps
WHERE ps.state = 'VALID'
AND ps.start_date <= GETDATE()
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
ORDER BY ps.start_date DESC
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
(SELECT spt.id FROM system_property_type spt WHERE spt.external_mapping_id = 'SPACE_COST'),
'0.2630136986'
);

View file

@ -0,0 +1,685 @@
-- Country Data Import SQL Script
-- Generated from Lastenheft_Requirements Appendix A_Länder 1.csv
-- ===================================================
-- INSERT a property set if not exists.
-- ===================================================
INSERT INTO property_set (state)
SELECT 'VALID'
WHERE NOT EXISTS (
SELECT 1 FROM property_set ps
WHERE ps.state = 'VALID'
AND ps.start_date <= GETDATE()
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
);
-- =============================================================================
-- 1. INSERT COUNTRY PROPERTY TYPES
-- =============================================================================
INSERT INTO country_property_type
(name, external_mapping_id, data_type, validation_rule, is_required, description, property_group, sequence_number)
VALUES
('Customs Union', 'UNION', 'ENUMERATION', '{ "ENUM" : ["EU", "NONE"]}', 0, 'Specifies the customs union in which the country is located. When crossing a customs union border, customs costs are added to the calculation result.', 'General', 1),
('Safety Stock [working days]', 'SAFETY_STOCK', 'INT', '{"GTE": 0}', 0, 'Specifies the safety stock in working days that is maintained when sourcing from this country.', 'General', 2),
('Air Freight Share [%]', 'AIR_SHARE', 'PERCENTAGE', '{"GTE": 0}', 0, 'Specifies the maximum air freight proportion that is included in the calculation when sourcing from this country. The actual air freight proportion that is used additionally depends on the overseas share of the part number and lies between 0% and this value.', 'General', 3),
('Wage Factor [%]', 'WAGE', 'PERCENTAGE', '{"GT": 0}', 0, 'Specifies the wage factor level for calculating handling costs in relation to the German wage factor level.', 'General', 4);
-- =============================================================================
-- 2. INSERT COUNTRIES
-- =============================================================================
INSERT INTO country (iso_code, name, region_code, is_deprecated) VALUES
('AD', N'Andorra', 'EMEA', 0),
('AE', N'United Arab Emirates', 'EMEA', 0),
('AF', N'Afghanistan', 'EMEA', 0),
('AG', N'Antigua and Barbuda', 'LATAM', 0),
('AI', N'Anguilla', 'LATAM', 0),
('AL', N'Albania', 'EMEA', 0),
('AM', N'Armenia', 'EMEA', 0),
('AO', N'Angola', 'EMEA', 0),
('AQ', N'Antarctica', 'EMEA', 0),
('AR', N'Argentina', 'LATAM', 0),
('AS', N'American Samoa', 'APAC', 0),
('AT', N'Austria', 'EMEA', 0),
('AU', N'Australia', 'APAC', 0),
('AW', N'Aruba', 'LATAM', 0),
('AX', N'Åland Islands', 'EMEA', 0),
('AZ', N'Azerbaijan', 'EMEA', 0),
('BA', N'Bosnia and Herzegovina', 'EMEA', 0),
('BB', N'Barbados', 'LATAM', 0),
('BD', N'Bangladesh', 'EMEA', 0),
('BE', N'Belgium', 'EMEA', 0),
('BF', N'Burkina Faso', 'EMEA', 0),
('BG', N'Bulgaria', 'EMEA', 0),
('BH', N'Bahrain', 'EMEA', 0),
('BI', N'Burundi', 'EMEA', 0),
('BJ', N'Benin', 'EMEA', 0),
('BL', N'Saint Barthélemy', 'LATAM', 0),
('BM', N'Bermuda', 'NAM', 0),
('BN', N'Brunei Darussalam', 'APAC', 0),
('BO', N'Bolivia', 'LATAM', 0),
('BQ', N'Bonaire, Sint Eustatius and Saba', 'LATAM', 0),
('BR', N'Brazil', 'LATAM', 0),
('BS', N'Bahamas', 'LATAM', 0),
('BT', N'Bhutan', 'APAC', 0),
('BV', N'Bouvet Island', 'EMEA', 0),
('BW', N'Botswana', 'EMEA', 0),
('BY', N'Belarus', 'EMEA', 0),
('BZ', N'Belize', 'LATAM', 0),
('CA', N'Canada', 'NAM', 0),
('CC', N'Cocos (Keeling) Islands', 'APAC', 0),
('CD', N'Congo, Democratic Republic', 'EMEA', 0),
('CF', N'Central African Republic', 'EMEA', 0),
('CG', N'Congo', 'EMEA', 0),
('CH', N'Switzerland', 'EMEA', 0),
('CI', N'Côte d''Ivoire', 'EMEA', 0),
('CK', N'Cook Islands', 'APAC', 0),
('CL', N'Chile', 'LATAM', 0),
('CM', N'Cameroon', 'EMEA', 0),
('CN', N'China', 'APAC', 0),
('CO', N'Colombia', 'LATAM', 0),
('CR', N'Costa Rica', 'LATAM', 0),
('CU', N'Cuba', 'LATAM', 0),
('CV', N'Cabo Verde', 'EMEA', 0),
('CW', N'Curaçao', 'LATAM', 0),
('CX', N'Christmas Island', 'APAC', 0),
('CY', N'Cyprus', 'EMEA', 0),
('CZ', N'Czech Republic', 'EMEA', 0),
('DE', N'Germany', 'EMEA', 0),
('DJ', N'Djibouti', 'EMEA', 0),
('DK', N'Denmark', 'EMEA', 0),
('DM', N'Dominica', 'LATAM', 0),
('DO', N'Dominican Republic', 'LATAM', 0),
('DZ', N'Algeria', 'EMEA', 0),
('EC', N'Ecuador', 'LATAM', 0),
('EE', N'Estonia', 'EMEA', 0),
('EG', N'Egypt', 'EMEA', 0),
('EH', N'Western Sahara', 'EMEA', 0),
('ER', N'Eritrea', 'EMEA', 0),
('ES', N'Spain', 'EMEA', 0),
('ET', N'Ethiopia', 'EMEA', 0),
('FI', N'Finland', 'EMEA', 0),
('FJ', N'Fiji', 'APAC', 0),
('FK', N'Falkland Islands', 'LATAM', 0),
('FM', N'Micronesia', 'APAC', 0),
('FO', N'Faroe Islands', 'EMEA', 0),
('FR', N'France', 'EMEA', 0),
('GA', N'Gabon', 'EMEA', 0),
('GB', N'United Kingdom', 'EMEA', 0),
('GD', N'Grenada', 'LATAM', 0),
('GE', N'Georgia', 'EMEA', 0),
('GF', N'French Guiana', 'LATAM', 0),
('GG', N'Guernsey', 'EMEA', 0),
('GH', N'Ghana', 'EMEA', 0),
('GI', N'Gibraltar', 'EMEA', 0),
('GL', N'Greenland', 'NAM', 0),
('GM', N'Gambia', 'EMEA', 0),
('GN', N'Guinea', 'EMEA', 0),
('GP', N'Guadeloupe', 'LATAM', 0),
('GQ', N'Equatorial Guinea', 'EMEA', 0),
('GR', N'Greece', 'EMEA', 0),
('GS', N'South Georgia and South Sandwich Islands', 'LATAM', 0),
('GT', N'Guatemala', 'LATAM', 0),
('GU', N'Guam', 'APAC', 0),
('GW', N'Guinea-Bissau', 'EMEA', 0),
('GY', N'Guyana', 'LATAM', 0),
('HK', N'Hong Kong', 'APAC', 0),
('HM', N'Heard Island and McDonald Islands', 'APAC', 0),
('HN', N'Honduras', 'LATAM', 0),
('HR', N'Croatia', 'EMEA', 0),
('HT', N'Haiti', 'LATAM', 0),
('HU', N'Hungary', 'EMEA', 0),
('ID', N'Indonesia', 'APAC', 0),
('IE', N'Ireland', 'EMEA', 0),
('IL', N'Israel', 'EMEA', 0),
('IM', N'Isle of Man', 'EMEA', 0),
('IN', N'India', 'APAC', 0),
('IO', N'British Indian Ocean Territory', 'APAC', 0),
('IQ', N'Iraq', 'EMEA', 0),
('IR', N'Iran', 'EMEA', 0),
('IS', N'Iceland', 'EMEA', 0),
('IT', N'Italy', 'EMEA', 0),
('JE', N'Jersey', 'EMEA', 0),
('JM', N'Jamaica', 'LATAM', 0),
('JO', N'Jordan', 'EMEA', 0),
('JP', N'Japan', 'APAC', 0),
('KE', N'Kenya', 'EMEA', 0),
('KG', N'Kyrgyzstan', 'EMEA', 0),
('KH', N'Cambodia', 'APAC', 0),
('KI', N'Kiribati', 'APAC', 0),
('KM', N'Comoros', 'EMEA', 0),
('KN', N'Saint Kitts and Nevis', 'LATAM', 0),
('KP', N'Korea, North', 'APAC', 0),
('KR', N'Korea, South', 'APAC', 0),
('KW', N'Kuwait', 'EMEA', 0),
('KY', N'Cayman Islands', 'LATAM', 0),
('KZ', N'Kazakhstan', 'EMEA', 0),
('LA', N'Laos', 'APAC', 0),
('LB', N'Lebanon', 'EMEA', 0),
('LC', N'Saint Lucia', 'LATAM', 0),
('LI', N'Liechtenstein', 'EMEA', 0),
('LK', N'Sri Lanka', 'APAC', 0),
('LR', N'Liberia', 'EMEA', 0),
('LS', N'Lesotho', 'EMEA', 0),
('LT', N'Lithuania', 'EMEA', 0),
('LU', N'Luxembourg', 'EMEA', 0),
('LV', N'Latvia', 'EMEA', 0),
('LY', N'Libya', 'EMEA', 0),
('MA', N'Morocco', 'EMEA', 0),
('MC', N'Monaco', 'EMEA', 0),
('MD', N'Moldova', 'EMEA', 0),
('ME', N'Montenegro', 'EMEA', 0),
('MF', N'Saint Martin', 'LATAM', 0),
('MG', N'Madagascar', 'EMEA', 0),
('MH', N'Marshall Islands', 'APAC', 0),
('MK', N'North Macedonia', 'EMEA', 0),
('ML', N'Mali', 'EMEA', 0),
('MM', N'Myanmar', 'APAC', 0),
('MN', N'Mongolia', 'APAC', 0),
('MO', N'Macao', 'APAC', 0),
('MP', N'Northern Mariana Islands', 'APAC', 0),
('MQ', N'Martinique', 'LATAM', 0),
('MR', N'Mauritania', 'EMEA', 0),
('MS', N'Montserrat', 'LATAM', 0),
('MT', N'Malta', 'EMEA', 0),
('MU', N'Mauritius', 'EMEA', 0),
('MV', N'Maldives', 'APAC', 0),
('MW', N'Malawi', 'EMEA', 0),
('MX', N'Mexico', 'LATAM', 0),
('MY', N'Malaysia', 'APAC', 0),
('MZ', N'Mozambique', 'EMEA', 0),
('NA', N'Namibia', 'EMEA', 0),
('NC', N'New Caledonia', 'APAC', 0),
('NE', N'Niger', 'EMEA', 0),
('NF', N'Norfolk Island', 'APAC', 0),
('NG', N'Nigeria', 'EMEA', 0),
('NI', N'Nicaragua', 'LATAM', 0),
('NL', N'Netherlands', 'EMEA', 0),
('NO', N'Norway', 'EMEA', 0),
('NP', N'Nepal', 'APAC', 0),
('NR', N'Nauru', 'APAC', 0),
('NU', N'Niue', 'APAC', 0),
('NZ', N'New Zealand', 'APAC', 0),
('OM', N'Oman', 'EMEA', 0),
('PA', N'Panama', 'LATAM', 0),
('PE', N'Peru', 'LATAM', 0),
('PF', N'French Polynesia', 'APAC', 0),
('PG', N'Papua New Guinea', 'APAC', 0),
('PH', N'Philippines', 'APAC', 0),
('PK', N'Pakistan', 'APAC', 0),
('PL', N'Poland', 'EMEA', 0),
('PM', N'Saint Pierre and Miquelon', 'NAM', 0),
('PN', N'Pitcairn', 'APAC', 0),
('PR', N'Puerto Rico', 'LATAM', 0),
('PS', N'Palestine', 'EMEA', 0),
('PT', N'Portugal', 'EMEA', 0),
('PW', N'Palau', 'APAC', 0),
('PY', N'Paraguay', 'LATAM', 0),
('QA', N'Qatar', 'EMEA', 0),
('RE', N'Réunion', 'EMEA', 0),
('RO', N'Romania', 'EMEA', 0),
('RS', N'Serbia', 'EMEA', 0),
('RU', N'Russian Federation', 'EMEA', 0),
('RW', N'Rwanda', 'EMEA', 0),
('SA', N'Saudi Arabia', 'EMEA', 0),
('SB', N'Solomon Islands', 'APAC', 0),
('SC', N'Seychelles', 'EMEA', 0),
('SD', N'Sudan', 'EMEA', 0),
('SE', N'Sweden', 'EMEA', 0),
('SG', N'Singapore', 'APAC', 0),
('SH', N'Saint Helena', 'EMEA', 0),
('SI', N'Slovenia', 'EMEA', 0),
('SJ', N'Svalbard and Jan Mayen', 'EMEA', 0),
('SK', N'Slovakia', 'EMEA', 0),
('SL', N'Sierra Leone', 'EMEA', 0),
('SM', N'San Marino', 'EMEA', 0),
('SN', N'Senegal', 'EMEA', 0),
('SO', N'Somalia', 'EMEA', 0),
('SR', N'Suriname', 'LATAM', 0),
('SS', N'South Sudan', 'EMEA', 0),
('ST', N'Sao Tome and Principe', 'EMEA', 0),
('SV', N'El Salvador', 'LATAM', 0),
('SX', N'Sint Maarten', 'LATAM', 0),
('SY', N'Syrian Arab Republic', 'EMEA', 0),
('SZ', N'Eswatini', 'EMEA', 0),
('TC', N'Turks and Caicos Islands', 'LATAM', 0),
('TD', N'Chad', 'EMEA', 0),
('TF', N'French Southern Territories', 'EMEA', 0),
('TG', N'Togo', 'EMEA', 0),
('TH', N'Thailand', 'APAC', 0),
('TJ', N'Tajikistan', 'EMEA', 0),
('TK', N'Tokelau', 'APAC', 0),
('TL', N'Timor-Leste', 'APAC', 0),
('TM', N'Turkmenistan', 'EMEA', 0),
('TN', N'Tunisia', 'EMEA', 0),
('TO', N'Tonga', 'APAC', 0),
('TR', N'Turkey', 'EMEA', 0),
('TT', N'Trinidad and Tobago', 'LATAM', 0),
('TV', N'Tuvalu', 'APAC', 0),
('TW', N'Taiwan', 'APAC', 0),
('TZ', N'Tanzania', 'EMEA', 0),
('UA', N'Ukraine', 'EMEA', 0),
('UG', N'Uganda', 'EMEA', 0),
('UM', N'United States Minor Outlying Islands', 'APAC', 0),
('US', N'United States', 'NAM', 0),
('UY', N'Uruguay', 'LATAM', 0),
('UZ', N'Uzbekistan', 'EMEA', 0),
('VA', N'Vatican City', 'EMEA', 0),
('VC', N'Saint Vincent and the Grenadines', 'LATAM', 0),
('VE', N'Venezuela', 'LATAM', 0),
('VG', N'Virgin Islands, British', 'LATAM', 0),
('VI', N'Virgin Islands, U.S.', 'LATAM', 0),
('VN', N'Viet Nam', 'APAC', 0),
('VU', N'Vanuatu', 'APAC', 0),
('WF', N'Wallis and Futuna', 'APAC', 0),
('WS', N'Samoa', 'APAC', 0),
('YE', N'Yemen', 'EMEA', 0),
('YT', N'Mayotte', 'EMEA', 0),
('ZA', N'South Africa', 'EMEA', 0),
('ZM', N'Zambia', 'EMEA', 0),
('ZW', N'Zimbabwe', 'EMEA', 0),
('XK', N'Kosovo', 'EMEA', 0);
-- =============================================================================
-- 3. INSERT COUNTRY PROPERTIES
-- =============================================================================
-- Note: Uses the currently valid property set (state = 'VALID' and within date range)
-- If no valid property set exists, these inserts will fail with NULL constraint violation
-- To create a new property set if none exists, uncomment the following:
-- INSERT INTO property_set (start_date, state) VALUES (GETDATE(), 'VALID');
-- Note: Using current valid property set
-- Customs Union Properties (only for EU countries)
INSERT INTO country_property
(country_id, country_property_type_id, property_set_id, property_value)
SELECT
c.id,
cpt.id,
(SELECT ps.id FROM property_set ps
WHERE ps.state = 'VALID'
AND ps.start_date <= GETDATE()
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
ORDER BY ps.start_date DESC
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
CASE
WHEN c.iso_code IN ('AT', N'BE', 'BG', 'CZ', 'DE', 'DK', 'EE', 'ES', 'FI', 'FR', 'GR', 'HR', 'HU', 'IE', 'IT', 'LT', 'LU', 'LV', 'MT', 'NL', 'PL', 'PT', 'RO', 'SE', 'SI', 'SK')
THEN 'EU'
ELSE 'NONE'
END
FROM country c, country_property_type cpt
WHERE cpt.external_mapping_id = 'UNION';
-- Safety Stock Properties
INSERT INTO country_property
(country_id, country_property_type_id, property_set_id, property_value)
SELECT
c.id,
cpt.id,
(SELECT ps.id FROM property_set ps
WHERE ps.state = 'VALID'
AND ps.start_date <= GETDATE()
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
ORDER BY ps.start_date DESC
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
CASE c.iso_code
WHEN 'AD' THEN N'15'
WHEN 'AE' THEN N'20'
WHEN 'AF' THEN N'30'
WHEN 'AG' THEN N'55'
WHEN 'AI' THEN N'55'
WHEN 'AL' THEN N'15'
WHEN 'AM' THEN N'15'
WHEN 'AO' THEN N'15'
WHEN 'AQ' THEN N'55'
WHEN 'AR' THEN N'55'
WHEN 'AS' THEN N'55'
WHEN 'AT' THEN N'10'
WHEN 'AU' THEN N'55'
WHEN 'AW' THEN N'55'
WHEN 'AZ' THEN N'15'
WHEN 'BA' THEN N'15'
WHEN 'BB' THEN N'55'
WHEN 'BD' THEN N'55'
WHEN 'BE' THEN N'10'
WHEN 'BF' THEN N'30'
WHEN 'BG' THEN N'10'
WHEN 'BH' THEN N'20'
WHEN 'BI' THEN N'30'
WHEN 'BJ' THEN N'30'
WHEN 'BL' THEN N'30'
WHEN 'BM' THEN N'55'
WHEN 'BN' THEN N'55'
WHEN 'BO' THEN N'55'
WHEN 'BQ' THEN N'55'
WHEN 'BR' THEN N'55'
WHEN 'BS' THEN N'55'
WHEN 'BT' THEN N'55'
WHEN 'BV' THEN N'30'
WHEN 'BW' THEN N'15'
WHEN 'BY' THEN N'55'
WHEN 'BZ' THEN N'55'
WHEN 'CA' THEN N'55'
WHEN 'CC' THEN N'55'
WHEN 'CD' THEN N'30'
WHEN 'CF' THEN N'30'
WHEN 'CG' THEN N'30'
WHEN 'CH' THEN N'10'
WHEN 'CI' THEN N'30'
WHEN 'CK' THEN N'30'
WHEN 'CL' THEN N'55'
WHEN 'CM' THEN N'30'
WHEN 'CN' THEN N'55'
WHEN 'CO' THEN N'55'
WHEN 'CR' THEN N'55'
WHEN 'CU' THEN N'55'
WHEN 'CV' THEN N'30'
WHEN 'CW' THEN N'30'
WHEN 'CX' THEN N'55'
WHEN 'CY' THEN N'10'
WHEN 'CZ' THEN N'10'
WHEN 'DE' THEN N'10'
WHEN 'DJ' THEN N'30'
WHEN 'DK' THEN N'10'
WHEN 'DM' THEN N'55'
WHEN 'DO' THEN N'55'
WHEN 'DZ' THEN N'10'
WHEN 'EC' THEN N'55'
WHEN 'EE' THEN N'10'
WHEN 'EG' THEN N'30'
WHEN 'EH' THEN N'30'
WHEN 'ER' THEN N'30'
WHEN 'ES' THEN N'10'
WHEN 'ET' THEN N'30'
WHEN 'FI' THEN N'10'
WHEN 'FJ' THEN N'55'
WHEN 'FK' THEN N'55'
WHEN 'FM' THEN N'55'
WHEN 'FO' THEN N'30'
WHEN 'FR' THEN N'10'
WHEN 'GA' THEN N'30'
WHEN 'GB' THEN N'30'
WHEN 'GD' THEN N'55'
WHEN 'GE' THEN N'10'
WHEN 'GF' THEN N'30'
WHEN 'GG' THEN N'30'
WHEN 'GH' THEN N'30'
WHEN 'GI' THEN N'10'
WHEN 'GL' THEN N'30'
WHEN 'GM' THEN N'30'
WHEN 'GN' THEN N'30'
WHEN 'GP' THEN N'30'
WHEN 'GQ' THEN N'30'
WHEN 'GR' THEN N'10'
WHEN 'GS' THEN N'55'
WHEN 'GT' THEN N'55'
WHEN 'GU' THEN N'55'
WHEN 'GW' THEN N'30'
WHEN 'GY' THEN N'55'
WHEN 'HK' THEN N'55'
WHEN 'HM' THEN N'30'
WHEN 'HN' THEN N'55'
WHEN 'HR' THEN N'10'
WHEN 'HT' THEN N'55'
WHEN 'HU' THEN N'10'
WHEN 'ID' THEN N'55'
WHEN 'IE' THEN N'10'
WHEN 'IL' THEN N'30'
WHEN 'IM' THEN N'30'
WHEN 'IN' THEN N'55'
WHEN 'IO' THEN N'55'
WHEN 'IQ' THEN N'30'
WHEN 'IR' THEN N'30'
WHEN 'IS' THEN N'20'
WHEN 'IT' THEN N'10'
WHEN 'JE' THEN N'30'
WHEN 'JM' THEN N'55'
WHEN 'JO' THEN N'30'
WHEN 'JP' THEN N'55'
WHEN 'KE' THEN N'30'
WHEN 'KG' THEN N'30'
WHEN 'KH' THEN N'55'
WHEN 'KI' THEN N'55'
WHEN 'KM' THEN N'30'
WHEN 'KN' THEN N'55'
WHEN 'KP' THEN N'55'
WHEN 'KR' THEN N'55'
WHEN 'KW' THEN N'30'
WHEN 'KY' THEN N'55'
WHEN 'KZ' THEN N'30'
WHEN 'LA' THEN N'55'
WHEN 'LB' THEN N'30'
WHEN 'LC' THEN N'55'
WHEN 'LI' THEN N'10'
WHEN 'LK' THEN N'55'
WHEN 'LR' THEN N'30'
WHEN 'LS' THEN N'30'
WHEN 'LT' THEN N'10'
WHEN 'LU' THEN N'10'
WHEN 'LV' THEN N'10'
WHEN 'LY' THEN N'30'
WHEN 'MA' THEN N'20'
WHEN 'MC' THEN N'30'
WHEN 'MD' THEN N'30'
WHEN 'ME' THEN N'30'
WHEN 'MF' THEN N'30'
WHEN 'MG' THEN N'30'
WHEN 'MH' THEN N'55'
WHEN 'MK' THEN N'30'
WHEN 'ML' THEN N'30'
WHEN 'MM' THEN N'55'
WHEN 'MN' THEN N'55'
WHEN 'MO' THEN N'55'
WHEN 'MP' THEN N'55'
WHEN 'MQ' THEN N'30'
WHEN 'MR' THEN N'30'
WHEN 'MS' THEN N'55'
WHEN 'MT' THEN N'10'
WHEN 'MU' THEN N'30'
WHEN 'MV' THEN N'55'
WHEN 'MW' THEN N'30'
WHEN 'MX' THEN N'55'
WHEN 'MY' THEN N'55'
WHEN 'MZ' THEN N'30'
WHEN 'NA' THEN N'30'
WHEN 'NC' THEN N'30'
WHEN 'NE' THEN N'30'
WHEN 'NF' THEN N'55'
WHEN 'NG' THEN N'30'
WHEN 'NI' THEN N'55'
WHEN 'NL' THEN N'10'
WHEN 'NO' THEN N'10'
WHEN 'NP' THEN N'55'
WHEN 'NR' THEN N'55'
WHEN 'NU' THEN N'55'
WHEN 'NZ' THEN N'55'
WHEN 'OM' THEN N'30'
WHEN 'PA' THEN N'55'
WHEN 'PE' THEN N'55'
WHEN 'PF' THEN N'30'
WHEN 'PG' THEN N'55'
WHEN 'PH' THEN N'55'
WHEN 'PK' THEN N'55'
WHEN 'PL' THEN N'10'
WHEN 'PM' THEN N'30'
WHEN 'PN' THEN N'55'
WHEN 'PR' THEN N'55'
WHEN 'PS' THEN N'30'
WHEN 'PT' THEN N'10'
WHEN 'PW' THEN N'55'
WHEN 'PY' THEN N'55'
WHEN 'QA' THEN N'30'
WHEN 'RE' THEN N'30'
WHEN 'RO' THEN N'10'
WHEN 'RS' THEN N'10'
WHEN 'RU' THEN N'30'
WHEN 'RW' THEN N'30'
WHEN 'SA' THEN N'30'
WHEN 'SB' THEN N'55'
WHEN 'SC' THEN N'30'
WHEN 'SD' THEN N'30'
WHEN 'SE' THEN N'10'
WHEN 'SG' THEN N'55'
WHEN 'SH' THEN N'30'
WHEN 'SI' THEN N'10'
WHEN 'SJ' THEN N'55'
WHEN 'SK' THEN N'10'
WHEN 'SL' THEN N'30'
WHEN 'SM' THEN N'30'
WHEN 'SN' THEN N'30'
WHEN 'SO' THEN N'30'
WHEN 'SR' THEN N'55'
WHEN 'SS' THEN N'30'
WHEN 'ST' THEN N'30'
WHEN 'SV' THEN N'55'
WHEN 'SX' THEN N'30'
WHEN 'SY' THEN N'30'
WHEN 'SZ' THEN N'30'
WHEN 'TC' THEN N'55'
WHEN 'TD' THEN N'30'
WHEN 'TF' THEN N'30'
WHEN 'TG' THEN N'30'
WHEN 'TH' THEN N'55'
WHEN 'TJ' THEN N'30'
WHEN 'TK' THEN N'55'
WHEN 'TL' THEN N'55'
WHEN 'TM' THEN N'30'
WHEN 'TN' THEN N'30'
WHEN 'TO' THEN N'55'
WHEN 'TR' THEN N'15'
WHEN 'TT' THEN N'55'
WHEN 'TV' THEN N'55'
WHEN 'TW' THEN N'55'
WHEN 'TZ' THEN N'30'
WHEN 'UA' THEN N'55'
WHEN 'UG' THEN N'30'
WHEN 'UM' THEN N'55'
WHEN 'US' THEN N'55'
WHEN 'UY' THEN N'55'
WHEN 'UZ' THEN N'30'
WHEN 'VA' THEN N'30'
WHEN 'VC' THEN N'55'
WHEN 'VE' THEN N'55'
WHEN 'VG' THEN N'55'
WHEN 'VI' THEN N'55'
WHEN 'VN' THEN N'55'
WHEN 'VU' THEN N'55'
WHEN 'WF' THEN N'30'
WHEN 'WS' THEN N'55'
WHEN 'YE' THEN N'30'
WHEN 'YT' THEN N'30'
WHEN 'ZA' THEN N'30'
WHEN 'ZM' THEN N'30'
WHEN 'ZW' THEN N'30'
WHEN 'XK' THEN N'55'
END
FROM country c, country_property_type cpt
WHERE cpt.external_mapping_id = 'SAFETY_STOCK';
-- Air Freight Share Properties (0.03 for countries with safety stock 55, otherwise 0%)
INSERT INTO country_property
(country_id, country_property_type_id, property_set_id, property_value)
SELECT
c.id,
cpt.id,
(SELECT ps.id FROM property_set ps
WHERE ps.state = 'VALID'
AND ps.start_date <= GETDATE()
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
ORDER BY ps.start_date DESC
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
CASE
WHEN cp_safety.property_value = '55' THEN N'0.03'
ELSE '0'
END
FROM country c
CROSS JOIN country_property_type cpt
LEFT JOIN country_property cp_safety
ON cp_safety.country_id = c.id
AND cp_safety.country_property_type_id = (
SELECT id FROM country_property_type
WHERE external_mapping_id = 'SAFETY_STOCK'
)
AND cp_safety.property_set_id = (
SELECT ps.id FROM property_set ps
WHERE ps.state = 'VALID'
AND ps.start_date <= GETDATE()
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
ORDER BY ps.start_date DESC
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY)
WHERE cpt.external_mapping_id = 'AIR_SHARE';
-- Wage Factor Properties (only for countries with defined values)
-- Wage Factor Properties (only for countries with defined values)
INSERT INTO country_property
(country_id, country_property_type_id, property_set_id, property_value)
SELECT
c.id,
cpt.id,
(SELECT ps.id FROM property_set ps
WHERE ps.state = 'VALID'
AND ps.start_date <= GETDATE()
AND (ps.end_date IS NULL OR ps.end_date > GETDATE())
ORDER BY ps.start_date DESC
OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY),
CASE c.iso_code
WHEN 'AT' THEN N'0.99'
WHEN 'BE' THEN N'1.14'
WHEN 'BG' THEN N'0.23'
WHEN 'CZ' THEN N'0.44'
WHEN 'DE' THEN N'1.00'
WHEN 'DK' THEN N'1.16'
WHEN 'EE' THEN N'0.60'
WHEN 'ES' THEN N'0.90'
WHEN 'FI' THEN N'1.02'
WHEN 'FR' THEN N'1.05'
WHEN 'GR' THEN N'0.35'
WHEN 'HR' THEN N'0.31'
WHEN 'HU' THEN N'0.35'
WHEN 'IE' THEN N'0.97'
WHEN 'IT' THEN N'0.72'
WHEN 'LT' THEN N'0.36'
WHEN 'LU' THEN N'1.31'
WHEN 'LV' THEN N'0.33'
WHEN 'MT' THEN N'0.41'
WHEN 'NL' THEN N'1.05'
WHEN 'PL' THEN N'0.27'
WHEN 'PT' THEN N'0.41'
WHEN 'RO' THEN N'0.27'
WHEN 'SE' THEN N'0.94'
WHEN 'SI' THEN N'0.62'
WHEN 'SK' THEN N'0.42'
ELSE '1'
END
FROM country c, country_property_type cpt
WHERE cpt.external_mapping_id = 'WAGE';
-- =============================================================================
-- VERIFICATION QUERIES (Optional - for testing)
-- =============================================================================
-- Verify country count
-- SELECT COUNT(*) as total_countries FROM country;
-- Verify property types
-- SELECT * FROM country_property_type;
-- Verify EU countries with all properties
-- SELECT
-- c.iso_code,
-- c.region_code,
-- MAX(CASE WHEN cpt.name = 'Customs Union' THEN cp.property_value END) as customs_union,
-- MAX(CASE WHEN cpt.name = 'Safety Stock' THEN cp.property_value END) as safety_stock,
-- MAX(CASE WHEN cpt.name = 'Air Freight Share' THEN cp.property_value END) as air_freight,
-- MAX(CASE WHEN cpt.name = 'Wage Factor' THEN cp.property_value END) as wage_factor
-- FROM country c
-- JOIN country_property cp ON c.id = cp.country_id
-- JOIN country_property_type cpt ON cp.country_property_type_id = cpt.id
-- WHERE c.iso_code IN ('DE', 'FR', 'AT', 'BE', 'NL')
-- GROUP BY c.id, c.iso_code, c.region_code
-- ORDER BY c.iso_code;

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,804 @@
-- Automatisch generierte SQL-Statements für Node Predecessor Chains
-- Generiert aus: node.xlsx
-- Format: Mehrere Chains pro Node möglich (mit ; getrennt)
-- Predecessor Chain 1: AB (Chain 1 von 2)
-- Predecessors: WH_ULHA
INSERT INTO node_predecessor_chain (
node_id
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'AB')
);
DECLARE @chain_id_1 INT;
SET @chain_id_1 = SCOPE_IDENTITY();
INSERT INTO node_predecessor_entry (
node_id,
node_predecessor_chain_id,
sequence_number
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'WH_ULHA'),
@chain_id_1,
1
);
-- Predecessor Chain 2: AB (Chain 2 von 2)
-- Predecessors: WH_STO
INSERT INTO node_predecessor_chain (
node_id
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'AB')
);
DECLARE @chain_id_2 INT;
SET @chain_id_2 = SCOPE_IDENTITY();
INSERT INTO node_predecessor_entry (
node_id,
node_predecessor_chain_id,
sequence_number
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'WH_STO'),
@chain_id_2,
1
);
-- Predecessor Chain 3: HH (Chain 1 von 1)
-- Predecessors: WH_HH
INSERT INTO node_predecessor_chain (
node_id
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'HH')
);
DECLARE @chain_id_3 INT;
SET @chain_id_3 = SCOPE_IDENTITY();
INSERT INTO node_predecessor_entry (
node_id,
node_predecessor_chain_id,
sequence_number
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'WH_HH'),
@chain_id_3,
1
);
-- Predecessor Chain 4: FGG (Chain 1 von 2)
-- Predecessors: WH_STO
INSERT INTO node_predecessor_chain (
node_id
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'FGG')
);
DECLARE @chain_id_4 INT;
SET @chain_id_4 = SCOPE_IDENTITY();
INSERT INTO node_predecessor_entry (
node_id,
node_predecessor_chain_id,
sequence_number
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'WH_STO'),
@chain_id_4,
1
);
-- Predecessor Chain 5: FGG (Chain 2 von 2)
-- Predecessors: BEZEE
INSERT INTO node_predecessor_chain (
node_id
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'FGG')
);
DECLARE @chain_id_5 INT;
SET @chain_id_5 = SCOPE_IDENTITY();
INSERT INTO node_predecessor_entry (
node_id,
node_predecessor_chain_id,
sequence_number
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'BEZEE'),
@chain_id_5,
1
);
-- Predecessor Chain 6: KWS (Chain 1 von 2)
-- Predecessors: WH_STO
INSERT INTO node_predecessor_chain (
node_id
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'KWS')
);
DECLARE @chain_id_6 INT;
SET @chain_id_6 = SCOPE_IDENTITY();
INSERT INTO node_predecessor_entry (
node_id,
node_predecessor_chain_id,
sequence_number
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'WH_STO'),
@chain_id_6,
1
);
-- Predecessor Chain 7: KWS (Chain 2 von 2)
-- Predecessors: BEZEE
INSERT INTO node_predecessor_chain (
node_id
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'KWS')
);
DECLARE @chain_id_7 INT;
SET @chain_id_7 = SCOPE_IDENTITY();
INSERT INTO node_predecessor_entry (
node_id,
node_predecessor_chain_id,
sequence_number
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'BEZEE'),
@chain_id_7,
1
);
-- Predecessor Chain 8: EGD (Chain 1 von 2)
-- Predecessors: WH_HH
INSERT INTO node_predecessor_chain (
node_id
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'EGD')
);
DECLARE @chain_id_8 INT;
SET @chain_id_8 = SCOPE_IDENTITY();
INSERT INTO node_predecessor_entry (
node_id,
node_predecessor_chain_id,
sequence_number
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'WH_HH'),
@chain_id_8,
1
);
-- Predecessor Chain 9: EGD (Chain 2 von 2)
-- Predecessors: DEHAM
INSERT INTO node_predecessor_chain (
node_id
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'EGD')
);
DECLARE @chain_id_9 INT;
SET @chain_id_9 = SCOPE_IDENTITY();
INSERT INTO node_predecessor_entry (
node_id,
node_predecessor_chain_id,
sequence_number
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'DEHAM'),
@chain_id_9,
1
);
-- Predecessor Chain 10: CTT (Chain 1 von 2)
-- Predecessors: WH_BAT3
INSERT INTO node_predecessor_chain (
node_id
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'CTT')
);
DECLARE @chain_id_10 INT;
SET @chain_id_10 = SCOPE_IDENTITY();
INSERT INTO node_predecessor_entry (
node_id,
node_predecessor_chain_id,
sequence_number
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'WH_BAT3'),
@chain_id_10,
1
);
-- Predecessor Chain 11: CTT (Chain 2 von 2)
-- Predecessors: WH_JEAN
INSERT INTO node_predecessor_chain (
node_id
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'CTT')
);
DECLARE @chain_id_11 INT;
SET @chain_id_11 = SCOPE_IDENTITY();
INSERT INTO node_predecessor_entry (
node_id,
node_predecessor_chain_id,
sequence_number
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'WH_JEAN'),
@chain_id_11,
1
);
-- Predecessor Chain 12: LZZ (Chain 1 von 1)
-- Predecessors: WH_ROLO
INSERT INTO node_predecessor_chain (
node_id
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'LZZ')
);
DECLARE @chain_id_12 INT;
SET @chain_id_12 = SCOPE_IDENTITY();
INSERT INTO node_predecessor_entry (
node_id,
node_predecessor_chain_id,
sequence_number
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'WH_ROLO'),
@chain_id_12,
1
);
-- Predecessor Chain 13: STR (Chain 1 von 1)
-- Predecessors: WH_ZBU
INSERT INTO node_predecessor_chain (
node_id
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'STR')
);
DECLARE @chain_id_13 INT;
SET @chain_id_13 = SCOPE_IDENTITY();
INSERT INTO node_predecessor_entry (
node_id,
node_predecessor_chain_id,
sequence_number
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'WH_ZBU'),
@chain_id_13,
1
);
-- Predecessor Chain 14: VOP (Chain 1 von 1)
-- Predecessors: WH_BUD
INSERT INTO node_predecessor_chain (
node_id
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'VOP')
);
DECLARE @chain_id_14 INT;
SET @chain_id_14 = SCOPE_IDENTITY();
INSERT INTO node_predecessor_entry (
node_id,
node_predecessor_chain_id,
sequence_number
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'WH_BUD'),
@chain_id_14,
1
);
-- Predecessor Chain 15: KOL (Chain 1 von 1)
-- Predecessors: DEHAM
INSERT INTO node_predecessor_chain (
node_id
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'KOL')
);
DECLARE @chain_id_15 INT;
SET @chain_id_15 = SCOPE_IDENTITY();
INSERT INTO node_predecessor_entry (
node_id,
node_predecessor_chain_id,
sequence_number
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'DEHAM'),
@chain_id_15,
1
);
-- Predecessor Chain 16: LIPO (Chain 1 von 1)
-- Predecessors: WH_BUD
INSERT INTO node_predecessor_chain (
node_id
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'LIPO')
);
DECLARE @chain_id_16 INT;
SET @chain_id_16 = SCOPE_IDENTITY();
INSERT INTO node_predecessor_entry (
node_id,
node_predecessor_chain_id,
sequence_number
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'WH_BUD'),
@chain_id_16,
1
);
-- Predecessor Chain 17: WH_ZBU (Chain 1 von 1)
-- Predecessors: DEHAM
INSERT INTO node_predecessor_chain (
node_id
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'WH_ZBU')
);
DECLARE @chain_id_17 INT;
SET @chain_id_17 = SCOPE_IDENTITY();
INSERT INTO node_predecessor_entry (
node_id,
node_predecessor_chain_id,
sequence_number
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'DEHAM'),
@chain_id_17,
1
);
-- Predecessor Chain 18: WH_STO (Chain 1 von 1)
-- Predecessors: BEZEE
INSERT INTO node_predecessor_chain (
node_id
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'WH_STO')
);
DECLARE @chain_id_18 INT;
SET @chain_id_18 = SCOPE_IDENTITY();
INSERT INTO node_predecessor_entry (
node_id,
node_predecessor_chain_id,
sequence_number
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'BEZEE'),
@chain_id_18,
1
);
-- Predecessor Chain 19: WH_HH (Chain 1 von 1)
-- Predecessors: DEHAM
INSERT INTO node_predecessor_chain (
node_id
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'WH_HH')
);
DECLARE @chain_id_19 INT;
SET @chain_id_19 = SCOPE_IDENTITY();
INSERT INTO node_predecessor_entry (
node_id,
node_predecessor_chain_id,
sequence_number
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'DEHAM'),
@chain_id_19,
1
);
-- Predecessor Chain 20: CNSHA (Chain 1 von 6)
-- Predecessors: Shanghai
INSERT INTO node_predecessor_chain (
node_id
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'CNSHA')
);
DECLARE @chain_id_20 INT;
SET @chain_id_20 = SCOPE_IDENTITY();
INSERT INTO node_predecessor_entry (
node_id,
node_predecessor_chain_id,
sequence_number
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'Shanghai'),
@chain_id_20,
1
);
-- Predecessor Chain 21: CNSHA (Chain 2 von 6)
-- Predecessors: Hangzhou
INSERT INTO node_predecessor_chain (
node_id
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'CNSHA')
);
DECLARE @chain_id_21 INT;
SET @chain_id_21 = SCOPE_IDENTITY();
INSERT INTO node_predecessor_entry (
node_id,
node_predecessor_chain_id,
sequence_number
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'Hangzhou'),
@chain_id_21,
1
);
-- Predecessor Chain 22: CNSHA (Chain 3 von 6)
-- Predecessors: Yangzhong
INSERT INTO node_predecessor_chain (
node_id
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'CNSHA')
);
DECLARE @chain_id_22 INT;
SET @chain_id_22 = SCOPE_IDENTITY();
INSERT INTO node_predecessor_entry (
node_id,
node_predecessor_chain_id,
sequence_number
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'Yangzhong'),
@chain_id_22,
1
);
-- Predecessor Chain 23: CNSHA (Chain 4 von 6)
-- Predecessors: Taicang
INSERT INTO node_predecessor_chain (
node_id
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'CNSHA')
);
DECLARE @chain_id_23 INT;
SET @chain_id_23 = SCOPE_IDENTITY();
INSERT INTO node_predecessor_entry (
node_id,
node_predecessor_chain_id,
sequence_number
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'Taicang'),
@chain_id_23,
1
);
-- Predecessor Chain 24: CNSHA (Chain 5 von 6)
-- Predecessors: Jingjiang
INSERT INTO node_predecessor_chain (
node_id
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'CNSHA')
);
DECLARE @chain_id_24 INT;
SET @chain_id_24 = SCOPE_IDENTITY();
INSERT INTO node_predecessor_entry (
node_id,
node_predecessor_chain_id,
sequence_number
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'Jingjiang'),
@chain_id_24,
1
);
-- Predecessor Chain 25: CNSHA (Chain 6 von 6)
-- Predecessors: JJ
INSERT INTO node_predecessor_chain (
node_id
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'CNSHA')
);
DECLARE @chain_id_25 INT;
SET @chain_id_25 = SCOPE_IDENTITY();
INSERT INTO node_predecessor_entry (
node_id,
node_predecessor_chain_id,
sequence_number
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'JJ'),
@chain_id_25,
1
);
-- Predecessor Chain 26: CNTAO (Chain 1 von 2)
-- Predecessors: Qingdao
INSERT INTO node_predecessor_chain (
node_id
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'CNTAO')
);
DECLARE @chain_id_26 INT;
SET @chain_id_26 = SCOPE_IDENTITY();
INSERT INTO node_predecessor_entry (
node_id,
node_predecessor_chain_id,
sequence_number
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'Qingdao'),
@chain_id_26,
1
);
-- Predecessor Chain 27: CNTAO (Chain 2 von 2)
-- Predecessors: Linfen
INSERT INTO node_predecessor_chain (
node_id
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'CNTAO')
);
DECLARE @chain_id_27 INT;
SET @chain_id_27 = SCOPE_IDENTITY();
INSERT INTO node_predecessor_entry (
node_id,
node_predecessor_chain_id,
sequence_number
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'Linfen'),
@chain_id_27,
1
);
-- Predecessor Chain 28: CNXMN (Chain 1 von 2)
-- Predecessors: Fuqing
INSERT INTO node_predecessor_chain (
node_id
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'CNXMN')
);
DECLARE @chain_id_28 INT;
SET @chain_id_28 = SCOPE_IDENTITY();
INSERT INTO node_predecessor_entry (
node_id,
node_predecessor_chain_id,
sequence_number
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'Fuqing'),
@chain_id_28,
1
);
-- Predecessor Chain 29: CNXMN (Chain 2 von 2)
-- Predecessors: LX
INSERT INTO node_predecessor_chain (
node_id
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'CNXMN')
);
DECLARE @chain_id_29 INT;
SET @chain_id_29 = SCOPE_IDENTITY();
INSERT INTO node_predecessor_entry (
node_id,
node_predecessor_chain_id,
sequence_number
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'LX'),
@chain_id_29,
1
);
-- Predecessor Chain 30: INNSA (Chain 1 von 2)
-- Predecessors: Pune
INSERT INTO node_predecessor_chain (
node_id
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'INNSA')
);
DECLARE @chain_id_30 INT;
SET @chain_id_30 = SCOPE_IDENTITY();
INSERT INTO node_predecessor_entry (
node_id,
node_predecessor_chain_id,
sequence_number
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'Pune'),
@chain_id_30,
1
);
-- Predecessor Chain 31: INNSA (Chain 2 von 2)
-- Predecessors: Aurangabad
INSERT INTO node_predecessor_chain (
node_id
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'INNSA')
);
DECLARE @chain_id_31 INT;
SET @chain_id_31 = SCOPE_IDENTITY();
INSERT INTO node_predecessor_entry (
node_id,
node_predecessor_chain_id,
sequence_number
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'Aurangabad'),
@chain_id_31,
1
);
-- Predecessor Chain 32: INMAA (Chain 1 von 1)
-- Predecessors: Bangalore
INSERT INTO node_predecessor_chain (
node_id
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'INMAA')
);
DECLARE @chain_id_32 INT;
SET @chain_id_32 = SCOPE_IDENTITY();
INSERT INTO node_predecessor_entry (
node_id,
node_predecessor_chain_id,
sequence_number
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'Bangalore'),
@chain_id_32,
1
);
-- Predecessor Chain 33: CNSZX (Chain 1 von 1)
-- Predecessors: Shenzhen
INSERT INTO node_predecessor_chain (
node_id
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'CNSZX')
);
DECLARE @chain_id_33 INT;
SET @chain_id_33 = SCOPE_IDENTITY();
INSERT INTO node_predecessor_entry (
node_id,
node_predecessor_chain_id,
sequence_number
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'Shenzhen'),
@chain_id_33,
1
);
-- Predecessor Chain 34: WH_BAT3 (Chain 1 von 1)
-- Predecessors: FRLEH
INSERT INTO node_predecessor_chain (
node_id
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'WH_BAT3')
);
DECLARE @chain_id_34 INT;
SET @chain_id_34 = SCOPE_IDENTITY();
INSERT INTO node_predecessor_entry (
node_id,
node_predecessor_chain_id,
sequence_number
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'FRLEH'),
@chain_id_34,
1
);
-- Predecessor Chain 35: WH_JEAN (Chain 1 von 1)
-- Predecessors: FRLEH
INSERT INTO node_predecessor_chain (
node_id
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'WH_JEAN')
);
DECLARE @chain_id_35 INT;
SET @chain_id_35 = SCOPE_IDENTITY();
INSERT INTO node_predecessor_entry (
node_id,
node_predecessor_chain_id,
sequence_number
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'FRLEH'),
@chain_id_35,
1
);
-- Predecessor Chain 36: WH_ROLO (Chain 1 von 1)
-- Predecessors: ITGOA
INSERT INTO node_predecessor_chain (
node_id
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'WH_ROLO')
);
DECLARE @chain_id_36 INT;
SET @chain_id_36 = SCOPE_IDENTITY();
INSERT INTO node_predecessor_entry (
node_id,
node_predecessor_chain_id,
sequence_number
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'ITGOA'),
@chain_id_36,
1
);
-- Predecessor Chain 37: WH_BUD (Chain 1 von 1)
-- Predecessors: DEHAM
INSERT INTO node_predecessor_chain (
node_id
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'WH_BUD')
);
DECLARE @chain_id_37 INT;
SET @chain_id_37 = SCOPE_IDENTITY();
INSERT INTO node_predecessor_entry (
node_id,
node_predecessor_chain_id,
sequence_number
) VALUES (
(SELECT id FROM node WHERE external_mapping_id = 'DEHAM'),
@chain_id_37,
1
);

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,20 @@
INSERT INTO sys_group(group_name, group_description)
VALUES (N'none', N'no rights');
INSERT INTO sys_group(group_name, group_description)
VALUES (N'basic', N'can generate reports');
INSERT INTO sys_group(group_name, group_description)
VALUES (N'calculation', N'can generate reports, do calculations');
INSERT INTO sys_group(group_name, group_description)
VALUES (N'freight', N'manage freight rates');
INSERT INTO sys_group(group_name, group_description)
VALUES (N'packaging', N'manage packaging data');
INSERT INTO sys_group(group_name, group_description)
VALUES (N'material', N'manage material data');
INSERT INTO sys_group(group_name, group_description)
VALUES (N'super',
N'can generate reports, do calculations, manage freight rates, manage packaging data, manage material data, manage general system settings');
INSERT INTO sys_group(group_name, group_description)
VALUES (N'service', N'register external applications');
INSERT INTO sys_group(group_name, group_description)
VALUES (N'right-management',
N'add users, manage user groups');

View file

@ -0,0 +1,51 @@
package de.avatic.lcc.config;
import org.springframework.boot.test.context.TestConfiguration;
import org.springframework.boot.testcontainers.service.connection.ServiceConnection;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Profile;
import org.testcontainers.containers.MSSQLServerContainer;
import org.testcontainers.containers.MySQLContainer;
import org.testcontainers.utility.DockerImageName;
/**
* TestContainers configuration for multi-database integration testing.
* <p>
* Automatically starts the correct database container based on active Spring profile.
* Uses @ServiceConnection to automatically configure Spring DataSource.
* <p>
* Usage:
* <pre>
* mvn test -Dspring.profiles.active=test,mysql -Dtest=DatabaseConfigurationSmokeTest
* mvn test -Dspring.profiles.active=test,mssql -Dtest=DatabaseConfigurationSmokeTest
* </pre>
*/
@TestConfiguration
public class DatabaseTestConfiguration {
@Bean
@ServiceConnection
@Profile("mysql")
public MySQLContainer<?> mysqlContainer() {
System.out.println("DatabaseTestConfiguration: Creating MySQL container bean...");
MySQLContainer<?> container = new MySQLContainer<>(DockerImageName.parse("mysql:8.0"))
.withDatabaseName("lcc_test")
.withUsername("test")
.withPassword("test");
System.out.println("DatabaseTestConfiguration: MySQL container bean created");
return container;
}
@Bean
@ServiceConnection
@Profile("mssql")
public MSSQLServerContainer<?> mssqlContainer() {
System.out.println("DatabaseTestConfiguration: Creating MSSQL container bean...");
MSSQLServerContainer<?> container = new MSSQLServerContainer<>(
DockerImageName.parse("mcr.microsoft.com/mssql/server:2022-latest"))
.acceptLicense()
.withPassword("YourStrong!Passw0rd123");
System.out.println("DatabaseTestConfiguration: MSSQL container bean created");
return container;
}
}

View file

@ -0,0 +1,49 @@
package de.avatic.lcc.config;
import org.springframework.boot.SpringBootConfiguration;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.FilterType;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate;
import javax.sql.DataSource;
/**
* Test configuration that provides only the beans needed for repository tests.
* Does NOT load the full LccApplication context.
*
* Uses @SpringBootConfiguration to prevent Spring Boot from searching for and loading LccApplication.
*
* Excludes repositories with external dependencies (transformers/services) since we're only testing JDBC layer.
*/
@SpringBootConfiguration
@EnableAutoConfiguration
@ComponentScan(
basePackages = {
"de.avatic.lcc.repositories",
"de.avatic.lcc.database.dialect"
},
excludeFilters = @ComponentScan.Filter(
type = FilterType.ASSIGNABLE_TYPE,
classes = {
de.avatic.lcc.repositories.error.DumpRepository.class
}
)
)
public class RepositoryTestConfig {
@Bean
public JdbcTemplate jdbcTemplate(DataSource dataSource) {
return new JdbcTemplate(dataSource);
}
@Bean
public NamedParameterJdbcTemplate namedParameterJdbcTemplate(DataSource dataSource) {
return new NamedParameterJdbcTemplate(dataSource);
}
// SqlDialectProvider beans are now provided by @Component annotations in
// MySQLDialectProvider and MSSQLDialectProvider classes
}

View file

@ -6,6 +6,9 @@ import com.fasterxml.jackson.databind.ObjectMapper;
import de.avatic.lcc.dto.calculation.DestinationDTO;
import de.avatic.lcc.dto.calculation.edit.destination.DestinationCreateDTO;
import de.avatic.lcc.dto.calculation.edit.destination.DestinationUpdateDTO;
import io.qameta.allure.Epic;
import io.qameta.allure.Feature;
import io.qameta.allure.Story;
import org.junit.jupiter.api.*;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc;
@ -27,6 +30,8 @@ import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.
@AutoConfigureMockMvc
@Transactional
@Import({PremiseControllerTestData.class, PremiseTestsHelper.class})
@Epic("Controller")
@Feature("Calculation")
public class CalculationIntegrationTests {
@Autowired
@ -49,6 +54,7 @@ public class CalculationIntegrationTests {
class StartCalculationTests {
@Test
@Story("Start calculation happy path")
@DisplayName("POST /api/calculation/start - happy path")
public void startCalculationCase1() throws Exception {

View file

@ -4,6 +4,9 @@ package de.avatic.lcc.controller.calculation;
import com.fasterxml.jackson.databind.ObjectMapper;
import de.avatic.lcc.dto.calculation.edit.destination.DestinationCreateDTO;
import de.avatic.lcc.dto.calculation.edit.destination.DestinationUpdateDTO;
import io.qameta.allure.Epic;
import io.qameta.allure.Feature;
import io.qameta.allure.Story;
import org.assertj.core.api.InstanceOfAssertFactories;
import org.junit.jupiter.api.*;
import org.springframework.beans.factory.annotation.Autowired;
@ -30,7 +33,8 @@ import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.
@AutoConfigureMockMvc
@Transactional
@Import({PremiseControllerTestData.class, PremiseTestsHelper.class})
@Epic("Controller")
@Feature("Calculation")
public class DestinationIntegrationTest {
@Autowired
@ -52,6 +56,7 @@ public class DestinationIntegrationTest {
class CreateDestinationTests {
@Test
@Story("Create destination happy path")
@DisplayName("POST /api/calculation/destination/ - happy path (create destination)")
public void createDestinationCase1() throws Exception {
@ -95,6 +100,7 @@ public class DestinationIntegrationTest {
}
@Test
@Story("Get destination happy path")
@DisplayName("GET /api/calculation/destination - happy path (get destination)")
public void getDestinationCase1() throws Exception {
@ -130,6 +136,7 @@ public class DestinationIntegrationTest {
}
@Test
@Story("Update destination happy path")
@DisplayName("PUT /api/calculation/destination - happy path (update destination)")
public void getDestinationCase1() throws Exception {
@ -165,6 +172,7 @@ public class DestinationIntegrationTest {
}
@Test
@Story("Partial update destination happy path")
@DisplayName("PUT /api/calculation/destination - happy path (partial update destination)")
public void getDestinationCase2() throws Exception {
@ -200,6 +208,7 @@ public class DestinationIntegrationTest {
}
@Test
@Story("Update destination error case with negative value")
@DisplayName("PUT /api/calculation/destination - error case (negative value)")
public void getDestinationCase3() throws Exception {
@ -255,6 +264,7 @@ public class DestinationIntegrationTest {
@Test
@Story("Delete single destination happy path")
@DisplayName("DELETE /api/calculation/destination - happy path (delete single destination)")
public void deleteDestinationCase1() throws Exception {
@ -279,6 +289,7 @@ public class DestinationIntegrationTest {
@Test
@Story("Delete all destinations happy path")
@DisplayName("DELETE /api/calculation/destination - happy path (delete all destination)")
public void deleteDestinationCase2() throws Exception {
@ -314,6 +325,7 @@ public class DestinationIntegrationTest {
@Test
@Story("Delete destination error case with unknown id")
@DisplayName("DELETE /api/calculation/destination - error case (unknown id)")
public void deleteDestinationCase3() throws Exception {

View file

@ -10,6 +10,9 @@ import de.avatic.lcc.dto.generic.DimensionDTO;
import de.avatic.lcc.model.db.premises.Premise;
import de.avatic.lcc.model.db.utils.DimensionUnit;
import de.avatic.lcc.model.db.utils.WeightUnit;
import io.qameta.allure.Epic;
import io.qameta.allure.Feature;
import io.qameta.allure.Story;
import org.assertj.core.api.InstanceOfAssertFactories;
import org.junit.jupiter.api.*;
import org.springframework.beans.factory.annotation.Autowired;
@ -39,6 +42,8 @@ import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.
@AutoConfigureMockMvc
@Transactional
@Import({PremiseControllerTestData.class, PremiseTestsHelper.class})
@Epic("Controller")
@Feature("Calculation")
public class PremiseControllerIntegrationTest {
@ -64,6 +69,7 @@ public class PremiseControllerIntegrationTest {
// Test for GET /api/calculation/view
@Test
@Story("GET /api/calculation/view - happy path (no filter)")
@DisplayName("GET /api/calculation/view - happy path (no filter)")
public void viewMaterialTest() throws Exception {
mockMvc.perform(get("/api/calculation/view")).andExpect(status().isOk()).andDo(print()).andExpect(jsonPath("$", isA(List.class)));
@ -73,6 +79,7 @@ public class PremiseControllerIntegrationTest {
@Test
@Story("GET /api/calculation/view - happy path (filtering part number)")
@DisplayName("GET /api/calculation/view - happy path (filtering part number)")
public void viewMaterialFilterTest() throws Exception {
mockMvc.perform(get("/api/calculation/view").param("filter", "28152640129")).andExpect(status().isOk()).andDo(print()).andExpect(jsonPath("$", isA(List.class))).andExpect(jsonPath("$[*].material.part_number", everyItem(equalTo("28152640129"))));
@ -81,6 +88,7 @@ public class PremiseControllerIntegrationTest {
}
@Test
@Story("GET /api/calculation/view - happy path (filtering supplier name)")
@DisplayName("GET /api/calculation/view - happy path (filtering supplier name)")
public void viewMaterialFilterSupplierTest() throws Exception {
mockMvc.perform(get("/api/calculation/view").param("filter", "My Supplier 1")).andExpect(status().isOk()).andDo(print()).andExpect(jsonPath("$", isA(List.class))).andExpect(jsonPath("$[*].supplier.name").value(everyItem(equalTo("My Supplier 1"))));
@ -105,6 +113,7 @@ public class PremiseControllerIntegrationTest {
// Test for GET /api/calculation/search
@Test
@Story("GET /api/calculation/search - happy path (single)")
@DisplayName("GET /api/calculation/search - happy path (single)")
public void searchSingleMaterialTest() throws Exception {
mockMvc.perform(get("/api/calculation/search").param("search", "28152640129")).andExpect(status().isOk()).andDo(print()).andExpect(jsonPath("$.materials", isA(List.class))).andExpect(jsonPath("$.materials", hasSize(1))).andExpect(jsonPath("$.supplier", hasSize(1))).andExpect(jsonPath("$.user_supplier", hasSize(1))).andExpect(jsonPath("$.user_supplier[?(@.name == 'My Supplier 1')]").exists()).andExpect(jsonPath("$.user_supplier[?(@.name == 'My Supplier 2')]").isEmpty());
@ -112,6 +121,7 @@ public class PremiseControllerIntegrationTest {
@Test
@Story("GET /api/calculation/search - happy path (muliple)")
@DisplayName("GET /api/calculation/search - happy path (muliple)")
public void searchMultiMaterialTest() throws Exception {
mockMvc.perform(get("/api/calculation/search").param("search", "28152640129 Material 4222640803 bla4222640104bla bla")).andExpect(status().isOk()).andDo(print()).andExpect(jsonPath("$.materials", isA(List.class))).andExpect(jsonPath("$.materials", hasSize(2))).andExpect(jsonPath("$.supplier", hasSize(1))).andExpect(jsonPath("$.user_supplier", hasSize(1))).andExpect(jsonPath("$.user_supplier[?(@.name == 'My Supplier 1')]").exists()).andExpect(jsonPath("$.supplier[?(@.name == 'Linde (China) Forklift Truck (Supplier)')]").exists()).andExpect(jsonPath("$.materials[?(@.part_number == '28152640129')]").exists()).andExpect(jsonPath("$.materials[?(@.part_number == '4222640803')]").exists());
@ -322,6 +332,7 @@ public class PremiseControllerIntegrationTest {
@Test
@Story("POST /api/calculation/create - Copies COMPLETED with lowest age")
@DisplayName("POST /api/calculation/create - Copies COMPLETED with lowest age")
public void createPremiseCase1() throws Exception {
@ -365,6 +376,7 @@ public class PremiseControllerIntegrationTest {
}
@Test
@Story("POST /api/calculation/create - Recycles DRAFT from same user")
@DisplayName("POST /api/calculation/create - Recycles DRAFT from same user")
public void createPremiseCase2() throws Exception {
@ -393,6 +405,7 @@ public class PremiseControllerIntegrationTest {
}
@Test
@Story("POST /api/calculation/create - Recycles COMPLETED from other user")
@DisplayName("POST /api/calculation/create - Recycles COMPLETED from other user")
public void createPremiseCase3() throws Exception {
var supplierId = testsHelper.getNodeIdByExternalMappingId("LX");
@ -419,6 +432,7 @@ public class PremiseControllerIntegrationTest {
}
@Test
@Story("POST /api/calculation/create - Creates new Premise if no old data")
@DisplayName("POST /api/calculation/create - Creates new Premise if no old data")
public void createPremiseCase4() throws Exception {
var supplierId = testsHelper.getNodeIdByExternalMappingId("LX");
@ -446,6 +460,7 @@ public class PremiseControllerIntegrationTest {
}
@Test
@Story("POST /api/calculation/create - Uses UserSupplier DRAFT")
@DisplayName("POST /api/calculation/create - Uses UserSupplier DRAFT")
public void createPremiseCase5() throws Exception {
var supplierId = testsHelper.getUserNodeIdByName("My Supplier 1");
@ -472,6 +487,7 @@ public class PremiseControllerIntegrationTest {
}
@Test
@Story("POST /api/calculation/create - Access Violation UserSupplier")
@DisplayName("POST /api/calculation/create - Access Violation UserSupplier")
public void createPremiseCase6() throws Exception {
var supplierId = testsHelper.getUserNodeIdByName("My Supplier 2");
@ -493,6 +509,7 @@ public class PremiseControllerIntegrationTest {
}
@Test
@Story("POST /api/calculation/create - From Scratch behavior test")
@DisplayName("POST /api/calculation/create - From Scratch behavior test")
public void createPremiseCase7() throws Exception {
var supplierId = testsHelper.getNodeIdByExternalMappingId("LX");
@ -526,6 +543,7 @@ public class PremiseControllerIntegrationTest {
}
@Test
@Story("POST /api/calculation/create - Faulty material id")
@DisplayName("POST /api/calculation/create - Faulty material id")
public void createPremiseCase8() throws Exception {
@ -548,6 +566,7 @@ public class PremiseControllerIntegrationTest {
}
@Test
@Story("POST /api/calculation/create - Faulty supplier id")
@DisplayName("POST /api/calculation/create - Faulty supplier id")
public void createPremiseCase9() throws Exception {
@ -569,6 +588,7 @@ public class PremiseControllerIntegrationTest {
}
@Test
@Story("POST /api/calculation/create - Faulty user supplier id")
@DisplayName("POST /api/calculation/create - Faulty user supplier id")
public void createPremiseCase10() throws Exception {
var premisesBeforeCreate = testsHelper.getPremisesFromDb();
@ -624,6 +644,7 @@ public class PremiseControllerIntegrationTest {
}
@Test
@Story("GET /api/calculation/edit - Get Single Premiss")
@DisplayName("GET /api/calculation/edit - Get Single Premiss")
public void getPremiseCase1() throws Exception {
@ -655,6 +676,7 @@ public class PremiseControllerIntegrationTest {
}
@Test
@Story("GET /api/calculation/edit - Get Multiple Premisses")
@DisplayName("GET /api/calculation/edit - Get Multiple Premisses")
public void getPremiseCase2() throws Exception {
@ -699,6 +721,7 @@ public class PremiseControllerIntegrationTest {
@Test
@Story("POST /api/calculation/material - happy path (update tariff_rate)")
@DisplayName("POST /api/calculation/material - happy path (update tariff_rate)")
public void updateMaterialCase1() throws Exception {
@ -731,6 +754,7 @@ public class PremiseControllerIntegrationTest {
}
@Test
@Story("POST /api/calculation/material - happy path (update hs_code)")
@DisplayName("POST /api/calculation/material - happy path (update hs_code)")
public void updateMaterialCase2() throws Exception {
@ -763,6 +787,7 @@ public class PremiseControllerIntegrationTest {
}
@Test
@Story("POST /api/calculation/material - happy path (update both)")
@DisplayName("POST /api/calculation/material - happy path (update both)")
public void updateMaterialCase3() throws Exception {
@ -797,6 +822,7 @@ public class PremiseControllerIntegrationTest {
@Test
@Story("POST /api/calculation/material - error (invalid hs_code - to long)")
@DisplayName("POST /api/calculation/material - error (invalid hs_code - to long)")
public void updateMaterialCase4() throws Exception {
@ -829,6 +855,7 @@ public class PremiseControllerIntegrationTest {
}
@Test
@Story("POST /api/calculation/material - error (invalid hs_code - to short)")
@DisplayName("POST /api/calculation/material - error (invalid hs_code - to short)")
public void updateMaterialCase5() throws Exception {
@ -861,6 +888,7 @@ public class PremiseControllerIntegrationTest {
}
@Test
@Story("POST /api/calculation/material - error (invalid hs_code - invalid characters)")
@DisplayName("POST /api/calculation/material - error (invalid hs_code - invalid characters)")
public void updateMaterialCase6() throws Exception {
@ -894,6 +922,7 @@ public class PremiseControllerIntegrationTest {
@Test
@Story("POST /api/calculation/material - error (invalid hs_code - negative number)")
@DisplayName("POST /api/calculation/material - error (invalid hs_code - negative number)")
public void updateMaterialCase7() throws Exception {
@ -926,6 +955,7 @@ public class PremiseControllerIntegrationTest {
}
@Test
@Story("POST /api/calculation/material - error (invalid tariff_rate values)")
@DisplayName("POST /api/calculation/material - error (invalid tariff_rate values)")
public void updateMaterialCase8() throws Exception {
@ -987,6 +1017,7 @@ public class PremiseControllerIntegrationTest {
class UpdatePackagingTests {
@Test
@Story("POST /api/calculation/packaging - happy path (update mixable)")
@DisplayName("POST /api/calculation/packaging - happy path (update mixable)")
public void updatePackagingCase1() throws Exception {
@ -1041,6 +1072,7 @@ public class PremiseControllerIntegrationTest {
}
@Test
@Story("POST /api/calculation/packaging - happy path (update stackable)")
@DisplayName("POST /api/calculation/packaging - happy path (update stackable)")
public void updatePackagingCase2() throws Exception {
@ -1095,6 +1127,7 @@ public class PremiseControllerIntegrationTest {
}
@Test
@Story("POST /api/calculation/packaging - happy path (update dimensions)")
@DisplayName("POST /api/calculation/packaging - happy path (update dimensions)")
public void updatePackagingCase3() throws Exception {
@ -1156,6 +1189,7 @@ public class PremiseControllerIntegrationTest {
}
@Test
@Story("POST /api/calculation/packaging - error (partial update dimensions)")
@DisplayName("POST /api/calculation/packaging - error (partial update dimensions)")
public void updatePackagingCase4() throws Exception {
@ -1217,6 +1251,7 @@ public class PremiseControllerIntegrationTest {
}
@Test
@Story("POST /api/calculation/packaging - happy path (API read-back)")
@DisplayName("POST /api/calculation/packaging - happy path (API read-back)")
public void updatePackagingCase5() throws Exception {
@ -1255,6 +1290,7 @@ public class PremiseControllerIntegrationTest {
class UpdatePriceTests {
@Test
@Story("POST /api/calculation/price - happy path (update price only)")
@DisplayName("POST /api/calculation/price - happy path (update price only)")
public void updatePriceCase1() throws Exception {
@ -1289,6 +1325,7 @@ public class PremiseControllerIntegrationTest {
}
@Test
@Story("POST /api/calculation/price - error case (set invalid prices)")
@DisplayName("POST /api/calculation/price - error case (set invalid prices)")
public void updatePriceCase2() throws Exception {
@ -1336,6 +1373,7 @@ public class PremiseControllerIntegrationTest {
}
@Test
@Story("POST /api/calculation/price - happy path (update oversea share only)")
@DisplayName("POST /api/calculation/price - happy path (update oversea share only)")
public void updatePriceCase4() throws Exception {
@ -1370,6 +1408,7 @@ public class PremiseControllerIntegrationTest {
}
@Test
@Story("POST /api/calculation/price - error case (set invalid oversea share)")
@DisplayName("POST /api/calculation/price - error case (set invalid oversea share)")
public void updatePriceCase5() throws Exception {
@ -1414,6 +1453,7 @@ public class PremiseControllerIntegrationTest {
}
@Test
@Story("POST /api/calculation/price - happy path (update include fca fee)")
@DisplayName("POST /api/calculation/price - happy path (update include fca fee)")
public void updatePriceCase6() throws Exception {
@ -1467,6 +1507,7 @@ public class PremiseControllerIntegrationTest {
}
@Test
@Story("POST /api/calculation/price - happy path (update all values)")
@DisplayName("POST /api/calculation/price - happy path (update all values)")
public void updatePriceCase7() throws Exception {

View file

@ -2,6 +2,9 @@ package de.avatic.lcc.controller.calculation;
import com.fasterxml.jackson.databind.ObjectMapper;
import de.avatic.lcc.dto.calculation.edit.SetDataDTO;
import io.qameta.allure.Epic;
import io.qameta.allure.Feature;
import io.qameta.allure.Story;
import org.junit.jupiter.api.*;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc;
@ -28,6 +31,8 @@ import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.
@AutoConfigureMockMvc
@Transactional
@Import({PremiseControllerTestData.class, PremiseTestsHelper.class})
@Epic("Controller")
@Feature("Calculation")
public class PremiseControllerSetIntegrationTest {
@Autowired
@ -59,6 +64,7 @@ public class PremiseControllerSetIntegrationTest {
}
@Test
@Story("Set supplier with master data update and no user node")
@DisplayName("PUT /api/calculation/supplier - happy path (update master data, no user node)")
public void setSupplierTestCase1() throws Exception {
@ -84,6 +90,7 @@ public class PremiseControllerSetIntegrationTest {
@Test
@Story("Set supplier without master data update and no user node")
@DisplayName("PUT /api/calculation/supplier - happy path (no master data update, no user node)")
public void setSupplierTestCase2() throws Exception {
@ -106,6 +113,7 @@ public class PremiseControllerSetIntegrationTest {
}
@Test
@Story("Set supplier with master data update and user node")
@DisplayName("PUT /api/calculation/supplier - happy path (master data update, user node)")
public void setSupplierTestCase3() throws Exception {
@ -150,6 +158,7 @@ public class PremiseControllerSetIntegrationTest {
}
@Test
@Story("Set material without master data update")
@DisplayName("PUT /api/calculation/material - happy path (no master data update)")
public void setMaterialTestCase1() throws Exception {
@ -171,6 +180,7 @@ public class PremiseControllerSetIntegrationTest {
}
@Test
@Story("Set material with master data update")
@DisplayName("PUT /api/calculation/material - happy path (master data update)")
public void setMaterialTestCase2() throws Exception {

View file

@ -2,6 +2,9 @@ package de.avatic.lcc.controller.configuration;
import com.fasterxml.jackson.databind.ObjectMapper;
import io.qameta.allure.Epic;
import io.qameta.allure.Feature;
import io.qameta.allure.Story;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.MethodOrderer;
import org.junit.jupiter.api.Test;
@ -25,6 +28,8 @@ import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.
@TestMethodOrder(MethodOrderer.OrderAnnotation.class)
@Sql(scripts = {"classpath:master_data/countries_properties.sql"}, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD)
@Sql(scripts = {"classpath:master_data/countries_properties-cleanup.sql"}, executionPhase = Sql.ExecutionPhase.AFTER_TEST_METHOD)
@Epic("Controller")
@Feature("Configuration")
public class CountryControllerIntegrationTest {
protected static final String BASE_URL = "/api/countries";
@ -36,6 +41,7 @@ public class CountryControllerIntegrationTest {
protected ObjectMapper objectMapper;
@Test
@Story("Get countries with default pagination")
@DisplayName("get countries with default pagination")
void countriesDefaultPagination() throws Exception {
mockMvc.perform(get(BASE_URL + "/"))
@ -49,6 +55,7 @@ public class CountryControllerIntegrationTest {
}
@Test
@Story("Get countries with custom pagination")
@DisplayName("get countries with custom pagination")
void countriesCustomPagination() throws Exception {
mockMvc.perform(get(BASE_URL + "/")
@ -62,6 +69,7 @@ public class CountryControllerIntegrationTest {
}
@Test
@Story("Get countries with filter region, non empty")
@DisplayName("get countries with filter region, non empty")
void countriesFilteredByNAM() throws Exception {
mockMvc.perform(get(BASE_URL + "/")
@ -72,6 +80,7 @@ public class CountryControllerIntegrationTest {
}
@Test
@Story("Get countries with filter, empty result")
@DisplayName("get countries with filter, empty result")
void countriesFilteredByXyz() throws Exception {
mockMvc.perform(get(BASE_URL + "/")
@ -82,6 +91,7 @@ public class CountryControllerIntegrationTest {
}
@Test
@Story("Get countries with filter country, non empty")
@DisplayName("get countries with filter country, non empty")
void countriesFilteredByDe() throws Exception {
mockMvc.perform(get(BASE_URL + "/")
@ -92,6 +102,7 @@ public class CountryControllerIntegrationTest {
}
@Test
@Story("Get all countries without pagination")
@DisplayName("get all countries without pagination")
void allCountries() throws Exception {
mockMvc.perform(get(BASE_URL + "/all"))
@ -101,6 +112,7 @@ public class CountryControllerIntegrationTest {
}
@Test
@Story("Get all countries filtered by EMEA region")
@DisplayName("get all countries filtered by EMEA region")
void allCountriesFilteredByEmea() throws Exception {
mockMvc.perform(get(BASE_URL + "/all")
@ -110,6 +122,7 @@ public class CountryControllerIntegrationTest {
}
@Test
@Story("Get country details by ID")
@DisplayName("get country details by ID")
void getCountryDetailsById() throws Exception {
final String isoCode = "DE";
@ -130,6 +143,7 @@ public class CountryControllerIntegrationTest {
}
@Test
@Story("Get country details by ISO code")
@DisplayName("get country details by ISO code")
void getCountryDetailsByIsoCode() throws Exception {
final String isoCode = "US";
@ -146,6 +160,7 @@ public class CountryControllerIntegrationTest {
}
@Test
@Story("Get country with specific property set")
@DisplayName("get country with specific property set")
void getWithPropertySet() throws Exception {
final String isoCode = "US";
@ -159,6 +174,7 @@ public class CountryControllerIntegrationTest {
}
@Test
@Story("Get country with invalid property set should return bad request")
@DisplayName("get country with invalid property set should return bad request")
void getWithBadPropertySet() throws Exception {
final String isoCode = "US";
@ -176,6 +192,7 @@ public class CountryControllerIntegrationTest {
}
@Test
@Story("Get country with invalid ID should return bad request")
@DisplayName("get country with invalid ID should return bad request")
void getCountryWithBadId() throws Exception {
mockMvc.perform(get(BASE_URL + "/999"))
@ -183,6 +200,7 @@ public class CountryControllerIntegrationTest {
}
@Test
@Story("Get country with invalid ISO code should return bad request")
@DisplayName("get country with invalid ISO code should return bad request")
void getCountryWithBadIsoCode() throws Exception {
mockMvc.perform(get(BASE_URL + "/XY"))
@ -190,6 +208,7 @@ public class CountryControllerIntegrationTest {
}
@Test
@Story("Get countries with invalid pagination parameters should return bad request")
@DisplayName("get countries with invalid pagination parameters should return bad request")
void countriesPaginationWithBadParams() throws Exception {
mockMvc.perform(get(BASE_URL + "/")
@ -199,6 +218,7 @@ public class CountryControllerIntegrationTest {
}
@Test
@Story("Get countries with large pagination limit should return all countries")
@DisplayName("get countries with large pagination limit should return all countries")
void countriesPaginationWithLargeParams() throws Exception {
mockMvc.perform(get(BASE_URL + "/")
@ -208,6 +228,7 @@ public class CountryControllerIntegrationTest {
}
@Test
@Story("Get countries with SQL injection attempt should be safe")
@DisplayName("get countries with SQL injection attempt should be safe")
void countriesSqlInjection() throws Exception {
String maliciousFilter = "'; DROP TABLE country; --";

View file

@ -2,6 +2,9 @@ package de.avatic.lcc.controller.configuration;
import com.fasterxml.jackson.databind.ObjectMapper;
import de.avatic.lcc.dto.generic.MaterialDTO;
import io.qameta.allure.Epic;
import io.qameta.allure.Feature;
import io.qameta.allure.Story;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Nested;
import org.junit.jupiter.api.Test;
@ -24,6 +27,8 @@ import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.
@AutoConfigureMockMvc
@Transactional
@DisplayName("MaterialController Integration Tests")
@Epic("Controller")
@Feature("Configuration")
class MaterialControllerIntegrationTest {
@Autowired
@ -39,6 +44,7 @@ class MaterialControllerIntegrationTest {
class ListMaterialsTests {
@Test
@Story("Return all materials with default pagination")
@DisplayName("Should return all materials with default pagination")
void shouldReturnAllMaterialsWithDefaultPagination() throws Exception {
mockMvc.perform(get("/api/materials/")
@ -57,6 +63,7 @@ class MaterialControllerIntegrationTest {
}
@Test
@Story("Return materials with custom pagination")
@DisplayName("Should return materials with custom pagination")
void shouldReturnMaterialsWithCustomPagination() throws Exception {
mockMvc.perform(get("/api/materials/")
@ -73,6 +80,7 @@ class MaterialControllerIntegrationTest {
}
@Test
@Story("Return second page of materials")
@DisplayName("Should return second page of materials")
void shouldReturnSecondPageOfMaterials() throws Exception {
mockMvc.perform(get("/api/materials/")
@ -89,6 +97,7 @@ class MaterialControllerIntegrationTest {
}
@Test
@Story("Filter materials by part number")
@DisplayName("Should filter materials by part number")
void shouldFilterMaterialsByPartNumber() throws Exception {
mockMvc.perform(get("/api/materials/")
@ -106,6 +115,7 @@ class MaterialControllerIntegrationTest {
}
@Test
@Story("Filter materials by name")
@DisplayName("Should filter materials by name")
void shouldFilterMaterialsByName() throws Exception {
mockMvc.perform(get("/api/materials/")
@ -119,6 +129,7 @@ class MaterialControllerIntegrationTest {
}
@Test
@Story("Return empty list when filter matches no materials")
@DisplayName("Should return empty list when filter matches no materials")
void shouldReturnEmptyListWhenFilterMatchesNoMaterials() throws Exception {
mockMvc.perform(get("/api/materials/")
@ -134,6 +145,7 @@ class MaterialControllerIntegrationTest {
}
@Test
@Story("Handle empty filter parameter")
@DisplayName("Should handle empty filter parameter")
void shouldHandleEmptyFilterParameter() throws Exception {
mockMvc.perform(get("/api/materials/")
@ -147,6 +159,7 @@ class MaterialControllerIntegrationTest {
}
@Test
@Story("Handle out of bounds page number")
@DisplayName("Should handle out of bounds page number")
void shouldHandleOutOfBoundsPageNumber() throws Exception {
mockMvc.perform(get("/api/materials/")
@ -162,6 +175,7 @@ class MaterialControllerIntegrationTest {
}
@Test
@Story("Handle negative page number")
@DisplayName("Should handle negative page number")
void shouldHandleNegativePageNumber() throws Exception {
mockMvc.perform(get("/api/materials/")
@ -173,6 +187,7 @@ class MaterialControllerIntegrationTest {
}
@Test
@Story("Handle large limit parameter")
@DisplayName("Should handle large limit parameter")
void shouldHandleLargeLimitParameter() throws Exception {
mockMvc.perform(get("/api/materials/")
@ -194,6 +209,7 @@ class MaterialControllerIntegrationTest {
class GetMaterialDetailsTests {
@Test
@Story("Return material details for existing material")
@DisplayName("Should return material details for existing material")
void shouldReturnMaterialDetailsForExistingMaterial() throws Exception {
// First, get the list of materials to find a valid ID
@ -219,6 +235,7 @@ class MaterialControllerIntegrationTest {
}
@Test
@Story("Return material details with correct data structure")
@DisplayName("Should return material details with correct data structure")
void shouldReturnMaterialDetailsWithCorrectDataStructure() throws Exception {
@ -244,6 +261,7 @@ class MaterialControllerIntegrationTest {
}
@Test
@Story("Return specific material details by known part number")
@DisplayName("Should return specific material details by known part number")
void shouldReturnSpecificMaterialDetailsByKnownPartNumber() throws Exception {
@ -269,6 +287,7 @@ class MaterialControllerIntegrationTest {
}
@Test
@Story("Return 400 for non-existent material ID")
@DisplayName("Should return 400 for non-existent material ID")
void shouldReturn404ForNonExistentMaterialId() throws Exception {
mockMvc.perform(get("/api/materials/99999")
@ -278,6 +297,7 @@ class MaterialControllerIntegrationTest {
}
@Test
@Story("Return 400 for negative material ID")
@DisplayName("Should return 400 for negative material ID")
void shouldReturn404ForNegativeMaterialId() throws Exception {
mockMvc.perform(get("/api/materials/-1")
@ -287,6 +307,7 @@ class MaterialControllerIntegrationTest {
}
@Test
@Story("Return 400 for invalid material ID format")
@DisplayName("Should return 400 for invalid material ID format")
void shouldReturn400ForInvalidMaterialIdFormat() throws Exception {
mockMvc.perform(get("/api/materials/invalid")
@ -296,6 +317,7 @@ class MaterialControllerIntegrationTest {
}
@Test
@Story("Return 400 for zero material ID")
@DisplayName("Should return 400 for zero material ID")
void shouldReturn400ForZeroMaterialId() throws Exception {
mockMvc.perform(get("/api/materials/0")
@ -312,6 +334,7 @@ class MaterialControllerIntegrationTest {
class ErrorHandlingTests {
@Test
@Story("Handle missing required headers")
@DisplayName("Should handle missing required headers")
void shouldHandleMissingRequiredHeaders() throws Exception {
mockMvc.perform(get("/api/materials/"))
@ -320,6 +343,7 @@ class MaterialControllerIntegrationTest {
}
@Test
@Story("Handle invalid parameter types for limit")
@DisplayName("Should handle invalid parameter types for limit")
void shouldHandleInvalidParameterTypesForLimit() throws Exception {
mockMvc.perform(get("/api/materials/")
@ -330,6 +354,7 @@ class MaterialControllerIntegrationTest {
}
@Test
@Story("Handle invalid parameter types for page")
@DisplayName("Should handle invalid parameter types for page")
void shouldHandleInvalidParameterTypesForPage() throws Exception {
mockMvc.perform(get("/api/materials/")
@ -340,6 +365,7 @@ class MaterialControllerIntegrationTest {
}
@Test
@Story("Handle extremely large filter string")
@DisplayName("Should handle extremely large filter string")
void shouldHandleExtremelyLargeFilterString() throws Exception {
String largeFilter = "x".repeat(1000);
@ -352,6 +378,7 @@ class MaterialControllerIntegrationTest {
}
@Test
@Story("Handle special characters in filter")
@DisplayName("Should handle special characters in filter")
void shouldHandleSpecialCharactersInFilter() throws Exception {
mockMvc.perform(get("/api/materials/")
@ -363,6 +390,7 @@ class MaterialControllerIntegrationTest {
}
@Test
@Story("Handle SQL injection attempts in filter")
@DisplayName("Should handle SQL injection attempts in filter")
void shouldHandleSqlInjectionAttemptsInFilter() throws Exception {
mockMvc.perform(get("/api/materials/")
@ -379,6 +407,7 @@ class MaterialControllerIntegrationTest {
class PerformanceTests {
@Test
@Story("Handle concurrent requests efficiently")
@DisplayName("Should handle concurrent requests efficiently")
@Sql(scripts = {"classpath:master_data/countries_properties.sql", "classpath:master_data/nodes.sql", "classpath:master_data/material_packaging.sql"}, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD)
@Sql(scripts = {"classpath:master_data/material_packaging-cleanup.sql", "classpath:master_data/nodes-cleanup.sql", "classpath:master_data/countries_properties-cleanup.sql"}, executionPhase = Sql.ExecutionPhase.AFTER_TEST_METHOD)
@ -395,6 +424,7 @@ class MaterialControllerIntegrationTest {
}
@Test
@Story("Respond within reasonable time limits")
@DisplayName("Should respond within reasonable time limits")
@Sql(scripts = {"classpath:master_data/countries_properties.sql", "classpath:master_data/nodes.sql", "classpath:master_data/material_packaging.sql"}, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD)
@Sql(scripts = {"classpath:master_data/material_packaging-cleanup.sql", "classpath:master_data/nodes-cleanup.sql", "classpath:master_data/countries_properties-cleanup.sql"}, executionPhase = Sql.ExecutionPhase.AFTER_TEST_METHOD)

View file

@ -6,6 +6,9 @@ import de.avatic.lcc.dto.configuration.nodes.userNodes.AddUserNodeDTO;
import de.avatic.lcc.dto.generic.CountryDTO;
import de.avatic.lcc.dto.generic.LocationDTO;
import de.avatic.lcc.dto.generic.NodeType;
import io.qameta.allure.Epic;
import io.qameta.allure.Feature;
import io.qameta.allure.Story;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Nested;
@ -25,6 +28,8 @@ import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.
@SpringBootTest
@AutoConfigureMockMvc
@Transactional
@Epic("Controller")
@Feature("Configuration")
class NodeControllerIntegrationTest {
@Autowired
@ -38,6 +43,7 @@ class NodeControllerIntegrationTest {
class ListNodesTests {
@Test
@Story("Return list of nodes with default pagination")
@DisplayName("Should return list of nodes with default pagination")
@Sql(scripts = {"classpath:master_data/countries_properties.sql", "classpath:master_data/nodes.sql"}, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD)
@Sql(scripts = {"classpath:master_data/nodes-cleanup.sql", "classpath:master_data/countries_properties-cleanup.sql"}, executionPhase = Sql.ExecutionPhase.AFTER_TEST_METHOD)
@ -64,6 +70,7 @@ class NodeControllerIntegrationTest {
}
@Test
@Story("Return filtered nodes when filter parameter is provided")
@DisplayName("Should return filtered nodes when filter parameter is provided")
@Sql(scripts = {"classpath:master_data/countries_properties.sql", "classpath:master_data/nodes.sql"}, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD)
@Sql(scripts = {"classpath:master_data/nodes-cleanup.sql", "classpath:master_data/countries_properties-cleanup.sql"}, executionPhase = Sql.ExecutionPhase.AFTER_TEST_METHOD)
@ -77,6 +84,7 @@ class NodeControllerIntegrationTest {
}
@Test
@Story("Respect pagination parameters")
@DisplayName("Should respect pagination parameters")
@Sql(scripts = {"classpath:master_data/countries_properties.sql", "classpath:master_data/nodes.sql"}, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD)
@Sql(scripts = {"classpath:master_data/nodes-cleanup.sql", "classpath:master_data/countries_properties-cleanup.sql"}, executionPhase = Sql.ExecutionPhase.AFTER_TEST_METHOD)
@ -91,6 +99,7 @@ class NodeControllerIntegrationTest {
}
@Test
@Story("Return empty list when no nodes match filter")
@DisplayName("Should return empty list when no nodes match filter")
@Sql(scripts = {"classpath:master_data/countries_properties.sql", "classpath:master_data/nodes.sql"}, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD)
@Sql(scripts = {"classpath:master_data/nodes-cleanup.sql", "classpath:master_data/countries_properties-cleanup.sql"}, executionPhase = Sql.ExecutionPhase.AFTER_TEST_METHOD)
@ -108,6 +117,7 @@ class NodeControllerIntegrationTest {
class SearchNodesTests {
@Test
@Story("Search nodes without type filter")
@DisplayName("Should search nodes without type filter")
@Sql(scripts = {"classpath:master_data/countries_properties.sql", "classpath:master_data/nodes.sql"}, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD)
@Sql(scripts = {"classpath:master_data/nodes-cleanup.sql", "classpath:master_data/countries_properties-cleanup.sql"}, executionPhase = Sql.ExecutionPhase.AFTER_TEST_METHOD)
@ -122,6 +132,7 @@ class NodeControllerIntegrationTest {
}
@Test
@Story("Search nodes with specific node type")
@DisplayName("Should search nodes with specific node type")
@Sql(scripts = {"classpath:master_data/countries_properties.sql", "classpath:master_data/nodes.sql"}, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD)
@Sql(scripts = {"classpath:master_data/nodes-cleanup.sql", "classpath:master_data/countries_properties-cleanup.sql"}, executionPhase = Sql.ExecutionPhase.AFTER_TEST_METHOD)
@ -135,6 +146,7 @@ class NodeControllerIntegrationTest {
}
@Test
@Story("Include user nodes when requested")
@DisplayName("Should include user nodes when requested")
@Sql(scripts = {"classpath:master_data/countries_properties.sql", "classpath:master_data/nodes.sql"}, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD)
@Sql(scripts = {"classpath:master_data/nodes-cleanup.sql", "classpath:master_data/countries_properties-cleanup.sql"}, executionPhase = Sql.ExecutionPhase.AFTER_TEST_METHOD)
@ -153,6 +165,7 @@ class NodeControllerIntegrationTest {
class GetNodeDetailsTests {
@Test
@Story("Return node details for existing node")
@DisplayName("Should return node details for existing node")
@Sql(scripts = {"classpath:master_data/countries_properties.sql", "classpath:master_data/nodes.sql"}, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD)
@Sql(scripts = {"classpath:master_data/nodes-cleanup.sql", "classpath:master_data/countries_properties-cleanup.sql"}, executionPhase = Sql.ExecutionPhase.AFTER_TEST_METHOD)
@ -178,6 +191,7 @@ class NodeControllerIntegrationTest {
}
@Test
@Story("Return bad request for non-existing node")
@DisplayName("Should return bad request for non-existing node")
@Sql(scripts = {"classpath:master_data/countries_properties.sql", "classpath:master_data/nodes.sql"}, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD)
@Sql(scripts = {"classpath:master_data/nodes-cleanup.sql", "classpath:master_data/countries_properties-cleanup.sql"}, executionPhase = Sql.ExecutionPhase.AFTER_TEST_METHOD)
@ -186,6 +200,7 @@ class NodeControllerIntegrationTest {
}
@Test
@Story("Return node with predecessor chains")
@DisplayName("Should return node with predecessor chains")
@Sql(scripts = {"classpath:master_data/countries_properties.sql", "classpath:master_data/nodes.sql"}, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD)
@Sql(scripts = {"classpath:master_data/nodes-cleanup.sql", "classpath:master_data/countries_properties-cleanup.sql"}, executionPhase = Sql.ExecutionPhase.AFTER_TEST_METHOD)
@ -205,6 +220,7 @@ class NodeControllerIntegrationTest {
class UpdateNodeTests {
@Test
@Story("Update existing node successfully")
@DisplayName("Should update existing node successfully")
@Sql(scripts = {"classpath:master_data/countries_properties.sql", "classpath:master_data/nodes.sql"}, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD)
@Sql(scripts = {"classpath:master_data/nodes-cleanup.sql", "classpath:master_data/countries_properties-cleanup.sql"}, executionPhase = Sql.ExecutionPhase.AFTER_TEST_METHOD)
@ -227,6 +243,7 @@ class NodeControllerIntegrationTest {
}
@Test
@Story("Return 400 when ID in path doesn't match ID in body")
@DisplayName("Should return 400 when ID in path doesn't match ID in body")
@Sql(scripts = {"classpath:master_data/countries_properties.sql", "classpath:master_data/nodes.sql"}, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD)
@Sql(scripts = {"classpath:master_data/nodes-cleanup.sql", "classpath:master_data/countries_properties-cleanup.sql"
@ -243,6 +260,7 @@ class NodeControllerIntegrationTest {
}
@Test
@Story("Return 400 when trying to update non-existing node")
@DisplayName("Should return 400 when trying to update non-existing node")
@Sql(scripts = {"classpath:master_data/countries_properties.sql", "classpath:master_data/nodes.sql"}, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD)
@Sql(scripts = {"classpath:master_data/nodes-cleanup.sql", "classpath:master_data/countries_properties-cleanup.sql"}, executionPhase = Sql.ExecutionPhase.AFTER_TEST_METHOD)
@ -263,6 +281,7 @@ class NodeControllerIntegrationTest {
class DeleteNodeTests {
@Test
@Story("Mark node as deprecated successfully")
@DisplayName("Should mark node as deprecated successfully")
@Sql(scripts = {"classpath:master_data/countries_properties.sql", "classpath:master_data/nodes.sql"}, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD)
@Sql(scripts = {"classpath:master_data/nodes-cleanup.sql", "classpath:master_data/countries_properties-cleanup.sql"}, executionPhase = Sql.ExecutionPhase.AFTER_TEST_METHOD)
@ -278,6 +297,7 @@ class NodeControllerIntegrationTest {
}
@Test
@Story("Return 404 when trying to delete non-existing node")
@DisplayName("Should return 404 when trying to delete non-existing node")
@Sql(scripts = {"classpath:master_data/countries_properties.sql", "classpath:master_data/nodes.sql"}, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD)
@Sql(scripts = {"classpath:master_data/nodes-cleanup.sql", "classpath:master_data/countries_properties-cleanup.sql"}, executionPhase = Sql.ExecutionPhase.AFTER_TEST_METHOD)
@ -292,6 +312,7 @@ class NodeControllerIntegrationTest {
class LocateNodeTests {
@Test
@Story("Locate node by address")
@DisplayName("Should locate node by address")
void shouldLocateNodeByAddress() throws Exception {
mockMvc.perform(get("/api/nodes/locate")
@ -304,6 +325,7 @@ class NodeControllerIntegrationTest {
}
@Test
@Story("Return 400 when address parameter is missing")
@DisplayName("Should return 400 when address parameter is missing")
void shouldReturn400WhenAddressParameterIsMissing() throws Exception {
mockMvc.perform(get("/api/nodes/locate"))
@ -311,6 +333,7 @@ class NodeControllerIntegrationTest {
}
@Test
@Story("Handle invalid addresses gracefully")
@DisplayName("Should handle invalid addresses gracefully")
void shouldHandleInvalidAddressesGracefully() throws Exception {
mockMvc.perform(get("/api/nodes/locate")
@ -325,6 +348,7 @@ class NodeControllerIntegrationTest {
class AddUserNodeTests {
@Test
@Story("Add user node successfully")
@DisplayName("Should add user node successfully")
@Sql(scripts = {"classpath:master_data/countries_properties.sql", "classpath:master_data/nodes.sql"}, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD)
@Sql(scripts = {"classpath:master_data/nodes-cleanup.sql", "classpath:master_data/countries_properties-cleanup.sql"}, executionPhase = Sql.ExecutionPhase.AFTER_TEST_METHOD)
@ -349,6 +373,7 @@ class NodeControllerIntegrationTest {
}
@Test
@Story("Return 400 for invalid user node data")
@DisplayName("Should return 400 for invalid user node data")
void shouldReturn400ForInvalidUserNodeData() throws Exception {
AddUserNodeDTO invalidDTO = new AddUserNodeDTO();
@ -366,6 +391,7 @@ class NodeControllerIntegrationTest {
class ErrorHandlingTests {
@Test
@Story("Handle malformed JSON in request body")
@DisplayName("Should handle malformed JSON in request body")
void shouldHandleMalformedJsonInRequestBody() throws Exception {
String malformedJson = "{ \"id\": \"invalid\" malformed }";
@ -377,6 +403,7 @@ class NodeControllerIntegrationTest {
}
@Test
@Story("Handle non-numeric node ID in path")
@DisplayName("Should handle non-numeric node ID in path")
void shouldHandleNonNumericNodeIdInPath() throws Exception {
mockMvc.perform(get("/api/nodes/invalid-id"))
@ -384,6 +411,7 @@ class NodeControllerIntegrationTest {
}
@Test
@Story("Handle negative pagination parameters")
@DisplayName("Should handle negative pagination parameters")
void shouldHandleNegativePaginationParameters() throws Exception {
mockMvc.perform(get("/api/nodes/")
@ -398,6 +426,7 @@ class NodeControllerIntegrationTest {
class SecurityTests {
@Test
@Story("Reject requests with XSS attempts in filter")
@DisplayName("Should reject requests with XSS attempts in filter")
void shouldRejectRequestsWithXssAttemptsInFilter() throws Exception {
String xssAttempt = "<script>alert('xss')</script>";
@ -409,6 +438,7 @@ class NodeControllerIntegrationTest {
}
@Test
@Story("Handle SQL injection attempts in filter")
@DisplayName("Should handle SQL injection attempts in filter")
void shouldHandleSqlInjectionAttemptsInFilter() throws Exception {
String sqlInjection = "'; DROP TABLE node; --";
@ -424,6 +454,7 @@ class NodeControllerIntegrationTest {
class PerformanceTests {
@Test
@Story("Handle large page sizes efficiently")
@DisplayName("Should handle large page sizes efficiently")
@Sql(scripts = {
"classpath:master_data/countries_properties.sql",

View file

@ -1,5 +1,8 @@
package de.avatic.lcc.controller.configuration;
import io.qameta.allure.Epic;
import io.qameta.allure.Feature;
import io.qameta.allure.Story;
import org.junit.jupiter.api.*;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc;
@ -18,6 +21,8 @@ import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.
@AutoConfigureMockMvc
@Transactional
@TestMethodOrder(MethodOrderer.OrderAnnotation.class)
@Epic("Controller")
@Feature("Configuration")
class PropertyControllerIntegrationTest {
@Autowired
@ -32,6 +37,7 @@ class PropertyControllerIntegrationTest {
@Test
@Order(1)
@Story("Return properties with default property_set parameter")
@DisplayName("Should return properties with default property_set parameter")
@Sql(scripts = {"classpath:master_data/countries_properties.sql"}, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD)
@Sql(scripts = {"classpath:master_data/countries_properties-cleanup.sql"}, executionPhase = Sql.ExecutionPhase.AFTER_TEST_METHOD)
@ -52,6 +58,7 @@ class PropertyControllerIntegrationTest {
@Test
@Order(2)
@Story("Return properties for specific property_set")
@DisplayName("Should return properties for specific property_set")
@Sql(scripts = {"classpath:master_data/countries_properties.sql"}, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD)
@Sql(scripts = {"classpath:master_data/countries_properties-cleanup.sql"}, executionPhase = Sql.ExecutionPhase.AFTER_TEST_METHOD)
@ -78,6 +85,7 @@ class PropertyControllerIntegrationTest {
@Test
@Order(3)
@Story("Return empty list for non-existent property_set")
@DisplayName("Should return empty list for non-existent property_set")
@Sql(scripts = {"classpath:master_data/countries_properties.sql"}, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD)
@Sql(scripts = {"classpath:master_data/countries_properties-cleanup.sql"}, executionPhase = Sql.ExecutionPhase.AFTER_TEST_METHOD)
@ -95,6 +103,7 @@ class PropertyControllerIntegrationTest {
@Test
@Order(4)
@Story("Return all validity periods")
@DisplayName("Should return all validity periods")
@Sql(scripts = {"classpath:master_data/countries_properties.sql"}, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD)
@Sql(scripts = {"classpath:master_data/countries_properties-cleanup.sql"}, executionPhase = Sql.ExecutionPhase.AFTER_TEST_METHOD)
@ -115,6 +124,7 @@ class PropertyControllerIntegrationTest {
@Test
@Order(5)
@Story("Invalidate validity period successfully")
@DisplayName("Should invalidate validity period successfully")
@Sql(scripts = {"classpath:master_data/countries_properties.sql"}, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD)
@Sql(scripts = {"classpath:master_data/countries_properties-cleanup.sql"}, executionPhase = Sql.ExecutionPhase.AFTER_TEST_METHOD)
@ -137,6 +147,7 @@ class PropertyControllerIntegrationTest {
@Test
@Order(6)
@Story("Return error for non-existent validity period/ not expired validity period")
@DisplayName("Should return error for non-existent validity period/ not expired validity period")
@Sql(scripts = {"classpath:master_data/countries_properties.sql"}, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD)
@Sql(scripts = {"classpath:master_data/countries_properties-cleanup.sql"}, executionPhase = Sql.ExecutionPhase.AFTER_TEST_METHOD)
@ -157,6 +168,7 @@ class PropertyControllerIntegrationTest {
@Test
@Order(7)
@Story("Update existing country property")
@DisplayName("Should update existing country property")
@Sql(scripts = {"classpath:master_data/countries_properties.sql"}, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD)
@Sql(scripts = {"classpath:master_data/countries_properties-cleanup.sql"}, executionPhase = Sql.ExecutionPhase.AFTER_TEST_METHOD)
@ -195,6 +207,7 @@ class PropertyControllerIntegrationTest {
@Test
@Order(9)
@Story("Reject invalid ISO code")
@DisplayName("Should reject invalid ISO code")
@Sql(scripts = {"classpath:master_data/countries_properties.sql"}, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD)
@Sql(scripts = {"classpath:master_data/countries_properties-cleanup.sql"}, executionPhase = Sql.ExecutionPhase.AFTER_TEST_METHOD)
@ -225,6 +238,7 @@ class PropertyControllerIntegrationTest {
@Test
@Order(10)
@Story("Set system property successfully")
@DisplayName("Should set system property successfully")
@Sql(scripts = {"classpath:master_data/countries_properties.sql"}, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD)
@Sql(scripts = {"classpath:master_data/countries_properties-cleanup.sql"}, executionPhase = Sql.ExecutionPhase.AFTER_TEST_METHOD)
@ -264,6 +278,7 @@ class PropertyControllerIntegrationTest {
@Test
@Order(11)
@Story("Validate property data type")
@DisplayName("Should validate property data type")
@Sql(scripts = {"classpath:master_data/countries_properties.sql"}, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD)
@Sql(scripts = {"classpath:master_data/countries_properties-cleanup.sql"}, executionPhase = Sql.ExecutionPhase.AFTER_TEST_METHOD)
@ -286,6 +301,7 @@ class PropertyControllerIntegrationTest {
@Test
@Order(14)
@Story("Approve staged changes successfully")
@DisplayName("Should approve staged changes successfully")
@Sql(scripts = {"classpath:master_data/countries_properties.sql"}, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD)
@Sql(scripts = {"classpath:master_data/countries_properties-cleanup.sql"}, executionPhase = Sql.ExecutionPhase.AFTER_TEST_METHOD)

View file

@ -1,5 +1,9 @@
package de.avatic.lcc.controller.configuration;
import io.qameta.allure.Epic;
import io.qameta.allure.Feature;
import io.qameta.allure.Story;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.CsvSource;
@ -25,6 +29,8 @@ import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.
@SpringBootTest
@AutoConfigureMockMvc
@Transactional
@Epic("Controller")
@Feature("Configuration")
class RateControllerAdvancedIntegrationTest {
@Autowired
@ -35,6 +41,8 @@ class RateControllerAdvancedIntegrationTest {
// Parameterized Tests
@ParameterizedTest
@Story("Rate endpoints with different limits")
@DisplayName("Rate endpoints with different limits")
@ValueSource(strings = {"container", "matrix"})
@Sql(scripts = {"classpath:master_data/reduced_rate_setup.sql"}, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD)
@Sql(scripts = {"classpath:master_data/reduced_rate_setup-cleanup.sql"}, executionPhase = Sql.ExecutionPhase.AFTER_TEST_METHOD)
@ -51,6 +59,8 @@ class RateControllerAdvancedIntegrationTest {
}
@ParameterizedTest
@Story("Container rates filter by transport type")
@DisplayName("Container rates filter by transport type")
@CsvSource({
"container,RAIL",
"container,SEA",
@ -69,6 +79,8 @@ class RateControllerAdvancedIntegrationTest {
// Complex Date Range Tests
@Test
@Story("Container rates with future date filter should return only future valid rates")
@DisplayName("Container rates with future date filter should return only future valid rates")
@Sql(scripts = {"classpath:master_data/reduced_rate_setup.sql"}, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD)
@Sql(scripts = {"classpath:master_data/reduced_rate_setup-cleanup.sql"}, executionPhase = Sql.ExecutionPhase.AFTER_TEST_METHOD)
void testContainerRates_WithFutureDateFilter_ShouldReturnOnlyFutureValidRates() throws Exception {
@ -83,6 +95,8 @@ class RateControllerAdvancedIntegrationTest {
}
@Test
@Story("Container rates with past date filter should return historical rates")
@DisplayName("Container rates with past date filter should return historical rates")
@Sql(scripts = {"classpath:master_data/reduced_rate_setup.sql"}, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD)
@Sql(scripts = {"classpath:master_data/reduced_rate_setup-cleanup.sql"}, executionPhase = Sql.ExecutionPhase.AFTER_TEST_METHOD)
void testContainerRates_WithPastDateFilter_ShouldReturnHistoricalRates() throws Exception {
@ -98,6 +112,8 @@ class RateControllerAdvancedIntegrationTest {
// Concurrent Request Tests
@Test
@Story("Container rates with multiple filters simultaneously should prioritize validAt")
@DisplayName("Container rates with multiple filters simultaneously should prioritize validAt")
@Sql(scripts = {"classpath:master_data/reduced_rate_setup.sql"}, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD)
@Sql(scripts = {"classpath:master_data/reduced_rate_setup-cleanup.sql"}, executionPhase = Sql.ExecutionPhase.AFTER_TEST_METHOD)
void testContainerRates_MultipleFiltersSimultaneously_ShouldPrioritizeValidAt() throws Exception {
@ -115,6 +131,8 @@ class RateControllerAdvancedIntegrationTest {
// State Transition Tests
@Test
@Story("Validity period state transitions from draft to valid")
@DisplayName("Validity period state transitions from draft to valid")
@Sql(scripts = {"classpath:master_data/reduced_rate_setup.sql"}, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD)
@Sql(scripts = {"classpath:master_data/reduced_rate_setup-cleanup.sql"}, executionPhase = Sql.ExecutionPhase.AFTER_TEST_METHOD)
void testValidityPeriod_StateTransitions_DraftToValid() throws Exception {
@ -139,6 +157,8 @@ class RateControllerAdvancedIntegrationTest {
// Edge Cases for Pagination
@Test
@Story("Pagination requesting page beyond available should return empty list")
@DisplayName("Pagination requesting page beyond available should return empty list")
@Sql(scripts = {"classpath:master_data/reduced_rate_setup.sql"}, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD)
@Sql(scripts = {"classpath:master_data/reduced_rate_setup-cleanup.sql"}, executionPhase = Sql.ExecutionPhase.AFTER_TEST_METHOD)
void testPagination_RequestingPageBeyondAvailable_ShouldReturnEmptyList() throws Exception {
@ -152,6 +172,8 @@ class RateControllerAdvancedIntegrationTest {
}
@Test
@Story("Pagination with zero limit should return bad request")
@DisplayName("Pagination with zero limit should return bad request")
@Sql(scripts = {"classpath:master_data/reduced_rate_setup.sql"}, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD)
@Sql(scripts = {"classpath:master_data/reduced_rate_setup-cleanup.sql"}, executionPhase = Sql.ExecutionPhase.AFTER_TEST_METHOD)
void testPagination_WithZeroLimit_ShouldReturnBadRequest() throws Exception {
@ -164,6 +186,8 @@ class RateControllerAdvancedIntegrationTest {
// Complex Filtering Scenarios
@Test
@Story("Matrix rates filter by specific country pairs")
@DisplayName("Matrix rates filter by specific country pairs")
@Sql(scripts = {"classpath:master_data/reduced_rate_setup.sql"}, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD)
@Sql(scripts = {"classpath:master_data/reduced_rate_setup-cleanup.sql"}, executionPhase = Sql.ExecutionPhase.AFTER_TEST_METHOD)
void testMatrixRates_FilterBySpecificCountryPairs() throws Exception {
@ -176,6 +200,8 @@ class RateControllerAdvancedIntegrationTest {
// Business Logic Validation Tests
@Test
@Story("Container rates validate rate hierarchy")
@DisplayName("Container rates validate rate hierarchy")
@Sql(scripts = {"classpath:master_data/reduced_rate_setup.sql"}, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD)
@Sql(scripts = {"classpath:master_data/reduced_rate_setup-cleanup.sql"}, executionPhase = Sql.ExecutionPhase.AFTER_TEST_METHOD)
void testContainerRates_ValidateRateHierarchy() throws Exception {
@ -199,6 +225,8 @@ class RateControllerAdvancedIntegrationTest {
// Null and Empty Value Handling
@Test
@Story("Validity periods with null end dates should handle gracefully")
@DisplayName("Validity periods with null end dates should handle gracefully")
@Sql(scripts = {"classpath:master_data/reduced_rate_setup.sql"}, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD)
@Sql(scripts = {"classpath:master_data/reduced_rate_setup-cleanup.sql"}, executionPhase = Sql.ExecutionPhase.AFTER_TEST_METHOD)
void testValidityPeriods_WithNullEndDates_ShouldHandleGracefully() throws Exception {
@ -211,6 +239,8 @@ class RateControllerAdvancedIntegrationTest {
// Special Character Handling
@Test
@Story("Invalid parameters with special characters should return 400")
@DisplayName("Invalid parameters with special characters should return 400")
void testInvalidParameters_WithSpecialCharacters_ShouldReturn400() throws Exception {
mockMvc.perform(get(BASE_URL + "/container")
.param("limit", "20'; DROP TABLE container_rate; --")
@ -221,6 +251,8 @@ class RateControllerAdvancedIntegrationTest {
// Large Dataset Performance Test
@Test
@Story("Performance with large dataset should maintain response time")
@DisplayName("Performance with large dataset should maintain response time")
@Sql(scripts = {"classpath:master_data/reduced_rate_setup.sql"}, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD)
@Sql(scripts = {"classpath:master_data/reduced_rate_setup-cleanup.sql"}, executionPhase = Sql.ExecutionPhase.AFTER_TEST_METHOD)
void testPerformance_WithLargeDataset_ShouldMaintainResponseTime() throws Exception {

View file

@ -5,6 +5,9 @@ import de.avatic.lcc.dto.configuration.matrixrates.MatrixRateDTO;
import de.avatic.lcc.dto.configuration.rates.ContainerRateDTO;
import de.avatic.lcc.dto.generic.ValidityPeriodDTO;
import de.avatic.lcc.model.db.rates.ValidityPeriodState;
import io.qameta.allure.Epic;
import io.qameta.allure.Feature;
import io.qameta.allure.Story;
import org.assertj.core.api.InstanceOfAssertFactories;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Nested;
@ -33,6 +36,8 @@ import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.
@SpringBootTest
@AutoConfigureMockMvc
@Transactional
@Epic("Controller")
@Feature("Configuration")
class RateControllerIntegrationTest {
private static final String BASE_URL = "/api/rates";
@ -47,6 +52,8 @@ class RateControllerIntegrationTest {
class ListContainerRatesTest {
@Test
@Story("List container rates without filter should return all rates")
@DisplayName("List container rates without filter should return all rates")
@Sql(scripts = {"classpath:master_data/reduced_rate_setup.sql"}, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD)
@Sql(scripts = {"classpath:master_data/reduced_rate_setup-cleanup.sql"}, executionPhase = Sql.ExecutionPhase.AFTER_TEST_METHOD)
void testListContainerRates_WithoutFilter_ShouldReturnAllRates() throws Exception {
@ -67,6 +74,8 @@ class RateControllerIntegrationTest {
}
@Test
@Story("List container rates with pagination should return paged results")
@DisplayName("List container rates with pagination should return paged results")
@Sql(scripts = {"classpath:master_data/reduced_rate_setup.sql"}, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD)
@Sql(scripts = {"classpath:master_data/reduced_rate_setup-cleanup.sql"}, executionPhase = Sql.ExecutionPhase.AFTER_TEST_METHOD)
void testListContainerRates_WithPagination_ShouldReturnPagedResults() throws Exception {
@ -81,6 +90,8 @@ class RateControllerIntegrationTest {
}
@Test
@Story("List container rates with validity period filter should return filtered rates")
@DisplayName("List container rates with validity period filter should return filtered rates")
@Sql(scripts = {"classpath:master_data/reduced_rate_setup.sql"}, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD)
@Sql(scripts = {"classpath:master_data/reduced_rate_setup-cleanup.sql"}, executionPhase = Sql.ExecutionPhase.AFTER_TEST_METHOD)
void testListContainerRates_WithValidityPeriodFilter_ShouldReturnFilteredRates() throws Exception {
@ -93,6 +104,8 @@ class RateControllerIntegrationTest {
}
@Test
@Story("List container rates with validAt filter should return valid rates")
@DisplayName("List container rates with validAt filter should return valid rates")
@Sql(scripts = {"classpath:master_data/reduced_rate_setup.sql"}, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD)
@Sql(scripts = {"classpath:master_data/reduced_rate_setup-cleanup.sql"}, executionPhase = Sql.ExecutionPhase.AFTER_TEST_METHOD)
void testListContainerRates_WithValidAtFilter_ShouldReturnValidRates() throws Exception {
@ -113,6 +126,8 @@ class RateControllerIntegrationTest {
class GetContainerRatesTest {
@Test
@Story("Get container rate with valid ID should return rate")
@DisplayName("Get container rate with valid ID should return rate")
@Sql(scripts = {"classpath:master_data/reduced_rate_setup.sql"}, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD)
@Sql(scripts = {"classpath:master_data/reduced_rate_setup-cleanup.sql"}, executionPhase = Sql.ExecutionPhase.AFTER_TEST_METHOD)
void testGetContainerRate_WithValidId_ShouldReturnRate() throws Exception {
@ -144,6 +159,8 @@ class RateControllerIntegrationTest {
}
@Test
@Story("Get container rate with invalid ID should return 400")
@DisplayName("Get container rate with invalid ID should return 400")
void testGetContainerRate_WithInvalidId_ShouldReturn400() throws Exception {
mockMvc.perform(get(BASE_URL + "/container/{id}", 99999)
.contentType(MediaType.APPLICATION_JSON))
@ -158,6 +175,8 @@ class RateControllerIntegrationTest {
@DisplayName("/api/rates/matrix/ - list matrix rates")
class ListMatrixRatesTest {
@Test
@Story("List matrix rates without filter should return all rates")
@DisplayName("List matrix rates without filter should return all rates")
@Sql(scripts = {"classpath:master_data/reduced_rate_setup.sql"}, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD)
@Sql(scripts = {"classpath:master_data/reduced_rate_setup-cleanup.sql"}, executionPhase = Sql.ExecutionPhase.AFTER_TEST_METHOD)
void testListMatrixRates_WithoutFilter_ShouldReturnAllRates() throws Exception {
@ -171,6 +190,8 @@ class RateControllerIntegrationTest {
}
@Test
@Story("List matrix rates with valid filter should return filtered rates")
@DisplayName("List matrix rates with valid filter should return filtered rates")
@Sql(scripts = {"classpath:master_data/reduced_rate_setup.sql"}, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD)
@Sql(scripts = {"classpath:master_data/reduced_rate_setup-cleanup.sql"}, executionPhase = Sql.ExecutionPhase.AFTER_TEST_METHOD)
void testListMatrixRates_WithValidFilter_ShouldReturnFilteredRates() throws Exception {
@ -185,6 +206,8 @@ class RateControllerIntegrationTest {
@DisplayName("/api/rates/matrix/id - get matrix rate detail")
class GetMatrixRatesTest {
@Test
@Story("Get matrix rate with valid ID should return rate")
@DisplayName("Get matrix rate with valid ID should return rate")
@Sql(scripts = {"classpath:master_data/reduced_rate_setup.sql"}, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD)
@Sql(scripts = {"classpath:master_data/reduced_rate_setup-cleanup.sql"}, executionPhase = Sql.ExecutionPhase.AFTER_TEST_METHOD)
void testGetMatrixRate_WithValidId_ShouldReturnRate() throws Exception {
@ -219,6 +242,8 @@ class RateControllerIntegrationTest {
@DisplayName("/api/rates/periods - List validity periods")
class ListPeriodsTest {
@Test
@Story("List periods should return all periods")
@DisplayName("List periods should return all periods")
@Sql(scripts = {"classpath:master_data/reduced_rate_setup.sql"}, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD)
@Sql(scripts = {"classpath:master_data/reduced_rate_setup-cleanup.sql"}, executionPhase = Sql.ExecutionPhase.AFTER_TEST_METHOD)
void testListPeriods_ShouldReturnAllPeriods() throws Exception {
@ -234,6 +259,8 @@ class RateControllerIntegrationTest {
@DisplayName("/api/rates/periods/id - Invalidate validity periods")
class GetPeriodsTest {
@Test
@Story("Invalidate period with valid ID should return OK")
@DisplayName("Invalidate period with valid ID should return OK")
@Sql(scripts = {"classpath:master_data/reduced_rate_setup.sql"}, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD)
@Sql(scripts = {"classpath:master_data/reduced_rate_setup-cleanup.sql"}, executionPhase = Sql.ExecutionPhase.AFTER_TEST_METHOD)
void testInvalidatePeriod_WithValidId_ShouldReturnOk() throws Exception {
@ -269,6 +296,8 @@ class RateControllerIntegrationTest {
}
@Test
@Story("Invalidate period with invalid ID should return 404")
@DisplayName("Invalidate period with invalid ID should return 404")
@Sql(scripts = {"classpath:master_data/reduced_rate_setup.sql"}, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD)
@Sql(scripts = {"classpath:master_data/reduced_rate_setup-cleanup.sql"}, executionPhase = Sql.ExecutionPhase.AFTER_TEST_METHOD)
void testInvalidatePeriod_WithInvalidId_ShouldReturn404() throws Exception {
@ -284,6 +313,8 @@ class RateControllerIntegrationTest {
@DisplayName("/api/rates/staged_changes - staged changes")
class StagedChangesTest {
@Test
@Story("Check rate drafts should return boolean")
@DisplayName("Check rate drafts should return boolean")
@Sql(scripts = {"classpath:master_data/reduced_rate_setup.sql"}, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD)
@Sql(scripts = {"classpath:master_data/reduced_rate_setup-cleanup.sql"}, executionPhase = Sql.ExecutionPhase.AFTER_TEST_METHOD)
void testCheckRateDrafts_ShouldReturnBoolean() throws Exception {
@ -297,6 +328,8 @@ class RateControllerIntegrationTest {
}
@Test
@Story("Approve rate drafts should return OK")
@DisplayName("Approve rate drafts should return OK")
@Sql(scripts = {"classpath:master_data/reduced_rate_setup.sql"}, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD)
@Sql(scripts = {"classpath:master_data/reduced_rate_setup-cleanup.sql"}, executionPhase = Sql.ExecutionPhase.AFTER_TEST_METHOD)
void testApproveRateDrafts_ShouldReturnOk() throws Exception {
@ -312,6 +345,8 @@ class RateControllerIntegrationTest {
@DisplayName("/api/rates/ - Edge cases")
class EdgeCasesTest {
@Test
@Story("List container rates with invalid date format should return 400")
@DisplayName("List container rates with invalid date format should return 400")
@Sql(scripts = {"classpath:master_data/reduced_rate_setup.sql"}, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD)
@Sql(scripts = {"classpath:master_data/reduced_rate_setup-cleanup.sql"}, executionPhase = Sql.ExecutionPhase.AFTER_TEST_METHOD)
void testListContainerRates_WithInvalidDateFormat_ShouldReturn400() throws Exception {
@ -322,6 +357,8 @@ class RateControllerIntegrationTest {
}
@Test
@Story("List container rates with negative limit should return 400")
@DisplayName("List container rates with negative limit should return 400")
@Sql(scripts = {"classpath:master_data/reduced_rate_setup.sql"}, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD)
@Sql(scripts = {"classpath:master_data/reduced_rate_setup-cleanup.sql"}, executionPhase = Sql.ExecutionPhase.AFTER_TEST_METHOD)
void testListContainerRates_WithNegativeLimit_ShouldReturn400() throws Exception {
@ -332,6 +369,8 @@ class RateControllerIntegrationTest {
}
@Test
@Story("List matrix rates with negative page should return 400")
@DisplayName("List matrix rates with negative page should return 400")
@Sql(scripts = {"classpath:master_data/reduced_rate_setup.sql"}, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD)
@Sql(scripts = {"classpath:master_data/reduced_rate_setup-cleanup.sql"}, executionPhase = Sql.ExecutionPhase.AFTER_TEST_METHOD)
void testListMatrixRates_WithNegativePage_ShouldReturn400() throws Exception {
@ -348,6 +387,8 @@ class RateControllerIntegrationTest {
@DisplayName("/api/rates/ - Performance tests")
class PerformanceTests {
@Test
@Story("List container rates with large limit should complete in reasonable time")
@DisplayName("List container rates with large limit should complete in reasonable time")
@Sql(scripts = {"classpath:master_data/reduced_rate_setup.sql"}, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD)
@Sql(scripts = {"classpath:master_data/reduced_rate_setup-cleanup.sql"}, executionPhase = Sql.ExecutionPhase.AFTER_TEST_METHOD)
void testListContainerRates_LargeLimit_ShouldCompleteInReasonableTime() throws Exception {

View file

@ -1,4 +1,9 @@
package de.avatic.lcc.controller.report;
import io.qameta.allure.Epic;
import io.qameta.allure.Feature;
@Epic("Controller")
@Feature("Report")
public class ReportingControllerIntegrationTest {
}

View file

@ -0,0 +1,333 @@
package de.avatic.lcc.database.dialect;
import io.qameta.allure.Epic;
import io.qameta.allure.Feature;
import io.qameta.allure.Story;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Nested;
import org.junit.jupiter.api.Test;
import java.util.Arrays;
import java.util.List;
import static org.junit.jupiter.api.Assertions.*;
/**
* Unit tests for {@link MSSQLDialectProvider}.
*/
@Epic("Database Layer")
@Feature("MSSQL Dialect")
@DisplayName("MSSQLDialectProvider Tests")
class MSSQLDialectProviderTest {
private MSSQLDialectProvider provider;
@BeforeEach
void setUp() {
provider = new MSSQLDialectProvider();
}
@Nested
@DisplayName("Metadata Tests")
class MetadataTests {
@Test
@Story("Return correct dialect name")
@DisplayName("Should return correct dialect name")
void shouldReturnCorrectDialectName() {
assertEquals("Microsoft SQL Server", provider.getDialectName());
}
@Test
@Story("Return correct driver class name")
@DisplayName("Should return correct driver class name")
void shouldReturnCorrectDriverClassName() {
assertEquals("com.microsoft.sqlserver.jdbc.SQLServerDriver", provider.getDriverClassName());
}
}
@Nested
@DisplayName("Pagination Tests")
class PaginationTests {
@Test
@Story("Build correct pagination clause with OFFSET/FETCH")
@DisplayName("Should build correct pagination clause with OFFSET/FETCH")
void shouldBuildCorrectPaginationClause() {
String result = provider.buildPaginationClause(10, 20);
assertEquals("OFFSET ? ROWS FETCH NEXT ? ROWS ONLY", result);
}
@Test
@Story("Return pagination parameters in correct order (offset, limit)")
@DisplayName("Should return pagination parameters in correct order (offset, limit)")
void shouldReturnPaginationParametersInCorrectOrder() {
Object[] params = provider.getPaginationParameters(10, 20);
// MSSQL: offset first, then limit (reversed from MySQL)
assertArrayEquals(new Object[]{20, 10}, params);
}
}
@Nested
@DisplayName("Upsert Operation Tests")
class UpsertOperationTests {
@Test
@Story("Build correct MERGE statement")
@DisplayName("Should build correct MERGE statement")
void shouldBuildCorrectMergeStatement() {
List<String> uniqueCols = Arrays.asList("id", "user_id");
List<String> insertCols = Arrays.asList("id", "user_id", "name", "value");
List<String> updateCols = Arrays.asList("name", "value");
String result = provider.buildUpsertStatement("test_table", uniqueCols, insertCols, updateCols);
assertTrue(result.contains("MERGE INTO test_table AS target"));
assertTrue(result.contains("USING (SELECT"));
assertTrue(result.contains("ON target.id = source.id AND target.user_id = source.user_id"));
assertTrue(result.contains("WHEN MATCHED THEN UPDATE SET"));
assertTrue(result.contains("WHEN NOT MATCHED THEN INSERT"));
assertTrue(result.contains("name = source.name"));
assertTrue(result.contains("value = source.value"));
}
@Test
@Story("Build correct conditional INSERT statement")
@DisplayName("Should build correct conditional INSERT statement")
void shouldBuildCorrectInsertIgnoreStatement() {
List<String> columns = Arrays.asList("user_id", "group_id");
List<String> uniqueCols = Arrays.asList("user_id", "group_id");
String result = provider.buildInsertIgnoreStatement("mapping_table", columns, uniqueCols);
assertTrue(result.contains("IF NOT EXISTS"));
assertTrue(result.contains("SELECT 1 FROM mapping_table"));
assertTrue(result.contains("WHERE user_id = ? AND group_id = ?"));
assertTrue(result.contains("INSERT INTO mapping_table (user_id, group_id) VALUES (?, ?)"));
}
}
@Nested
@DisplayName("Locking Strategy Tests")
class LockingStrategyTests {
@Test
@Story("Build WITH (UPDLOCK, READPAST) for SKIP LOCKED equivalent")
@DisplayName("Should build WITH (UPDLOCK, READPAST) for SKIP LOCKED equivalent")
void shouldBuildSelectForUpdateSkipLocked() {
String baseQuery = "SELECT * FROM calculation_job WHERE state = 'CREATED'";
String result = provider.buildSelectForUpdateSkipLocked(baseQuery);
assertTrue(result.contains("WITH (UPDLOCK, READPAST)"));
assertTrue(result.contains("FROM calculation_job WITH (UPDLOCK, READPAST)"));
}
@Test
@Story("Build WITH (UPDLOCK, ROWLOCK) for standard locking")
@DisplayName("Should build WITH (UPDLOCK, ROWLOCK) for standard locking")
void shouldBuildSelectForUpdate() {
String baseQuery = "SELECT * FROM calculation_job WHERE id = ?";
String result = provider.buildSelectForUpdate(baseQuery);
assertTrue(result.contains("WITH (UPDLOCK, ROWLOCK)"));
assertTrue(result.contains("FROM calculation_job WITH (UPDLOCK, ROWLOCK)"));
assertFalse(result.contains("READPAST"));
}
}
@Nested
@DisplayName("Date/Time Function Tests")
class DateTimeFunctionTests {
@Test
@Story("Return GETDATE() for current timestamp")
@DisplayName("Should return GETDATE() for current timestamp")
void shouldReturnGetDateForCurrentTimestamp() {
assertEquals("GETDATE()", provider.getCurrentTimestamp());
}
@Test
@Story("Build date subtraction with GETDATE() using DATEADD")
@DisplayName("Should build date subtraction with GETDATE() using DATEADD")
void shouldBuildDateSubtractionWithGetDate() {
String result = provider.buildDateSubtraction(null, "3", SqlDialectProvider.DateUnit.DAY);
assertEquals("DATEADD(DAY, -3, GETDATE())", result);
}
@Test
@Story("Build date subtraction with custom base date")
@DisplayName("Should build date subtraction with custom base date")
void shouldBuildDateSubtractionWithCustomBaseDate() {
String result = provider.buildDateSubtraction("calculation_date", "60", SqlDialectProvider.DateUnit.MINUTE);
assertEquals("DATEADD(MINUTE, -60, calculation_date)", result);
}
@Test
@Story("Build date addition with GETDATE() using DATEADD")
@DisplayName("Should build date addition with GETDATE() using DATEADD")
void shouldBuildDateAdditionWithGetDate() {
String result = provider.buildDateAddition(null, "7", SqlDialectProvider.DateUnit.DAY);
assertEquals("DATEADD(DAY, 7, GETDATE())", result);
}
@Test
@Story("Build date addition with custom base date")
@DisplayName("Should build date addition with custom base date")
void shouldBuildDateAdditionWithCustomBaseDate() {
String result = provider.buildDateAddition("start_date", "1", SqlDialectProvider.DateUnit.MONTH);
assertEquals("DATEADD(MONTH, 1, start_date)", result);
}
@Test
@Story("Extract date from column using CAST")
@DisplayName("Should extract date from column using CAST")
void shouldExtractDateFromColumn() {
String result = provider.extractDate("created_at");
assertEquals("CAST(created_at AS DATE)", result);
}
@Test
@Story("Extract date from expression using CAST")
@DisplayName("Should extract date from expression using CAST")
void shouldExtractDateFromExpression() {
String result = provider.extractDate("GETDATE()");
assertEquals("CAST(GETDATE() AS DATE)", result);
}
}
@Nested
@DisplayName("Auto-increment Reset Tests")
class AutoIncrementResetTests {
@Test
@Story("Build DBCC CHECKIDENT reset statement")
@DisplayName("Should build DBCC CHECKIDENT reset statement")
void shouldBuildAutoIncrementResetStatement() {
String result = provider.buildAutoIncrementReset("test_table");
assertEquals("DBCC CHECKIDENT ('test_table', RESEED, 0)", result);
}
}
@Nested
@DisplayName("Geospatial Distance Tests")
class GeospatialDistanceTests {
@Test
@Story("Build Haversine distance calculation in kilometers")
@DisplayName("Should build Haversine distance calculation in kilometers")
void shouldBuildHaversineDistanceCalculation() {
String result = provider.buildHaversineDistance("50.1", "8.6", "node.geo_lat", "node.geo_lng");
// MSSQL uses 6371 km (not 6371000 m like MySQL)
assertTrue(result.contains("6371"));
assertFalse(result.contains("6371000")); // Should NOT be in meters
assertTrue(result.contains("ACOS"));
assertTrue(result.contains("COS"));
assertTrue(result.contains("SIN"));
assertTrue(result.contains("RADIANS"));
assertTrue(result.contains("50.1"));
assertTrue(result.contains("8.6"));
assertTrue(result.contains("node.geo_lat"));
assertTrue(result.contains("node.geo_lng"));
}
}
@Nested
@DisplayName("String/Type Function Tests")
class StringTypeFunctionTests {
@Test
@Story("Build CONCAT with multiple expressions")
@DisplayName("Should build CONCAT with multiple expressions")
void shouldBuildConcatWithMultipleExpressions() {
String result = provider.buildConcat("first_name", "' '", "last_name");
assertEquals("CONCAT(first_name, ' ', last_name)", result);
}
@Test
@Story("Build CONCAT with single expression")
@DisplayName("Should build CONCAT with single expression")
void shouldBuildConcatWithSingleExpression() {
String result = provider.buildConcat("column_name");
assertEquals("CONCAT(column_name)", result);
}
@Test
@Story("Cast to string using VARCHAR")
@DisplayName("Should cast to string using VARCHAR")
void shouldCastToString() {
String result = provider.castToString("user_id");
assertEquals("CAST(user_id AS VARCHAR(MAX))", result);
}
}
@Nested
@DisplayName("Bulk Operation Tests")
class BulkOperationTests {
@Test
@Story("Return INT max value for MSSQL")
@DisplayName("Should return INT max value for MSSQL")
void shouldReturnMSSQLIntMaxValue() {
// MSSQL returns INT max value (not BIGINT)
assertEquals("2147483647", provider.getMaxLimitValue());
}
@Test
@Story("Support RETURNING clause via OUTPUT")
@DisplayName("Should support RETURNING clause via OUTPUT")
void shouldSupportReturningClause() {
assertTrue(provider.supportsReturningClause());
}
@Test
@Story("Build OUTPUT clause for RETURNING")
@DisplayName("Should build OUTPUT clause for RETURNING")
void shouldBuildOutputClause() {
String result = provider.buildReturningClause("id", "name", "created_at");
assertEquals("OUTPUT INSERTED.id, INSERTED.name, INSERTED.created_at", result);
}
}
@Nested
@DisplayName("Schema/DDL Tests")
class SchemaDDLTests {
@Test
@Story("Return IDENTITY definition")
@DisplayName("Should return IDENTITY definition")
void shouldReturnIdentityDefinition() {
String result = provider.getAutoIncrementDefinition();
assertEquals("IDENTITY(1,1)", result);
}
@Test
@Story("Return DATETIME2 with default for timestamp")
@DisplayName("Should return DATETIME2 with default for timestamp")
void shouldReturnDateTimeWithDefaultDefinition() {
String result = provider.getTimestampDefinition();
assertEquals("DATETIME2 DEFAULT GETDATE()", result);
}
}
@Nested
@DisplayName("Boolean Literal Tests")
class BooleanLiteralTests {
@Test
@Story("Return '1' for boolean true")
@DisplayName("Should return '1' for boolean true")
void shouldReturnOneForBooleanTrue() {
assertEquals("1", provider.getBooleanTrue());
}
@Test
@Story("Return '0' for boolean false")
@DisplayName("Should return '0' for boolean false")
void shouldReturnZeroForBooleanFalse() {
assertEquals("0", provider.getBooleanFalse());
}
}
}

View file

@ -0,0 +1,311 @@
package de.avatic.lcc.database.dialect;
import io.qameta.allure.Epic;
import io.qameta.allure.Feature;
import io.qameta.allure.Story;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Nested;
import org.junit.jupiter.api.Test;
import java.util.Arrays;
import java.util.List;
import static org.junit.jupiter.api.Assertions.*;
/**
* Unit tests for {@link MySQLDialectProvider}.
*/
@Epic("Database Layer")
@Feature("MySQL Dialect")
@DisplayName("MySQLDialectProvider Tests")
class MySQLDialectProviderTest {
private MySQLDialectProvider provider;
@BeforeEach
void setUp() {
provider = new MySQLDialectProvider();
}
@Nested
@DisplayName("Metadata Tests")
class MetadataTests {
@Test
@Story("Return correct dialect name")
@DisplayName("Should return correct dialect name")
void shouldReturnCorrectDialectName() {
assertEquals("MySQL", provider.getDialectName());
}
@Test
@Story("Return correct driver class name")
@DisplayName("Should return correct driver class name")
void shouldReturnCorrectDriverClassName() {
assertEquals("com.mysql.cj.jdbc.Driver", provider.getDriverClassName());
}
}
@Nested
@DisplayName("Pagination Tests")
class PaginationTests {
@Test
@Story("Build correct pagination clause")
@DisplayName("Should build correct pagination clause")
void shouldBuildCorrectPaginationClause() {
String result = provider.buildPaginationClause(10, 20);
assertEquals("LIMIT ? OFFSET ?", result);
}
@Test
@Story("Return pagination parameters in correct order")
@DisplayName("Should return pagination parameters in correct order")
void shouldReturnPaginationParametersInCorrectOrder() {
Object[] params = provider.getPaginationParameters(10, 20);
assertArrayEquals(new Object[]{10, 20}, params);
}
}
@Nested
@DisplayName("Upsert Operation Tests")
class UpsertOperationTests {
@Test
@Story("Build correct upsert statement")
@DisplayName("Should build correct upsert statement")
void shouldBuildCorrectUpsertStatement() {
List<String> uniqueCols = Arrays.asList("id", "user_id");
List<String> insertCols = Arrays.asList("id", "user_id", "name", "value");
List<String> updateCols = Arrays.asList("name", "value");
String result = provider.buildUpsertStatement("test_table", uniqueCols, insertCols, updateCols);
assertTrue(result.contains("INSERT INTO test_table"));
assertTrue(result.contains("(id, user_id, name, value)"));
assertTrue(result.contains("VALUES (?, ?, ?, ?)"));
assertTrue(result.contains("ON DUPLICATE KEY UPDATE"));
assertTrue(result.contains("name = VALUES(name)"));
assertTrue(result.contains("value = VALUES(value)"));
}
@Test
@Story("Build correct insert ignore statement")
@DisplayName("Should build correct insert ignore statement")
void shouldBuildCorrectInsertIgnoreStatement() {
List<String> columns = Arrays.asList("user_id", "group_id");
List<String> uniqueCols = Arrays.asList("user_id", "group_id");
String result = provider.buildInsertIgnoreStatement("mapping_table", columns, uniqueCols);
assertEquals("INSERT IGNORE INTO mapping_table (user_id, group_id) VALUES (?, ?)", result);
}
}
@Nested
@DisplayName("Locking Strategy Tests")
class LockingStrategyTests {
@Test
@Story("Build SELECT FOR UPDATE SKIP LOCKED")
@DisplayName("Should build SELECT FOR UPDATE SKIP LOCKED")
void shouldBuildSelectForUpdateSkipLocked() {
String baseQuery = "SELECT * FROM calculation_job WHERE state = 'CREATED'";
String result = provider.buildSelectForUpdateSkipLocked(baseQuery);
assertTrue(result.endsWith("FOR UPDATE SKIP LOCKED"));
assertTrue(result.startsWith("SELECT * FROM calculation_job"));
}
@Test
@Story("Build SELECT FOR UPDATE")
@DisplayName("Should build SELECT FOR UPDATE")
void shouldBuildSelectForUpdate() {
String baseQuery = "SELECT * FROM calculation_job WHERE id = ?";
String result = provider.buildSelectForUpdate(baseQuery);
assertTrue(result.endsWith("FOR UPDATE"));
assertFalse(result.contains("SKIP LOCKED"));
}
}
@Nested
@DisplayName("Date/Time Function Tests")
class DateTimeFunctionTests {
@Test
@Story("Return NOW() for current timestamp")
@DisplayName("Should return NOW() for current timestamp")
void shouldReturnNowForCurrentTimestamp() {
assertEquals("NOW()", provider.getCurrentTimestamp());
}
@Test
@Story("Build date subtraction with NOW()")
@DisplayName("Should build date subtraction with NOW()")
void shouldBuildDateSubtractionWithNow() {
String result = provider.buildDateSubtraction(null, "3", SqlDialectProvider.DateUnit.DAY);
assertEquals("DATE_SUB(NOW(), INTERVAL 3 DAY)", result);
}
@Test
@Story("Build date subtraction with custom base date")
@DisplayName("Should build date subtraction with custom base date")
void shouldBuildDateSubtractionWithCustomBaseDate() {
String result = provider.buildDateSubtraction("calculation_date", "60", SqlDialectProvider.DateUnit.MINUTE);
assertEquals("DATE_SUB(calculation_date, INTERVAL 60 MINUTE)", result);
}
@Test
@Story("Build date addition with NOW()")
@DisplayName("Should build date addition with NOW()")
void shouldBuildDateAdditionWithNow() {
String result = provider.buildDateAddition(null, "7", SqlDialectProvider.DateUnit.DAY);
assertEquals("DATE_ADD(NOW(), INTERVAL 7 DAY)", result);
}
@Test
@Story("Build date addition with custom base date")
@DisplayName("Should build date addition with custom base date")
void shouldBuildDateAdditionWithCustomBaseDate() {
String result = provider.buildDateAddition("start_date", "1", SqlDialectProvider.DateUnit.MONTH);
assertEquals("DATE_ADD(start_date, INTERVAL 1 MONTH)", result);
}
@Test
@Story("Extract date from column")
@DisplayName("Should extract date from column")
void shouldExtractDateFromColumn() {
String result = provider.extractDate("created_at");
assertEquals("DATE(created_at)", result);
}
@Test
@Story("Extract date from expression")
@DisplayName("Should extract date from expression")
void shouldExtractDateFromExpression() {
String result = provider.extractDate("NOW()");
assertEquals("DATE(NOW())", result);
}
}
@Nested
@DisplayName("Auto-increment Reset Tests")
class AutoIncrementResetTests {
@Test
@Story("Build auto-increment reset statement")
@DisplayName("Should build auto-increment reset statement")
void shouldBuildAutoIncrementResetStatement() {
String result = provider.buildAutoIncrementReset("test_table");
assertEquals("ALTER TABLE test_table AUTO_INCREMENT = 1", result);
}
}
@Nested
@DisplayName("Geospatial Distance Tests")
class GeospatialDistanceTests {
@Test
@Story("Build Haversine distance calculation in kilometers")
@DisplayName("Should build Haversine distance calculation in kilometers")
void shouldBuildHaversineDistanceCalculation() {
String result = provider.buildHaversineDistance("50.1", "8.6", "node.geo_lat", "node.geo_lng");
// MySQL now uses 6371 km (not 6371000 m) for consistency with MSSQL
assertTrue(result.contains("6371"));
assertFalse(result.contains("6371000")); // Should NOT be in meters
assertTrue(result.contains("ACOS"));
assertTrue(result.contains("COS"));
assertTrue(result.contains("SIN"));
assertTrue(result.contains("RADIANS"));
assertTrue(result.contains("50.1"));
assertTrue(result.contains("8.6"));
assertTrue(result.contains("node.geo_lat"));
assertTrue(result.contains("node.geo_lng"));
}
}
@Nested
@DisplayName("String/Type Function Tests")
class StringTypeFunctionTests {
@Test
@Story("Build CONCAT with multiple expressions")
@DisplayName("Should build CONCAT with multiple expressions")
void shouldBuildConcatWithMultipleExpressions() {
String result = provider.buildConcat("first_name", "' '", "last_name");
assertEquals("CONCAT(first_name, ' ', last_name)", result);
}
@Test
@Story("Build CONCAT with single expression")
@DisplayName("Should build CONCAT with single expression")
void shouldBuildConcatWithSingleExpression() {
String result = provider.buildConcat("column_name");
assertEquals("CONCAT(column_name)", result);
}
@Test
@Story("Cast to string")
@DisplayName("Should cast to string")
void shouldCastToString() {
String result = provider.castToString("user_id");
assertEquals("CAST(user_id AS CHAR)", result);
}
}
@Nested
@DisplayName("Bulk Operation Tests")
class BulkOperationTests {
@Test
@Story("Return MySQL BIGINT UNSIGNED max value")
@DisplayName("Should return MySQL BIGINT UNSIGNED max value")
void shouldReturnMySQLBigIntUnsignedMaxValue() {
assertEquals("18446744073709551615", provider.getMaxLimitValue());
}
@Test
@Story("Not support RETURNING clause")
@DisplayName("Should not support RETURNING clause")
void shouldNotSupportReturningClause() {
assertFalse(provider.supportsReturningClause());
}
@Test
@Story("Throw exception when building RETURNING clause")
@DisplayName("Should throw exception when building RETURNING clause")
void shouldThrowExceptionWhenBuildingReturningClause() {
UnsupportedOperationException exception = assertThrows(
UnsupportedOperationException.class,
() -> provider.buildReturningClause("id", "name")
);
assertTrue(exception.getMessage().contains("MySQL does not support RETURNING"));
assertTrue(exception.getMessage().contains("LAST_INSERT_ID"));
}
}
@Nested
@DisplayName("Schema/DDL Tests")
class SchemaDDLTests {
@Test
@Story("Return AUTO_INCREMENT definition")
@DisplayName("Should return AUTO_INCREMENT definition")
void shouldReturnAutoIncrementDefinition() {
String result = provider.getAutoIncrementDefinition();
assertEquals("INT NOT NULL AUTO_INCREMENT", result);
}
@Test
@Story("Return TIMESTAMP with ON UPDATE definition")
@DisplayName("Should return TIMESTAMP with ON UPDATE definition")
void shouldReturnTimestampWithOnUpdateDefinition() {
String result = provider.getTimestampDefinition();
assertEquals("TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP", result);
}
}
}

View file

@ -0,0 +1,150 @@
package de.avatic.lcc.e2e.config;
import com.microsoft.playwright.Browser;
import com.microsoft.playwright.BrowserContext;
import com.microsoft.playwright.BrowserType;
import com.microsoft.playwright.Page;
import com.microsoft.playwright.Playwright;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.logging.Logger;
/**
* Configuration and factory class for Playwright browser instances.
* Provides centralized configuration for E2E tests.
*/
public class PlaywrightTestConfiguration {
private static final Logger logger = Logger.getLogger(PlaywrightTestConfiguration.class.getName());
// Default configuration values
public static final String DEFAULT_BASE_URL = "http://localhost:5173";
public static final boolean DEFAULT_HEADLESS = true;
public static final int DEFAULT_VIEWPORT_WIDTH = 1920;
public static final int DEFAULT_VIEWPORT_HEIGHT = 1080;
public static final double DEFAULT_TOLERANCE = 0.01; // 1%
public static final Path SCREENSHOTS_DIR = Paths.get("target/screenshots");
public static final Path TRACES_DIR = Paths.get("target/traces");
private Playwright playwright;
private Browser browser;
private final boolean headless;
private final String baseUrl;
private final int viewportWidth;
private final int viewportHeight;
public PlaywrightTestConfiguration() {
this(
System.getProperty("e2e.baseUrl", DEFAULT_BASE_URL),
Boolean.parseBoolean(System.getProperty("playwright.headless", String.valueOf(DEFAULT_HEADLESS))),
Integer.parseInt(System.getProperty("playwright.viewport.width", String.valueOf(DEFAULT_VIEWPORT_WIDTH))),
Integer.parseInt(System.getProperty("playwright.viewport.height", String.valueOf(DEFAULT_VIEWPORT_HEIGHT)))
);
}
public PlaywrightTestConfiguration(String baseUrl, boolean headless, int viewportWidth, int viewportHeight) {
this.baseUrl = baseUrl;
this.headless = headless;
this.viewportWidth = viewportWidth;
this.viewportHeight = viewportHeight;
}
/**
* Initializes Playwright and launches the browser.
* Must be called before creating pages.
*/
public void initialize() {
logger.info("Initializing Playwright");
playwright = Playwright.create();
browser = playwright.chromium().launch(
new BrowserType.LaunchOptions()
.setHeadless(headless)
.setSlowMo(headless ? 0 : 100)
);
logger.info(() -> String.format(
"Playwright initialized. Headless: %s, Base URL: %s, Viewport: %dx%d",
headless, baseUrl, viewportWidth, viewportHeight
));
}
/**
* Creates a new browser context with default settings.
*/
public BrowserContext createContext() {
return browser.newContext(new Browser.NewContextOptions()
.setViewportSize(viewportWidth, viewportHeight)
);
}
/**
* Creates a new browser context with tracing enabled.
*/
public BrowserContext createContextWithTracing(String traceName) {
BrowserContext context = createContext();
context.tracing().start(new com.microsoft.playwright.Tracing.StartOptions()
.setScreenshots(true)
.setSnapshots(true)
.setSources(true)
);
return context;
}
/**
* Stops tracing and saves it to a file.
*/
public void stopTracing(BrowserContext context, String traceName) {
context.tracing().stop(new com.microsoft.playwright.Tracing.StopOptions()
.setPath(TRACES_DIR.resolve(traceName + ".zip"))
);
}
/**
* Creates a new page in a new context.
*/
public Page createPage() {
BrowserContext context = createContext();
return context.newPage();
}
/**
* Closes the browser and Playwright instance.
*/
public void close() {
if (browser != null) {
browser.close();
}
if (playwright != null) {
playwright.close();
}
logger.info("Playwright closed");
}
// Getters
public String getBaseUrl() {
return baseUrl;
}
public boolean isHeadless() {
return headless;
}
public int getViewportWidth() {
return viewportWidth;
}
public int getViewportHeight() {
return viewportHeight;
}
public Browser getBrowser() {
return browser;
}
public Playwright getPlaywright() {
return playwright;
}
}

View file

@ -0,0 +1,123 @@
package de.avatic.lcc.e2e.config;
import de.avatic.lcc.config.LccOidcUser;
import de.avatic.lcc.config.filter.DevUserEmulationFilter;
import de.avatic.lcc.model.db.users.User;
import de.avatic.lcc.repositories.users.UserRepository;
import jakarta.servlet.FilterChain;
import jakarta.servlet.ServletException;
import jakarta.servlet.http.HttpServletRequest;
import jakarta.servlet.http.HttpServletResponse;
import jakarta.servlet.http.HttpSession;
import org.jetbrains.annotations.NotNull;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.security.core.GrantedAuthority;
import org.springframework.security.core.authority.SimpleGrantedAuthority;
import org.springframework.security.core.context.SecurityContext;
import org.springframework.security.core.context.SecurityContextHolder;
import org.springframework.security.oauth2.core.oidc.OidcIdToken;
import org.springframework.security.oauth2.core.oidc.OidcUserInfo;
import org.springframework.security.web.authentication.preauth.PreAuthenticatedAuthenticationToken;
import org.springframework.web.filter.OncePerRequestFilter;
import java.io.IOException;
import java.time.Instant;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
/**
* Filter that auto-logins a test user when running E2E tests.
* This bypasses the need to manually select a user on the /dev page.
*/
public class TestAutoLoginFilter extends OncePerRequestFilter {
private static final Logger log = LoggerFactory.getLogger(TestAutoLoginFilter.class);
private static final String TEST_USER_EMAIL = "john.doe@test.com";
private static final String DEV_USER_ID_SESSION_KEY = "dev.emulated.user.id";
private final UserRepository userRepository;
public TestAutoLoginFilter(UserRepository userRepository) {
this.userRepository = userRepository;
}
@Override
protected void doFilterInternal(@NotNull HttpServletRequest request,
@NotNull HttpServletResponse response,
@NotNull FilterChain filterChain) throws ServletException, IOException {
HttpSession session = request.getSession(true);
Integer emulatedUserId = (Integer) session.getAttribute(DEV_USER_ID_SESSION_KEY);
// If no user is selected, auto-login the test user
if (emulatedUserId == null) {
try {
User testUser = userRepository.getByEmail(TEST_USER_EMAIL);
if (testUser != null) {
log.debug("TestAutoLoginFilter - Auto-logging in test user: {}", TEST_USER_EMAIL);
session.setAttribute(DEV_USER_ID_SESSION_KEY, testUser.getId());
setEmulatedUser(testUser);
} else {
log.warn("TestAutoLoginFilter - Test user {} not found", TEST_USER_EMAIL);
}
} catch (Exception e) {
log.debug("TestAutoLoginFilter - Could not auto-login: {}", e.getMessage());
}
} else {
// User is already selected, set authentication
User user = userRepository.getById(emulatedUserId);
if (user != null) {
setEmulatedUser(user);
}
}
filterChain.doFilter(request, response);
}
private void setEmulatedUser(User user) {
Set<GrantedAuthority> authorities = new HashSet<>();
user.getGroups().forEach(group ->
authorities.add(new SimpleGrantedAuthority("ROLE_" + group.getName().toUpperCase()))
);
// Create a mock OIDC user
Map<String, Object> claims = new HashMap<>();
claims.put("sub", user.getId().toString());
claims.put("email", user.getEmail());
claims.put("preferred_username", user.getEmail());
claims.put("name", user.getFirstName() + " " + user.getLastName());
if (user.getWorkdayId() != null) {
claims.put("workday_id", user.getWorkdayId());
}
OidcIdToken idToken = new OidcIdToken(
"mock-token",
Instant.now(),
Instant.now().plusSeconds(3600),
claims
);
OidcUserInfo userInfo = new OidcUserInfo(claims);
LccOidcUser oidcUser = new LccOidcUser(
authorities,
idToken,
userInfo,
"preferred_username",
user.getId()
);
var authentication = new PreAuthenticatedAuthenticationToken(
oidcUser,
null,
authorities
);
SecurityContext context = SecurityContextHolder.createEmptyContext();
context.setAuthentication(authentication);
SecurityContextHolder.setContext(context);
}
}

View file

@ -0,0 +1,44 @@
package de.avatic.lcc.e2e.config;
import org.jetbrains.annotations.NotNull;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Profile;
import org.springframework.core.io.ClassPathResource;
import org.springframework.core.io.Resource;
import org.springframework.web.servlet.config.annotation.ResourceHandlerRegistry;
import org.springframework.web.servlet.config.annotation.WebMvcConfigurer;
import org.springframework.web.servlet.resource.PathResourceResolver;
import java.io.IOException;
/**
* Frontend configuration for E2E tests.
* Serves index.html for Vue Router to handle SPA routes.
*/
@Configuration
@Profile("test")
public class TestFrontendConfig implements WebMvcConfigurer {
@Override
public void addResourceHandlers(ResourceHandlerRegistry registry) {
// Handle all requests by serving index.html for non-existent resources
// This allows Vue Router to handle SPA routes like /dev
registry.addResourceHandler("/**")
.addResourceLocations("classpath:/static/")
.resourceChain(true)
.addResolver(new PathResourceResolver() {
@Override
protected Resource getResource(@NotNull String resourcePath, @NotNull Resource location) throws IOException {
Resource requestedResource = location.createRelative(resourcePath);
// If the resource exists, serve it
if (requestedResource.exists() && requestedResource.isReadable()) {
return requestedResource;
}
// Otherwise, serve index.html for Vue Router to handle
return new ClassPathResource("static/index.html");
}
});
}
}

View file

@ -0,0 +1,221 @@
package de.avatic.lcc.e2e.pages;
import com.microsoft.playwright.Locator;
import com.microsoft.playwright.Page;
import com.microsoft.playwright.options.AriaRole;
import com.microsoft.playwright.options.WaitForSelectorState;
import de.avatic.lcc.e2e.testdata.TestCaseInput;
import java.util.logging.Logger;
/**
* Page Object for the calculation assistant page.
* Handles part number entry, supplier selection, and calculation creation.
*/
public class AssistantPage extends BasePage {
private static final Logger logger = Logger.getLogger(AssistantPage.class.getName());
// Selectors - using more robust selectors
private static final String PART_NUMBER_INPUT = "textarea"; // simplified - typically only one textarea on the page
private static final String ANALYZE_BUTTON_TEXT = "Analyze input";
private static final String SUPPLIER_SEARCH_INPUT = "input[type='text']"; // fallback, may need refinement
private static final String LOAD_FROM_PREVIOUS_CHECKBOX = ".checkbox-item";
private static final String CREATE_CALCULATION_BUTTON_TEXT = "Create";
private static final String DELETE_SUPPLIER_BUTTON = ".icon-btn";
public AssistantPage(Page page) {
super(page);
}
/**
* Navigates to the assistant page.
* The part number modal opens automatically by design.
*/
public void navigate(String baseUrl) {
page.navigate(baseUrl + "/assistant");
waitForSpaNavigation("/assistant");
// Wait for the part number modal to appear (it opens automatically)
Locator modal = page.locator(".part-number-modal-container");
try {
modal.waitFor(new Locator.WaitForOptions()
.setState(WaitForSelectorState.VISIBLE)
.setTimeout(5000));
logger.info("Part number modal opened automatically");
} catch (Exception e) {
logger.info("Modal did not open automatically, will be opened manually when needed");
}
// Debug screenshot after navigation
page.screenshot(new com.microsoft.playwright.Page.ScreenshotOptions()
.setPath(java.nio.file.Paths.get("target/screenshots/debug_after_navigate.png")));
logger.info("Navigated to assistant page");
}
/**
* Enters part numbers and clicks analyze.
* Works with modal whether it's already open or needs to be opened.
*/
public void searchPartNumbers(String partNumber) {
// Check if modal is already visible
Locator modal = page.locator(".part-number-modal-container");
boolean modalVisible = false;
try {
modalVisible = modal.isVisible();
} catch (Exception e) {
modalVisible = false;
}
if (!modalVisible) {
// Modal not open, click "Drop part numbers" button to open it
logger.info("Modal not visible, clicking 'Drop part numbers' button");
Locator dropButton = page.locator("button:has-text('Drop part numbers')");
dropButton.waitFor(new Locator.WaitForOptions().setState(WaitForSelectorState.VISIBLE));
dropButton.click();
// Wait for modal to appear
modal.waitFor(new Locator.WaitForOptions().setState(WaitForSelectorState.VISIBLE));
} else {
logger.info("Modal already visible, proceeding with part number entry");
}
// Find and fill textarea inside modal - click first to focus, then type
Locator textarea = modal.locator("textarea");
textarea.waitFor(new Locator.WaitForOptions().setState(WaitForSelectorState.VISIBLE));
textarea.click();
page.waitForTimeout(200);
textarea.fill(partNumber);
// Debug screenshot after filling
page.screenshot(new com.microsoft.playwright.Page.ScreenshotOptions()
.setPath(java.nio.file.Paths.get("target/screenshots/debug_after_fill.png")));
logger.info(() -> "Filled textarea with: " + partNumber);
// Click Analyze input button inside modal
Locator analyzeButton = modal.locator("button:has-text('Analyze input')");
analyzeButton.click();
logger.info("Clicked Analyze input button");
// Wait for modal to close after API response
page.waitForTimeout(2000); // Wait for API response
// Check if modal is still visible and wait for it to close
try {
Locator modalOverlay = page.locator(".modal-overlay");
if (modalOverlay.isVisible()) {
modalOverlay.waitFor(new Locator.WaitForOptions()
.setState(WaitForSelectorState.HIDDEN)
.setTimeout(10000));
}
} catch (Exception e) {
logger.warning("Modal overlay check failed: " + e.getMessage());
}
// Wait for the part number to appear in the material list (not anywhere on page)
// The part number appears in: .item-list-element .supplier-item-address
try {
Locator partNumberInList = page.locator(".item-list-element .supplier-item-address:has-text('" + partNumber + "')");
partNumberInList.waitFor(new Locator.WaitForOptions()
.setState(WaitForSelectorState.VISIBLE)
.setTimeout(10000));
logger.info(() -> "Part number " + partNumber + " appeared in the material list");
} catch (Exception e) {
logger.warning(() -> "Part number " + partNumber + " not found in material list: " + e.getMessage());
// Take a screenshot to debug
page.screenshot(new com.microsoft.playwright.Page.ScreenshotOptions()
.setPath(java.nio.file.Paths.get("target/screenshots/debug_no_materials.png")));
// Log what materials are visible
int itemCount = page.locator(".item-list-element").count();
logger.info(() -> "Found " + itemCount + " item-list-elements on page");
}
logger.info(() -> "Searched for part number: " + partNumber);
}
/**
* Deletes all pre-selected suppliers.
* Uses specific selector to target only supplier items, not material items.
* SupplierItem has .supplier-content class with flag, MaterialItem has .material-item-text.
*/
public void deletePreselectedSuppliers() {
while (true) {
try {
// Target only delete buttons within supplier items (which have .supplier-content)
// This avoids deleting material items by mistake
Locator deleteButton = page.locator(".item-list-element:has(.supplier-content) .icon-btn").first();
deleteButton.waitFor(new Locator.WaitForOptions()
.setState(WaitForSelectorState.VISIBLE)
.setTimeout(1000));
deleteButton.click();
page.waitForTimeout(200);
} catch (Exception e) {
// No more supplier delete buttons
break;
}
}
logger.info("Deleted all pre-selected suppliers");
}
/**
* Selects a supplier by name using autosuggest.
*/
public void selectSupplier(String supplierName) {
// Find the search input - look for placeholder text or input near supplier section
Locator searchInput = page.locator("input[placeholder*='Search'], input[placeholder*='search'], .search-input").first();
searchInput.waitFor(new Locator.WaitForOptions().setState(WaitForSelectorState.VISIBLE));
searchInput.clear();
searchInput.fill(supplierName);
page.waitForTimeout(1000);
// Click the first suggestion
Locator suggestion = page.locator(".suggestion-item, .autocomplete-item, [role='option']").first();
try {
suggestion.waitFor(new Locator.WaitForOptions()
.setState(WaitForSelectorState.VISIBLE)
.setTimeout(3000));
suggestion.click();
} catch (Exception e) {
// Try clicking text that matches the supplier name
page.getByText(supplierName).first().click();
}
page.waitForTimeout(500);
logger.info(() -> "Selected supplier: " + supplierName);
}
/**
* Sets the "load from previous" checkbox and creates the calculation.
*/
public void createCalculation(boolean loadFromPrevious) {
// Try to set checkbox if visible
try {
setCheckbox(LOAD_FROM_PREVIOUS_CHECKBOX, loadFromPrevious);
} catch (Exception e) {
logger.warning("Could not find load from previous checkbox, continuing...");
}
// Use specific role-based selector to avoid matching "Create Calculation" heading
// and "Create a new supplier" button
Locator createButton = page.getByRole(AriaRole.BUTTON,
new Page.GetByRoleOptions().setName("Create").setExact(true));
createButton.click();
page.waitForTimeout(500);
logger.info(() -> "Created calculation with loadFromPrevious: " + loadFromPrevious);
}
/**
* Performs the complete assistant workflow for a test case.
*/
public void completeAssistantWorkflow(String baseUrl, TestCaseInput input) {
navigate(baseUrl);
searchPartNumbers(input.partNumber());
deletePreselectedSuppliers();
selectSupplier(input.supplierName());
createCalculation(input.loadFromPrevious());
}
}

View file

@ -0,0 +1,209 @@
package de.avatic.lcc.e2e.pages;
import com.microsoft.playwright.Locator;
import com.microsoft.playwright.Page;
import com.microsoft.playwright.options.LoadState;
import com.microsoft.playwright.options.WaitForSelectorState;
import java.util.logging.Logger;
/**
* Base class for all Playwright Page Objects.
* Provides common interaction methods for UI elements.
*/
public abstract class BasePage {
private static final Logger logger = Logger.getLogger(BasePage.class.getName());
protected final Page page;
protected BasePage(Page page) {
this.page = page;
}
/**
* Waits until the SPA navigates to a route containing the expected part.
*/
protected void waitForSpaNavigation(String expectedRoutePart) {
page.waitForURL("**" + expectedRoutePart + "**");
page.waitForLoadState(LoadState.NETWORKIDLE);
}
/**
* Waits for an element to be visible.
*/
protected Locator waitForElement(String selector) {
Locator locator = page.locator(selector);
locator.waitFor(new Locator.WaitForOptions().setState(WaitForSelectorState.VISIBLE));
return locator;
}
/**
* Waits for an element to be visible with a custom timeout.
*/
protected Locator waitForElement(String selector, double timeoutMs) {
Locator locator = page.locator(selector);
locator.waitFor(new Locator.WaitForOptions()
.setState(WaitForSelectorState.VISIBLE)
.setTimeout(timeoutMs));
return locator;
}
/**
* Clears and fills an input field.
*/
protected void fillInput(Locator locator, String text) {
locator.clear();
locator.fill(text);
logger.info(() -> "Filled input with: " + text);
}
/**
* Clears and fills an input field by selector.
*/
protected void fillInput(String selector, String text) {
Locator locator = waitForElement(selector);
fillInput(locator, text);
}
/**
* Fills an input field if it exists, returns false if element not found.
*/
protected boolean fillInputIfExists(String selector, String text, double timeoutMs) {
try {
Locator locator = page.locator(selector);
locator.waitFor(new Locator.WaitForOptions()
.setState(WaitForSelectorState.VISIBLE)
.setTimeout(timeoutMs));
fillInput(locator, text);
return true;
} catch (Exception e) {
logger.warning(() -> "Element not found, skipping: " + selector);
return false;
}
}
/**
* Clicks a button by selector.
*/
protected void clickButton(String selector) {
Locator button = waitForElement(selector);
button.click();
logger.info(() -> "Clicked button: " + selector);
}
/**
* Clicks a button by its visible text.
*/
protected void clickButtonByText(String buttonText) {
Locator button = page.getByText(buttonText);
button.waitFor(new Locator.WaitForOptions().setState(WaitForSelectorState.VISIBLE));
button.click();
logger.info(() -> "Clicked button with text: " + buttonText);
}
/**
* Clicks a button by its visible text with custom timeout.
*/
protected void clickButtonByText(String buttonText, double timeoutMs) {
Locator button = page.getByText(buttonText);
button.waitFor(new Locator.WaitForOptions()
.setState(WaitForSelectorState.VISIBLE)
.setTimeout(timeoutMs));
button.click();
logger.info(() -> "Clicked button with text: " + buttonText);
}
/**
* Sets a checkbox to the desired state.
*/
protected void setCheckbox(String labelSelector, boolean checked) {
Locator label = page.locator(labelSelector);
label.waitFor(new Locator.WaitForOptions().setState(WaitForSelectorState.VISIBLE));
Locator checkbox = label.locator("input[type='checkbox']");
boolean isChecked = checkbox.isChecked();
if (isChecked != checked) {
label.click();
page.waitForTimeout(300);
logger.info(() -> "Toggled checkbox to: " + checked);
}
}
/**
* Selects an option from a dropdown menu.
*/
protected void selectDropdownOption(String triggerSelector, String optionText) {
Locator dropdownTrigger = waitForElement(triggerSelector);
// Check if already has the correct value
try {
String currentValue = dropdownTrigger.locator("span.dropdown-trigger-text").textContent();
if (optionText.equals(currentValue)) {
logger.info(() -> "Dropdown already has value: " + optionText);
return;
}
} catch (Exception ignored) {
// Continue to open dropdown
}
dropdownTrigger.click();
logger.info("Opened dropdown");
Locator menu = page.locator("ul.dropdown-menu");
menu.waitFor(new Locator.WaitForOptions().setState(WaitForSelectorState.VISIBLE));
String optionXPath = String.format(
"//li[contains(@class, 'dropdown-option')][normalize-space(text())='%s']",
optionText
);
Locator option = page.locator(optionXPath);
option.click();
logger.info(() -> "Selected dropdown option: " + optionText);
page.waitForTimeout(200);
}
/**
* Searches in an autosuggest input and selects the first suggestion.
*/
protected void searchAndSelectAutosuggest(String inputSelector, String searchText) {
searchAndSelectAutosuggest(inputSelector, searchText, ".suggestion-item");
}
/**
* Searches in an autosuggest input and selects from suggestions.
*/
protected void searchAndSelectAutosuggest(String inputSelector, String searchText, String suggestionSelector) {
Locator input = waitForElement(inputSelector);
input.clear();
input.fill(searchText);
page.waitForTimeout(1000);
Locator suggestion = page.locator(suggestionSelector).first();
suggestion.waitFor(new Locator.WaitForOptions().setState(WaitForSelectorState.VISIBLE));
suggestion.click();
page.waitForTimeout(500);
logger.info(() -> "Selected autosuggest for: " + searchText);
}
/**
* Waits for a modal to close.
*/
protected void waitForModalToClose() {
page.locator("div.modal-container").waitFor(
new Locator.WaitForOptions().setState(WaitForSelectorState.HIDDEN)
);
}
/**
* Takes a screenshot for debugging purposes.
*/
protected void takeScreenshot(String name) {
page.screenshot(new Page.ScreenshotOptions()
.setPath(java.nio.file.Paths.get("target/screenshots/" + name + ".png")));
}
}

View file

@ -0,0 +1,679 @@
package de.avatic.lcc.e2e.pages;
import com.microsoft.playwright.Locator;
import com.microsoft.playwright.Page;
import com.microsoft.playwright.options.WaitForSelectorState;
import de.avatic.lcc.e2e.testdata.DestinationInput;
import de.avatic.lcc.e2e.testdata.TestCaseInput;
import java.util.logging.Logger;
/**
* Page Object for the calculation edit page.
* Handles form filling for materials, packaging, pricing, and destinations.
*/
public class CalculationEditPage extends BasePage {
private static final Logger logger = Logger.getLogger(CalculationEditPage.class.getName());
// Screenshot settings
private String screenshotPrefix = null;
private int destinationCounter = 0;
// Material section selectors (first master-data-item box)
// Note: Use [1] after following-sibling::div to get only the first following sibling
private static final String HS_CODE_INPUT = "//div[contains(@class, 'master-data-item')][1]//div[contains(@class, 'caption-column')][text()='HS code']/following-sibling::div[1]//input[@class='input-field']";
private static final String TARIFF_RATE_INPUT = "//div[contains(@class, 'master-data-item')][1]//div[contains(@class, 'caption-column')][contains(., 'Tariff rate')]/following-sibling::div[1]//input[@class='input-field']";
// Price section selectors (second master-data-item box)
// Note: Labels are "MEK_A [EUR]", "Overseas share [%]" (spelling: OverSeas, not OverSea)
private static final String PRICE_INPUT = "//div[contains(@class, 'master-data-item')][2]//div[contains(@class, 'caption-column')][contains(., 'MEK_A')]/following-sibling::div[1]//input[@class='input-field']";
private static final String OVERSEA_SHARE_INPUT = "//div[contains(@class, 'master-data-item')][2]//div[contains(@class, 'caption-column')][contains(., 'Overseas share')]/following-sibling::div[1]//input[@class='input-field']";
private static final String FCA_FEE_CHECKBOX = "//div[contains(@class, 'master-data-item')][2]//div[contains(@class, 'caption-column')][contains(., 'FCA')]/following-sibling::div[1]//label[contains(@class, 'checkbox-item')]";
// Handling Unit section selectors (third master-data-item box)
private static final String LENGTH_INPUT = "//div[contains(@class, 'master-data-item')][3]//div[contains(@class, 'caption-column')][text()='HU length']/following-sibling::div[1]//input[@class='input-field']";
private static final String WIDTH_INPUT = "//div[contains(@class, 'master-data-item')][3]//div[contains(@class, 'caption-column')][text()='HU width']/following-sibling::div[1]//input[@class='input-field']";
private static final String HEIGHT_INPUT = "//div[contains(@class, 'master-data-item')][3]//div[contains(@class, 'caption-column')][text()='HU height']/following-sibling::div[1]//input[@class='input-field']";
private static final String WEIGHT_INPUT = "//div[contains(@class, 'master-data-item')][3]//div[contains(@class, 'caption-column')][text()='HU weight']/following-sibling::div[1]//input[@class='input-field']";
private static final String PIECES_PER_UNIT_INPUT = "//div[contains(@class, 'master-data-item')][3]//div[contains(@class, 'caption-column')][text()='Pieces per HU']/following-sibling::div[1]//input[@class='input-field']";
// Dropdown selectors
private static final String DIMENSION_UNIT_DROPDOWN = "//div[contains(@class, 'master-data-item')][3]//div[contains(@class, 'caption-column')][text()='Dimension unit']/following-sibling::div[1]//button[contains(@class, 'dropdown-trigger')]";
private static final String WEIGHT_UNIT_DROPDOWN = "//div[contains(@class, 'master-data-item')][3]//div[contains(@class, 'caption-column')][text()='Weight unit']/following-sibling::div[1]//button[contains(@class, 'dropdown-trigger')]";
// Checkbox selectors
private static final String MIXED_CHECKBOX = "//label[contains(@class, 'checkbox-item')][.//span[contains(@class, 'checkbox-label')][text()='Mixable']]";
private static final String STACKED_CHECKBOX = "//label[contains(@class, 'checkbox-item')][.//span[contains(@class, 'checkbox-label')][text()='Stackable']]";
// Destination selectors
// Note: Use contains(., 'text') instead of contains(text(), 'text') when text is inside nested elements like tooltips
private static final String DESTINATION_NAME_INPUT = "//input[@placeholder='Add new Destination ...']";
private static final String DESTINATION_QUANTITY_INPUT = "//div[contains(@class, 'destination-edit-column-caption')][contains(., 'Annual quantity')]/following-sibling::div[1]//input[@class='input-field']";
// Radio buttons are hidden and styled via label - click the label text instead
private static final String ROUTING_RADIO = "//label[contains(@class, 'radio-item')]//span[contains(@class, 'radio-label')][contains(., 'standard routing')]";
private static final String D2D_RADIO = "//label[contains(@class, 'radio-item')]//span[contains(@class, 'radio-label')][contains(., 'individual rate')]";
// Note: D2D fields use "D2D Rate [EUR]" and "Lead time [days]" as labels in the UI
private static final String D2D_COST_INPUT = "//div[contains(@class, 'destination-edit-column-caption')][contains(., 'D2D Rate')]/following-sibling::div[1]//input[@class='input-field']";
private static final String D2D_DURATION_INPUT = "//div[contains(@class, 'destination-edit-column-caption')][contains(., 'Lead time')]/following-sibling::div[1]//input[@class='input-field']";
private static final String HANDLING_TAB = "//button[contains(@class, 'tab-header')][contains(., 'Handling')]";
private static final String CUSTOM_HANDLING_CHECKBOX = "//div[contains(@class, 'destination-edit-handling-cost')]//label[contains(@class, 'checkbox-item')]";
private static final String HANDLING_COST_INPUT = "//div[contains(@class, 'destination-edit-column-caption')][contains(., 'Handling cost')]/following-sibling::div[1]//input[@class='input-field']";
private static final String REPACKING_COST_INPUT = "//div[contains(@class, 'destination-edit-column-caption')][contains(., 'Repackaging cost')]/following-sibling::div[1]//input[@class='input-field']";
private static final String DISPOSAL_COST_INPUT = "//div[contains(@class, 'destination-edit-column-caption')][contains(., 'Disposal cost')]/following-sibling::div[1]//input[@class='input-field']";
// Buttons
private static final String CALCULATE_AND_CLOSE_BUTTON = "//button[contains(., 'Calculate & close')]";
private static final String CLOSE_BUTTON = "//button[contains(., 'Close') and not(contains(., 'Calculate'))]";
public CalculationEditPage(Page page) {
super(page);
}
/**
* Enables screenshot mode with a test case prefix.
* Screenshots will be saved at key points during form filling.
*/
public void enableScreenshots(String testCaseId) {
this.screenshotPrefix = testCaseId;
this.destinationCounter = 0;
}
/**
* Takes a screenshot if screenshot mode is enabled.
*/
private void captureScreenshot(String suffix) {
if (screenshotPrefix != null) {
String filename = screenshotPrefix + "_" + suffix;
java.nio.file.Path screenshotPath = java.nio.file.Paths.get("target/screenshots/" + filename + ".png");
page.screenshot(new Page.ScreenshotOptions().setPath(screenshotPath).setFullPage(true));
logger.info(() -> "Screenshot saved: " + screenshotPath);
}
}
/**
* Takes a screenshot of the current page state before calculation.
*/
public void screenshotBeforeCalculate() {
captureScreenshot("before_calculate");
}
/**
* Fills the main calculation form with input data.
*/
public void fillForm(TestCaseInput input) {
logger.info("Filling calculation form");
// Material section (if HS code input exists)
fillInputByXPath(HS_CODE_INPUT, String.valueOf(input.hsCode()), true);
fillInputByXPath(TARIFF_RATE_INPUT, String.valueOf(input.tariffRate()), true);
// Price section
fillInputByXPath(PRICE_INPUT, String.valueOf(input.price()), false);
fillInputByXPath(OVERSEA_SHARE_INPUT, String.valueOf(input.overseaShare()), false);
setCheckboxByXPath(FCA_FEE_CHECKBOX, input.fcaFee());
// Handling Unit section
fillInputByXPath(LENGTH_INPUT, String.valueOf(input.length()), false);
fillInputByXPath(WIDTH_INPUT, String.valueOf(input.width()), false);
fillInputByXPath(HEIGHT_INPUT, String.valueOf(input.height()), false);
fillInputByXPath(WEIGHT_INPUT, String.valueOf(input.weight()), false);
fillInputByXPath(PIECES_PER_UNIT_INPUT, String.valueOf(input.piecesPerUnit()), false);
// Dropdowns
selectDropdownByXPath(DIMENSION_UNIT_DROPDOWN, input.dimensionUnit());
selectDropdownByXPath(WEIGHT_UNIT_DROPDOWN, input.weightUnit());
// Checkboxes
setCheckboxByXPath(STACKED_CHECKBOX, input.stacked());
setCheckboxByXPath(MIXED_CHECKBOX, input.mixed());
logger.info("Calculation form filled successfully");
}
/**
* Adds a new destination by name.
*/
public void addDestination(DestinationInput destination) {
searchAndSelectAutosuggestByXPath(DESTINATION_NAME_INPUT, destination.name());
page.waitForTimeout(500);
logger.info(() -> "Added destination: " + destination.name());
}
/**
* Fills destination-specific fields.
*/
public void fillDestination(DestinationInput destination) {
destinationCounter++;
String destNum = String.valueOf(destinationCounter);
// First, ensure no modal is currently open
try {
Locator existingModal = page.locator(".modal-overlay");
if (existingModal.count() > 0 && existingModal.isVisible()) {
logger.info("Closing existing modal before opening destination edit");
// Press Escape to close any open modal
page.keyboard().press("Escape");
page.waitForTimeout(500);
}
} catch (Exception e) {
// No modal open, continue
}
// Click on the destination item's edit button to open the modal
// The destination item shows the name, so we find it and click the pencil icon
String destinationName = destination.name();
Locator destinationRow = page.locator(".destination-item-row:has-text('" + destinationName + "')");
if (destinationRow.count() > 0) {
logger.info(() -> "Found destination row for: " + destinationName);
Locator editButton = destinationRow.locator("button:has([class*='pencil'])");
if (editButton.count() == 0) {
// Try alternative selector for icon button
editButton = destinationRow.locator(".destination-item-action button").first();
}
if (editButton.count() > 0) {
logger.info("Clicking edit button to open destination modal");
editButton.click();
page.waitForTimeout(1000); // Wait for modal to open
}
}
// Wait for destination edit modal to be visible
Locator quantityInput = page.locator("xpath=" + DESTINATION_QUANTITY_INPUT);
quantityInput.waitFor(new Locator.WaitForOptions()
.setState(WaitForSelectorState.VISIBLE)
.setTimeout(10000));
// Wait extra time for Vue component to fully initialize
// This is critical for subsequent destinations
page.waitForTimeout(1000);
// Fill quantity
fillInputByXPath(DESTINATION_QUANTITY_INPUT, String.valueOf(destination.quantity()), false);
// Select transport mode
if (destination.d2d()) {
page.locator("xpath=" + D2D_RADIO).click();
page.waitForTimeout(300);
// Fill D2D specific fields if individual rate (custom cost/duration)
if (destination.d2dCost() != null) {
fillInputByXPath(D2D_COST_INPUT, String.valueOf(destination.d2dCost()), true);
}
if (destination.d2dDuration() != null) {
fillInputByXPath(D2D_DURATION_INPUT, String.valueOf(destination.d2dDuration()), true);
}
// Note: D2D mode does NOT show route selection UI - routes are determined by the D2D provider
// If using standard routing (no cost specified), the system uses database D2D rates
if (destination.d2dCost() == null) {
logger.info("D2D with standard routing - D2D rates will be loaded from database");
}
} else {
page.locator("xpath=" + ROUTING_RADIO).click();
page.waitForTimeout(300);
// Select route - if not specified, select first available route
selectRoute(destination.route());
}
// Take screenshot of Routes tab (with route selection or D2D fields)
captureScreenshot("dest" + destNum + "_routes_tab");
// Handle custom handling costs
if (destination.customHandling()) {
// Click handling tab
try {
Locator handlingTab = page.locator("xpath=" + HANDLING_TAB);
if (handlingTab.isVisible()) {
handlingTab.click();
page.waitForTimeout(300);
}
} catch (Exception e) {
// Tab might not exist or already selected
}
setCheckboxByXPath(CUSTOM_HANDLING_CHECKBOX, true);
page.waitForTimeout(300);
if (destination.handlingCost() != null) {
fillInputByXPath(HANDLING_COST_INPUT, String.valueOf(destination.handlingCost()), true);
}
if (destination.repackingCost() != null) {
fillInputByXPath(REPACKING_COST_INPUT, String.valueOf(destination.repackingCost()), true);
}
if (destination.disposalCost() != null) {
fillInputByXPath(DISPOSAL_COST_INPUT, String.valueOf(destination.disposalCost()), true);
}
// Take screenshot of Handling tab
captureScreenshot("dest" + destNum + "_handling_tab");
} else {
// For destinations without custom handling, also take a screenshot of the handling tab for verification
try {
Locator handlingTab = page.locator("xpath=" + HANDLING_TAB);
if (handlingTab.isVisible()) {
handlingTab.click();
page.waitForTimeout(300);
captureScreenshot("dest" + destNum + "_handling_tab");
// Go back to routes tab
Locator routesTab = page.locator("//button[contains(@class, 'tab-header')][contains(., 'Routes')]");
if (routesTab.count() > 0 && routesTab.isVisible()) {
routesTab.click();
page.waitForTimeout(200);
}
}
} catch (Exception e) {
// Tab might not exist
}
}
// Close the destination edit modal by clicking OK
Locator okButton = page.locator("button:has-text('OK')");
okButton.click();
page.waitForTimeout(500);
// Wait for modal and overlay to fully close
try {
page.locator(".destination-edit-modal-container").waitFor(
new Locator.WaitForOptions()
.setState(WaitForSelectorState.HIDDEN)
.setTimeout(5000));
} catch (Exception e) {
logger.warning("Destination edit modal might not have closed: " + e.getMessage());
}
// Also wait for any modal overlay to disappear
try {
page.locator(".modal-overlay").waitFor(
new Locator.WaitForOptions()
.setState(WaitForSelectorState.HIDDEN)
.setTimeout(3000));
} catch (Exception e) {
// Overlay might not exist or already hidden
}
// Extra wait to ensure DOM is stable
page.waitForTimeout(500);
logger.info(() -> "Filled destination: " + destination.name());
}
/**
* Selects a route from the available routes.
* Routes are displayed as clickable elements in the destination edit modal.
* Each route shows external_mapping_id values like "HH", "WH HH", etc.
*
* The Vue component (DestinationEditRoutes) uses a Pinia store for route selection.
* When a route is clicked, selectRoute(id) sets route.is_selected = true.
*
* IMPORTANT: Standard DOM clicks don't reliably trigger Vue's event system.
* We need to find the Vue component and call its methods directly.
*/
private void selectRoute(String route) {
// Wait for routes to fully load
page.waitForTimeout(500);
// Wait for routes to be visible
try {
page.locator(".destination-route-container").first().waitFor(
new Locator.WaitForOptions().setState(WaitForSelectorState.VISIBLE).setTimeout(5000));
} catch (Exception e) {
logger.info("No routes visible yet, continuing anyway");
}
// Check for "no routes available" warning
Locator routeWarning = page.locator(".destination-edit-route-warning");
if (routeWarning.count() > 0 && routeWarning.isVisible()) {
String warningText = routeWarning.textContent();
logger.warning(() -> "Route warning displayed: " + warningText);
logger.info("No routes available - route selection skipped.");
return;
}
// Get routes from DOM and find the Vue component
Locator allRoutes = page.locator(".destination-route-container");
int routeCount = allRoutes.count();
logger.info(() -> "Found " + routeCount + " routes in DOM");
if (routeCount == 0) {
logger.warning("No routes found");
return;
}
// Log available routes
for (int i = 0; i < routeCount; i++) {
final int idx = i;
String routeText = allRoutes.nth(i).textContent();
logger.info(() -> " Route " + idx + ": " + routeText.trim());
}
// Find best matching route index
int routeIndexToSelect = findBestMatchingRouteIndexFromDom(allRoutes, route);
logger.info(() -> "Will select route at index " + routeIndexToSelect);
// Try to find and call the Vue component's selectRoute method
// The component is mounted on the modal's routes container
Object result = page.evaluate("(routeIndex) => { " +
"try { " +
// Find the route element
" const routeElements = document.querySelectorAll('.destination-route-container'); " +
" if (!routeElements || routeElements.length === 0) return 'no_routes_in_dom'; " +
" if (routeIndex >= routeElements.length) return 'index_out_of_bounds'; " +
// Find the Vue component that handles routes - it's the parent of the routes container
" const routesCell = document.querySelector('.destination-edit-cell-routes'); " +
" if (!routesCell) return 'no_routes_cell'; " +
// Walk up to find the component with selectRoute method
" let vueComponent = null; " +
" let el = routesCell; " +
" for (let i = 0; i < 10 && el; i++) { " +
" if (el.__vueParentComponent) { " +
" let comp = el.__vueParentComponent; " +
" while (comp) { " +
" if (comp.ctx && typeof comp.ctx.selectRoute === 'function') { " +
" vueComponent = comp; " +
" break; " +
" } " +
" comp = comp.parent; " +
" } " +
" if (vueComponent) break; " +
" } " +
" el = el.parentElement; " +
" } " +
" if (!vueComponent) { " +
// Alternative: try to access pinia via window or through component
" const routeEl = routeElements[routeIndex]; " +
" let compEl = routeEl; " +
" for (let i = 0; i < 5 && compEl; i++) { " +
" if (compEl.__vueParentComponent?.ctx?.destination?.routes) { " +
" const routes = compEl.__vueParentComponent.ctx.destination.routes; " +
" if (Array.isArray(routes) && routes.length > routeIndex) { " +
" routes.forEach((r, idx) => { r.is_selected = (idx === routeIndex); }); " +
" return 'set_via_ctx_destination'; " +
" } " +
" } " +
" compEl = compEl.parentElement; " +
" } " +
" return 'no_vue_component'; " +
" } " +
// Get the route id from the component's destination.routes
" const routes = vueComponent.ctx.destination?.routes; " +
" if (!routes || routes.length === 0) return 'no_routes_in_ctx'; " +
" if (routeIndex >= routes.length) return 'route_index_exceeds_ctx'; " +
" const routeId = routes[routeIndex].id; " +
// Call the selectRoute method
" vueComponent.ctx.selectRoute(routeId); " +
" return 'called_selectRoute:' + routeId; " +
"} catch (e) { return 'error:' + e.message; } " +
"}", routeIndexToSelect);
final Object vueResult = result;
logger.info(() -> "Vue component route selection result: " + vueResult);
// Always try click simulation as the primary method - it's most reliable
logger.info("Using click simulation to select route");
Locator routeToClick = allRoutes.nth(routeIndexToSelect);
simulateRobustClick(routeToClick);
// Wait for UI update
page.waitForTimeout(500);
// Verify selection worked
boolean selected = verifyRouteSelectionVisual(allRoutes.nth(routeIndexToSelect));
// If click didn't work, try Pinia as fallback
if (!selected) {
logger.info("Click simulation didn't select route, trying Pinia direct access");
Object piniaResult = tryPiniaDirectAccess(routeIndexToSelect);
final Object piniaResultFinal = piniaResult;
logger.info(() -> "Pinia direct access result: " + piniaResultFinal);
page.waitForTimeout(300);
selected = verifyRouteSelectionVisual(allRoutes.nth(routeIndexToSelect));
}
if (!selected) {
logger.warning(() -> "Route selection may have failed for index " + routeIndexToSelect);
}
}
/**
* Try direct Pinia store access through various paths.
*/
private Object tryPiniaDirectAccess(int routeIndex) {
return page.evaluate("(routeIndex) => { " +
"try { " +
// Try different ways to find Pinia
" let pinia = null; " +
// Method 1: Through app provides
" const app = document.querySelector('#app')?.__vue_app__; " +
" if (app?._context?.provides?.pinia) { " +
" pinia = app._context.provides.pinia; " +
" } " +
// Method 2: Through window (if exposed)
" if (!pinia && window.__pinia) { " +
" pinia = window.__pinia; " +
" } " +
// Method 3: Walk through app's config
" if (!pinia && app?.config?.globalProperties?.$pinia) { " +
" pinia = app.config.globalProperties.$pinia; " +
" } " +
" if (!pinia) return 'pinia_not_found'; " +
// Access the store
" const storeState = pinia.state?.value?.['destinationSingleEdit']; " +
" if (!storeState?.destination?.routes) return 'store_not_found'; " +
" const routes = storeState.destination.routes; " +
" if (routeIndex >= routes.length) return 'index_out_of_range'; " +
// Set selection
" routes.forEach((r, idx) => { r.is_selected = (idx === routeIndex); }); " +
" return 'pinia_success'; " +
"} catch (e) { return 'pinia_error:' + e.message; } " +
"}", routeIndex);
}
/**
* Simulate a robust click that Vue should recognize.
*/
private void simulateRobustClick(Locator element) {
try {
// First, scroll into view
element.scrollIntoViewIfNeeded();
page.waitForTimeout(100);
// Try to trigger via native Playwright click
element.click(new Locator.ClickOptions().setForce(true));
page.waitForTimeout(100);
// Also dispatch events manually
element.evaluate("el => { " +
"const mousedown = new MouseEvent('mousedown', { bubbles: true, cancelable: true, view: window }); " +
"const mouseup = new MouseEvent('mouseup', { bubbles: true, cancelable: true, view: window }); " +
"const click = new MouseEvent('click', { bubbles: true, cancelable: true, view: window }); " +
"el.dispatchEvent(mousedown); " +
"el.dispatchEvent(mouseup); " +
"el.dispatchEvent(click); " +
"}");
logger.info("Simulated robust click on route element");
} catch (Exception e) {
logger.warning(() -> "Robust click simulation failed: " + e.getMessage());
}
}
/**
* Verify route selection is visible in the DOM.
* @return true if the route appears selected, false otherwise
*/
private boolean verifyRouteSelectionVisual(Locator routeElement) {
try {
Locator innerContainer = routeElement.locator(".destination-route-inner-container");
if (innerContainer.count() > 0) {
String classes = innerContainer.getAttribute("class");
boolean selected = classes != null && classes.contains("selected");
logger.info(() -> "Route visual verification - classes: " + classes + ", selected: " + selected);
return selected;
}
} catch (Exception e) {
logger.warning(() -> "Could not verify route selection: " + e.getMessage());
}
return false;
}
/**
* Find exact matching route from DOM elements.
* The route must contain all spec segments in order, and the route text
* (when normalized) should match the concatenated spec segments.
*
* @throws IllegalStateException if no exact match is found
*/
private int findBestMatchingRouteIndexFromDom(Locator allRoutes, String routeSpec) {
int routeCount = allRoutes.count();
if (routeSpec == null || routeSpec.isEmpty()) {
return 0; // No route specified, use first available
}
if (routeCount == 0) {
throw new IllegalStateException("No routes available, but route spec was: " + routeSpec);
}
String[] specSegments = routeSpec.split(",");
// Build expected route text by concatenating segments (routes display without separators)
StringBuilder expectedBuilder = new StringBuilder();
for (String segment : specSegments) {
expectedBuilder.append(segment.trim().toLowerCase().replace("_", " "));
}
String expectedRouteText = expectedBuilder.toString();
// Find exact match
for (int i = 0; i < routeCount; i++) {
String routeText = allRoutes.nth(i).textContent().toLowerCase().trim();
// Remove common whitespace/separator variations
String normalizedRouteText = routeText.replaceAll("\\s+", "").replace(">", "");
String normalizedExpected = expectedRouteText.replaceAll("\\s+", "");
if (normalizedRouteText.equals(normalizedExpected)) {
final int matchedIndex = i;
final String matchedRoute = routeText;
logger.info(() -> "Exact route match found at index " + matchedIndex + ": " + matchedRoute);
return i;
}
}
// No exact match found - log available routes and fail
StringBuilder availableRoutes = new StringBuilder("Available routes:\n");
for (int i = 0; i < routeCount; i++) {
availableRoutes.append(" ").append(i).append(": ").append(allRoutes.nth(i).textContent().trim()).append("\n");
}
throw new IllegalStateException(
"No exact route match found for spec: '" + routeSpec + "' (expected: '" + expectedRouteText + "')\n" +
availableRoutes.toString()
);
}
/**
* Clicks the "Calculate & close" button.
*/
public void calculateAndClose() {
page.locator("xpath=" + CALCULATE_AND_CLOSE_BUTTON).click();
page.waitForTimeout(2000);
logger.info("Clicked Calculate & close");
}
/**
* Clicks the "Close" button.
*/
public void close() {
page.locator("xpath=" + CLOSE_BUTTON).click();
logger.info("Clicked Close");
}
// Helper methods for XPath-based operations
private void fillInputByXPath(String xpath, String value, boolean optional) {
try {
Locator locator = page.locator("xpath=" + xpath);
if (optional) {
locator.waitFor(new Locator.WaitForOptions()
.setState(WaitForSelectorState.VISIBLE)
.setTimeout(2000));
} else {
locator.waitFor(new Locator.WaitForOptions()
.setState(WaitForSelectorState.VISIBLE));
}
locator.clear();
locator.fill(value);
logger.fine(() -> "Filled XPath input: " + xpath + " with value: " + value);
} catch (Exception e) {
if (!optional) {
throw e;
}
logger.warning(() -> "Optional field not found: " + xpath);
}
}
private void setCheckboxByXPath(String xpath, boolean checked) {
try {
Locator label = page.locator("xpath=" + xpath);
label.waitFor(new Locator.WaitForOptions()
.setState(WaitForSelectorState.VISIBLE)
.setTimeout(2000));
Locator checkbox = label.locator("input[type='checkbox']");
boolean isChecked = checkbox.isChecked();
if (isChecked != checked) {
label.click();
page.waitForTimeout(300);
}
} catch (Exception e) {
logger.warning(() -> "Could not set checkbox: " + xpath);
}
}
private void selectDropdownByXPath(String xpath, String optionText) {
try {
Locator dropdown = page.locator("xpath=" + xpath);
dropdown.waitFor(new Locator.WaitForOptions()
.setState(WaitForSelectorState.VISIBLE)
.setTimeout(2000));
// Check current value
try {
String currentValue = dropdown.locator("span.dropdown-trigger-text").textContent();
if (optionText.equals(currentValue)) {
return;
}
} catch (Exception ignored) {
}
dropdown.click();
Locator menu = page.locator("ul.dropdown-menu");
menu.waitFor(new Locator.WaitForOptions().setState(WaitForSelectorState.VISIBLE));
String optionXPath = String.format(
"//li[contains(@class, 'dropdown-option')][normalize-space(text())='%s']",
optionText
);
page.locator("xpath=" + optionXPath).click();
page.waitForTimeout(200);
} catch (Exception e) {
logger.warning(() -> "Could not select dropdown option: " + optionText);
}
}
private void searchAndSelectAutosuggestByXPath(String xpath, String searchText) {
Locator input = page.locator("xpath=" + xpath);
input.waitFor(new Locator.WaitForOptions().setState(WaitForSelectorState.VISIBLE));
input.clear();
input.fill(searchText);
page.waitForTimeout(1000);
Locator suggestion = page.locator(".suggestion-item").first();
suggestion.waitFor(new Locator.WaitForOptions().setState(WaitForSelectorState.VISIBLE));
suggestion.click();
page.waitForTimeout(500);
}
}

View file

@ -0,0 +1,86 @@
package de.avatic.lcc.e2e.pages;
import com.microsoft.playwright.Locator;
import com.microsoft.playwright.Page;
import com.microsoft.playwright.options.WaitForSelectorState;
import java.util.logging.Logger;
/**
* Page Object for the dev login page (/dev).
* Allows selecting a user from the dev user table for testing.
*/
public class DevLoginPage extends BasePage {
private static final Logger logger = Logger.getLogger(DevLoginPage.class.getName());
private static final String MODAL_YES_BUTTON = "div.modal-dialog-actions button.btn--primary";
private static final String MODAL_CONTAINER = "div.modal-container";
public DevLoginPage(Page page) {
super(page);
}
/**
* Navigates to the dev login page and logs in as the specified user.
*
* @param baseUrl The base URL of the application
* @param userName The first name of the user to log in as (e.g., "John")
*/
public void login(String baseUrl, String userName) {
page.navigate(baseUrl + "/dev");
// Wait for the page to load
page.waitForLoadState();
// The /dev page has two tables. We need the first one (User control tab).
// Use .first() to get the first table
Locator userTable = page.locator("table.data-table").first();
userTable.waitFor(new Locator.WaitForOptions().setState(WaitForSelectorState.VISIBLE));
// Wait for table rows to appear (API might take time to load data)
Locator rows = userTable.locator("tbody tr.table-row");
try {
rows.first().waitFor(new Locator.WaitForOptions()
.setState(WaitForSelectorState.VISIBLE)
.setTimeout(10000));
} catch (Exception e) {
logger.warning("No table rows found after waiting. Page content: " +
page.content().substring(0, Math.min(1000, page.content().length())));
throw new RuntimeException("No users found in dev user table. Is the API working?", e);
}
int rowCount = rows.count();
logger.info(() -> "Found " + rowCount + " user rows");
boolean userFound = false;
for (int i = 0; i < rowCount; i++) {
Locator row = rows.nth(i);
Locator firstCell = row.locator("td").first();
String firstName = firstCell.textContent();
if (firstName != null && firstName.contains(userName)) {
row.click();
userFound = true;
logger.info(() -> "Selected user: " + userName);
break;
}
}
if (!userFound) {
throw new RuntimeException("User '" + userName + "' not found in dev user table");
}
// Confirm the login in the modal
Locator yesButton = page.locator(MODAL_YES_BUTTON);
yesButton.waitFor(new Locator.WaitForOptions().setState(WaitForSelectorState.VISIBLE));
yesButton.click();
// Wait for modal to close
page.locator(MODAL_CONTAINER).waitFor(
new Locator.WaitForOptions().setState(WaitForSelectorState.HIDDEN)
);
logger.info(() -> "Successfully logged in as: " + userName);
}
}

View file

@ -0,0 +1,620 @@
package de.avatic.lcc.e2e.pages;
import com.microsoft.playwright.Locator;
import com.microsoft.playwright.Page;
import com.microsoft.playwright.options.AriaRole;
import com.microsoft.playwright.options.WaitForSelectorState;
import de.avatic.lcc.e2e.testdata.DestinationExpected;
import de.avatic.lcc.e2e.testdata.TestCaseExpected;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.logging.Logger;
/**
* Page Object for the calculation results/report page.
* Handles navigating to reports and reading calculation results.
*/
public class ResultsPage extends BasePage {
private static final Logger logger = Logger.getLogger(ResultsPage.class.getName());
// Report page selectors based on Report.vue structure
private static final String REPORT_CONTAINER = ".report-container";
private static final String CREATE_REPORT_BUTTON = "button:has-text('Create report')";
private static final String REPORT_BOX = ".box"; // Reports are shown inside Box components
public ResultsPage(Page page) {
super(page);
}
/**
* Navigates to the reports page and creates a report for the given material/supplier.
*/
public void navigateToReports(String baseUrl, String partNumber, String supplierName) {
// Navigate to reports page
page.navigate(baseUrl + "/reports");
page.waitForLoadState();
logger.info("Navigated to reports page");
// Click "Create report" button
Locator createReportBtn = page.locator(CREATE_REPORT_BUTTON);
createReportBtn.waitFor(new Locator.WaitForOptions()
.setState(WaitForSelectorState.VISIBLE)
.setTimeout(10000));
createReportBtn.click();
logger.info("Clicked Create report button");
// Wait for the modal to fully open
page.waitForTimeout(1000);
// The modal has an autosuggest search bar with specific placeholder
// Use the placeholder text to find the correct input inside the modal
Locator searchInput = page.locator("input[placeholder='Select material for reporting']");
searchInput.waitFor(new Locator.WaitForOptions()
.setState(WaitForSelectorState.VISIBLE)
.setTimeout(5000));
searchInput.click();
searchInput.fill(partNumber);
logger.info("Entered part number in search: " + partNumber);
page.waitForTimeout(1500);
// Wait for and select the material from suggestions
Locator suggestion = page.locator(".suggestion-item").first();
try {
suggestion.waitFor(new Locator.WaitForOptions()
.setState(WaitForSelectorState.VISIBLE)
.setTimeout(5000));
suggestion.click();
logger.info("Selected material from suggestions");
} catch (Exception e) {
logger.warning("Could not select material from suggestions: " + e.getMessage());
}
// Wait for suppliers list to load
page.waitForTimeout(1500);
// Select the supplier by clicking on its item-list-element
// The supplier name is inside a supplier-item component
try {
Locator supplierElement = page.locator(".item-list-element")
.filter(new Locator.FilterOptions().setHasText(supplierName))
.first();
if (supplierElement.count() > 0) {
supplierElement.click();
logger.info("Selected supplier: " + supplierName);
page.waitForTimeout(500);
} else {
logger.warning("Supplier not found: " + supplierName);
}
} catch (Exception e) {
logger.warning("Could not select supplier: " + e.getMessage());
}
// Click OK button inside the modal footer
Locator okButton = page.locator(".footer button:has-text('OK')");
try {
okButton.waitFor(new Locator.WaitForOptions()
.setState(WaitForSelectorState.VISIBLE)
.setTimeout(5000));
okButton.click();
logger.info("Clicked OK button");
} catch (Exception e) {
// Fallback: try to find any OK button
page.locator("button:has-text('OK')").first().click();
}
// Wait for the report to load
waitForResults();
}
/**
* Waits for the results to be loaded.
*/
public void waitForResults() {
// Wait for any "Prepare report" modal to disappear
try {
Locator prepareReportModal = page.locator(".modal-overlay, .modal-container, .modal-dialog");
if (prepareReportModal.count() > 0 && prepareReportModal.first().isVisible()) {
logger.info("Waiting for modal to close...");
prepareReportModal.first().waitFor(new Locator.WaitForOptions()
.setState(WaitForSelectorState.HIDDEN)
.setTimeout(30000));
logger.info("Modal closed");
}
} catch (Exception e) {
// Modal might not be present or already closed
}
try {
// Wait for report container or spinner to disappear
page.locator(".report-spinner, .spinner").waitFor(new Locator.WaitForOptions()
.setState(WaitForSelectorState.HIDDEN)
.setTimeout(30000));
} catch (Exception e) {
// Spinner might not be present
}
try {
page.locator(REPORT_CONTAINER).waitFor(new Locator.WaitForOptions()
.setState(WaitForSelectorState.VISIBLE)
.setTimeout(30000));
page.waitForLoadState();
logger.info("Results loaded");
} catch (Exception e) {
logger.warning("Results container not found, continuing...");
}
}
/**
* Expands all collapsible boxes on the report page.
* The Vue CollapsibleBox component uses:
* - .box-content.collapsed for hidden content
* - .collapse-button in the header to toggle
* - The outer box element gets class "collapsible" when collapsed and clickable
*/
public void expandAllCollapsibleBoxes() {
try {
// Strategy: Keep clicking on collapsed boxes until none remain
// After each click, re-query the DOM since it changes
int maxIterations = 20; // Safety limit
int totalExpanded = 0;
for (int iteration = 0; iteration < maxIterations; iteration++) {
// Find collapsed content sections
Locator collapsedContent = page.locator(".box-content.collapsed");
int collapsedCount = collapsedContent.count();
if (collapsedCount == 0) {
break; // All expanded
}
final int iterNum = iteration + 1;
final int remaining = collapsedCount;
logger.info(() -> "Iteration " + iterNum + ": Found " + remaining + " collapsed boxes");
// Try to expand the first collapsed box
try {
Locator firstCollapsed = collapsedContent.first();
// Navigate up to find the clickable header span (the title)
// Structure: box > div > div.box-header > span (clickable)
Locator headerSpan = firstCollapsed.locator("xpath=preceding-sibling::div[contains(@class, 'box-header')]//span").first();
if (headerSpan.count() > 0 && headerSpan.isVisible()) {
headerSpan.click();
page.waitForTimeout(400); // Wait for animation
totalExpanded++;
logger.info(() -> "Expanded box via header span");
continue;
}
// Alternative: Try clicking the collapse button
Locator collapseButton = firstCollapsed.locator("xpath=preceding-sibling::div[contains(@class, 'box-header')]//button[contains(@class, 'collapse-button')]").first();
if (collapseButton.count() > 0 && collapseButton.isVisible()) {
collapseButton.click();
page.waitForTimeout(400);
totalExpanded++;
logger.info(() -> "Expanded box via collapse button");
continue;
}
// Alternative: Click on the parent box element which also has a click handler
Locator parentBox = firstCollapsed.locator("xpath=ancestor::*[contains(@class, 'collapsible')]").first();
if (parentBox.count() > 0 && parentBox.isVisible()) {
parentBox.click();
page.waitForTimeout(400);
totalExpanded++;
logger.info(() -> "Expanded box via parent collapsible element");
continue;
}
// If nothing worked, log and try next
logger.warning("Could not find clickable element for collapsed box");
} catch (Exception e) {
final String errorMsg = e.getMessage();
logger.warning(() -> "Error expanding box: " + errorMsg);
}
}
// Final check
int finalCollapsed = page.locator(".box-content.collapsed").count();
final int expanded = totalExpanded;
final int stillCollapsedFinal = finalCollapsed;
logger.info(() -> "Expanded " + expanded + " boxes, " + stillCollapsedFinal + " still collapsed");
page.waitForTimeout(500); // Wait for all animations to complete
} catch (Exception e) {
logger.warning("Could not expand all boxes: " + e.getMessage());
}
}
/**
* Takes a full page screenshot with all content visible.
* @param filename The filename without path (will be saved to target/screenshots/)
*/
public void takeFullPageScreenshot(String filename) {
try {
// First expand all collapsible sections
expandAllCollapsibleBoxes();
// Wait a moment for any animations to complete
page.waitForTimeout(500);
// Take full page screenshot
String path = "target/screenshots/" + filename + ".png";
page.screenshot(new Page.ScreenshotOptions()
.setPath(Paths.get(path))
.setFullPage(true));
logger.info(() -> "Full page screenshot saved: " + path);
} catch (Exception e) {
logger.warning("Could not take full page screenshot: " + e.getMessage());
}
}
/**
* Reads all result values from the page.
* Based on Report.vue structure with .report-content-row elements.
*/
public Map<String, Object> readResults() {
waitForResults();
// Expand all collapsible boxes to ensure all content is visible
expandAllCollapsibleBoxes();
Map<String, Object> results = new HashMap<>();
// Read values from the "Summary" section (first 3-col grid)
// Structure: <div class="report-content-row"><div>Label</div><div class="report-content-data-cell">Value €</div>...</div>
results.put("mekA", readValueByLabel("MEK A"));
results.put("logisticCost", readValueByLabel("Logistics cost"));
results.put("mekB", readValueByLabel("MEK B"));
// Read values from the "Weighted cost breakdown" section
results.put("fcaFee", readValueByLabel("FCA fee"));
results.put("transportation", readValueByLabel("Transportation costs"));
results.put("d2d", readValueByLabel("Door 2 door costs"));
results.put("airFreight", readValueByLabel("Air freight costs"));
results.put("custom", readValueByLabel("Custom costs"));
results.put("repackaging", readValueByLabel("Repackaging"));
results.put("handling", readValueByLabel("Handling"));
results.put("disposal", readValueByLabel("Disposal costs"));
results.put("space", readValueByLabel("Space costs"));
results.put("capital", readValueByLabel("Capital costs"));
// Read safety stock from material section
results.put("safetyStock", readIntValueByLabel("Safety stock"));
// Read destination results
results.put("destinations", readDestinationResults());
return results;
}
/**
* Reads a numeric value by finding the label in a report-content-row.
* The structure is: label | value | percentage
*/
private Double readValueByLabel(String label) {
try {
// Find the row containing the label, then get the first data cell
String xpath = String.format(
"//div[contains(@class, 'report-content-row')]/div[contains(text(), '%s')]/following-sibling::div[contains(@class, 'report-content-data-cell')][1]",
label
);
Locator locator = page.locator("xpath=" + xpath).first();
if (locator.count() == 0) {
// Try alternative: text might be in a child element
xpath = String.format(
"//div[contains(@class, 'report-content-row')]/div[contains(., '%s')]/following-sibling::div[contains(@class, 'report-content-data-cell')][1]",
label
);
locator = page.locator("xpath=" + xpath).first();
}
if (locator.count() == 0) {
logger.warning(() -> "Field not found by label: " + label);
return null;
}
String text = locator.textContent();
if (text == null || text.isEmpty()) {
return null;
}
// Remove currency symbols, percentage signs, and formatting
// Handle German number format (1.234,56) vs English (1,234.56)
text = text.replaceAll("[€$%\\s]", "").trim();
// If contains comma as decimal separator (German format)
if (text.contains(",") && !text.contains(".")) {
text = text.replace(",", ".");
} else if (text.contains(",") && text.contains(".")) {
// 1.234,56 format - remove thousands separator, replace decimal
text = text.replace(".", "").replace(",", ".");
}
return Double.parseDouble(text);
} catch (Exception e) {
logger.warning(() -> "Could not read numeric value for label: " + label + " - " + e.getMessage());
return null;
}
}
/**
* Reads integer value by label.
*/
private Integer readIntValueByLabel(String label) {
Double value = readValueByLabel(label);
return value != null ? value.intValue() : null;
}
/**
* Reads results for all destinations from the report.
* Destinations are in collapsible boxes with class containing destination info.
*/
private List<Map<String, Object>> readDestinationResults() {
List<Map<String, Object>> destinations = new ArrayList<>();
try {
// Each destination is in a collapsible-box with the destination name as title
// Look for boxes that have destination-related content
Locator destinationBoxes = page.locator(".box-gap:has(.report-content-container--2-col)");
int count = destinationBoxes.count();
logger.info(() -> "Found " + count + " potential destination boxes");
// Skip the first few boxes which are summary, cost breakdown, and material sections
// Destinations start after those
for (int i = 0; i < count; i++) {
Locator box = destinationBoxes.nth(i);
// Check if this box has destination-specific content (Transit time [days], Container type)
if (box.locator("div:has-text('Transit time')").count() > 0) {
Map<String, Object> destResult = new HashMap<>();
destResult.put("transitTime", readValueInBox(box, "Transit time [days]"));
destResult.put("stackedLayers", readValueInBox(box, "Stacked layers"));
destResult.put("containerUnitCount", readValueInBox(box, "Container unit count"));
destResult.put("containerType", readStringInBox(box, "Container type"));
destResult.put("limitingFactor", readStringInBox(box, "Limiting factor"));
destinations.add(destResult);
final int destCount = destinations.size();
logger.info(() -> "Read destination " + destCount + " results: " + destResult);
}
}
} catch (Exception e) {
logger.warning("Could not read destination results: " + e.getMessage());
}
return destinations;
}
private Double readValueInBox(Locator box, String label) {
try {
// Try exact text match first, then contains match
Locator cell = box.locator(".report-content-row")
.filter(new Locator.FilterOptions().setHasText(label))
.locator(".report-content-data-cell")
.first();
if (cell.count() == 0) {
logger.warning(() -> "Could not find cell for label: " + label);
return null;
}
String text = cell.textContent().replaceAll("[^0-9.,\\-]", "").trim();
final String logText = text;
logger.info(() -> "Read value for '" + label + "': " + logText);
if (text.isEmpty() || text.equals("-")) {
return null;
}
// Handle German decimal format
if (text.contains(",") && !text.contains(".")) {
text = text.replace(",", ".");
}
return Double.parseDouble(text);
} catch (Exception e) {
logger.warning(() -> "Error reading value for label '" + label + "': " + e.getMessage());
return null;
}
}
private String readStringInBox(Locator box, String label) {
try {
Locator cell = box.locator(".report-content-row")
.filter(new Locator.FilterOptions().setHasText(label))
.locator(".report-content-data-cell")
.first();
if (cell.count() == 0) {
logger.warning(() -> "Could not find string cell for label: " + label);
return null;
}
String text = cell.textContent().trim();
logger.info(() -> "Read string for '" + label + "': " + text);
return text;
} catch (Exception e) {
logger.warning(() -> "Error reading string for label '" + label + "': " + e.getMessage());
return null;
}
}
/**
* Verifies that results match expected values.
*/
public void verifyResults(TestCaseExpected expected, double tolerance) {
Map<String, Object> actual = readResults();
// Log all actual values for debugging
logger.info("======== ACTUAL VALUES FROM REPORT ========");
logger.info(() -> "MEK A: " + actual.get("mekA"));
logger.info(() -> "Logistics cost: " + actual.get("logisticCost"));
logger.info(() -> "MEK B: " + actual.get("mekB"));
logger.info(() -> "FCA fee: " + actual.get("fcaFee"));
logger.info(() -> "Transportation: " + actual.get("transportation"));
logger.info(() -> "D2D: " + actual.get("d2d"));
logger.info(() -> "Air freight: " + actual.get("airFreight"));
logger.info(() -> "Custom: " + actual.get("custom"));
logger.info(() -> "Repackaging: " + actual.get("repackaging"));
logger.info(() -> "Handling: " + actual.get("handling"));
logger.info(() -> "Disposal: " + actual.get("disposal"));
logger.info(() -> "Space: " + actual.get("space"));
logger.info(() -> "Capital: " + actual.get("capital"));
logger.info(() -> "Safety stock: " + actual.get("safetyStock"));
logger.info("======== EXPECTED VALUES ========");
logger.info(() -> "MEK A: " + expected.mekA());
logger.info(() -> "Logistics cost: " + expected.logisticCost());
logger.info(() -> "MEK B: " + expected.mekB());
logger.info(() -> "FCA fee: " + expected.fcaFee());
logger.info(() -> "Transportation: " + expected.transportation());
logger.info(() -> "D2D: " + expected.d2d());
logger.info(() -> "Air freight: " + expected.airFreight());
logger.info(() -> "Custom: " + expected.custom());
logger.info(() -> "Repackaging: " + expected.repackaging());
logger.info(() -> "Handling: " + expected.handling());
logger.info(() -> "Disposal: " + expected.disposal());
logger.info(() -> "Space: " + expected.space());
logger.info(() -> "Capital: " + expected.capital());
logger.info(() -> "Safety stock: " + expected.safetyStock());
logger.info("============================================");
verifyNumericResult("MEK_A", expected.mekA(), (Double) actual.get("mekA"), tolerance);
verifyNumericResult("LOGISTIC_COST", expected.logisticCost(), (Double) actual.get("logisticCost"), tolerance);
verifyNumericResult("MEK_B", expected.mekB(), (Double) actual.get("mekB"), tolerance);
verifyNumericResult("FCA_FEE", expected.fcaFee(), (Double) actual.get("fcaFee"), tolerance);
verifyNumericResult("TRANSPORTATION", expected.transportation(), (Double) actual.get("transportation"), tolerance);
verifyNumericResult("D2D", expected.d2d(), (Double) actual.get("d2d"), tolerance);
verifyNumericResult("AIR_FREIGHT", expected.airFreight(), (Double) actual.get("airFreight"), tolerance);
verifyNumericResult("CUSTOM", expected.custom(), (Double) actual.get("custom"), tolerance);
verifyNumericResult("REPACKAGING", expected.repackaging(), (Double) actual.get("repackaging"), tolerance);
verifyNumericResult("HANDLING", expected.handling(), (Double) actual.get("handling"), tolerance);
verifyNumericResult("DISPOSAL", expected.disposal(), (Double) actual.get("disposal"), tolerance);
verifyNumericResult("SPACE", expected.space(), (Double) actual.get("space"), tolerance);
verifyNumericResult("CAPITAL", expected.capital(), (Double) actual.get("capital"), tolerance);
// Verify destinations
@SuppressWarnings("unchecked")
List<Map<String, Object>> actualDestinations = (List<Map<String, Object>>) actual.get("destinations");
List<DestinationExpected> expectedDestinations = expected.destinations();
if (expectedDestinations.size() != actualDestinations.size()) {
throw new AssertionError(String.format(
"Destination count mismatch: expected %d, got %d",
expectedDestinations.size(), actualDestinations.size()
));
}
for (int i = 0; i < expectedDestinations.size(); i++) {
DestinationExpected expDest = expectedDestinations.get(i);
Map<String, Object> actDest = actualDestinations.get(i);
String prefix = "Destination " + (i + 1) + " ";
// Verify transit time (always expected to have a value)
if (expDest.transitTime() != null) {
verifyNumericResult(prefix + "TRANSIT_TIME",
expDest.transitTime().doubleValue(),
(Double) actDest.get("transitTime"), tolerance);
}
// Verify stacked layers (null expected = "-" in UI)
verifyNullableNumericResult(prefix + "STACKED_LAYERS",
expDest.stackedLayers(),
(Double) actDest.get("stackedLayers"), tolerance);
// Verify container unit count (null expected = "-" in UI)
verifyNullableNumericResult(prefix + "CONTAINER_UNIT_COUNT",
expDest.containerUnitCount(),
(Double) actDest.get("containerUnitCount"), tolerance);
// Verify container type (null or "-" expected = "-" in UI)
String expContainerType = expDest.containerType();
String actContainerType = (String) actDest.get("containerType");
verifyStringResult(prefix + "CONTAINER_TYPE", expContainerType, actContainerType);
// Verify limiting factor (null or "-" expected = "-" in UI)
String expLimitingFactor = expDest.limitingFactor();
String actLimitingFactor = (String) actDest.get("limitingFactor");
verifyStringResult(prefix + "LIMITING_FACTOR", expLimitingFactor, actLimitingFactor);
}
logger.info("All results verified successfully");
}
private void verifyNumericResult(String fieldName, double expected, Double actual, double tolerance) {
// If actual is null and expected is effectively zero, treat as pass
// (some fields may not be displayed in the UI when their value is 0)
if (actual == null) {
if (Math.abs(expected) < 0.001) {
logger.info(() -> "Field '" + fieldName + "': actual is null, expected ~0 - treating as pass");
return;
}
throw new AssertionError(String.format(
"Field '%s': actual value is null, expected %f",
fieldName, expected
));
}
double diff = Math.abs(expected - actual);
double relativeDiff = expected != 0 ? diff / Math.abs(expected) : diff;
if (relativeDiff > tolerance) {
throw new AssertionError(String.format(
"Field '%s': expected %f, got %f (diff: %.4f, tolerance: %.2f%%)",
fieldName, expected, actual, relativeDiff * 100, tolerance * 100
));
}
}
/**
* Verifies a nullable numeric result. If expected is null, actual should also be null.
*/
private void verifyNullableNumericResult(String fieldName, Integer expected, Double actual, double tolerance) {
if (expected == null) {
// Expected null means UI shows "-"
if (actual != null) {
throw new AssertionError(String.format(
"Field '%s': expected null (UI shows '-'), got %f",
fieldName, actual
));
}
return;
}
// Expected has a value, verify it
verifyNumericResult(fieldName, expected.doubleValue(), actual, tolerance);
}
/**
* Verifies a string result. Handles null/"-" as equivalent.
*/
private void verifyStringResult(String fieldName, String expected, String actual) {
// Normalize "-" to null for comparison
String normExpected = (expected == null || "-".equals(expected)) ? null : expected;
String normActual = (actual == null || "-".equals(actual)) ? null : actual;
if (normExpected == null && normActual == null) {
return; // Both null/"-" = match
}
if (normExpected == null || normActual == null || !normExpected.equals(normActual)) {
throw new AssertionError(String.format(
"Field '%s': expected '%s', got '%s'",
fieldName, expected, actual
));
}
}
}

View file

@ -0,0 +1,56 @@
package de.avatic.lcc.e2e.testdata;
/**
* Expected output values for a single destination in a test case.
* Nullable fields (Integer, String) indicate the UI shows "-" when no main run/D2D is configured.
*/
public record DestinationExpected(
Integer transitTime,
Integer stackedLayers,
Integer containerUnitCount,
String containerType,
String limitingFactor
) {
public static Builder builder() {
return new Builder();
}
public static class Builder {
private Integer transitTime;
private Integer stackedLayers;
private Integer containerUnitCount;
private String containerType;
private String limitingFactor;
public Builder transitTime(Integer transitTime) {
this.transitTime = transitTime;
return this;
}
public Builder stackedLayers(Integer stackedLayers) {
this.stackedLayers = stackedLayers;
return this;
}
public Builder containerUnitCount(Integer containerUnitCount) {
this.containerUnitCount = containerUnitCount;
return this;
}
public Builder containerType(String containerType) {
this.containerType = containerType;
return this;
}
public Builder limitingFactor(String limitingFactor) {
this.limitingFactor = limitingFactor;
return this;
}
public DestinationExpected build() {
return new DestinationExpected(
transitTime, stackedLayers, containerUnitCount, containerType, limitingFactor
);
}
}
}

View file

@ -0,0 +1,91 @@
package de.avatic.lcc.e2e.testdata;
/**
* Input data for a single destination in a test case.
*/
public record DestinationInput(
String name,
int quantity,
boolean d2d,
String route,
Double d2dCost,
Integer d2dDuration,
Double handlingCost,
Double repackingCost,
Double disposalCost,
boolean customHandling
) {
public static Builder builder() {
return new Builder();
}
public static class Builder {
private String name;
private int quantity;
private boolean d2d;
private String route;
private Double d2dCost;
private Integer d2dDuration;
private Double handlingCost;
private Double repackingCost;
private Double disposalCost;
private boolean customHandling;
public Builder name(String name) {
this.name = name;
return this;
}
public Builder quantity(int quantity) {
this.quantity = quantity;
return this;
}
public Builder d2d(boolean d2d) {
this.d2d = d2d;
return this;
}
public Builder route(String route) {
this.route = route;
return this;
}
public Builder d2dCost(Double d2dCost) {
this.d2dCost = d2dCost;
return this;
}
public Builder d2dDuration(Integer d2dDuration) {
this.d2dDuration = d2dDuration;
return this;
}
public Builder handlingCost(Double handlingCost) {
this.handlingCost = handlingCost;
return this;
}
public Builder repackingCost(Double repackingCost) {
this.repackingCost = repackingCost;
return this;
}
public Builder disposalCost(Double disposalCost) {
this.disposalCost = disposalCost;
return this;
}
public Builder customHandling(boolean customHandling) {
this.customHandling = customHandling;
return this;
}
public DestinationInput build() {
return new DestinationInput(
name, quantity, d2d, route, d2dCost, d2dDuration,
handlingCost, repackingCost, disposalCost, customHandling
);
}
}
}

View file

@ -0,0 +1,12 @@
package de.avatic.lcc.e2e.testdata;
/**
* Represents a complete E2E test case with input data and expected output.
*/
public record TestCase(
String id,
String name,
TestCaseInput input,
TestCaseExpected expected
) {
}

View file

@ -0,0 +1,128 @@
package de.avatic.lcc.e2e.testdata;
import java.util.List;
/**
* Expected output values for a test case containing all calculated results.
*/
public record TestCaseExpected(
double mekA,
double logisticCost,
double mekB,
double fcaFee,
double transportation,
double d2d,
double airFreight,
double custom,
double repackaging,
double handling,
double disposal,
double space,
double capital,
int safetyStock,
List<DestinationExpected> destinations
) {
public static Builder builder() {
return new Builder();
}
public static class Builder {
private double mekA;
private double logisticCost;
private double mekB;
private double fcaFee;
private double transportation;
private double d2d;
private double airFreight;
private double custom;
private double repackaging;
private double handling;
private double disposal;
private double space;
private double capital;
private int safetyStock;
private List<DestinationExpected> destinations = List.of();
public Builder mekA(double mekA) {
this.mekA = mekA;
return this;
}
public Builder logisticCost(double logisticCost) {
this.logisticCost = logisticCost;
return this;
}
public Builder mekB(double mekB) {
this.mekB = mekB;
return this;
}
public Builder fcaFee(double fcaFee) {
this.fcaFee = fcaFee;
return this;
}
public Builder transportation(double transportation) {
this.transportation = transportation;
return this;
}
public Builder d2d(Double d2d) {
this.d2d = d2d;
return this;
}
public Builder airFreight(double airFreight) {
this.airFreight = airFreight;
return this;
}
public Builder custom(double custom) {
this.custom = custom;
return this;
}
public Builder repackaging(double repackaging) {
this.repackaging = repackaging;
return this;
}
public Builder handling(double handling) {
this.handling = handling;
return this;
}
public Builder disposal(double disposal) {
this.disposal = disposal;
return this;
}
public Builder space(double space) {
this.space = space;
return this;
}
public Builder capital(double capital) {
this.capital = capital;
return this;
}
public Builder safetyStock(int safetyStock) {
this.safetyStock = safetyStock;
return this;
}
public Builder destinations(List<DestinationExpected> destinations) {
this.destinations = destinations;
return this;
}
public TestCaseExpected build() {
return new TestCaseExpected(
mekA, logisticCost, mekB, fcaFee, transportation, d2d, airFreight,
custom, repackaging, handling, disposal, space, capital, safetyStock, destinations
);
}
}
}

View file

@ -0,0 +1,150 @@
package de.avatic.lcc.e2e.testdata;
import java.util.List;
/**
* Input data for a test case containing all form values to be entered.
*/
public record TestCaseInput(
String partNumber,
String supplierName,
boolean loadFromPrevious,
Integer hsCode,
double tariffRate,
double price,
double overseaShare,
boolean fcaFee,
int length,
int width,
int height,
String dimensionUnit,
int weight,
String weightUnit,
int piecesPerUnit,
boolean stacked,
boolean mixed,
List<DestinationInput> destinations
) {
public static Builder builder() {
return new Builder();
}
public static class Builder {
private String partNumber;
private String supplierName;
private boolean loadFromPrevious;
private Integer hsCode;
private double tariffRate;
private double price;
private double overseaShare;
private boolean fcaFee;
private int length;
private int width;
private int height;
private String dimensionUnit = "cm";
private int weight;
private String weightUnit = "kg";
private int piecesPerUnit;
private boolean stacked;
private boolean mixed;
private List<DestinationInput> destinations = List.of();
public Builder partNumber(String partNumber) {
this.partNumber = partNumber;
return this;
}
public Builder supplierName(String supplierName) {
this.supplierName = supplierName;
return this;
}
public Builder loadFromPrevious(boolean loadFromPrevious) {
this.loadFromPrevious = loadFromPrevious;
return this;
}
public Builder hsCode(Integer hsCode) {
this.hsCode = hsCode;
return this;
}
public Builder tariffRate(double tariffRate) {
this.tariffRate = tariffRate;
return this;
}
public Builder price(double price) {
this.price = price;
return this;
}
public Builder overseaShare(double overseaShare) {
this.overseaShare = overseaShare;
return this;
}
public Builder fcaFee(boolean fcaFee) {
this.fcaFee = fcaFee;
return this;
}
public Builder length(int length) {
this.length = length;
return this;
}
public Builder width(int width) {
this.width = width;
return this;
}
public Builder height(int height) {
this.height = height;
return this;
}
public Builder dimensionUnit(String dimensionUnit) {
this.dimensionUnit = dimensionUnit;
return this;
}
public Builder weight(int weight) {
this.weight = weight;
return this;
}
public Builder weightUnit(String weightUnit) {
this.weightUnit = weightUnit;
return this;
}
public Builder piecesPerUnit(int piecesPerUnit) {
this.piecesPerUnit = piecesPerUnit;
return this;
}
public Builder stacked(boolean stacked) {
this.stacked = stacked;
return this;
}
public Builder mixed(boolean mixed) {
this.mixed = mixed;
return this;
}
public Builder destinations(List<DestinationInput> destinations) {
this.destinations = destinations;
return this;
}
public TestCaseInput build() {
return new TestCaseInput(
partNumber, supplierName, loadFromPrevious, hsCode, tariffRate, price,
overseaShare, fcaFee, length, width, height, dimensionUnit, weight,
weightUnit, piecesPerUnit, stacked, mixed, destinations
);
}
}
}

View file

@ -0,0 +1,901 @@
package de.avatic.lcc.e2e.testdata;
import java.util.List;
/**
* Static test case definitions extracted from Testfälle.xlsx.
* These test cases cover various logistics calculation scenarios including:
* - EU and Non-EU suppliers
* - Matrix, D2D, and Container transport modes
* - Different packaging configurations
* - Single and multiple destinations
*/
public final class TestCases {
private TestCases() {
// Utility class
}
/**
* Test Case 1: EU Supplier, user - Matrix - Direkt
* Single destination, no FCA fee, standard packaging
*/
public static final TestCase CASE_1 = new TestCase(
"1",
"EU Supplier, user - Matrix - Direkt",
TestCaseInput.builder()
.partNumber("3064540201")
.supplierName("Ireland supplier")
.loadFromPrevious(false)
.hsCode(84312002)
.tariffRate(0.0)
.price(8.0)
.overseaShare(0.0)
.fcaFee(false)
.length(114)
.width(114)
.height(95)
.dimensionUnit("cm")
.weight(850)
.weightUnit("kg")
.piecesPerUnit(20)
.stacked(true)
.mixed(true)
.destinations(List.of(
DestinationInput.builder()
.name("Hamburg (KION plant)")
.quantity(5)
.d2d(false)
.route("IE SUP,HH")
.customHandling(false)
.build()
))
.build(),
TestCaseExpected.builder()
.mekA(8.0)
.logisticCost(33.76)
.mekB(41.76)
.fcaFee(0.0)
.transportation(4.18)
.d2d(0.0)
.airFreight(0.0)
.custom(0.0)
.repackaging(0.0)
.handling(4.392)
.disposal(0.0)
.space(24.95)
.capital(0.13)
.safetyStock(10)
.destinations(List.of(
DestinationExpected.builder()
.transitTime(3)
.stackedLayers(null)
.containerUnitCount(null)
.containerType(null)
.limitingFactor(null)
.build()
))
.build()
);
/**
* Test Case 2: EU-supplier, standard - Matrix - Über Hop
* Two destinations, with FCA fee, individual packaging
*/
public static final TestCase CASE_2 = new TestCase(
"2",
"EU-supplier, standard - Matrix - Über Hop",
TestCaseInput.builder()
.partNumber("4222640104")
.supplierName("Hamburg (KION plant)")
.loadFromPrevious(false)
.hsCode(84312002)
.tariffRate(0.0)
.price(230.0)
.overseaShare(0.0)
.fcaFee(true)
.length(120)
.width(80)
.height(95)
.dimensionUnit("cm")
.weight(1200)
.weightUnit("kg")
.piecesPerUnit(2000)
.stacked(true)
.mixed(true)
.destinations(List.of(
DestinationInput.builder()
.name("Geisa (KION plant)")
.quantity(3500)
.d2d(false)
.route("HH,WH STO,FGG")
.handlingCost(3.5)
.repackingCost(2.7)
.disposalCost(6.5)
.customHandling(true)
.build(),
DestinationInput.builder()
.name("Aschaffenburg (KION plant)")
.quantity(25000)
.d2d(false)
.route("HH,WH ULHA,AB")
.handlingCost(3.0)
.repackingCost(3.3)
.disposalCost(8.0)
.customHandling(true)
.build()
))
.build(),
TestCaseExpected.builder()
.mekA(230.0)
.logisticCost(1.50)
.mekB(231.50)
.fcaFee(0.46)
.transportation(0.02)
.d2d(0.0)
.airFreight(0.0)
.custom(0.0)
.repackaging(0.00)
.handling(0.00)
.disposal(0.00)
.space(0.01)
.capital(1.00)
.safetyStock(10)
.destinations(List.of(
DestinationExpected.builder()
.transitTime(6)
.stackedLayers(null)
.containerUnitCount(null)
.containerType(null)
.limitingFactor(null)
.build(),
DestinationExpected.builder()
.transitTime(6)
.stackedLayers(null)
.containerUnitCount(null)
.containerType(null)
.limitingFactor(null)
.build()
))
.build()
);
/**
* Test Case 3: Non-EU supplier, user - Matrix - Direkt
* Three destinations, with customs
*/
public static final TestCase CASE_3 = new TestCase(
"3",
"Non-EU supplier, user - Matrix - Direkt",
TestCaseInput.builder()
.partNumber("4222640803")
.supplierName("Turkey supplier")
.loadFromPrevious(false)
.hsCode(84312002)
.tariffRate(1.7)
.price(11.0)
.overseaShare(0.0)
.fcaFee(true)
.length(120)
.width(100)
.height(80)
.dimensionUnit("cm")
.weight(570)
.weightUnit("kg")
.piecesPerUnit(2000)
.stacked(true)
.mixed(true)
.destinations(List.of(
DestinationInput.builder()
.name("Hamburg (KION plant)")
.quantity(60000)
.d2d(false)
.route("Turkey sup ...,WH HH,HH")
.customHandling(false)
.build(),
DestinationInput.builder()
.name("Aschaffenburg (KION plant)")
.quantity(80000)
.d2d(false)
.route("Turkey sup ...,WH ULHA,AB")
.handlingCost(6.0)
.repackingCost(6.0)
.disposalCost(6.0)
.customHandling(true)
.build(),
DestinationInput.builder()
.name("Luzzara (KION plant)")
.quantity(30000)
.d2d(false)
.route("Turkey sup ...,LZZ")
.customHandling(false)
.build()
))
.build(),
TestCaseExpected.builder()
.mekA(11.0)
.logisticCost(0.33)
.mekB(11.33)
.fcaFee(0.02)
.transportation(0.06)
.d2d(0.0)
.airFreight(0.0)
.custom(0.21)
.repackaging(0.00)
.handling(0.00)
.disposal(0.00)
.space(0.00)
.capital(0.03)
.safetyStock(10)
.destinations(List.of(
DestinationExpected.builder()
.transitTime(6)
.stackedLayers(null)
.containerUnitCount(null)
.containerType(null)
.limitingFactor(null)
.build(),
DestinationExpected.builder()
.transitTime(6)
.stackedLayers(null)
.containerUnitCount(null)
.containerType(null)
.limitingFactor(null)
.build(),
DestinationExpected.builder()
.transitTime(3)
.stackedLayers(null)
.containerUnitCount(null)
.containerType(null)
.limitingFactor(null)
.build()
))
.build()
);
/**
* Test Case 3b: Non-EU supplier, standard - Matrix - Direkt
* Variation of case 3 with standard packaging
*/
public static final TestCase CASE_3B = new TestCase(
"3b",
"Non-EU supplier, standard - Matrix - Direkt",
TestCaseInput.builder()
.partNumber("4222640805")
.supplierName("Turkey supplier")
.loadFromPrevious(false)
.hsCode(84312002)
.tariffRate(1.7)
.price(11.0)
.overseaShare(0.0)
.fcaFee(true)
.length(120)
.width(100)
.height(80)
.dimensionUnit("cm")
.weight(570)
.weightUnit("kg")
.piecesPerUnit(2000)
.stacked(true)
.mixed(true)
.destinations(List.of(
DestinationInput.builder()
.name("Hamburg (KION plant)")
.quantity(60000)
.d2d(false)
.route("Turkey sup ...,WH HH,HH")
.customHandling(false)
.build()
))
.build(),
TestCaseExpected.builder()
.mekA(11.0)
.logisticCost(0.33)
.mekB(11.33)
.fcaFee(0.02)
.transportation(0.06)
.d2d(0.0)
.airFreight(0.0)
.custom(0.21)
.repackaging(0.0)
.handling(0.01)
.disposal(0.0)
.space(0.01)
.capital(0.03)
.safetyStock(10)
.destinations(List.of(
DestinationExpected.builder()
.transitTime(6)
.stackedLayers(null)
.containerUnitCount(null)
.containerType(null)
.limitingFactor(null)
.build()
))
.build()
);
/**
* Test Case 4: Non-EU supplier, standard - D2D - Über Hop
* D2D transport with customs, large volume
*/
public static final TestCase CASE_4 = new TestCase(
"4",
"Non-EU supplier, standard - D2D - Über Hop",
TestCaseInput.builder()
.partNumber("5512640106")
.supplierName("Turkey supplier")
.loadFromPrevious(false)
.hsCode(84312002)
.tariffRate(3.0)
.price(56.87)
.overseaShare(100.0)
.fcaFee(false)
.length(114)
.width(114)
.height(95)
.dimensionUnit("cm")
.weight(850)
.weightUnit("kg")
.piecesPerUnit(10000)
.stacked(true)
.mixed(true)
.destinations(List.of(
DestinationInput.builder()
.name("Hamburg (KION plant)")
.quantity(1200000)
.d2d(true)
.route("Turkey sup ...,WH HH,HH")
.d2dCost(6500.0)
.d2dDuration(47)
.customHandling(false)
.build()
))
.build(),
TestCaseExpected.builder()
.mekA(56.87)
.logisticCost(2.61)
.mekB(59.48)
.fcaFee(0.0)
.transportation(0.0)
.d2d(0.03)
.airFreight(0.0)
.custom(1.71)
.repackaging(0.0)
.handling(0.00)
.disposal(0.00)
.space(0.00)
.capital(0.87)
.safetyStock(10)
.destinations(List.of(
DestinationExpected.builder()
.transitTime(47)
.stackedLayers(2)
.containerUnitCount(240000)
.containerType("40 ft. GP")
.limitingFactor("Weight")
.build()
))
.build()
);
/**
* Test Case 5: EU Supplier, user - D2D - Über Hop
* D2D transport with custom handling costs
*/
public static final TestCase CASE_5 = new TestCase(
"5",
"EU Supplier, user - D2D - Über Hop",
TestCaseInput.builder()
.partNumber("8212640113")
.supplierName("Ireland supplier")
.loadFromPrevious(false)
.hsCode(84312002)
.tariffRate(0.0)
.price(8.0)
.overseaShare(75.0)
.fcaFee(true)
.length(114)
.width(114)
.height(95)
.dimensionUnit("cm")
.weight(850)
.weightUnit("kg")
.piecesPerUnit(2000)
.stacked(true)
.mixed(false)
.destinations(List.of(
DestinationInput.builder()
.name("Hamburg (KION plant)")
.quantity(500)
.d2d(true)
.route("IE SUP,WH HH,HH")
.d2dCost(2500.0)
.d2dDuration(12)
.handlingCost(120.0)
.repackingCost(230.0)
.disposalCost(5.0)
.customHandling(true)
.build(),
DestinationInput.builder()
.name("Aschaffenburg (KION plant)")
.quantity(1000)
.d2d(true)
.route("IE SUP,WH ULHA,AB")
.d2dCost(1500.0)
.d2dDuration(10)
.handlingCost(2.5)
.repackingCost(5.0)
.disposalCost(6.0)
.customHandling(true)
.build()
))
.build(),
TestCaseExpected.builder()
.mekA(8.0)
.logisticCost(8.61)
.mekB(16.61)
.fcaFee(0.02)
.transportation(0.0)
.d2d(8.0)
.airFreight(0.0)
.custom(0.0)
.repackaging(0.04)
.handling(0.24)
.disposal(0.00)
.space(0.17)
.capital(0.16)
.safetyStock(10)
.destinations(List.of(
DestinationExpected.builder()
.transitTime(12)
.stackedLayers(2)
.containerUnitCount(48000)
.containerType("40 ft. GP")
.limitingFactor("Weight")
.build(),
DestinationExpected.builder()
.transitTime(10)
.stackedLayers(2)
.containerUnitCount(48000)
.containerType("40 ft. GP")
.limitingFactor("Weight")
.build()
))
.build()
);
/**
* Test Case 6: EU-supplier, standard - D2D - Über Hop
* D2D transport with custom handling, three destinations
*/
public static final TestCase CASE_6 = new TestCase(
"6",
"EU-supplier, standard - D2D - Über Hop",
TestCaseInput.builder()
.partNumber("8212640827")
.supplierName("Hamburg (KION plant)")
.loadFromPrevious(false)
.hsCode(84312002)
.tariffRate(100.0)
.price(18.2)
.overseaShare(0.0)
.fcaFee(false)
.length(1140)
.width(1140)
.height(950)
.dimensionUnit("mm")
.weight(99000)
.weightUnit("g")
.piecesPerUnit(2000)
.stacked(true)
.mixed(false)
.destinations(List.of(
DestinationInput.builder()
.name("Hamburg (KION plant)")
.quantity(4000)
.d2d(true)
.d2dCost(0.01)
.d2dDuration(1)
.handlingCost(0.0)
.repackingCost(0.0)
.disposalCost(0.0)
.customHandling(true)
.build(),
DestinationInput.builder()
.name("Aschaffenburg (KION plant)")
.quantity(6000)
.d2d(true)
.d2dCost(100.0)
.d2dDuration(2)
.customHandling(false)
.build(),
DestinationInput.builder()
.name("Luzzara (KION plant)")
.quantity(3000)
.d2d(true)
.d2dCost(200.0)
.d2dDuration(3)
.handlingCost(20.0)
.repackingCost(7.0)
.disposalCost(11.0)
.customHandling(true)
.build()
))
.build(),
TestCaseExpected.builder()
.mekA(18.2)
.logisticCost(0.41)
.mekB(18.61)
.fcaFee(0.0)
.transportation(0.0)
.d2d(0.07)
.airFreight(0.0)
.custom(0.0)
.repackaging(0.00)
.handling(0.01)
.disposal(0.00)
.space(0.03)
.capital(0.30)
.safetyStock(10)
.destinations(List.of(
DestinationExpected.builder()
.transitTime(1)
.stackedLayers(2)
.containerUnitCount(80000)
.containerType("40 ft. GP")
.limitingFactor("Volume")
.build(),
DestinationExpected.builder()
.transitTime(2)
.stackedLayers(2)
.containerUnitCount(80000)
.containerType("40 ft. GP")
.limitingFactor("Volume")
.build(),
DestinationExpected.builder()
.transitTime(3)
.stackedLayers(2)
.containerUnitCount(80000)
.containerType("40 ft. GP")
.limitingFactor("Volume")
.build()
))
.build()
);
/**
* Test Case 7: Non-EU supplier, user - D2D - Über Hop
* D2D transport from China with customs and air freight
*/
public static final TestCase CASE_7 = new TestCase(
"7",
"Non-EU supplier, user - D2D - Über Hop",
TestCaseInput.builder()
.partNumber("8222640822")
.supplierName("Yantian supplier")
.loadFromPrevious(false)
.hsCode(84312002)
.tariffRate(3.0)
.price(56.87)
.overseaShare(100.0)
.fcaFee(true)
.length(114)
.width(114)
.height(95)
.dimensionUnit("cm")
.weight(850)
.weightUnit("kg")
.piecesPerUnit(10000)
.stacked(true)
.mixed(false)
.destinations(List.of(
DestinationInput.builder()
.name("Stříbro (KION plant)")
.quantity(50000)
.d2d(true)
.route("Yantian s ...,CNSZX,DEHAM,WH ZBU,STR")
.d2dCost(6500.0)
.d2dDuration(47)
.customHandling(false)
.build()
))
.build(),
TestCaseExpected.builder()
.mekA(56.87)
.logisticCost(5.48)
.mekB(62.35)
.fcaFee(0.11)
.transportation(0.0)
.d2d(0.39)
.airFreight(0.0)
.custom(1.72)
.repackaging(0.00)
.handling(0.00)
.disposal(0.00)
.space(0.01)
.capital(3.25)
.safetyStock(100)
.destinations(List.of(
DestinationExpected.builder()
.transitTime(47)
.stackedLayers(2)
.containerUnitCount(240000)
.containerType("40 ft. GP")
.limitingFactor("Weight")
.build()
))
.build()
);
/**
* Test Case 8: Non-EU supplier, standard - D2D - Über Hop
* D2D from China (Baoli) with container transport
*/
public static final TestCase CASE_8 = new TestCase(
"8",
"Non-EU supplier, standard - D2D - Über Hop",
TestCaseInput.builder()
.partNumber("8212640827")
.supplierName("KION Baoli (Jiangsu) Forklift Co., Ltd. (KION plant)")
.loadFromPrevious(false)
.hsCode(84312002)
.tariffRate(3.0)
.price(18.2)
.overseaShare(0.0)
.fcaFee(false)
.length(120)
.width(100)
.height(87)
.dimensionUnit("cm")
.weight(99000)
.weightUnit("g")
.piecesPerUnit(150)
.stacked(true)
.mixed(false)
.destinations(List.of(
DestinationInput.builder()
.name("Aschaffenburg (KION plant)")
.quantity(15000)
.d2d(true)
.route("JJ,CNSHA,DEHAM,WH STO,WH ULHA,AB")
.d2dCost(4500.0)
.d2dDuration(47)
.customHandling(false)
.build()
))
.build(),
TestCaseExpected.builder()
.mekA(18.2)
.logisticCost(2.99)
.mekB(21.19)
.fcaFee(0.0)
.transportation(0.0)
.d2d(0.9)
.airFreight(0.0)
.custom(0.58)
.repackaging(0.05)
.handling(0.05)
.disposal(0.04)
.space(0.33)
.capital(1.04)
.safetyStock(55)
.destinations(List.of(
DestinationExpected.builder()
.transitTime(47)
.stackedLayers(2)
.containerUnitCount(6300)
.containerType("40 ft. GP")
.limitingFactor("Volume")
.build()
))
.build()
);
/**
* Test Case 9: EU Supplier, user - Container - Über Hop
* Container transport with very low quantity
*/
public static final TestCase CASE_9 = new TestCase(
"9",
"EU Supplier, user - Container - Über Hop",
TestCaseInput.builder()
.partNumber("8263500575")
.supplierName("Ireland supplier")
.loadFromPrevious(false)
.hsCode(84312002)
.tariffRate(0.0)
.price(8.0)
.overseaShare(0.0)
.fcaFee(false)
.length(114)
.width(114)
.height(95)
.dimensionUnit("cm")
.weight(850)
.weightUnit("kg")
.piecesPerUnit(20)
.stacked(false)
.mixed(false)
.destinations(List.of(
DestinationInput.builder()
.name("Hamburg (KION plant)")
.quantity(5)
.d2d(false)
.route("IE SUP,HH")
.customHandling(false)
.build()
))
.build(),
TestCaseExpected.builder()
.mekA(8.0)
.logisticCost(1505.46)
.mekB(1513.46)
.fcaFee(0.0)
.transportation(1475.98)
.d2d(0.0)
.airFreight(0.0)
.custom(0.0)
.repackaging(0.0)
.handling(4.39)
.disposal(0.0)
.space(24.95)
.capital(0.13)
.safetyStock(10)
.destinations(List.of(
DestinationExpected.builder()
.transitTime(3)
.stackedLayers(null)
.containerUnitCount(null)
.containerType(null)
.limitingFactor(null)
.build()
))
.build()
);
/**
* Test Case 10: EU-supplier, standard - Container - Über Hop
* Container transport with custom handling costs
*/
public static final TestCase CASE_10 = new TestCase(
"10",
"EU-supplier, standard - Container - Über Hop",
TestCaseInput.builder()
.partNumber("8263500575")
.supplierName("Ireland supplier")
.loadFromPrevious(false)
.hsCode(84312002)
.tariffRate(0.0)
.price(8.0)
.overseaShare(0.0)
.fcaFee(true)
.length(114)
.width(114)
.height(95)
.dimensionUnit("cm")
.weight(850)
.weightUnit("kg")
.piecesPerUnit(20)
.stacked(false)
.mixed(false)
.destinations(List.of(
DestinationInput.builder()
.name("Hamburg (KION plant)")
.quantity(40)
.d2d(false)
.route("IE SUP,HH")
.handlingCost(6.0)
.repackingCost(6.0)
.disposalCost(6.0)
.customHandling(true)
.build()
))
.build(),
TestCaseExpected.builder()
.mekA(8.0)
.logisticCost(188.82)
.mekB(196.82)
.fcaFee(0.02)
.transportation(184.50)
.d2d(0.0)
.airFreight(0.0)
.custom(0.0)
.repackaging(0.3)
.handling(0.45)
.disposal(0.3)
.space(3.12)
.capital(0.14)
.safetyStock(10)
.destinations(List.of(
DestinationExpected.builder()
.transitTime(3)
.stackedLayers(null)
.containerUnitCount(null)
.containerType(null)
.limitingFactor(null)
.build()
))
.build()
);
/**
* Test Case 11: Non-EU supplier, user - Container - Über Hop
* Container transport from China with air freight
*/
public static final TestCase CASE_11 = new TestCase(
"11",
"Non-EU supplier, user - Container - Über Hop",
TestCaseInput.builder()
.partNumber("8263500575")
.supplierName("Linde (China) Forklift Truck (Supplier) (KION plant)")
.loadFromPrevious(false)
.hsCode(84312002)
.tariffRate(1.7)
.price(8.0)
.overseaShare(75.0)
.fcaFee(true)
.length(114)
.width(114)
.height(95)
.dimensionUnit("cm")
.weight(850)
.weightUnit("kg")
.piecesPerUnit(20)
.stacked(false)
.mixed(false)
.destinations(List.of(
DestinationInput.builder()
.name("Hamburg (KION plant)")
.quantity(900)
.d2d(false)
.route("LX,CNXMN,DEHAM,WH HH,HH")
.customHandling(false)
.build()
))
.build(),
TestCaseExpected.builder()
.mekA(8.0)
.logisticCost(9.50)
.mekB(17.50)
.fcaFee(0.02)
.transportation(4.87)
.d2d(0.0)
.airFreight(0.0)
.custom(0.32)
.repackaging(0.39)
.handling(0.38)
.disposal(0.30)
.space(2.77)
.capital(0.46)
.safetyStock(10)
.destinations(List.of(
DestinationExpected.builder()
.transitTime(47)
.stackedLayers(2)
.containerUnitCount(400)
.containerType("20 ft. GP")
.limitingFactor("Volume")
.build()
))
.build()
);
/**
* All test cases as a list for parametrized tests.
*/
public static final List<TestCase> ALL = List.of(
CASE_1,
CASE_2,
CASE_3,
CASE_3B,
CASE_4,
CASE_5,
CASE_6,
CASE_7,
CASE_8,
CASE_9,
CASE_10,
CASE_11
);
}

View file

@ -0,0 +1,443 @@
package de.avatic.lcc.e2e.tests;
import com.microsoft.playwright.Browser;
import com.microsoft.playwright.BrowserContext;
import com.microsoft.playwright.BrowserType;
import com.microsoft.playwright.Page;
import com.microsoft.playwright.Playwright;
import de.avatic.lcc.LccApplication;
import de.avatic.lcc.config.DatabaseTestConfiguration;
import de.avatic.lcc.e2e.config.TestFrontendConfig;
import de.avatic.lcc.e2e.pages.DevLoginPage;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.TestInstance;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.boot.test.web.server.LocalServerPort;
import org.springframework.context.annotation.Import;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.test.context.ActiveProfiles;
import org.testcontainers.junit.jupiter.Testcontainers;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.concurrent.TimeUnit;
import java.util.logging.Logger;
/**
* Abstract base class for E2E tests.
* Starts Spring Boot backend with integrated frontend and provides Playwright setup.
*
* <p>Prerequisites:
* <ul>
* <li>Frontend must be built to src/main/resources/static before running tests</li>
* <li>Run: {@code cd src/frontend && BUILD_FOR_SPRING=true npm run build}</li>
* </ul>
*
* <p>Or use Maven profile (if configured):
* {@code mvn test -Dtest="*E2ETest" -Pe2e}
*/
@SpringBootTest(
classes = LccApplication.class,
webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT
)
@Import({DatabaseTestConfiguration.class, TestFrontendConfig.class})
@Testcontainers
@ActiveProfiles({"test", "dev", "mysql", "e2e"})
@TestInstance(TestInstance.Lifecycle.PER_CLASS)
@Tag("e2e")
public abstract class AbstractE2ETest {
@Autowired
protected JdbcTemplate jdbcTemplate;
private static final Logger logger = Logger.getLogger(AbstractE2ETest.class.getName());
protected static final boolean HEADLESS = Boolean.parseBoolean(
System.getProperty("playwright.headless", "true")
);
protected static final double TOLERANCE = 0.03; // 3% tolerance for numeric comparisons
@LocalServerPort
protected int port;
protected Playwright playwright;
protected Browser browser;
protected BrowserContext context;
protected Page page;
protected String getBaseUrl() {
return "http://localhost:" + port;
}
@BeforeAll
void setupPlaywright() {
// Load E2E test data
loadTestData();
checkFrontendBuilt();
logger.info("Setting up Playwright");
playwright = Playwright.create();
browser = playwright.chromium().launch(
new BrowserType.LaunchOptions()
.setHeadless(HEADLESS)
.setSlowMo(HEADLESS ? 0 : 100)
);
// Ensure screenshot directory exists
try {
Files.createDirectories(Paths.get("target/screenshots"));
} catch (Exception e) {
logger.warning("Could not create screenshots directory");
}
logger.info(() -> String.format(
"Playwright setup complete. Headless: %s, Base URL: %s",
HEADLESS, getBaseUrl()
));
}
@BeforeEach
void setupPage() {
context = browser.newContext(new Browser.NewContextOptions()
.setViewportSize(1920, 1080)
);
page = context.newPage();
// Login via DevLoginPage
DevLoginPage loginPage = new DevLoginPage(page);
loginPage.login(getBaseUrl(), "John");
// Navigate to home page after login
page.navigate(getBaseUrl());
page.waitForLoadState();
// Take screenshot after login
takeScreenshot("after_login");
logger.info(() -> "Page setup complete, logged in as John. Current URL: " + page.url());
}
@AfterEach
void teardownPage() {
if (context != null) {
context.close();
}
}
@AfterAll
void teardownPlaywright() {
if (browser != null) {
browser.close();
}
if (playwright != null) {
playwright.close();
}
logger.info("Playwright teardown complete");
}
/**
* Takes a screenshot for debugging purposes.
*/
protected void takeScreenshot(String name) {
Path screenshotPath = Paths.get("target/screenshots/" + name + ".png");
page.screenshot(new Page.ScreenshotOptions().setPath(screenshotPath));
logger.info(() -> "Screenshot saved to: " + screenshotPath);
}
/**
* Checks if the frontend has been built to static resources.
* Throws an exception with instructions if not.
*/
private void checkFrontendBuilt() {
Path staticIndex = Paths.get("src/main/resources/static/index.html");
if (!Files.exists(staticIndex)) {
// Try to build frontend automatically
if (tryBuildFrontend()) {
logger.info("Frontend built successfully");
} else {
throw new IllegalStateException(
"Frontend not built. Please run:\n" +
" cd src/frontend && BUILD_FOR_SPRING=true npm run build\n" +
"Or set -Dskip.frontend.check=true to skip this check."
);
}
} else {
logger.info("Frontend already built at: " + staticIndex);
}
}
/**
* Attempts to build the frontend automatically.
* Returns true if successful, false otherwise.
*/
private boolean tryBuildFrontend() {
if (Boolean.getBoolean("skip.frontend.build")) {
return false;
}
logger.info("Attempting to build frontend...");
try {
File frontendDir = new File("src/frontend");
if (!frontendDir.exists()) {
logger.warning("Frontend directory not found");
return false;
}
// Check if node_modules exists
File nodeModules = new File(frontendDir, "node_modules");
if (!nodeModules.exists()) {
logger.info("Installing npm dependencies...");
ProcessBuilder npmInstall = new ProcessBuilder("npm", "install")
.directory(frontendDir)
.inheritIO();
Process installProcess = npmInstall.start();
if (!installProcess.waitFor(5, TimeUnit.MINUTES)) {
installProcess.destroyForcibly();
return false;
}
}
// Build frontend (to dist/)
ProcessBuilder npmBuild = new ProcessBuilder("npm", "run", "build")
.directory(frontendDir)
.inheritIO();
Process buildProcess = npmBuild.start();
boolean completed = buildProcess.waitFor(3, TimeUnit.MINUTES);
if (!completed) {
buildProcess.destroyForcibly();
return false;
}
if (buildProcess.exitValue() != 0) {
return false;
}
// Copy dist/ to src/main/resources/static/
return copyFrontendToStatic(frontendDir);
} catch (IOException | InterruptedException e) {
logger.warning("Failed to build frontend: " + e.getMessage());
return false;
}
}
/**
* Loads E2E test data into the database.
* This is called once before all tests run.
*/
private void loadTestData() {
logger.info("Loading E2E test data...");
// Check if test users already exist
Integer existingUsers = jdbcTemplate.queryForObject(
"SELECT COUNT(*) FROM sys_user WHERE email = 'john.doe@test.com'",
Integer.class
);
if (existingUsers != null && existingUsers > 0) {
logger.info("Test users already exist, checking nodes...");
addMissingNodes();
return;
}
// Create test users
jdbcTemplate.update(
"INSERT INTO sys_user (workday_id, email, firstname, lastname, is_active) VALUES (?, ?, ?, ?, ?)",
"WD001TEST", "john.doe@test.com", "John", "Doe", true
);
jdbcTemplate.update(
"INSERT INTO sys_user (workday_id, email, firstname, lastname, is_active) VALUES (?, ?, ?, ?, ?)",
"WD002TEST", "jane.smith@test.com", "Jane", "Smith", true
);
jdbcTemplate.update(
"INSERT INTO sys_user (workday_id, email, firstname, lastname, is_active) VALUES (?, ?, ?, ?, ?)",
"WD003TEST", "admin.test@test.com", "Admin", "User", true
);
// Assign groups to users
// John gets 'super' role for full E2E testing capabilities
jdbcTemplate.update(
"INSERT INTO sys_user_group_mapping (user_id, group_id) " +
"SELECT u.id, g.id FROM sys_user u, sys_group g " +
"WHERE u.email = 'john.doe@test.com' AND g.group_name = 'super'"
);
jdbcTemplate.update(
"INSERT INTO sys_user_group_mapping (user_id, group_id) " +
"SELECT u.id, g.id FROM sys_user u, sys_group g " +
"WHERE u.email = 'jane.smith@test.com' AND g.group_name = 'super'"
);
jdbcTemplate.update(
"INSERT INTO sys_user_group_mapping (user_id, group_id) " +
"SELECT u.id, g.id FROM sys_user u, sys_group g " +
"WHERE u.email = 'admin.test@test.com' AND g.group_name = 'super'"
);
// Add missing nodes for E2E tests
addMissingNodes();
logger.info("E2E test data loaded successfully");
}
/**
* Adds missing nodes needed for E2E tests.
*/
private void addMissingNodes() {
logger.info("Adding missing nodes for E2E tests...");
// Add Ireland supplier to node table (if not exists)
Integer irelandCount = jdbcTemplate.queryForObject(
"SELECT COUNT(*) FROM node WHERE name = 'Ireland supplier'", Integer.class);
if (irelandCount == null || irelandCount == 0) {
Integer ieCountryId = jdbcTemplate.queryForObject(
"SELECT id FROM country WHERE iso_code = 'IE'", Integer.class);
jdbcTemplate.update(
"INSERT INTO node (country_id, name, address, external_mapping_id, predecessor_required, " +
"is_destination, is_source, is_intermediate, geo_lat, geo_lng, is_deprecated) " +
"VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
ieCountryId, "Ireland supplier", "Dublin Ireland", "IE_SUP", false,
false, true, false, 53.3494, -6.2606, false
);
logger.info("Added Ireland supplier to node table");
}
// Get test user ID for sys_user_node entries
Integer testUserId = jdbcTemplate.queryForObject(
"SELECT id FROM sys_user WHERE email = 'john.doe@test.com'", Integer.class);
// Add Turkey supplier to sys_user_node (if not exists)
Integer turkeyCount = jdbcTemplate.queryForObject(
"SELECT COUNT(*) FROM sys_user_node WHERE name = 'Turkey supplier'", Integer.class);
if (turkeyCount == null || turkeyCount == 0) {
Integer trCountryId = jdbcTemplate.queryForObject(
"SELECT id FROM country WHERE iso_code = 'TR'", Integer.class);
jdbcTemplate.update(
"INSERT INTO sys_user_node (user_id, country_id, name, address, geo_lat, geo_lng, is_deprecated) " +
"VALUES (?, ?, ?, ?, ?, ?, ?)",
testUserId, trCountryId, "Turkey supplier", "Antalya Türkiye",
36.8864, 30.7105, false
);
logger.info("Added Turkey supplier to sys_user_node table");
}
// Add Yantian supplier to sys_user_node (if not exists)
Integer yantianCount = jdbcTemplate.queryForObject(
"SELECT COUNT(*) FROM sys_user_node WHERE name = 'Yantian supplier'", Integer.class);
if (yantianCount == null || yantianCount == 0) {
Integer cnCountryId = jdbcTemplate.queryForObject(
"SELECT id FROM country WHERE iso_code = 'CN'", Integer.class);
jdbcTemplate.update(
"INSERT INTO sys_user_node (user_id, country_id, name, address, geo_lat, geo_lng, is_deprecated) " +
"VALUES (?, ?, ?, ?, ?, ?, ?)",
testUserId, cnCountryId, "Yantian supplier", "Yantian, China",
22.5925, 114.2460, false
);
logger.info("Added Yantian supplier to sys_user_node table");
}
logger.info("Missing nodes added");
// Add test materials
addTestMaterials();
}
/**
* Adds test materials needed for E2E tests.
*/
private void addTestMaterials() {
logger.info("Adding test materials...");
String[] materials = {
"3064540201", "003064540201", "84312000", "wheel hub",
"4222640104", "004222640104", "84139100", "gearbox housing blank",
"4222640803", "004222640803", "84139100", "planet gear carrier blank stage 1",
"4222640805", "004222640805", "84139100", "planet gear carrier blank stage 2",
"5512640106", "005512640106", "84312000", "transmission housing blank",
"8212640113", "008212640113", "84312000", "transmission housing blank GR2E-04",
"8212640827", "008212640827", "84312000", "planet gear carrier blank Stufe 1",
"8222640822", "008222640822", "84839089", "planet gear carrier blank stage 1",
"8263500575", "008263500575", "85015220", "traction motor assy"
};
for (int i = 0; i < materials.length; i += 4) {
String partNumber = materials[i];
String normalizedPartNumber = materials[i + 1];
String hsCode = materials[i + 2];
String name = materials[i + 3];
// Check by normalized_part_number since that has the UNIQUE constraint
Integer count = jdbcTemplate.queryForObject(
"SELECT COUNT(*) FROM material WHERE normalized_part_number = ?",
Integer.class, normalizedPartNumber);
if (count == null || count == 0) {
try {
jdbcTemplate.update(
"INSERT INTO material (part_number, normalized_part_number, hs_code, name, is_deprecated) " +
"VALUES (?, ?, ?, ?, ?)",
partNumber, normalizedPartNumber, hsCode, name, false
);
logger.info(() -> "Added material: " + partNumber + " (normalized: " + normalizedPartNumber + ")");
} catch (Exception e) {
logger.warning(() -> "Failed to insert material " + partNumber + ": " + e.getMessage());
}
} else {
logger.info(() -> "Material already exists: " + normalizedPartNumber);
}
}
logger.info("Test materials added");
}
/**
* Copies the built frontend from dist/ to src/main/resources/static/.
*/
private boolean copyFrontendToStatic(File frontendDir) {
Path source = frontendDir.toPath().resolve("dist");
Path target = Paths.get("src/main/resources/static");
if (!Files.exists(source)) {
logger.warning("Frontend dist directory not found: " + source);
return false;
}
try {
// Create target directory if needed
Files.createDirectories(target);
// Copy all files recursively
try (var walk = Files.walk(source)) {
walk.forEach(sourcePath -> {
try {
Path targetPath = target.resolve(source.relativize(sourcePath));
if (Files.isDirectory(sourcePath)) {
Files.createDirectories(targetPath);
} else {
Files.copy(sourcePath, targetPath,
java.nio.file.StandardCopyOption.REPLACE_EXISTING);
}
} catch (IOException e) {
throw new RuntimeException("Failed to copy: " + sourcePath, e);
}
});
}
logger.info("Frontend copied to: " + target);
return true;
} catch (IOException | RuntimeException e) {
logger.warning("Failed to copy frontend: " + e.getMessage());
return false;
}
}
}

Some files were not shown because too many files have changed in this diff Show more