Compare commits

..

2 Commits

Author SHA1 Message Date
Michael Hoennig
1e2986b091 upgrade to Gradle 8.12.1 2025-02-01 10:48:19 +01:00
Michael Hoennig
313b373ce2 fix some deprecated warnings 2025-01-31 14:40:36 +01:00
40 changed files with 2522 additions and 21120 deletions

View File

@ -1,4 +1,4 @@
# For using the alias gw-importHostingAssets, # For using the alias gw-importOfficeData or gw-importHostingAssets,
# copy the file .tc-environment to .environment (ignored by git) # copy the file .tc-environment to .environment (ignored by git)
# and amend them according to your external DB. # and amend them according to your external DB.
@ -71,6 +71,7 @@ function importLegacyData() {
./gradlew $target --rerun ./gradlew $target --rerun
fi fi
} }
alias gw-importOfficeData='importLegacyData importOfficeData'
alias gw-importHostingAssets='importLegacyData importHostingAssets' alias gw-importHostingAssets='importLegacyData importHostingAssets'
alias podman-start='systemctl --user enable --now podman.socket && systemctl --user status podman.socket && ls -la /run/user/$UID/podman/podman.sock' alias podman-start='systemctl --user enable --now podman.socket && systemctl --user status podman.socket && ls -la /run/user/$UID/podman/podman.sock'
@ -91,8 +92,8 @@ alias fp='grep -r '@Accepts' src | sed -e 's/^.*@/@/g' | sort -u | wc -l'
alias gw-spotless='./gradlew spotlessApply -x pitest -x test -x :processResources' alias gw-spotless='./gradlew spotlessApply -x pitest -x test -x :processResources'
alias gw-check='. .aliases; . .tc-environment; gw test check -x pitest' alias gw-check='. .aliases; . .tc-environment; gw test check -x pitest'
# HOWTO: run all 'normal' tests (by default without scenario+import-tests): `gw-test` # HOWTO: run all 'normal' tests (no scenario+import-tests): `gw-test`
# You can also mention specific targets: `gw-test importHostingAssets`, in that case only these tests are executed. # You can also mention specific targets: `gw-test importOfficeData`.
# This will always use the environment from `.tc-environment`. # This will always use the environment from `.tc-environment`.
# #
# HOWTO: re-run tests even if no changed can be detected: `gw-test --rerun` # HOWTO: re-run tests even if no changed can be detected: `gw-test --rerun`
@ -113,16 +114,16 @@ function _gwTest1() {
echo "DONE gw $@" echo "DONE gw $@"
} }
function _gwTest() { function _gwTest() {
. .aliases . .aliases;
. .tc-environment . .tc-environment;
rm -f /tmp/gwTest.tmp rm /tmp/gwTest.tmp
if [ "$1" == "--all" ]; then if [ "$1" == "--all" ]; then
shift # to remove the --all from $@ shift # to remove the --all from $@
# delierately in separate gradlew-calls to avoid Testcontains-PostgreSQL problem spillover # delierately in separate gradlew-calls to avoid Testcontains-PostgreSQL problem spillover
time (_gwTest1 unitTest "$@" && time (_gwTest1 unitTest "$@" &&
_gwTest1 officeIntegrationTest bookingIntegrationTest hostingIntegrationTest "$@" && _gwTest1 officeIntegrationTest bookingIntegrationTest hostingIntegrationTest "$@" &&
_gwTest1 scenarioTest "$@" && _gwTest1 scenarioTest "$@" &&
_gwTest1 importHostingAssets "$@"); _gwTest1 importOfficeData importHostingAssets "$@");
elif [ $# -eq 0 ] || [[ $1 == -* ]]; then elif [ $# -eq 0 ] || [[ $1 == -* ]]; then
time _gwTest1 test "$@"; time _gwTest1 test "$@";
else else
@ -136,7 +137,7 @@ alias howto=bin/howto
alias cas-curl=bin/cas-curl alias cas-curl=bin/cas-curl
# etc/docker-compose.yml limits CPUs+MEM and includes a PostgreSQL config for analysing slow queries # etc/docker-compose.yml limits CPUs+MEM and includes a PostgreSQL config for analysing slow queries
alias gw-importHostingAssets-in-docker-compose=' alias gw-importOfficeData-in-docker-compose='
docker-compose -f etc/docker-compose.yml down && docker-compose -f etc/docker-compose.yml down &&
docker-compose -f etc/docker-compose.yml up -d && sleep 10 && docker-compose -f etc/docker-compose.yml up -d && sleep 10 &&
time gw-importHostingAssets' time gw-importHostingAssets'

View File

@ -7,7 +7,6 @@
<entry key="HSADMINNG_POSTGRES_ADMIN_USERNAME" value="postgres" /> <entry key="HSADMINNG_POSTGRES_ADMIN_USERNAME" value="postgres" />
<entry key="HSADMINNG_POSTGRES_JDBC_URL" value="jdbc:postgresql://localhost:5432/postgres" /> <entry key="HSADMINNG_POSTGRES_JDBC_URL" value="jdbc:postgresql://localhost:5432/postgres" />
<entry key="HSADMINNG_POSTGRES_RESTRICTED_USERNAME" value="restricted" /> <entry key="HSADMINNG_POSTGRES_RESTRICTED_USERNAME" value="restricted" />
<entry key="HSADMINNG_MIGRATION_DATA_PATH" value="migration" />
</map> </map>
</option> </option>
<option name="executionName" /> <option name="executionName" />
@ -35,4 +34,4 @@
<RunAsTest>true</RunAsTest> <RunAsTest>true</RunAsTest>
<method v="2" /> <method v="2" />
</configuration> </configuration>
</component> </component>

View File

@ -3,9 +3,9 @@
<ExternalSystemSettings> <ExternalSystemSettings>
<option name="env"> <option name="env">
<map> <map>
<entry key="HSADMINNG_MIGRATION_DATA_PATH" value="migration" />
<entry key="HSADMINNG_POSTGRES_ADMIN_USERNAME" value="admin" /> <entry key="HSADMINNG_POSTGRES_ADMIN_USERNAME" value="admin" />
<entry key="HSADMINNG_POSTGRES_RESTRICTED_USERNAME" value="restricted" /> <entry key="HSADMINNG_POSTGRES_RESTRICTED_USERNAME" value="restricted" />
<entry key="HSADMINNG_SUPERUSER" value="import-superuser@hostsharing.net" />
</map> </map>
</option> </option>
<option name="executionName" /> <option name="executionName" />

View File

@ -0,0 +1,103 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="ImportOfficeData" type="GradleRunConfiguration" factoryName="Gradle">
<ExternalSystemSettings>
<option name="env">
<map>
<entry key="HSADMINNG_MIGRATION_DATA_PATH" value="migration" />
<entry key="HSADMINNG_POSTGRES_ADMIN_USERNAME" value="admin" />
<entry key="HSADMINNG_POSTGRES_RESTRICTED_USERNAME" value="restricted" />
</map>
</option>
<option name="executionName" />
<option name="externalProjectPath" value="$PROJECT_DIR$" />
<option name="externalSystemIdString" value="GRADLE" />
<option name="scriptParameters" value="" />
<option name="taskDescriptions">
<list />
</option>
<option name="taskNames">
<list>
<option value=":importOfficeData" />
<option value="--tests" />
<option value="&quot;net.hostsharing.hsadminng.hs.migration.ImportOfficeData&quot;" />
</list>
</option>
<option name="vmOptions" />
</ExternalSystemSettings>
<ExternalSystemDebugServerProcess>false</ExternalSystemDebugServerProcess>
<ExternalSystemReattachDebugProcess>true</ExternalSystemReattachDebugProcess>
<EXTENSION ID="com.intellij.execution.ExternalSystemRunConfigurationJavaExtension">
<extension name="coverage" sample_coverage="false" />
</EXTENSION>
<DebugAllEnabled>false</DebugAllEnabled>
<RunAsTest>true</RunAsTest>
<method v="2" />
</configuration>
<configuration default="false" name="ImportOfficeData" type="GradleRunConfiguration" factoryName="Gradle">
<ExternalSystemSettings>
<option name="env">
<map>
<entry key="HSADMINNG_MIGRATION_DATA_PATH" value="migration" />
<entry key="HSADMINNG_POSTGRES_ADMIN_USERNAME" value="admin" />
<entry key="HSADMINNG_POSTGRES_RESTRICTED_USERNAME" value="restricted" />
</map>
</option>
<option name="executionName" />
<option name="externalProjectPath" value="$PROJECT_DIR$" />
<option name="externalSystemIdString" value="GRADLE" />
<option name="scriptParameters" value="" />
<option name="taskDescriptions">
<list />
</option>
<option name="taskNames">
<list>
<option value=":importOfficeData" />
<option value="--tests" />
<option value="&quot;net.hostsharing.hsadminng.hs.office.migration.ImportOfficeData&quot;" />
</list>
</option>
<option name="vmOptions" />
</ExternalSystemSettings>
<ExternalSystemDebugServerProcess>false</ExternalSystemDebugServerProcess>
<ExternalSystemReattachDebugProcess>true</ExternalSystemReattachDebugProcess>
<EXTENSION ID="com.intellij.execution.ExternalSystemRunConfigurationJavaExtension">
<extension name="coverage" sample_coverage="false" />
</EXTENSION>
<DebugAllEnabled>false</DebugAllEnabled>
<RunAsTest>true</RunAsTest>
<method v="2" />
</configuration>
<configuration default="false" name="ImportOfficeData" type="GradleRunConfiguration" factoryName="Gradle">
<ExternalSystemSettings>
<option name="env">
<map>
<entry key="HSADMINNG_POSTGRES_ADMIN_USERNAME" value="admin" />
<entry key="HSADMINNG_POSTGRES_RESTRICTED_USERNAME" value="restricted" />
</map>
</option>
<option name="executionName" />
<option name="externalProjectPath" value="$PROJECT_DIR$" />
<option name="externalSystemIdString" value="GRADLE" />
<option name="scriptParameters" value="" />
<option name="taskDescriptions">
<list />
</option>
<option name="taskNames">
<list>
<option value=":importOfficeData" />
<option value="--tests" />
<option value="&quot;net.hostsharing.hsadminng.hs.migration.ImportOfficeData&quot;" />
</list>
</option>
<option name="vmOptions" />
</ExternalSystemSettings>
<ExternalSystemDebugServerProcess>false</ExternalSystemDebugServerProcess>
<ExternalSystemReattachDebugProcess>true</ExternalSystemReattachDebugProcess>
<EXTENSION ID="com.intellij.execution.ExternalSystemRunConfigurationJavaExtension">
<extension name="coverage" sample_coverage="false" />
</EXTENSION>
<DebugAllEnabled>false</DebugAllEnabled>
<RunAsTest>true</RunAsTest>
<method v="2" />
</configuration>
</component>

View File

@ -1,7 +1,7 @@
source .unset-environment unset HSADMINNG_POSTGRES_JDBC_URL # dynamically set, different for normal tests and imports
export HSADMINNG_POSTGRES_RESTRICTED_USERNAME=restricted
export HSADMINNG_POSTGRES_ADMIN_USERNAME=admin export HSADMINNG_POSTGRES_ADMIN_USERNAME=admin
export HSADMINNG_SUPERUSER=import-superuser@hostsharing.net export HSADMINNG_POSTGRES_ADMIN_PASSWORD=
export HSADMINNG_POSTGRES_RESTRICTED_USERNAME=restricted
export HSADMINNG_SUPERUSER=superuser-alex@hostsharing.net
export HSADMINNG_MIGRATION_DATA_PATH=migration
export LANG=en_US.UTF-8 export LANG=en_US.UTF-8

View File

@ -4,5 +4,4 @@ unset HSADMINNG_POSTGRES_ADMIN_PASSWORD
unset HSADMINNG_POSTGRES_RESTRICTED_USERNAME unset HSADMINNG_POSTGRES_RESTRICTED_USERNAME
unset HSADMINNG_SUPERUSER unset HSADMINNG_SUPERUSER
unset HSADMINNG_MIGRATION_DATA_PATH unset HSADMINNG_MIGRATION_DATA_PATH
unset HSADMINNG_OFFICE_DATA_SQL_FILE

4
Jenkinsfile vendored
View File

@ -55,9 +55,9 @@ pipeline {
sh './gradlew bookingIntegrationTest hostingIntegrationTest --no-daemon' sh './gradlew bookingIntegrationTest hostingIntegrationTest --no-daemon'
} }
} }
stage('Test-Imports') { stage('Import-Tests') {
steps { steps {
sh './gradlew importHostingAssets --no-daemon' sh './gradlew importOfficeData importHostingAssets --no-daemon'
} }
} }
stage ('Scenario-Tests') { stage ('Scenario-Tests') {

View File

@ -20,7 +20,7 @@ version = '0.0.1-SNAPSHOT'
wrapper { wrapper {
distributionType = Wrapper.DistributionType.BIN distributionType = Wrapper.DistributionType.BIN
gradleVersion = '8.5' gradleVersion = '8.12.1'
} }
// TODO.impl: self-attaching is deprecated, see: // TODO.impl: self-attaching is deprecated, see:
@ -42,8 +42,8 @@ configurations {
repositories { repositories {
mavenCentral() mavenCentral()
maven { url 'https://repo.spring.io/milestone' } maven { url = 'https://repo.spring.io/milestone' }
maven { url 'https://repo.spring.io/snapshot' } maven { url = 'https://repo.spring.io/snapshot' }
} }
java { java {
@ -109,14 +109,14 @@ dependencyManagement {
} }
// Java Compiler Options // Java Compiler Options
tasks.withType(JavaCompile) { tasks.withType(JavaCompile).configureEach {
options.compilerArgs += [ options.compilerArgs += [
"-parameters" // keep parameter names => no need for @Param for SpringData "-parameters" // keep parameter names => no need for @Param for SpringData
] ]
} }
// Configure tests // Configure tests
tasks.named('test') { tasks.withType(Test).configureEach {
useJUnitPlatform() useJUnitPlatform()
jvmArgs '-Duser.language=en' jvmArgs '-Duser.language=en'
jvmArgs '-Duser.country=US' jvmArgs '-Duser.country=US'
@ -129,7 +129,7 @@ openapiProcessor {
processor 'io.openapiprocessor:openapi-processor-spring:2022.5' processor 'io.openapiprocessor:openapi-processor-spring:2022.5'
apiPath "$projectDir/src/main/resources/api-definition/api-definition.yaml" apiPath "$projectDir/src/main/resources/api-definition/api-definition.yaml"
mapping "$projectDir/src/main/resources/api-definition/api-mappings.yaml" mapping "$projectDir/src/main/resources/api-definition/api-mappings.yaml"
targetDir "$buildDir/generated/sources/openapi-javax" targetDir layout.buildDirectory.dir("generated/sources/openapi-javax")
showWarnings true showWarnings true
openApiNullable true openApiNullable true
} }
@ -138,7 +138,7 @@ openapiProcessor {
processor 'io.openapiprocessor:openapi-processor-spring:2022.5' processor 'io.openapiprocessor:openapi-processor-spring:2022.5'
apiPath "$projectDir/src/main/resources/api-definition/rbac/rbac.yaml" apiPath "$projectDir/src/main/resources/api-definition/rbac/rbac.yaml"
mapping "$projectDir/src/main/resources/api-definition/rbac/api-mappings.yaml" mapping "$projectDir/src/main/resources/api-definition/rbac/api-mappings.yaml"
targetDir "$buildDir/generated/sources/openapi-javax" targetDir layout.buildDirectory.dir("generated/sources/openapi-javax")
showWarnings true showWarnings true
openApiNullable true openApiNullable true
} }
@ -147,7 +147,7 @@ openapiProcessor {
processor 'io.openapiprocessor:openapi-processor-spring:2022.5' processor 'io.openapiprocessor:openapi-processor-spring:2022.5'
apiPath "$projectDir/src/main/resources/api-definition/test/test.yaml" apiPath "$projectDir/src/main/resources/api-definition/test/test.yaml"
mapping "$projectDir/src/main/resources/api-definition/test/api-mappings.yaml" mapping "$projectDir/src/main/resources/api-definition/test/api-mappings.yaml"
targetDir "$buildDir/generated/sources/openapi-javax" targetDir layout.buildDirectory.dir("generated/sources/openapi-javax")
showWarnings true showWarnings true
openApiNullable true openApiNullable true
} }
@ -156,7 +156,7 @@ openapiProcessor {
processor 'io.openapiprocessor:openapi-processor-spring:2022.5' processor 'io.openapiprocessor:openapi-processor-spring:2022.5'
apiPath "$projectDir/src/main/resources/api-definition/hs-office/hs-office.yaml" apiPath "$projectDir/src/main/resources/api-definition/hs-office/hs-office.yaml"
mapping "$projectDir/src/main/resources/api-definition/hs-office/api-mappings.yaml" mapping "$projectDir/src/main/resources/api-definition/hs-office/api-mappings.yaml"
targetDir "$buildDir/generated/sources/openapi-javax" targetDir layout.buildDirectory.dir("generated/sources/openapi-javax")
showWarnings true showWarnings true
openApiNullable true openApiNullable true
} }
@ -165,7 +165,7 @@ openapiProcessor {
processor 'io.openapiprocessor:openapi-processor-spring:2022.5' processor 'io.openapiprocessor:openapi-processor-spring:2022.5'
apiPath "$projectDir/src/main/resources/api-definition/hs-booking/hs-booking.yaml" apiPath "$projectDir/src/main/resources/api-definition/hs-booking/hs-booking.yaml"
mapping "$projectDir/src/main/resources/api-definition/hs-booking/api-mappings.yaml" mapping "$projectDir/src/main/resources/api-definition/hs-booking/api-mappings.yaml"
targetDir "$buildDir/generated/sources/openapi-javax" targetDir layout.buildDirectory.dir("generated/sources/openapi-javax")
showWarnings true showWarnings true
openApiNullable true openApiNullable true
} }
@ -174,7 +174,7 @@ openapiProcessor {
processor 'io.openapiprocessor:openapi-processor-spring:2022.5' processor 'io.openapiprocessor:openapi-processor-spring:2022.5'
apiPath "$projectDir/src/main/resources/api-definition/hs-hosting/hs-hosting.yaml" apiPath "$projectDir/src/main/resources/api-definition/hs-hosting/hs-hosting.yaml"
mapping "$projectDir/src/main/resources/api-definition/hs-hosting/api-mappings.yaml" mapping "$projectDir/src/main/resources/api-definition/hs-hosting/api-mappings.yaml"
targetDir "$buildDir/generated/sources/openapi-javax" targetDir layout.buildDirectory.dir("generated/sources/openapi-javax")
showWarnings true showWarnings true
openApiNullable true openApiNullable true
} }
@ -201,11 +201,11 @@ project.tasks.compileJava.dependsOn processSpring
// TODO.impl: Upgrade to io.openapiprocessor.openapi-processor >= 2024.2 // TODO.impl: Upgrade to io.openapiprocessor.openapi-processor >= 2024.2
// and use either `bean-validation: true` in api-mapping.yaml or `useSpringBoot3 true` (not sure where exactly). // and use either `bean-validation: true` in api-mapping.yaml or `useSpringBoot3 true` (not sure where exactly).
task openApiGenerate(type: Copy) { task openApiGenerate(type: Copy) {
from "$buildDir/generated/sources/openapi-javax" from layout.buildDirectory.dir("generated/sources/openapi-javax")
into "$buildDir/generated/sources/openapi" into layout.buildDirectory.dir("generated/sources/openapi")
filter { line -> line.replaceAll('javax', 'jakarta') } filter { line -> line.replaceAll('javax', 'jakarta') }
} }
compileJava.source "$buildDir/generated/sources/openapi" compileJava.source layout.buildDirectory.dir("generated/sources/openapi")
compileJava.dependsOn openApiGenerate compileJava.dependsOn openApiGenerate
openApiGenerate.dependsOn processSpring openApiGenerate.dependsOn processSpring
@ -263,7 +263,7 @@ test {
'net.hostsharing.hsadminng.**.generated.**', 'net.hostsharing.hsadminng.**.generated.**',
] ]
useJUnitPlatform { useJUnitPlatform {
excludeTags 'importHostingAssets', 'scenarioTest' excludeTags 'importOfficeData', 'importHostingAssets', 'scenarioTest'
} }
} }
@ -338,12 +338,15 @@ jacocoTestCoverageVerification {
// HOWTO: run all unit-tests which don't need a database: gw-test unitTest // HOWTO: run all unit-tests which don't need a database: gw-test unitTest
tasks.register('unitTest', Test) { tasks.register('unitTest', Test) {
useJUnitPlatform { useJUnitPlatform {
excludeTags 'importHostingAssets', 'scenarioTest', 'generalIntegrationTest', excludeTags 'importOfficeData', 'importHostingAssets', 'scenarioTest', 'generalIntegrationTest',
'officeIntegrationTest', 'bookingIntegrationTest', 'hostingIntegrationTest' 'officeIntegrationTest', 'bookingIntegrationTest', 'hostingIntegrationTest'
} }
group 'verification' group = 'verification'
description 'runs all unit-tests which do not need a database' description = 'runs all unit-tests which do not need a database'
testClassesDirs = testing.suites.test.sources.output.classesDirs
classpath = testing.suites.test.sources.runtimeClasspath
mustRunAfter spotlessJava mustRunAfter spotlessJava
} }
@ -354,8 +357,11 @@ tasks.register('generalIntegrationTest', Test) {
includeTags 'generalIntegrationTest' includeTags 'generalIntegrationTest'
} }
group 'verification' group = 'verification'
description 'runs integration tests which are not specific to a module, like base, rbac, config etc.' description = 'runs integration tests which are not specific to a module, like base, rbac, config etc.'
testClassesDirs = testing.suites.test.sources.output.classesDirs
classpath = testing.suites.test.sources.runtimeClasspath
mustRunAfter spotlessJava mustRunAfter spotlessJava
} }
@ -366,8 +372,11 @@ tasks.register('officeIntegrationTest', Test) {
includeTags 'officeIntegrationTest' includeTags 'officeIntegrationTest'
} }
group 'verification' group = 'verification'
description 'runs integration tests of the office module' description = 'runs integration tests of the office module'
testClassesDirs = testing.suites.test.sources.output.classesDirs
classpath = testing.suites.test.sources.runtimeClasspath
mustRunAfter spotlessJava mustRunAfter spotlessJava
} }
@ -378,8 +387,11 @@ tasks.register('bookingIntegrationTest', Test) {
includeTags 'bookingIntegrationTest' includeTags 'bookingIntegrationTest'
} }
group 'verification' group = 'verification'
description 'runs integration tests of the booking module' description = 'runs integration tests of the booking module'
testClassesDirs = testing.suites.test.sources.output.classesDirs
classpath = testing.suites.test.sources.runtimeClasspath
mustRunAfter spotlessJava mustRunAfter spotlessJava
} }
@ -390,8 +402,25 @@ tasks.register('hostingIntegrationTest', Test) {
includeTags 'hostingIntegrationTest' includeTags 'hostingIntegrationTest'
} }
group 'verification' group = 'verification'
description 'runs integration tests of the hosting module' description = 'runs integration tests of the hosting module'
testClassesDirs = testing.suites.test.sources.output.classesDirs
classpath = testing.suites.test.sources.runtimeClasspath
mustRunAfter spotlessJava
}
tasks.register('importOfficeData', Test) {
useJUnitPlatform {
includeTags 'importOfficeData'
}
group = 'verification'
description = 'run the import jobs as tests'
testClassesDirs = testing.suites.test.sources.output.classesDirs
classpath = testing.suites.test.sources.runtimeClasspath
mustRunAfter spotlessJava mustRunAfter spotlessJava
} }
@ -401,8 +430,11 @@ tasks.register('importHostingAssets', Test) {
includeTags 'importHostingAssets' includeTags 'importHostingAssets'
} }
group 'verification' group = 'verification'
description 'run the import jobs as tests' description = 'run the import jobs as tests'
testClassesDirs = testing.suites.test.sources.output.classesDirs
classpath = testing.suites.test.sources.runtimeClasspath
mustRunAfter spotlessJava mustRunAfter spotlessJava
} }
@ -412,8 +444,11 @@ tasks.register('scenarioTest', Test) {
includeTags 'scenarioTest' includeTags 'scenarioTest'
} }
group 'verification' group = 'verification'
description 'run the import jobs as tests' description = 'run the import jobs as tests'
testClassesDirs = testing.suites.test.sources.output.classesDirs
classpath = testing.suites.test.sources.runtimeClasspath
mustRunAfter spotlessJava mustRunAfter spotlessJava
} }
@ -428,7 +463,7 @@ pitest {
] ]
targetTests = ['net.hostsharing.hsadminng.**.*UnitTest', 'net.hostsharing.hsadminng.**.*RestTest'] targetTests = ['net.hostsharing.hsadminng.**.*UnitTest', 'net.hostsharing.hsadminng.**.*RestTest']
excludedTestClasses = ['**AcceptanceTest*', '**IntegrationTest*', '**ImportHostingAssets'] excludedTestClasses = ['**AcceptanceTest*', '**IntegrationTest*', '**ImportOfficeData', '**ImportHostingAssets']
pitestVersion = '1.17.0' pitestVersion = '1.17.0'
junit5PluginVersion = '1.1.0' junit5PluginVersion = '1.1.0'

519
build.gradle.kotlin Normal file
View File

@ -0,0 +1,519 @@
plugins {
`java-platform`
//id( "java" )
id( "org.springframework.boot") version "3.4.1"
id( "io.spring.dependency-management") version "1.1.7" // manages implicit dependencies
id( "io.openapiprocessor.openapi-processor") version "2023.2" // generates Controller-interface and resources from API-spec
id( "com.github.jk1.dependency-license-report") version "2.9" // checks dependency-license compatibility
id( "org.owasp.dependencycheck") version "12.0.1" // checks dependencies for known vulnerabilities
id( "com.diffplug.spotless") version "7.0.2" // formats + checks formatting for source-code
id( "jacoco") // determines code-coverage of tests
id( "info.solidsoft.pitest") version "1.15.0" // performs mutation testing
id( "se.patrikerdes.use-latest-versions") version "0.2.18" // updates module and plugin versions
id( "com.github.ben-manes.versions") version "0.52.0" // determines which dependencies have updates
}
// HOWTO: find out which dependency versions are managed by Spring Boot:
// https://docs.spring.io/spring-boot/appendix/dependency-versions/coordinates.html
group = "net.hostsharing"
version = "0.0.1-SNAPSHOT"
tasks.named<Wrapper>("wrapper") {
distributionType = Wrapper.DistributionType.BIN
gradleVersion = "8.5"
}
// TODO.impl: self-attaching is deprecated, see:
// https://javadoc.io/doc/org.mockito/mockito-core/latest/org/mockito/Mockito.html#0.3
configurations {
named("compileOnly") {
extendsFrom(configurations.named("annotationProcessor").get())
}
named("testCompile") {
extendsFrom(configurations.named("testAnnotationProcessor").get())
// Only JUnit 5 (Jupiter) should be used at compile time.
// For runtime it's still needed by testcontainers, though.
exclude(group = "junit", module = "junit")
exclude(group = "org.junit.vintage", module = "junit-vintage-engine")
}
}
extensions.configure<JavaPluginExtension> {
toolchain {
languageVersion.set(JavaLanguageVersion.of(21))
vendor.set(JvmVendorSpec.ADOPTIUM)
implementation.set(JvmImplementation.VENDOR_SPECIFIC)
}
}
extra["testcontainersVersion"] = "1.17.3"
allprojects {
repositories {
mavenCentral()
maven { url = uri("https://repo.spring.io/milestone") }
maven { url = uri("https://repo.spring.io/snapshot") }
}
dependencies {
implementation("org.springframework.boot:spring-boot-starter-data-jpa")
implementation("org.springframework.boot:spring-boot-starter-data-rest")
implementation("org.springframework.boot:spring-boot-starter-jdbc")
implementation("org.springframework.boot:spring-boot-starter-web")
implementation("org.springframework.boot:spring-boot-starter-validation")
implementation("org.springframework.boot:spring-boot-starter-actuator")
implementation("org.springframework.boot:spring-boot-starter-security")
implementation("com.github.gavlyukovskiy:datasource-proxy-spring-boot-starter:1.10.0")
implementation("org.springdoc:springdoc-openapi:2.8.3")
classpath("org.postgresql:postgresql")
classpath("org.liquibase:liquibase-core")
classpath("io.hypersistence:hypersistence-utils-hibernate-63:3.9.0")
classpath("com.fasterxml.jackson.datatype:jackson-datatype-jsr310")
classpath("org.openapitools:jackson-databind-nullable:0.2.6")
classpath("org.apache.commons:commons-text:1.13.0")
classpath("net.java.dev.jna:jna:5.16.0")
classpath("org.modelmapper:modelmapper:3.2.2")
classpath("org.iban4j:iban4j:3.2.10-RELEASE")
classpath("org.springdoc:springdoc-openapi-starter-webmvc-ui:2.8.3")
classpath("org.reflections:reflections:0.10.2")
compileOnly("org.projectlombok:lombok")
testCompileOnly("org.projectlombok:lombok")
// TODO.impl: version conflict with SpringDoc, check later and re-enable if fixed
// developmentOnly "org.springframework.boot:spring-boot-devtools"
annotationProcessor("org.projectlombok:lombok")
testAnnotationProcessor("org.projectlombok:lombok")
testImplementation("org.springframework.boot:spring-boot-starter-test")
testImplementation("org.testcontainers:testcontainers")
testImplementation("org.testcontainers:junit-jupiter")
testImplementation("org.junit.jupiter:junit-jupiter")
testImplementation("org.testcontainers:postgresql")
testImplementation("com.tngtech.archunit:archunit-junit5:1.3.0")
testImplementation("io.rest-assured:spring-mock-mvc")
testImplementation("org.hamcrest:hamcrest-core")
testImplementation("org.pitest:pitest-junit5-plugin:1.2.1")
testImplementation("org.junit.jupiter:junit-jupiter-api")
testImplementation("org.wiremock:wiremock-standalone:3.10.0")
}
}
// Java Compiler Options
tasks.withType<JavaCompile>().configureEach {
options.compilerArgs.add("-parameters") // keep parameter names => no need for @Param for SpringData
}
// Configure tests
tasks.withType<Test> {
useJUnitPlatform()
jvmArgs("-Duser.language=en", "-Duser.country=US")
}
// OpenAPI Source Code Generation
//openapiProcessor {
// springRoot {
// processorName = "spring"
// processor = "io.openapiprocessor:openapi-processor-spring:2022.5"
// apiPath "$projectDir/src/main/resources/api-definition/api-definition.yaml"
// mapping "$projectDir/src/main/resources/api-definition/api-mappings.yaml"
// targetDir layout.buildDirectory.dir("generated/sources/openapi-javax")
// showWarnings true
// openApiNullable true
// }
// springRbac {
// processorName "spring"
// processor "io.openapiprocessor:openapi-processor-spring:2022.5"
// apiPath "$projectDir/src/main/resources/api-definition/rbac/rbac.yaml"
// mapping "$projectDir/src/main/resources/api-definition/rbac/api-mappings.yaml"
// targetDir layout.buildDirectory.dir("generated/sources/openapi-javax")
// showWarnings true
// openApiNullable true
// }
// springTest {
// processorName "spring"
// processor "io.openapiprocessor:openapi-processor-spring:2022.5"
// apiPath "$projectDir/src/main/resources/api-definition/test/test.yaml"
// mapping "$projectDir/src/main/resources/api-definition/test/api-mappings.yaml"
// targetDir layout.buildDirectory.dir("generated/sources/openapi-javax")
// showWarnings true
// openApiNullable true
// }
// springHsOffice {
// processorName "spring"
// processor "io.openapiprocessor:openapi-processor-spring:2022.5"
// apiPath "$projectDir/src/main/resources/api-definition/hs-office/hs-office.yaml"
// mapping "$projectDir/src/main/resources/api-definition/hs-office/api-mappings.yaml"
// targetDir layout.buildDirectory.dir("generated/sources/openapi-javax")
// showWarnings true
// openApiNullable true
// }
// springHsBooking {
// processorName "spring"
// processor "io.openapiprocessor:openapi-processor-spring:2022.5"
// apiPath "$projectDir/src/main/resources/api-definition/hs-booking/hs-booking.yaml"
// mapping "$projectDir/src/main/resources/api-definition/hs-booking/api-mappings.yaml"
// targetDir layout.buildDirectory.dir("generated/sources/openapi-javax")
// showWarnings true
// openApiNullable true
// }
// springHsHosting {
// processorName "spring"
// processor "io.openapiprocessor:openapi-processor-spring:2022.5"
// apiPath "$projectDir/src/main/resources/api-definition/hs-hosting/hs-hosting.yaml"
// mapping "$projectDir/src/main/resources/api-definition/hs-hosting/api-mappings.yaml"
// targetDir layout.buildDirectory.dir("generated/sources/openapi-javax")
// showWarnings true
// openApiNullable true
// }
//}
//sourceSets.main.java.srcDir "build/generated/sources/openapi"
//abstract class ProcessSpring extends DefaultTask {}
//tasks.register("processSpring", ProcessSpring)
//["processSpringRoot",
// "processSpringRbac",
// "processSpringTest",
// "processSpringHsOffice",
// "processSpringHsBooking",
// "processSpringHsHosting"
//].each {
// project.tasks.processSpring.dependsOn it
//}
//project.tasks.processResources.dependsOn processSpring
// project.tasks.compileJava.dependsOn processSpring
// Rename javax to jakarta in OpenApi generated java files because
// io.openapiprocessor.openapi-processor 2022.5 does not yet support the openapiprocessor useSpringBoot3 config option.
// TODO.impl: Upgrade to io.openapiprocessor.openapi-processor >= 2024.2
// and use either `bean-validation: true` in api-mapping.yaml or `useSpringBoot3 true` (not sure where exactly).
//task openApiGenerate(type: Copy) {
// from layout.buildDirectory.dir("generated/sources/openapi-javax")
// into layout.buildDirectory.dir("generated/sources/openapi")
// filter { line -> line.replaceAll("javax", "jakarta") }
}
//compileJava.source layout.buildDirectory.dir("generated/sources/openapi")
//compileJava.dependsOn openApiGenerate
// openApiGenerate.dependsOn processSpring
// Spotless Code Formatting
// spotless {
// java {
// removeUnusedImports()
// leadingTabsToSpaces(4)
// endWithNewline()
// toggleOffOn()
//
// target fileTree(rootDir) {
// include "**/*.java"
// exclude "**/generated/**/*.java"
// }
// }
// }
//project.tasks.check.dependsOn(spotlessCheck)
// HACK: no idea why spotless uses the output of these tasks, but we get warnings without those
//project.tasks.spotlessJava.dependsOn(
// tasks.generateLicenseReport,
// // tasks.pitest, TODO.test: PiTest currently does not work, needs to be fixed
// tasks.jacocoTestReport,
// tasks.processResources,
// tasks.processTestResources)
// OWASP Dependency Security Test
//dependencyCheck {
// nvd {
// apiKey = project.properties["OWASP_API_KEY"] // set it in ~/.gradle/gradle.properties
// delay = 16000
// }
// format = "ALL"
// suppressionFile = "etc/owasp-dependency-check-suppression.xml"
// failOnError = true
// failBuildOnCVSS = 5
//}
//project.tasks.check.dependsOn(dependencyCheckAnalyze)
//project.tasks.dependencyCheckAnalyze.doFirst { // Why not doLast? See README.md!
// println "OWASP Dependency Security Report: file:///${project.rootDir}/build/reports/dependency-check-report.html"
//}
// License Check
//licenseReport {
// excludeBoms = true
// allowedLicensesFile = new File("$projectDir/etc/allowed-licenses.json")
//}
//project.tasks.check.dependsOn(checkLicense)
// HOWTO: run all tests except import- and scenario-tests: gw test
//test {
// finalizedBy jacocoTestReport // generate report after tests
// excludes = [
// "net.hostsharing.hsadminng.**.generated.**",
// ]
// useJUnitPlatform {
// excludeTags "importOfficeData", "importHostingAssets", "scenarioTest"
// }
//}
// JaCoCo Test Code Coverage for unit-tests
//jacoco {
// toolVersion = "0.8.10"
//}
//jacocoTestReport {
// dependsOn test
// afterEvaluate {
// classDirectories.setFrom(files(classDirectories.files.collect {
// fileTree(dir: it, exclude: [
// "net/hostsharing/hsadminng/**/generated/**/*.class",
// "net/hostsharing/hsadminng/hs/HsadminNgApplication.class"
// ])
// }))
// }
// doFirst { // Why not doLast? See README.md!
// println "HTML Jacoco Test Code Coverage Report: file://${reports.html.outputLocation.get()}/index.html"
// }
//}
//project.tasks.check.dependsOn(jacocoTestCoverageVerification)
//jacocoTestCoverageVerification {
// violationRules {
// rule {
// limit {
// minimum = 0.80 // TODO.test: improve instruction coverage
// }
// }
//
// // element: PACKAGE, BUNDLE, CLASS, SOURCEFILE or METHOD
// // counter: INSTRUCTION, BRANCH, LINE, COMPLEXITY, METHOD, or CLASS
// // value: TOTALCOUNT, COVEREDCOUNT, MISSEDCOUNT, COVEREDRATIO or MISSEDRATIO
//
// rule {
// element = "CLASS"
// excludes = [
// "net.hostsharing.hsadminng.**.generated.**",
// "net.hostsharing.hsadminng.rbac.test.dom.TestDomainEntity",
// "net.hostsharing.hsadminng.HsadminNgApplication",
// "net.hostsharing.hsadminng.ping.PingController",
// "net.hostsharing.hsadminng.rbac.generator.*",
// "net.hostsharing.hsadminng.rbac.grant.RbacGrantsDiagramService",
// "net.hostsharing.hsadminng.rbac.grant.RbacGrantsDiagramService.Node",
// "net.hostsharing.hsadminng.**.*Repository",
// "net.hostsharing.hsadminng.mapper.Mapper"
// ]
//
// limit {
// counter = "LINE"
// value = "COVEREDRATIO"
// minimum = 0.75 // TODO.test: improve line coverage
// }
// }
// rule {
// element = "METHOD"
// excludes = [
// "net.hostsharing.hsadminng.**.generated.**",
// "net.hostsharing.hsadminng.HsadminNgApplication.main",
// "net.hostsharing.hsadminng.ping.PingController.*"
// ]
//
// limit {
// counter = "BRANCH"
// value = "COVEREDRATIO"
// minimum = 0.00 // TODO.test: improve branch coverage
// }
// }
// }
//}
// HOWTO: run all unit-tests which don"t need a database: gw-test unitTest
//tasks.register("unitTest", Test) {
// useJUnitPlatform {
// excludeTags "importOfficeData", "importHostingAssets", "scenarioTest", "generalIntegrationTest",
// "officeIntegrationTest", "bookingIntegrationTest", "hostingIntegrationTest"
// }
//
// group "verification"
// description "runs all unit-tests which do not need a database"
//
// mustRunAfter spotlessJava
//}
// HOWTO: run all integration tests which are not specific to a module, like base, rbac, config etc.
//tasks.register("generalIntegrationTest", Test) {
// useJUnitPlatform {
// includeTags "generalIntegrationTest"
// }
//
// group "verification"
// description "runs integration tests which are not specific to a module, like base, rbac, config etc."
//
// mustRunAfter spotlessJava
//}
// HOWTO: run all integration tests of the office module: gw-test officeIntegrationTest
//tasks.register("officeIntegrationTest", Test) {
// useJUnitPlatform {
// includeTags "officeIntegrationTest"
// }
//
// group "verification"
// description "runs integration tests of the office module"
//
// mustRunAfter spotlessJava
//}
// HOWTO: run all integration tests of the booking module: gw-test bookingIntegrationTest
//tasks.register("bookingIntegrationTest", Test) {
// useJUnitPlatform {
// includeTags "bookingIntegrationTest"
// }
//
// group "verification"
// description "runs integration tests of the booking module"
//
// mustRunAfter spotlessJava
//}
// HOWTO: run all integration tests of the hosting module: gw-test hostingIntegrationTest
//tasks.register("hostingIntegrationTest", Test) {
// useJUnitPlatform {
// includeTags "hostingIntegrationTest"
// }
//
// group "verification"
// description "runs integration tests of the hosting module"
//
// mustRunAfter spotlessJava
//}
//tasks.register("importOfficeData", Test) {
// useJUnitPlatform {
// includeTags "importOfficeData"
// }
//
// group "verification"
// description "run the import jobs as tests"
//
// mustRunAfter spotlessJava
//}
//tasks.register("importHostingAssets", Test) {
// useJUnitPlatform {
// includeTags "importHostingAssets"
// }
//
// group "verification"
// description "run the import jobs as tests"
//
// mustRunAfter spotlessJava
//}
//tasks.register("scenarioTest", Test) {
// useJUnitPlatform {
// includeTags "scenarioTest"
// }
//
// group "verification"
// description "run the import jobs as tests"
//
// mustRunAfter spotlessJava
//}
// pitest mutation testing
//pitest {
// targetClasses = ["net.hostsharing.hsadminng.**"]
// excludedClasses = [
// "net.hostsharing.hsadminng.config.**",
// // "net.hostsharing.hsadminng.**.*Controller",
// "net.hostsharing.hsadminng.**.generated.**"
// ]
//
// targetTests = ["net.hostsharing.hsadminng.**.*UnitTest", "net.hostsharing.hsadminng.**.*RestTest"]
// excludedTestClasses = ["**AcceptanceTest*", "**IntegrationTest*", "**ImportOfficeData", "**ImportHostingAssets"]
//
// pitestVersion = "1.17.0"
// junit5PluginVersion = "1.1.0"
//
// threads = 4
//
// // As Java unit tests are pretty pointless in our case, this maybe makes not much sense.
// mutationThreshold = 71
// coverageThreshold = 57
// testStrengthThreshold = 87
//
// outputFormats = ["XML", "HTML"]
// timestampedReports = false
//}
// project.tasks.check.dependsOn(project.tasks.pitest) TODO.test: PiTest currently does not work, needs to be fixed
//project.tasks.pitest.doFirst { // Why not doLast? See README.md!
// println "PiTest Mutation Report: file:///${project.rootDir}/build/reports/pitest/index.html"
//}
// Dependency Versions Upgrade
//useLatestVersions {
// finalizedBy check
//}
//def isNonStable = { String version ->
// def stableKeyword = ["RELEASE", "FINAL", "GA"].any { it -> version.toUpperCase().contains(it) }
// def regex = /^[0-9,.v-]+(-r)?$/
// return !stableKeyword && !(version ==~ regex)
//}
//tasks.named("dependencyUpdates").configure {
// rejectVersionIf {
// isNonStable(it.candidate.version)
// }
//}
// Generate HTML from Markdown scenario-test-reports using Pandoc:
//tasks.register("convertMarkdownToHtml") {
// description = "Generates HTML from Markdown scenario-test-reports using Pandoc."
// group = "Conversion"
//
// // Define the template file and input directory
// def templateFile = file("doc/scenarios/.template.html")
//
// // Task configuration and execution
// doFirst {
// // Check if pandoc is installed
// try {
// exec {
// commandLine "pandoc", "--version"
// }
// } catch (Exception) {
// throw new GradleException("Pandoc is not installed or not found in the system path.")
// }
//
// // Check if the template file exists
// if (!templateFile.exists()) {
// throw new GradleException("Template file "doc/scenarios/.template.html" not found.")
// }
// }
//
// doLast {
// // Gather all Markdown files in the current directory
// fileTree(dir: ".", include: "build/doc/scenarios/*.md").each { file ->
// // Corrected way to create the output file path
// def outputFile = new File(file.parent, file.name.replaceAll(/\.md$/, ".html"))
//
// // Execute pandoc for each markdown file
// exec {
// commandLine "pandoc", file.absolutePath, "--template", templateFile.absolutePath, "-o", outputFile.absolutePath
// }
//
// println "Converted ${file.name} to ${outputFile.name}"
// }
// }
//}
//convertMarkdownToHtml.dependsOn scenarioTest
// shortcut for compiling all files
// tasks.register("compile") {
// dependsOn "compileJava", "compileTestJava"
// }

View File

@ -108,40 +108,6 @@ der Person des _Subscriber-Contact_ (_Holder_) zur repräsentierten Person (_Anc
Zusätzlich wird diese Relation mit dem Kurznamen der abonnierten Mailingliste markiert. Zusätzlich wird diese Relation mit dem Kurznamen der abonnierten Mailingliste markiert.
### Coop-Asset-Transactions (Geschäftsguthabens-Transaktionen)
- positiver Wert => Geschäftsguthaben nehmen zu
- negativer Wert => Geschäftsguthaben nehmen ab
**REVERSAL**: **Korrekturbuchung** einer fehlerhaften Buchung, positiver oder negativer Wert ist möglich
**DEPOSIT**: **Zahlungseingang** vom Mitglied nach Beteiligung mit Geschäftsanteilen, immer positiver Wert
**DISBURSAL**: **Zahlungsausgang** an Mitglied nach Kündigung von Geschäftsanteilen, immer negativer Wert
**TRANSFER**: **Übertragung** von Geschäftsguthaben an ein anderes Mitglied, immer negativer Wert
**ADOPTION**: **Übernahme** von Geschäftsguthaben von einem anderen Mitglied, immer positiver Wert
**CLEARING**: **Verrechnung** von Geschäftsguthaben mit Schulden des Mitglieds, immer negativer Wert
**LOSS**: **Verlust** von Geschäftsguthaben bei Zuweisung Eigenkapitalverlust nach Kündigung von Geschäftsanteilen, immer negativer Wert
**LIMITATION**: **Verjährung** von Geschäftsguthaben, wenn Auszahlung innerhalb der Frist nicht möglich war.
### Coop-Share-Transactions (Geschäftsanteil-Transaktionen)
- positiver Wert => Geschäftsanteile nehmen zu
- negativer Wert => Geschäftsanteile nehmen ab
-
**REVERSAL**: **Korrekturbuchung** einer fehlerhaften Buchung, positiver oder negativer Wert ist möglich
**SUBSCRIPTION**: **Beteiligung** mit Geschäftsanteilen, z.B. durch Beitrittserklärung, immer positiver Wert
**CANCELLATION**: **Kündigung** von Geschäftsanteilen, z.B. durch Austritt, immer negativer Wert
#### Anchor / Relation-Anchor #### Anchor / Relation-Anchor
siehe [Relation](#Relation) siehe [Relation](#Relation)

View File

@ -116,7 +116,7 @@ classDiagram
+BankAccount refundBankAccount +BankAccount refundBankAccount
+String defaultPrefix: mei +String defaultPrefix: mei
} }
debitor-MeierGmbH o.. partner-MeierGmbH debitor-MeierGmbH o-- partner-MeierGmbH
debitor-MeierGmbH *-- rel-MeierGmbH-Buha debitor-MeierGmbH *-- rel-MeierGmbH-Buha
class contactData-MeierGmbH-Buha { class contactData-MeierGmbH-Buha {

Binary file not shown.

View File

@ -1,6 +1,6 @@
distributionBase=GRADLE_USER_HOME distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-8.5-bin.zip distributionUrl=https\://services.gradle.org/distributions/gradle-8.12.1-bin.zip
networkTimeout=10000 networkTimeout=10000
validateDistributionUrl=true validateDistributionUrl=true
zipStoreBase=GRADLE_USER_HOME zipStoreBase=GRADLE_USER_HOME

6
gradlew vendored
View File

@ -15,6 +15,8 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
# #
# SPDX-License-Identifier: Apache-2.0
#
############################################################################## ##############################################################################
# #
@ -55,7 +57,7 @@
# Darwin, MinGW, and NonStop. # Darwin, MinGW, and NonStop.
# #
# (3) This script is generated from the Groovy template # (3) This script is generated from the Groovy template
# https://github.com/gradle/gradle/blob/HEAD/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt # https://github.com/gradle/gradle/blob/HEAD/platforms/jvm/plugins-application/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt
# within the Gradle project. # within the Gradle project.
# #
# You can find Gradle at https://github.com/gradle/gradle/. # You can find Gradle at https://github.com/gradle/gradle/.
@ -84,7 +86,7 @@ done
# shellcheck disable=SC2034 # shellcheck disable=SC2034
APP_BASE_NAME=${0##*/} APP_BASE_NAME=${0##*/}
# Discard cd standard output in case $CDPATH is set (https://github.com/gradle/gradle/issues/25036) # Discard cd standard output in case $CDPATH is set (https://github.com/gradle/gradle/issues/25036)
APP_HOME=$( cd "${APP_HOME:-./}" > /dev/null && pwd -P ) || exit APP_HOME=$( cd -P "${APP_HOME:-./}" > /dev/null && printf '%s\n' "$PWD" ) || exit
# Use the maximum available, or set MAX_FD != -1 to use that value. # Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD=maximum MAX_FD=maximum

22
gradlew.bat vendored
View File

@ -13,6 +13,8 @@
@rem See the License for the specific language governing permissions and @rem See the License for the specific language governing permissions and
@rem limitations under the License. @rem limitations under the License.
@rem @rem
@rem SPDX-License-Identifier: Apache-2.0
@rem
@if "%DEBUG%"=="" @echo off @if "%DEBUG%"=="" @echo off
@rem ########################################################################## @rem ##########################################################################
@ -43,11 +45,11 @@ set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1 %JAVA_EXE% -version >NUL 2>&1
if %ERRORLEVEL% equ 0 goto execute if %ERRORLEVEL% equ 0 goto execute
echo. echo. 1>&2
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 1>&2
echo. echo. 1>&2
echo Please set the JAVA_HOME variable in your environment to match the echo Please set the JAVA_HOME variable in your environment to match the 1>&2
echo location of your Java installation. echo location of your Java installation. 1>&2
goto fail goto fail
@ -57,11 +59,11 @@ set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto execute if exist "%JAVA_EXE%" goto execute
echo. echo. 1>&2
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 1>&2
echo. echo. 1>&2
echo Please set the JAVA_HOME variable in your environment to match the echo Please set the JAVA_HOME variable in your environment to match the 1>&2
echo location of your Java installation. echo location of your Java installation. 1>&2
goto fail goto fail

View File

@ -1,7 +1,7 @@
pluginManagement { pluginManagement {
repositories { repositories {
maven { url 'https://repo.spring.io/milestone' } maven { url = 'https://repo.spring.io/milestone' }
maven { url 'https://repo.spring.io/snapshot' } maven { url = 'https://repo.spring.io/snapshot' }
gradlePluginPortal() gradlePluginPortal()
mavenCentral() mavenCentral()
} }

View File

@ -1,6 +1,6 @@
--liquibase formatted sql --liquibase formatted sql
-- TODO.impl: check if we really need the restricted user -- FIXME: check if we really need the restricted user
-- ============================================================================ -- ============================================================================
-- NUMERIC-HASH-FUNCTIONS -- NUMERIC-HASH-FUNCTIONS

View File

@ -22,12 +22,13 @@ select (objectTable || '#' || objectIdName || ':' || roleType) as roleIdName, *
--// --//
-- ============================================================================ -- ============================================================================
--changeset michael.hoennig:rbac-views-ROLE-RESTRICTED-VIEW runOnChange:true validCheckSum:ANY endDelimiter:--// --changeset michael.hoennig:rbac-views-ROLE-RESTRICTED-VIEW endDelimiter:--//
-- ---------------------------------------------------------------------------- -- ----------------------------------------------------------------------------
/* /*
Creates a view to the role table with row-level limitation Creates a view to the role table with row-level limitation
based on the grants of the current user or assumed roles. based on the grants of the current user or assumed roles.
*/ */
drop view if exists rbac.role_rv;
create or replace view rbac.role_rv as create or replace view rbac.role_rv as
select * select *
-- @formatter:off -- @formatter:off
@ -105,7 +106,7 @@ create or replace view rbac.grant_ev as
-- ============================================================================ -- ============================================================================
--changeset michael.hoennig:rbac-views-GRANT-RESTRICTED-VIEW runOnChange:true validCheckSum:ANY endDelimiter:--// --changeset michael.hoennig:rbac-views-GRANT-RESTRICTED-VIEW endDelimiter:--//
-- ---------------------------------------------------------------------------- -- ----------------------------------------------------------------------------
/* /*
Creates a view to the grants table with row-level limitation Creates a view to the grants table with row-level limitation
@ -221,12 +222,13 @@ select distinct *
-- ============================================================================ -- ============================================================================
--changeset michael.hoennig:rbac-views-USER-RESTRICTED-VIEW runOnChange:true validCheckSum:ANY endDelimiter:--// --changeset michael.hoennig:rbac-views-USER-RESTRICTED-VIEW endDelimiter:--//
-- ---------------------------------------------------------------------------- -- ----------------------------------------------------------------------------
/* /*
Creates a view to the users table with row-level limitation Creates a view to the users table with row-level limitation
based on the grants of the current user or assumed roles. based on the grants of the current user or assumed roles.
*/ */
drop view if exists rbac.subject_rv;
create or replace view rbac.subject_rv as create or replace view rbac.subject_rv as
select distinct * select distinct *
-- @formatter:off -- @formatter:off
@ -314,13 +316,14 @@ execute function rbac.delete_subject_tf();
--/ --/
-- ============================================================================ -- ============================================================================
--changeset michael.hoennig:rbac-views-OWN-GRANTED-PERMISSIONS-VIEW runOnChange:true validCheckSum:ANY endDelimiter:--// --changeset michael.hoennig:rbac-views-OWN-GRANTED-PERMISSIONS-VIEW endDelimiter:--//
-- ---------------------------------------------------------------------------- -- ----------------------------------------------------------------------------
/* /*
Creates a view to all permissions granted to the current user or Creates a view to all permissions granted to the current user or
based on the grants of the current user or assumed roles. based on the grants of the current user or assumed roles.
*/ */
-- @formatter:off -- @formatter:off
drop view if exists rbac.own_granted_permissions_rv;
create or replace view rbac.own_granted_permissions_rv as create or replace view rbac.own_granted_permissions_rv as
select r.uuid as roleuuid, p.uuid as permissionUuid, select r.uuid as roleuuid, p.uuid as permissionUuid,
(r.objecttable || ':' || r.objectidname || ':' || r.roletype) as roleName, p.op, (r.objecttable || ':' || r.objectidname || ':' || r.roletype) as roleName, p.op,

View File

@ -111,7 +111,7 @@ end; $$;
-- ============================================================================ -- ============================================================================
--changeset michael.hoennig:rbac-generators-IDENTITY-VIEW runOnChange:true validCheckSum:ANY endDelimiter:--// --changeset michael.hoennig:rbac-generators-IDENTITY-VIEW endDelimiter:--//
-- ---------------------------------------------------------------------------- -- ----------------------------------------------------------------------------
create or replace procedure rbac.generateRbacIdentityViewFromQuery(targetTable text, sqlQuery text) create or replace procedure rbac.generateRbacIdentityViewFromQuery(targetTable text, sqlQuery text)
@ -171,7 +171,7 @@ end; $$;
-- ============================================================================ -- ============================================================================
--changeset michael.hoennig:rbac-generators-RESTRICTED-VIEW runOnChange:true validCheckSum:ANY endDelimiter:--// --changeset michael.hoennig:rbac-generators-RESTRICTED-VIEW endDelimiter:--//
-- ---------------------------------------------------------------------------- -- ----------------------------------------------------------------------------
create or replace procedure rbac.generateRbacRestrictedView(targetTable text, orderBy text, columnUpdates text = null, columnNames text = '*') create or replace procedure rbac.generateRbacRestrictedView(targetTable text, orderBy text, columnUpdates text = null, columnNames text = '*')

View File

@ -1,7 +1,7 @@
--liquibase formatted sql --liquibase formatted sql
-- ============================================================================ -- ============================================================================
--changeset michael.hoennig:rbac-global-OBJECT runOnChange:true validCheckSum:ANY endDelimiter:--// --changeset michael.hoennig:rbac-global-OBJECT endDelimiter:--//
-- ---------------------------------------------------------------------------- -- ----------------------------------------------------------------------------
/* /*
The purpose of this table is provide root business objects The purpose of this table is provide root business objects
@ -11,12 +11,12 @@
In production databases, there is only a single row in this table, In production databases, there is only a single row in this table,
in test stages, there can be one row for each test data realm. in test stages, there can be one row for each test data realm.
*/ */
create table if not exists rbac.global create table rbac.global
( (
uuid uuid primary key references rbac.object (uuid) on delete cascade, uuid uuid primary key references rbac.object (uuid) on delete cascade,
name varchar(63) unique name varchar(63) unique
); );
create unique index if not exists Global_Singleton on rbac.global ((0)); create unique index Global_Singleton on rbac.global ((0));
grant select on rbac.global to ${HSADMINNG_POSTGRES_RESTRICTED_USERNAME}; grant select on rbac.global to ${HSADMINNG_POSTGRES_RESTRICTED_USERNAME};
--// --//
@ -75,12 +75,13 @@ $$;
-- ============================================================================ -- ============================================================================
--changeset michael.hoennig:rbac-global-IDENTITY-VIEW runOnChange:true validCheckSum:ANY endDelimiter:--// --changeset michael.hoennig:rbac-global-IDENTITY-VIEW endDelimiter:--//
-- ---------------------------------------------------------------------------- -- ----------------------------------------------------------------------------
/* /*
Creates a view to the rbac.global object table which maps the identifying name to the objectUuid. Creates a view to the rbac.global object table which maps the identifying name to the objectUuid.
*/ */
drop view if exists rbac.global_iv;
create or replace view rbac.global_iv as create or replace view rbac.global_iv as
select target.uuid, target.name as idName select target.uuid, target.name as idName
from rbac.global as target; from rbac.global as target;

View File

@ -32,41 +32,6 @@ create table if not exists hs_office.membership
--// --//
-- ============================================================================
--changeset michael.hoennig:hs-office-membership-SINGLE-MEMBERSHIP-CHECK endDelimiter:--//
-- ----------------------------------------------------------------------------
CREATE OR REPLACE FUNCTION hs_office.validate_membership_validity()
RETURNS trigger AS $$
DECLARE
partnerNumber int;
BEGIN
IF EXISTS (
SELECT 1
FROM hs_office.membership
WHERE partnerUuid = NEW.partnerUuid
AND uuid <> NEW.uuid
AND NEW.validity && validity
) THEN
SELECT p.partnerNumber INTO partnerNumber
FROM hs_office.partner AS p
WHERE p.uuid = NEW.partnerUuid;
RAISE EXCEPTION 'Membership validity ranges overlap for partnerUuid %, partnerNumber %', NEW.partnerUuid, partnerNumber;
END IF;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
CREATE TRIGGER trg_validate_membership_validity
BEFORE INSERT OR UPDATE ON hs_office.membership
FOR EACH ROW
EXECUTE FUNCTION hs_office.validate_membership_validity();
--//
-- ============================================================================ -- ============================================================================
--changeset michael.hoennig:hs-office-membership-MAIN-TABLE-JOURNAL endDelimiter:--// --changeset michael.hoennig:hs-office-membership-MAIN-TABLE-JOURNAL endDelimiter:--//
-- ---------------------------------------------------------------------------- -- ----------------------------------------------------------------------------

View File

@ -10,8 +10,7 @@
*/ */
create or replace procedure hs_office.membership_create_test_data( create or replace procedure hs_office.membership_create_test_data(
forPartnerNumber numeric(5), forPartnerNumber numeric(5),
newMemberNumberSuffix char(2), newMemberNumberSuffix char(2) )
validity daterange)
language plpgsql as $$ language plpgsql as $$
declare declare
relatedPartner hs_office.partner; relatedPartner hs_office.partner;
@ -23,7 +22,7 @@ begin
raise notice '- using partner (%): %', relatedPartner.uuid, relatedPartner; raise notice '- using partner (%): %', relatedPartner.uuid, relatedPartner;
insert insert
into hs_office.membership (uuid, partneruuid, memberNumberSuffix, validity, status) into hs_office.membership (uuid, partneruuid, memberNumberSuffix, validity, status)
values (uuid_generate_v4(), relatedPartner.uuid, newMemberNumberSuffix, validity, 'ACTIVE'); values (uuid_generate_v4(), relatedPartner.uuid, newMemberNumberSuffix, daterange('20221001' , null, '[]'), 'ACTIVE');
end; $$; end; $$;
--// --//
@ -36,9 +35,9 @@ do language plpgsql $$
begin begin
call base.defineContext('creating Membership test-data', null, 'superuser-alex@hostsharing.net', 'rbac.global#global:ADMIN'); call base.defineContext('creating Membership test-data', null, 'superuser-alex@hostsharing.net', 'rbac.global#global:ADMIN');
call hs_office.membership_create_test_data(10001, '01', daterange('20221001' , '20241231', '[)')); call hs_office.membership_create_test_data(10001, '01');
call hs_office.membership_create_test_data(10002, '02', daterange('20221001' , '20251231', '[]')); call hs_office.membership_create_test_data(10002, '02');
call hs_office.membership_create_test_data(10003, '03', daterange('20221001' , null, '[]')); call hs_office.membership_create_test_data(10003, '03');
end; end;
$$; $$;
--// --//

View File

@ -25,7 +25,7 @@ create table if not exists hs_booking.item
caption varchar(80) not null, caption varchar(80) not null,
resources jsonb not null, resources jsonb not null,
constraint booking_item_has_project_or_parent_item constraint booking_item_has_project_or_parent_asset
check (projectUuid is not null or parentItemUuid is not null) check (projectUuid is not null or parentItemUuid is not null)
); );
--// --//

View File

@ -1,38 +0,0 @@
--liquibase formatted sql
-- ============================================================================
--changeset michael.hoennig:hs-global-office-test-ddl-cleanup context:hosting-asset-import endDelimiter:--//
-- ----------------------------------------------------------------------------
DROP PROCEDURE IF EXISTS hs_office.bankaccount_create_test_data(IN givenholder character varying, IN giveniban character varying, IN givenbic character varying);
DROP PROCEDURE IF EXISTS hs_office.contact_create_test_data(IN contcaption character varying);
DROP PROCEDURE IF EXISTS hs_office.contact_create_test_data(IN startcount integer, IN endcount integer);
DROP PROCEDURE IF EXISTS hs_office.coopassettx_create_test_data(IN givenpartnernumber numeric, IN givenmembernumbersuffix character);
DROP PROCEDURE IF EXISTS hs_office.coopsharetx_create_test_data(IN givenpartnernumber numeric, IN givenmembernumbersuffix character);
DROP PROCEDURE IF EXISTS hs_office.debitor_create_test_data(IN withdebitornumbersuffix numeric, IN forpartnerpersonname character varying, IN forbillingcontactcaption character varying, IN withdefaultprefix character varying);
DROP PROCEDURE IF EXISTS hs_office.membership_create_test_data(IN forpartnernumber numeric, IN newmembernumbersuffix character);
DROP PROCEDURE IF EXISTS hs_office.partner_create_test_data(IN mandanttradename character varying, IN newpartnernumber numeric, IN partnerpersonname character varying, IN contactcaption character varying);
DROP PROCEDURE IF EXISTS hs_office.person_create_test_data(IN newpersontype hs_office.persontype, IN newtradename character varying, IN newfamilyname character varying, IN newgivenname character varying);
DROP PROCEDURE IF EXISTS hs_office.relation_create_test_data(IN startcount integer, IN endcount integer);
DROP PROCEDURE IF EXISTS hs_office.relation_create_test_data(IN holderpersonname character varying, IN relationtype hs_office.relationtype, IN anchorpersonname character varying, IN contactcaption character varying, IN mark character varying);
DROP PROCEDURE IF EXISTS hs_office.sepamandate_create_test_data(IN forpartnernumber numeric, IN fordebitorsuffix character, IN foriban character varying, IN withreference character varying);
--//
-- ============================================================================
--changeset michael.hoennig:hs-global-rbac-test-ddl-cleanup context:hosting-asset-import endDelimiter:--//
-- ----------------------------------------------------------------------------
DROP SCHEMA IF EXISTS rbactest CASCADE;
--//
-- ============================================================================
--changeset michael.hoennig:hs-global-rbac-test-dml-cleanup context:hosting-asset-import endDelimiter:--//
-- ----------------------------------------------------------------------------
call base.defineContext('9800-cleanup', null, '${HSADMINNG_SUPERUSER}', null);
DELETE FROM rbac.subject WHERE name='superuser-alex@hostsharing.net';
DELETE FROM rbac.subject WHERE name='superuser-fran@hostsharing.net';
--//

View File

@ -212,10 +212,6 @@ databaseChangeLog:
file: db/changelog/9-hs-global/9000-statistics.sql file: db/changelog/9-hs-global/9000-statistics.sql
context: "!only-office" context: "!only-office"
- include:
file: db/changelog/9-hs-global/9800-cleanup.sql
context: "without-test-data"
- include: - include:
file: db/changelog/9-hs-global/9100-hs-integration-schema.sql file: db/changelog/9-hs-global/9100-hs-integration-schema.sql
- include: - include:

File diff suppressed because it is too large Load Diff

View File

@ -4,8 +4,6 @@ import com.opencsv.CSVParserBuilder;
import com.opencsv.CSVReader; import com.opencsv.CSVReader;
import com.opencsv.CSVReaderBuilder; import com.opencsv.CSVReaderBuilder;
import lombok.SneakyThrows; import lombok.SneakyThrows;
import net.hostsharing.hsadminng.hs.booking.item.HsBookingItem;
import net.hostsharing.hsadminng.hs.booking.project.HsBookingProject;
import net.hostsharing.hsadminng.hs.hosting.asset.HsHostingAsset; import net.hostsharing.hsadminng.hs.hosting.asset.HsHostingAsset;
import net.hostsharing.hsadminng.rbac.context.ContextBasedTest; import net.hostsharing.hsadminng.rbac.context.ContextBasedTest;
import net.hostsharing.hsadminng.persistence.BaseEntity; import net.hostsharing.hsadminng.persistence.BaseEntity;
@ -16,9 +14,6 @@ import org.junit.jupiter.api.extension.TestWatcher;
import org.opentest4j.AssertionFailedError; import org.opentest4j.AssertionFailedError;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value; import org.springframework.beans.factory.annotation.Value;
import org.springframework.core.io.AbstractResource;
import org.springframework.core.io.ClassPathResource;
import org.springframework.core.io.FileSystemResource;
import org.springframework.test.context.bean.override.mockito.MockitoBean; import org.springframework.test.context.bean.override.mockito.MockitoBean;
import org.springframework.core.io.Resource; import org.springframework.core.io.Resource;
import org.springframework.transaction.support.TransactionTemplate; import org.springframework.transaction.support.TransactionTemplate;
@ -29,7 +24,6 @@ import jakarta.servlet.http.HttpServletRequest;
import jakarta.validation.ValidationException; import jakarta.validation.ValidationException;
import jakarta.validation.constraints.NotNull; import jakarta.validation.constraints.NotNull;
import java.io.BufferedReader; import java.io.BufferedReader;
import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.io.InputStreamReader; import java.io.InputStreamReader;
import java.io.Reader; import java.io.Reader;
@ -122,16 +116,10 @@ public class CsvDataImport extends ContextBasedTest {
return stream(lines.getFirst()).map(String::trim).toArray(String[]::new); return stream(lines.getFirst()).map(String::trim).toArray(String[]::new);
} }
public static @NotNull AbstractResource resourceOf(final String sqlFile) {
return new File(sqlFile).exists()
? new FileSystemResource(sqlFile)
: new ClassPathResource(sqlFile);
}
protected Reader resourceReader(@NotNull final String resourcePath) { protected Reader resourceReader(@NotNull final String resourcePath) {
try { try {
return new InputStreamReader(requireNonNull(resourceOf(resourcePath).getInputStream())); return new InputStreamReader(requireNonNull(getClass().getClassLoader().getResourceAsStream(resourcePath)));
} catch (final Exception exc) { } catch (Exception exc) {
throw new AssertionFailedError("cannot open '" + resourcePath + "'"); throw new AssertionFailedError("cannot open '" + resourcePath + "'");
} }
} }
@ -167,78 +155,37 @@ public class CsvDataImport extends ContextBasedTest {
return record; return record;
} }
@SneakyThrows public <T extends BaseEntity> T persist(final Integer id, final T entity) {
public void persistViaSql(final Integer id, final HsBookingProject entity) { try {
entity.setUuid(UUID.randomUUID()); if (entity instanceof HsHostingAsset ha) {
//noinspection unchecked
final var query = em.createNativeQuery(""" return (T) persistViaSql(id, ha);
insert into hs_booking.project( }
uuid, return persistViaEM(id, entity);
version, } catch (Exception exc) {
debitorUuid, errors.add("failed to persist #" + entity.hashCode() + ": " + entity);
caption) errors.add(exc.toString());
values (
:uuid,
:version,
:debitorUuid,
:caption)
""")
.setParameter("uuid", entity.getUuid())
.setParameter("version", entity.getVersion())
.setParameter("debitorUuid", entity.getDebitor().getUuid())
.setParameter("caption", entity.getCaption());
final var count = query.executeUpdate();
logError(() -> {
assertThat(count).describedAs("persisting BookingProject #" + id + " failed: " + entity).isEqualTo(1);
});
}
@SneakyThrows
public void persistViaSql(final Integer id, final HsBookingItem entity) {
if (entity.getUuid() != null) {
return;
} }
return entity;
}
entity.setUuid(UUID.randomUUID()); public <T extends BaseEntity> T persistViaEM(final Integer id, final T entity) {
if (em.contains(entity)) {
final var query = em.createNativeQuery(""" return entity;
insert into hs_booking.item( }
uuid, try {
version, em.persist(entity);
type, em.flush(); // makes it a bit slower, but produces better error messages
projectUuid, System.out.println("persisted #" + id + " as " + entity.getUuid());
parentItemUuid, return entity;
validity, } catch (final Exception exc) {
caption, System.err.println("persist failed for #" + id + " as " + entity);
resources) throw exc; // for breakpoints
values ( }
:uuid,
:version,
:type,
:projectUuid,
:parentItemUuid,
:validity,
:caption,
cast(:resources as jsonb))
""")
.setParameter("uuid", entity.getUuid())
.setParameter("version", entity.getVersion())
.setParameter("projectUuid", ofNullable(entity.getProject()).map(BaseEntity::getUuid).orElse(null))
.setParameter("type", entity.getType().name())
.setParameter("parentItemUuid", ofNullable(entity.getParentItem()).map(BaseEntity::getUuid).orElse(null))
.setParameter("validity", entity.getValidity())
.setParameter("caption", entity.getCaption())
.setParameter("resources", entity.getResources().toString().replace("\t", "\\t"));
final var count = query.executeUpdate();
logError(() -> {
assertThat(count).describedAs("persisting BookingItem #" + id + " failed: " + entity).isEqualTo(1);
});
} }
@SneakyThrows @SneakyThrows
public HsHostingAsset persistViaSql(final Integer id, final HsHostingAsset entity) { public BaseEntity<HsHostingAsset> persistViaSql(final Integer id, final HsHostingAsset entity) {
if (entity.getUuid() == null) { if (entity.getUuid() == null) {
entity.setUuid(UUID.randomUUID()); entity.setUuid(UUID.randomUUID());
} }
@ -282,7 +229,7 @@ public class CsvDataImport extends ContextBasedTest {
final var count = query.executeUpdate(); final var count = query.executeUpdate();
logError(() -> { logError(() -> {
assertThat(count).describedAs("persisting HostingAsset #" + id + " failed: " + entity).isEqualTo(1); assertThat(count).isEqualTo(1);
}); });
return entity; return entity;
} }
@ -301,22 +248,63 @@ public class CsvDataImport extends ContextBasedTest {
return json; return json;
} }
protected void makeSureThatTheImportAdminUserExists() { protected void deleteTestDataFromHsOfficeTables() {
jpaAttempt.transacted(() -> { jpaAttempt.transacted(() -> {
context(null); context(rbacSuperuser);
em.createNativeQuery(""" // TODO.perf: could we instead skip creating test-data based on an env var?
do language plpgsql $$ em.createNativeQuery("delete from hs_hosting.asset where true").executeUpdate();
declare em.createNativeQuery("delete from hs_hosting.asset_ex where true").executeUpdate();
admins uuid; em.createNativeQuery("delete from hs_booking.item where true").executeUpdate();
begin em.createNativeQuery("delete from hs_booking.item_ex where true").executeUpdate();
if not exists (select 1 from rbac.subject where name = '${rbacSuperuser}') then em.createNativeQuery("delete from hs_booking.project where true").executeUpdate();
admins = rbac.findRoleId(rbac.global_ADMIN()); em.createNativeQuery("delete from hs_booking.project_ex where true").executeUpdate();
call rbac.grantRoleToSubjectUnchecked(admins, admins, rbac.create_subject('${rbacSuperuser}')); em.createNativeQuery("delete from hs_office.coopassettx where true").executeUpdate();
end if; em.createNativeQuery("delete from hs_office.coopassettx_legacy_id where true").executeUpdate();
end; em.createNativeQuery("delete from hs_office.coopsharetx where true").executeUpdate();
$$; em.createNativeQuery("delete from hs_office.coopsharetx_legacy_id where true").executeUpdate();
""".replace("${rbacSuperuser}", rbacSuperuser)) em.createNativeQuery("delete from hs_office.membership where true").executeUpdate();
.executeUpdate(); em.createNativeQuery("delete from hs_office.sepamandate where true").executeUpdate();
em.createNativeQuery("delete from hs_office.sepamandate_legacy_id where true").executeUpdate();
em.createNativeQuery("delete from hs_office.debitor where true").executeUpdate();
em.createNativeQuery("delete from hs_office.bankaccount where true").executeUpdate();
em.createNativeQuery("delete from hs_office.partner where true").executeUpdate();
em.createNativeQuery("delete from hs_office.partner_details where true").executeUpdate();
em.createNativeQuery("delete from hs_office.relation where true").executeUpdate();
em.createNativeQuery("delete from hs_office.contact where true").executeUpdate();
em.createNativeQuery("delete from hs_office.person where true").executeUpdate();
}).assertSuccessful();
}
protected void resetHsOfficeSequences() {
jpaAttempt.transacted(() -> {
context(rbacSuperuser);
em.createNativeQuery("alter sequence hs_office.contact_legacy_id_seq restart with 1000000000;").executeUpdate();
em.createNativeQuery("alter sequence hs_office.coopassettx_legacy_id_seq restart with 1000000000;")
.executeUpdate();
em.createNativeQuery("alter sequence public.hs_office.coopsharetx_legacy_id_seq restart with 1000000000;")
.executeUpdate();
em.createNativeQuery("alter sequence public.hs_office.partner_legacy_id_seq restart with 1000000000;")
.executeUpdate();
em.createNativeQuery("alter sequence public.hs_office.sepamandate_legacy_id_seq restart with 1000000000;")
.executeUpdate();
});
}
protected void deleteFromTestTables() {
jpaAttempt.transacted(() -> {
context(rbacSuperuser);
em.createNativeQuery("delete from rbactest.domain where true").executeUpdate();
em.createNativeQuery("delete from rbactest.package where true").executeUpdate();
em.createNativeQuery("delete from rbactest.customer where true").executeUpdate();
}).assertSuccessful();
}
protected void deleteFromCommonTables() {
jpaAttempt.transacted(() -> {
context(rbacSuperuser);
em.createNativeQuery("delete from rbac.subject_rv where name not like 'superuser-%'").executeUpdate();
em.createNativeQuery("delete from base.tx_journal where true").executeUpdate();
em.createNativeQuery("delete from base.tx_context where true").executeUpdate();
}).assertSuccessful(); }).assertSuccessful();
} }

View File

@ -7,12 +7,10 @@ import net.hostsharing.hsadminng.context.Context;
import net.hostsharing.hsadminng.hash.HashGenerator; import net.hostsharing.hsadminng.hash.HashGenerator;
import net.hostsharing.hsadminng.hash.HashGenerator.Algorithm; import net.hostsharing.hsadminng.hash.HashGenerator.Algorithm;
import net.hostsharing.hsadminng.hs.booking.debitor.HsBookingDebitorEntity; import net.hostsharing.hsadminng.hs.booking.debitor.HsBookingDebitorEntity;
import net.hostsharing.hsadminng.hs.booking.debitor.HsBookingDebitorRepository;
import net.hostsharing.hsadminng.hs.booking.item.HsBookingItem; import net.hostsharing.hsadminng.hs.booking.item.HsBookingItem;
import net.hostsharing.hsadminng.hs.booking.item.HsBookingItemRealEntity; import net.hostsharing.hsadminng.hs.booking.item.HsBookingItemRealEntity;
import net.hostsharing.hsadminng.hs.booking.item.HsBookingItemType; import net.hostsharing.hsadminng.hs.booking.item.HsBookingItemType;
import net.hostsharing.hsadminng.hs.booking.item.validators.HsBookingItemEntityValidatorRegistry; import net.hostsharing.hsadminng.hs.booking.item.validators.HsBookingItemEntityValidatorRegistry;
import net.hostsharing.hsadminng.hs.booking.project.HsBookingProject;
import net.hostsharing.hsadminng.hs.booking.project.HsBookingProjectRealEntity; import net.hostsharing.hsadminng.hs.booking.project.HsBookingProjectRealEntity;
import net.hostsharing.hsadminng.hs.hosting.asset.HsHostingAssetRealEntity; import net.hostsharing.hsadminng.hs.hosting.asset.HsHostingAssetRealEntity;
import net.hostsharing.hsadminng.hs.hosting.asset.HsHostingAssetType; import net.hostsharing.hsadminng.hs.hosting.asset.HsHostingAssetType;
@ -29,19 +27,13 @@ import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestMethodOrder; import org.junit.jupiter.api.TestMethodOrder;
import org.junit.jupiter.api.extension.ExtendWith; import org.junit.jupiter.api.extension.ExtendWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.test.autoconfigure.orm.jpa.DataJpaTest; import org.springframework.boot.test.autoconfigure.orm.jpa.DataJpaTest;
import org.springframework.context.annotation.Import; import org.springframework.context.annotation.Import;
import org.springframework.core.io.support.PathMatchingResourcePatternResolver; import org.springframework.core.io.support.PathMatchingResourcePatternResolver;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.orm.jpa.EntityManagerFactoryInfo;
import org.springframework.test.annotation.Commit; import org.springframework.test.annotation.Commit;
import org.springframework.test.annotation.DirtiesContext; import org.springframework.test.annotation.DirtiesContext;
import org.springframework.test.context.ActiveProfiles; import org.springframework.test.context.ActiveProfiles;
import java.io.IOException;
import java.io.InputStream;
import java.io.Reader; import java.io.Reader;
import java.net.IDN; import java.net.IDN;
import java.util.ArrayList; import java.util.ArrayList;
@ -52,12 +44,10 @@ import java.util.Map;
import java.util.Objects; import java.util.Objects;
import java.util.Set; import java.util.Set;
import java.util.TreeMap; import java.util.TreeMap;
import java.util.UUID;
import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Function; import java.util.function.Function;
import static java.nio.charset.StandardCharsets.UTF_8;
import static java.util.Arrays.stream; import static java.util.Arrays.stream;
import static java.util.Map.entry; import static java.util.Map.entry;
import static java.util.Map.ofEntries; import static java.util.Map.ofEntries;
@ -86,22 +76,56 @@ import static net.hostsharing.hsadminng.hs.hosting.asset.HsHostingAssetType.UNIX
import static net.hostsharing.hsadminng.mapper.PostgresDateRange.toPostgresDateRange; import static net.hostsharing.hsadminng.mapper.PostgresDateRange.toPostgresDateRange;
import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assumptions.assumeThat; import static org.assertj.core.api.Assumptions.assumeThat;
import static org.springframework.util.FileCopyUtils.copyToByteArray;
/*
* This 'test' includes the complete legacy 'office' data import.
*
* There is no code in 'main' because the import is not needed a normal runtime.
* There is some test data in Java resources to verify the data conversion.
* For a real import a main method will be added later
* which reads CSV files from the file system.
*
* When run on a Hostsharing database, it needs the following settings (hsh99_... just examples).
*
* In a real Hostsharing environment, these are created via (the old) hsadmin:
CREATE USER hsh99_admin WITH PASSWORD 'password';
CREATE DATABASE hsh99_hsadminng ENCODING 'UTF8' TEMPLATE template0;
REVOKE ALL ON DATABASE hsh99_hsadminng FROM public; -- why does hsadmin do that?
ALTER DATABASE hsh99_hsadminng OWNER TO hsh99_admin;
CREATE USER hsh99_restricted WITH PASSWORD 'password';
\c hsh99_hsadminng
GRANT ALL PRIVILEGES ON SCHEMA public to hsh99_admin;
* Additionally, we need these settings (because the Hostsharing DB-Admin has no CREATE right):
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
-- maybe something like that is needed for the 2nd user
-- GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public to hsh99_restricted;
* Then copy the file .tc-environment to a file named .environment (excluded from git) and fill in your specific values.
* To finally import the office data, run:
*
* gw-importHostingAssets # comes from .aliases file and uses .environment
*/
@Tag("importHostingAssets") @Tag("importHostingAssets")
@DataJpaTest(properties = { @DataJpaTest(properties = {
"spring.datasource.url=${HSADMINNG_POSTGRES_JDBC_URL:jdbc:tc:postgresql:15.5-bookworm:///importHostingAssetsTC}", "spring.datasource.url=${HSADMINNG_POSTGRES_JDBC_URL:jdbc:tc:postgresql:15.5-bookworm:///importHostingAssetsTC}",
"spring.datasource.username=${HSADMINNG_POSTGRES_ADMIN_USERNAME:ADMIN}", "spring.datasource.username=${HSADMINNG_POSTGRES_ADMIN_USERNAME:ADMIN}",
"spring.datasource.password=${HSADMINNG_POSTGRES_ADMIN_PASSWORD:password}", "spring.datasource.password=${HSADMINNG_POSTGRES_ADMIN_PASSWORD:password}",
"hsadminng.superuser=${HSADMINNG_SUPERUSER:import-superuser@hostsharing.net}", "hsadminng.superuser=${HSADMINNG_SUPERUSER:superuser-alex@hostsharing.net}"
"spring.liquibase.enabled=false" // @Sql should go first, Liquibase will be initialized programmatically
}) })
@DirtiesContext @DirtiesContext
@Import({ Context.class, JpaAttempt.class, LiquibaseConfig.class }) @Import({ Context.class, JpaAttempt.class })
@ActiveProfiles({ "without-test-data", "liquibase-migration", "hosting-asset-import" }) @ActiveProfiles("without-test-data")
@TestMethodOrder(MethodOrderer.OrderAnnotation.class) @TestMethodOrder(MethodOrderer.OrderAnnotation.class)
@ExtendWith(OrderedDependedTestsExtension.class) @ExtendWith(OrderedDependedTestsExtension.class)
public class ImportHostingAssets extends CsvDataImport { public class ImportHostingAssets extends BaseOfficeDataImport {
private static final Set<String> NOBODY_SUBSTITUTES = Set.of("nomail", "bounce"); private static final Set<String> NOBODY_SUBSTITUTES = Set.of("nomail", "bounce");
@ -132,56 +156,14 @@ public class ImportHostingAssets extends CsvDataImport {
final ObjectMapper jsonMapper = new ObjectMapper(); final ObjectMapper jsonMapper = new ObjectMapper();
@Autowired
HsBookingDebitorRepository debitorRepo;
@Autowired
LiquibaseMigration liquibase;
@Value("${HSADMINNG_OFFICE_DATA_SQL_FILE:/db/released-only-office-schema-with-import-test-data.sql}")
String officeSchemaAndDataSqlFile;
@Test
@Order(11000)
@SneakyThrows
void liquibaseMigrationForBookingAndHosting() {
executeSqlScript(officeSchemaAndDataSqlFile);
liquibase.assertReferenceStatusAfterRestore(286, "hs-booking-SCHEMA");
makeSureThatTheImportAdminUserExists();
liquibase.runWithContexts("migration", "without-test-data");
liquibase.assertThatCurrentMigrationsGotApplied(331, "hs-booking-SCHEMA");
}
@Test @Test
@Order(11010) @Order(11010)
void createBookingProjects() { void createBookingProjects() {
debitors.forEach((id, debitor) -> {
record PartnerLegacyIdMapping(UUID uuid, Integer bp_id) {} bookingProjects.put(id, HsBookingProjectRealEntity.builder()
record DebitorRecord(UUID uuid, Integer version, String defaultPrefix) {} .caption(debitor.getDefaultPrefix() + " default project")
.debitor(em.find(HsBookingDebitorEntity.class, debitor.getUuid()))
final var partnerLegacyIdMappings = em.createNativeQuery( .build());
"""
select debitor.uuid, pid.bp_id
from hs_office.debitor debitor
join hs_office.relation debitorRel on debitor.debitorReluUid=debitorRel.uuid
join hs_office.relation partnerRel on partnerRel.holderUuid=debitorRel.anchorUuid
join hs_office.partner partner on partner.partnerReluUid=partnerRel.uuid
join hs_office.partner_legacy_id pid on partner.uuid=pid.uuid
""", PartnerLegacyIdMapping.class).getResultList();
//noinspection unchecked
final var debitorUuidToLegacyBpIdMap = ((List<PartnerLegacyIdMapping>) partnerLegacyIdMappings).stream()
.collect(toMap(row -> row.uuid, row -> row.bp_id));
final var debitors = em.createNativeQuery(
"select debitor.uuid, debitor.version, debitor.defaultPrefix from hs_office.debitor debitor",
DebitorRecord.class).getResultList();
//noinspection unchecked
((List<DebitorRecord>) debitors).forEach(debitor -> {
bookingProjects.put(
debitorUuidToLegacyBpIdMap.get(debitor.uuid), HsBookingProjectRealEntity.builder()
.version(debitor.version)
.caption(debitor.defaultPrefix + " default project")
.debitor(em.find(HsBookingDebitorEntity.class, debitor.uuid))
.build());
}); });
} }
@ -519,11 +501,11 @@ public class ImportHostingAssets extends CsvDataImport {
@SneakyThrows @SneakyThrows
void importZonenfiles() { void importZonenfiles() {
final var resolver = new PathMatchingResourcePatternResolver(); final var resolver = new PathMatchingResourcePatternResolver();
final var resources = resolver.getResources("/" + MIGRATION_DATA_PATH + "/hosting/zonefiles/*.json"); final var resources = resolver.getResources("/" + MIGRATION_DATA_PATH + "/hosting/zonefiles/*.json");
for (var resource : resources) { for (var resource : resources) {
System.out.println("Processing zonenfile: " + resource); System.out.println("Processing zonenfile: " + resource);
importZonefiles(vmName(resource.getFilename()), resourceAsString(resource)); importZonefiles(vmName(resource.getFilename()), resourceAsString(resource));
} }
} }
@Test @Test
@ -685,7 +667,7 @@ public class ImportHostingAssets extends CsvDataImport {
void validateDbUserAssets() { void validateDbUserAssets() {
validateHostingAssets(dbUserAssets); validateHostingAssets(dbUserAssets);
} }
@Test @Test
@Order(18032) @Order(18032)
void validateDbAssets() { void validateDbAssets() {
@ -731,10 +713,10 @@ public class ImportHostingAssets extends CsvDataImport {
void validateHostingAssets(final Map<Integer, HsHostingAssetRealEntity> assets) { void validateHostingAssets(final Map<Integer, HsHostingAssetRealEntity> assets) {
assets.forEach((id, ha) -> { assets.forEach((id, ha) -> {
logError(() -> logError(() ->
new HostingAssetEntitySaveProcessor(em, ha) new HostingAssetEntitySaveProcessor(em, ha)
.preprocessEntity() .preprocessEntity()
.validateEntity() .validateEntity()
.prepareForSave() .prepareForSave()
); );
}); });
} }
@ -746,12 +728,9 @@ public class ImportHostingAssets extends CsvDataImport {
if (isImportingControlledTestData()) { if (isImportingControlledTestData()) {
expectError("zonedata dom_owner of mellis.de is old00 but expected to be mim00"); expectError("zonedata dom_owner of mellis.de is old00 but expected to be mim00");
expectError("\nexpected: \"vm1068\"\n but was: \"vm1093\""); expectError("\nexpected: \"vm1068\"\n but was: \"vm1093\"");
expectError( expectError("['EMAIL_ADDRESS:webmaster@hamburg-west.l-u-g.org.config.target' is expected to match any of [^[a-z][a-z0-9]{2}[0-9]{2}(-[a-z0-9][a-z0-9\\.+_-]*)?$, ^([a-zA-Z0-9_!#$%&'*+/=?`{|}~^.-]+)?@[a-zA-Z0-9.-]+$, ^nobody$, ^/dev/null$] but 'raoul.lottmann@example.com peter.lottmann@example.com' does not match any]");
"['EMAIL_ADDRESS:webmaster@hamburg-west.l-u-g.org.config.target' is expected to match any of [^[a-z][a-z0-9]{2}[0-9]{2}(-[a-z0-9][a-z0-9\\.+_-]*)?$, ^([a-zA-Z0-9_!#$%&'*+/=?`{|}~^.-]+)?@[a-zA-Z0-9.-]+$, ^nobody$, ^/dev/null$] but 'raoul.lottmann@example.com peter.lottmann@example.com' does not match any]"); expectError("['EMAIL_ADDRESS:abuse@mellis.de.config.target' length is expected to be at min 1 but length of [[]] is 0]");
expectError( expectError("['EMAIL_ADDRESS:abuse@ist-im-netz.de.config.target' length is expected to be at min 1 but length of [[]] is 0]");
"['EMAIL_ADDRESS:abuse@mellis.de.config.target' length is expected to be at min 1 but length of [[]] is 0]");
expectError(
"['EMAIL_ADDRESS:abuse@ist-im-netz.de.config.target' length is expected to be at min 1 but length of [[]] is 0]");
} }
this.assertNoErrors(); this.assertNoErrors();
} }
@ -759,7 +738,7 @@ public class ImportHostingAssets extends CsvDataImport {
// -------------------------------------------------------------------------------------------- // --------------------------------------------------------------------------------------------
@Test @Test
@Order(19100) @Order(19000)
@Commit @Commit
void persistBookingProjects() { void persistBookingProjects() {
@ -767,12 +746,12 @@ public class ImportHostingAssets extends CsvDataImport {
jpaAttempt.transacted(() -> { jpaAttempt.transacted(() -> {
context(rbacSuperuser); context(rbacSuperuser);
bookingProjects.forEach(this::persistViaSql); bookingProjects.forEach(this::persist);
}).assertSuccessful(); }).assertSuccessful();
} }
@Test @Test
@Order(19110) @Order(19010)
@Commit @Commit
void persistBookingItems() { void persistBookingItems() {
@ -1058,15 +1037,15 @@ public class ImportHostingAssets extends CsvDataImport {
void verifyMariaDbLegacyIds() { void verifyMariaDbLegacyIds() {
assumeThatWeAreImportingControlledTestData(); assumeThatWeAreImportingControlledTestData();
assertThat(fetchHosingAssetLegacyIds(MARIADB_DATABASE)).isEqualTo(""" assertThat(fetchHosingAssetLegacyIds(MARIADB_DATABASE)).isEqualTo("""
1786 1786
1805 1805
4908 4908
4941 4941
4942 4942
7520 7520
7521 7521
7604 7604
""".trim()); """.trim());
assertThat(missingHostingAsstLegacyIds(MARIADB_DATABASE)).isEmpty(); assertThat(missingHostingAsstLegacyIds(MARIADB_DATABASE)).isEmpty();
} }
@ -1091,15 +1070,14 @@ public class ImportHostingAssets extends CsvDataImport {
assumeThatWeAreImportingControlledTestData(); assumeThatWeAreImportingControlledTestData();
final var haCount = jpaAttempt.transacted(() -> { final var haCount = jpaAttempt.transacted(() -> {
context(rbacSuperuser, "hs_booking.project#D-1000300-mimdefaultproject:AGENT"); context(rbacSuperuser, "hs_booking.project#D-1000300-mimdefaultproject:AGENT");
return (Integer) em.createNativeQuery( return (Integer) em.createNativeQuery("select count(*) from hs_hosting.asset_rv where type='EMAIL_ADDRESS'", Integer.class)
"select count(*) from hs_hosting.asset_rv where type='EMAIL_ADDRESS'", .getSingleResult();
Integer.class) }).assertSuccessful().returnedValue();
.getSingleResult();
}).assertSuccessful().returnedValue();
assertThat(haCount).isEqualTo(68); assertThat(haCount).isEqualTo(68);
} }
// ============================================================================================ // ============================================================================================
@Test @Test
@ -1127,7 +1105,7 @@ public class ImportHostingAssets extends CsvDataImport {
if (bi.getParentItem() != null) { if (bi.getParentItem() != null) {
persistRecursively(key, HsBookingItemEntityValidatorRegistry.validated(em, bi.getParentItem())); persistRecursively(key, HsBookingItemEntityValidatorRegistry.validated(em, bi.getParentItem()));
} }
persistViaSql(key, HsBookingItemEntityValidatorRegistry.validated(em, bi)); persist(key, HsBookingItemEntityValidatorRegistry.validated(em, bi));
} }
private void persistHostingAssets(final Map<Integer, HsHostingAssetRealEntity> assets) { private void persistHostingAssets(final Map<Integer, HsHostingAssetRealEntity> assets) {
@ -1151,7 +1129,7 @@ public class ImportHostingAssets extends CsvDataImport {
"'EMAIL_ADDRESS:.*\\.config\\.target' .*" "'EMAIL_ADDRESS:.*\\.config\\.target' .*"
) )
.prepareForSave() .prepareForSave()
.saveUsing(entity -> persistViaSql(entry.getKey(), entity)) .saveUsing(entity -> persist(entry.getKey(), entity))
.validateContext() .validateContext()
)); ));
} }
@ -1243,7 +1221,9 @@ public class ImportHostingAssets extends CsvDataImport {
bookingItems.put(packet_id, bookingItem); bookingItems.put(packet_id, bookingItem);
final var haType = determineHaType(basepacket_code); final var haType = determineHaType(basepacket_code);
logError(() -> assertThat(!free || haType == MANAGED_WEBSPACE || defaultPrefix(bookingItem) logError(() -> assertThat(!free || haType == MANAGED_WEBSPACE || bookingItem.getRelatedProject()
.getDebitor()
.getDefaultPrefix()
.equals("hsh")) .equals("hsh"))
.as("packet.free only supported for Hostsharing-Assets and ManagedWebspace in customer-ManagedServer, but is set for " .as("packet.free only supported for Hostsharing-Assets and ManagedWebspace in customer-ManagedServer, but is set for "
+ packet_name) + packet_name)
@ -1282,14 +1262,14 @@ public class ImportHostingAssets extends CsvDataImport {
managedWebspace.setParentAsset(parentAsset); managedWebspace.setParentAsset(parentAsset);
if (parentAsset.getRelatedProject() != managedWebspace.getRelatedProject() if (parentAsset.getRelatedProject() != managedWebspace.getRelatedProject()
&& managedWebspace.getRelatedProject().getDebitor().getDebitorNumber() == 10000_00) { && managedWebspace.getRelatedProject().getDebitor().getDebitorNumber() == 10000_00 ) {
assertThat(managedWebspace.getIdentifier()).startsWith("xyz"); assertThat(managedWebspace.getIdentifier()).startsWith("xyz");
final var hshDebitor = managedWebspace.getBookingItem().getProject().getDebitor(); final var hshDebitor = managedWebspace.getBookingItem().getProject().getDebitor();
final var newProject = HsBookingProjectRealEntity.builder() final var newProject = HsBookingProjectRealEntity.builder()
.debitor(hshDebitor) .debitor(hshDebitor)
.caption(parentAsset.getIdentifier() + " Monitor") .caption(parentAsset.getIdentifier() + " Monitor")
.build(); .build();
bookingProjects.put(Collections.max(bookingProjects.keySet()) + 1, newProject); bookingProjects.put(Collections.max(bookingProjects.keySet())+1, newProject);
managedWebspace.getBookingItem().setProject(newProject); managedWebspace.getBookingItem().setProject(newProject);
} else { } else {
managedWebspace.getBookingItem().setParentItem(parentAsset.getBookingItem()); managedWebspace.getBookingItem().setParentItem(parentAsset.getBookingItem());
@ -1298,13 +1278,6 @@ public class ImportHostingAssets extends CsvDataImport {
}); });
} }
private String defaultPrefix(final HsBookingItem bookingItem) {
return ofNullable(bookingItem.getProject())
.map(HsBookingProject::getDebitor)
.map(HsBookingDebitorEntity::getDefaultPrefix)
.orElse("<no default prefix for BI: " + bookingItem.getCaption() + ">");
}
private void importPacketComponents(final String[] header, final List<String[]> records) { private void importPacketComponents(final String[] header, final List<String[]> records) {
final var columns = new Columns(header); final var columns = new Columns(header);
records.stream() records.stream()
@ -1651,25 +1624,20 @@ public class ImportHostingAssets extends CsvDataImport {
entry("includes", options.contains("includes")), entry("includes", options.contains("includes")),
entry("letsencrypt", options.contains("letsencrypt")), entry("letsencrypt", options.contains("letsencrypt")),
entry("multiviews", options.contains("multiviews")), entry("multiviews", options.contains("multiviews")),
entry( entry("subdomains", withDefault(rec.getString("valid_subdomain_names"), "*")
"subdomains", withDefault(rec.getString("valid_subdomain_names"), "*") .split(",")),
.split(",")), entry("fcgi-php-bin", withDefault(
entry( rec.getString("fcgi_php_bin"),
"fcgi-php-bin", withDefault( httpDomainSetupValidator.getProperty("fcgi-php-bin").defaultValue())),
rec.getString("fcgi_php_bin"), entry("passenger-nodejs", withDefault(
httpDomainSetupValidator.getProperty("fcgi-php-bin").defaultValue())), rec.getString("passenger_nodejs"),
entry( httpDomainSetupValidator.getProperty("passenger-nodejs").defaultValue())),
"passenger-nodejs", withDefault( entry("passenger-python", withDefault(
rec.getString("passenger_nodejs"), rec.getString("passenger_python"),
httpDomainSetupValidator.getProperty("passenger-nodejs").defaultValue())), httpDomainSetupValidator.getProperty("passenger-python").defaultValue())),
entry( entry("passenger-ruby", withDefault(
"passenger-python", withDefault( rec.getString("passenger_ruby"),
rec.getString("passenger_python"), httpDomainSetupValidator.getProperty("passenger-ruby").defaultValue()))
httpDomainSetupValidator.getProperty("passenger-python").defaultValue())),
entry(
"passenger-ruby", withDefault(
rec.getString("passenger_ruby"),
httpDomainSetupValidator.getProperty("passenger-ruby").defaultValue()))
)) ))
.build(); .build();
domainHttpSetupAssets.put(domain_id, domainHttpSetupAsset); domainHttpSetupAssets.put(domain_id, domainHttpSetupAsset);
@ -1776,10 +1744,9 @@ public class ImportHostingAssets extends CsvDataImport {
logError(() -> assertThat(vmName).isEqualTo(domUser.getParentAsset().getParentAsset().getIdentifier())); logError(() -> assertThat(vmName).isEqualTo(domUser.getParentAsset().getParentAsset().getIdentifier()));
//noinspection unchecked //noinspection unchecked
zoneData.put( zoneData.put("user-RR", ((ArrayList<ArrayList<Object>>) zoneData.get("user-RR")).stream()
"user-RR", ((ArrayList<ArrayList<Object>>) zoneData.get("user-RR")).stream() .map(userRR -> userRR.stream().map(Object::toString).collect(joining(" ")))
.map(userRR -> userRR.stream().map(Object::toString).collect(joining(" "))) .toArray(String[]::new)
.toArray(String[]::new)
); );
domainDnsSetupAsset.getConfig().putAll(zoneData); domainDnsSetupAsset.getConfig().putAll(zoneData);
} else { } else {
@ -1930,13 +1897,13 @@ public class ImportHostingAssets extends CsvDataImport {
private String fetchHosingAssetLegacyIds(final HsHostingAssetType type) { private String fetchHosingAssetLegacyIds(final HsHostingAssetType type) {
//noinspection unchecked //noinspection unchecked
return ((List<List<?>>) em.createNativeQuery( return ((List<List<?>>) em.createNativeQuery(
""" """
select li.* from hs_hosting.asset_legacy_id li SELECT li.* FROM hs_hosting.asset_legacy_id li
join hs_hosting.asset ha on ha.uuid=li.uuid JOIN hs_hosting.asset ha ON ha.uuid=li.uuid
where cast(ha.type as text)=:type WHERE CAST(ha.type AS text)=:type
order by legacy_id ORDER BY legacy_id
""", """,
List.class) List.class)
.setParameter("type", type.name()) .setParameter("type", type.name())
.getResultList() .getResultList()
).stream().map(row -> row.get(1).toString()).collect(joining("\n")); ).stream().map(row -> row.get(1).toString()).collect(joining("\n"));
@ -1945,29 +1912,16 @@ public class ImportHostingAssets extends CsvDataImport {
private String missingHostingAsstLegacyIds(final HsHostingAssetType type) { private String missingHostingAsstLegacyIds(final HsHostingAssetType type) {
//noinspection unchecked //noinspection unchecked
return ((List<List<?>>) em.createNativeQuery( return ((List<List<?>>) em.createNativeQuery(
""" """
select ha.uuid, ha.type, ha.identifier from hs_hosting.asset ha SELECT ha.uuid, ha.type, ha.identifier FROM hs_hosting.asset ha
join hs_hosting.asset_legacy_id li on li.uuid=ha.uuid JOIN hs_hosting.asset_legacy_id li ON li.uuid=ha.uuid
where li.legacy_id is null and cast(ha.type as text)=:type WHERE li.legacy_id is null AND CAST(ha.type AS text)=:type
order by li.legacy_id ORDER BY li.legacy_id
""", """,
List.class) List.class)
.setParameter("type", type.name()) .setParameter("type", type.name())
.getResultList()).stream() .getResultList()).stream()
.map(row -> row.stream().map(Object::toString).collect(joining(", "))) .map(row -> row.stream().map(Object::toString).collect(joining(", ")))
.collect(joining("\n")); .collect(joining("\n"));
} }
@SneakyThrows
private void executeSqlScript(final String sqlFile) {
jpaAttempt.transacted(() -> {
try (InputStream resourceStream = resourceOf(sqlFile).getInputStream()) {
final var sqlScript = new String(copyToByteArray(resourceStream), UTF_8);
final var emf = (EntityManagerFactoryInfo) em.getEntityManagerFactory();
new JdbcTemplate(emf.getDataSource()).execute(sqlScript);
} catch (IOException e) {
throw new RuntimeException(e);
}
}).assertSuccessful();
}
} }

View File

@ -0,0 +1,61 @@
package net.hostsharing.hsadminng.hs.migration;
import net.hostsharing.hsadminng.context.Context;
import net.hostsharing.hsadminng.rbac.test.JpaAttempt;
import org.junit.jupiter.api.*;
import org.junit.jupiter.api.extension.ExtendWith;
import org.springframework.boot.test.autoconfigure.orm.jpa.DataJpaTest;
import org.springframework.context.annotation.Import;
import org.springframework.test.annotation.DirtiesContext;
import org.springframework.test.context.ActiveProfiles;
/*
* This 'test' includes the complete legacy 'office' data import.
*
* There is no code in 'main' because the import is not needed a normal runtime.
* There is some test data in Java resources to verify the data conversion.
* For a real import a main method will be added later
* which reads CSV files from the file system.
*
* When run on a Hostsharing database, it needs the following settings (hsh99_... just examples).
*
* In a real Hostsharing environment, these are created via (the old) hsadmin:
CREATE USER hsh99_admin WITH PASSWORD 'password';
CREATE DATABASE hsh99_hsadminng ENCODING 'UTF8' TEMPLATE template0;
REVOKE ALL ON DATABASE hsh99_hsadminng FROM public; -- why does hsadmin do that?
ALTER DATABASE hsh99_hsadminng OWNER TO hsh99_admin;
CREATE USER hsh99_restricted WITH PASSWORD 'password';
\c hsh99_hsadminng
GRANT ALL PRIVILEGES ON SCHEMA public to hsh99_admin;
* Additionally, we need these settings (because the Hostsharing DB-Admin has no CREATE right):
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
-- maybe something like that is needed for the 2nd user
-- GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public to hsh99_restricted;
* Then copy the file .tc-environment to a file named .environment (excluded from git) and fill in your specific values.
* To finally import the office data, run:
*
* gw-importOfficeTables # comes from .aliases file and uses .environment
*/
@Tag("importOfficeData")
@DataJpaTest(properties = {
"spring.datasource.url=${HSADMINNG_POSTGRES_JDBC_URL:jdbc:tc:postgresql:15.5-bookworm:///importOfficeDataTC}",
"spring.datasource.username=${HSADMINNG_POSTGRES_ADMIN_USERNAME:ADMIN}",
"spring.datasource.password=${HSADMINNG_POSTGRES_ADMIN_PASSWORD:password}",
"hsadminng.superuser=${HSADMINNG_SUPERUSER:superuser-alex@hostsharing.net}"
})
@ActiveProfiles("without-test-data")
@DirtiesContext
@Import({ Context.class, JpaAttempt.class })
@TestMethodOrder(MethodOrderer.OrderAnnotation.class)
@ExtendWith(OrderedDependedTestsExtension.class)
public class ImportOfficeData extends BaseOfficeDataImport {
}

View File

@ -1,17 +1,33 @@
package net.hostsharing.hsadminng.hs.migration; package net.hostsharing.hsadminng.hs.migration;
import liquibase.Liquibase;
import lombok.SneakyThrows;
import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.test.autoconfigure.orm.jpa.DataJpaTest; import org.springframework.boot.test.autoconfigure.orm.jpa.DataJpaTest;
import org.springframework.context.annotation.Import; import org.springframework.context.annotation.Import;
import org.springframework.test.annotation.DirtiesContext; import org.springframework.test.annotation.DirtiesContext;
import org.springframework.test.context.ActiveProfiles; import org.springframework.test.context.ActiveProfiles;
import org.springframework.test.context.jdbc.Sql; import org.springframework.test.context.jdbc.Sql;
import org.testcontainers.containers.JdbcDatabaseContainer;
import org.testcontainers.jdbc.ContainerDatabaseDriver;
import jakarta.persistence.EntityManager;
import jakarta.persistence.PersistenceContext;
import javax.sql.DataSource;
import java.io.BufferedReader;
import java.io.File; import java.io.File;
import java.io.InputStreamReader;
import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.apache.commons.io.FileUtils.readFileToString;
import static org.apache.commons.io.FileUtils.write;
import static org.apache.commons.io.FileUtils.writeStringToFile;
import static org.assertj.core.api.Assertions.assertThat;
import static org.springframework.test.context.jdbc.Sql.ExecutionPhase.BEFORE_TEST_CLASS; import static org.springframework.test.context.jdbc.Sql.ExecutionPhase.BEFORE_TEST_CLASS;
// BLOG: Liquibase-migration-test (not before the reference-SQL-dump-generation is simplified) // BLOG: Liquibase-migration-test (not before the reference-SQL-dump-generation is simplified)
@ -24,9 +40,9 @@ import static org.springframework.test.context.jdbc.Sql.ExecutionPhase.BEFORE_TE
* <p>The test works as follows:</p> * <p>The test works as follows:</p>
* *
* <ol> * <ol>
* <li>the database is initialized by `db/released-only-office-schema-with-test-data.sql` from the test-resources</li> * <li>the database is initialized by `db/prod-only-office-schema-with-test-data.sql` from the test-resources</li>
* <li>the current Liquibase-migrations (only-office but with-test-data) are performed</li> * <li>the current Liquibase-migrations (only-office but with-test-data) are performed</li>
* <li>a new dump is written to `db/released-only-office-schema-with-test-data.sql` in the build-directory</li> * <li>a new dump is written to `db/prod-only-office-schema-with-test-data.sql` in the build-directory</li>
* <li>an extra Liquibase-changeset (liquibase-migration-test) is applied</li> * <li>an extra Liquibase-changeset (liquibase-migration-test) is applied</li>
* <li>it's asserted that the extra changeset got applied</li> * <li>it's asserted that the extra changeset got applied</li>
* </ol> * </ol>
@ -42,31 +58,123 @@ import static org.springframework.test.context.jdbc.Sql.ExecutionPhase.BEFORE_TE
@DirtiesContext @DirtiesContext
@ActiveProfiles("liquibase-migration-test") @ActiveProfiles("liquibase-migration-test")
@Import(LiquibaseConfig.class) @Import(LiquibaseConfig.class)
@Sql(value = "/db/released-only-office-schema-with-test-data.sql", executionPhase = BEFORE_TEST_CLASS) // release-schema @Sql(value = "/db/prod-only-office-schema-with-test-data.sql", executionPhase = BEFORE_TEST_CLASS)
public class LiquibaseCompatibilityIntegrationTest { public class LiquibaseCompatibilityIntegrationTest {
private static final String EXPECTED_CHANGESET_ONLY_AFTER_NEW_MIGRATION = "hs-global-liquibase-migration-test"; private static final String EXPECTED_CHANGESET_ONLY_AFTER_NEW_MIGRATION = "hs-global-liquibase-migration-test";
private static final int EXPECTED_LIQUIBASE_CHANGELOGS_IN_PROD_SCHEMA_DUMP = 287;
@Value("${spring.datasource.url}")
private String jdbcUrl;
@Autowired @Autowired
private LiquibaseMigration liquibase; private DataSource dataSource;
@Autowired
private Liquibase liquibase;
@PersistenceContext
private EntityManager em;
@Test @Test
void migrationWorksBasedOnAPreviouslyPopulatedSchema() { void migrationWorksBasedOnAPreviouslyPopulatedSchema() {
// check the initial status from the @Sql-annotation // check the initial status from the @Sql-annotation
final var initialChangeSetCount = liquibase.assertReferenceStatusAfterRestore( final var initialChangeSetCount = assertProdReferenceStatusAfterRestore();
EXPECTED_LIQUIBASE_CHANGELOGS_IN_PROD_SCHEMA_DUMP, EXPECTED_CHANGESET_ONLY_AFTER_NEW_MIGRATION);
// run the current migrations and dump the result to the build-directory // run the current migrations and dump the result to the build-directory
liquibase.runWithContexts("only-office", "with-test-data"); runLiquibaseMigrationsWithContexts("only-office", "with-test-data");
PostgresTestcontainer.dump(jdbcUrl, new File("build/db/released-only-office-schema-with-test-data.sql")); dumpTo(new File("build/db/prod-only-office-schema-with-test-data.sql"));
// then add another migration and assert if it was applied // then add another migration and assert if it was applied
liquibase.runWithContexts("liquibase-migration-test"); runLiquibaseMigrationsWithContexts("liquibase-migration-test");
liquibase.assertThatCurrentMigrationsGotApplied( assertThatCurrentMigrationsGotApplied(initialChangeSetCount);
initialChangeSetCount, EXPECTED_CHANGESET_ONLY_AFTER_NEW_MIGRATION); }
private int assertProdReferenceStatusAfterRestore() {
final var schemas = singleColumnSqlQuery("SELECT tablename FROM pg_catalog.pg_tables WHERE schemaname='public'");
assertThat(schemas).containsExactly("databasechangelog", "databasechangeloglock");
final var liquibaseScripts1 = singleColumnSqlQuery("SELECT * FROM public.databasechangelog");
assertThat(liquibaseScripts1).hasSizeGreaterThan(285);
assertThat(liquibaseScripts1).doesNotContain(EXPECTED_CHANGESET_ONLY_AFTER_NEW_MIGRATION);
final var initialChangeSetCount = liquibaseScripts1.size();
return initialChangeSetCount;
}
private void assertThatCurrentMigrationsGotApplied(final int initialChangeSetCount) {
final var liquibaseScripts = singleColumnSqlQuery("SELECT id FROM public.databasechangelog");
assertThat(liquibaseScripts).hasSizeGreaterThan(initialChangeSetCount);
assertThat(liquibaseScripts).contains(EXPECTED_CHANGESET_ONLY_AFTER_NEW_MIGRATION);
}
@SneakyThrows
private void dumpTo(final File targetFileName) {
makeDir(targetFileName.getParentFile());
final var jdbcDatabaseContainer = getJdbcDatabaseContainer();
final var sqlDumpFile = new File(targetFileName.getParent(), "." + targetFileName.getName());
final var pb = new ProcessBuilder(
"pg_dump", "--column-inserts", "--disable-dollar-quoting",
"--host=" + jdbcDatabaseContainer.getHost(),
"--port=" + jdbcDatabaseContainer.getFirstMappedPort(),
"--username=" + jdbcDatabaseContainer.getUsername() ,
"--dbname=" + jdbcDatabaseContainer.getDatabaseName(),
"--file=" + sqlDumpFile.getCanonicalPath()
);
pb.environment().put("PGPASSWORD", jdbcDatabaseContainer.getPassword());
final var process = pb.start();
int exitCode = process.waitFor();
final var stderr = new BufferedReader(new InputStreamReader(process.getErrorStream()))
.lines().collect(Collectors.joining("\n"));
assertThat(exitCode).describedAs(stderr).isEqualTo(0);
final var header = """
-- =================================================================================
-- Generated reference-SQL-dump (hopefully of latest prod-release).
-- See: net.hostsharing.hsadminng.hs.migration.LiquibaseCompatibilityIntegrationTest
-- ---------------------------------------------------------------------------------
--
-- Explicit pre-initialization because we cannot use `pg_dump --create ...`
-- because the database is already created by Testcontainers.
--
CREATE ROLE postgres;
CREATE ROLE admin;
CREATE ROLE restricted;
""";
writeStringToFile(targetFileName, header, UTF_8, false); // false = overwrite
write(targetFileName, readFileToString(sqlDumpFile, UTF_8), UTF_8, true);
assertThat(sqlDumpFile.delete()).describedAs(sqlDumpFile + " cannot be deleted");
}
private void makeDir(final File dir) {
assertThat(!dir.exists() || dir.isDirectory()).describedAs(dir + " does exist, but is not a directory").isTrue();
assertThat(dir.isDirectory() || dir.mkdirs()).describedAs(dir + " cannot be created").isTrue();
}
@SneakyThrows
private void runLiquibaseMigrationsWithContexts(final String... contexts) {
liquibase.update(
new liquibase.Contexts(contexts),
new liquibase.LabelExpression());
}
private List<String> singleColumnSqlQuery(final String sql) {
//noinspection unchecked
final var rows = (List<Object>) em.createNativeQuery(sql).getResultList();
return rows.stream().map(Objects::toString).toList();
}
@SneakyThrows
private static JdbcDatabaseContainer<?> getJdbcDatabaseContainer() {
final var getContainerMethod = ContainerDatabaseDriver.class.getDeclaredMethod("getContainer", String.class);
getContainerMethod.setAccessible(true);
@SuppressWarnings("rawtypes")
final var container = (JdbcDatabaseContainer) getContainerMethod.invoke(null,
"jdbc:tc:postgresql:15.5-bookworm:///liquibaseMigrationTestTC");
return container;
} }
} }

View File

@ -1,27 +1,28 @@
package net.hostsharing.hsadminng.hs.migration; package net.hostsharing.hsadminng.hs.migration;
import liquibase.Liquibase;
import liquibase.database.DatabaseFactory; import liquibase.database.DatabaseFactory;
import liquibase.database.jvm.JdbcConnection; import liquibase.database.jvm.JdbcConnection;
import liquibase.resource.ClassLoaderResourceAccessor;
import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Profile; import org.springframework.context.annotation.Profile;
import jakarta.persistence.EntityManager;
import jakarta.persistence.PersistenceContext;
import javax.sql.DataSource; import javax.sql.DataSource;
@Configuration @Configuration
@Profile({"liquibase-migration", "liquibase-migration-test"}) @Profile("liquibase-migration-test")
public class LiquibaseConfig { public class LiquibaseConfig {
@PersistenceContext
private EntityManager em;
@Bean @Bean
public LiquibaseMigration liquibase(DataSource dataSource) throws Exception { public Liquibase liquibase(DataSource dataSource) throws Exception {
final var connection = dataSource.getConnection(); final var connection = dataSource.getConnection();
final var database = DatabaseFactory.getInstance() final var database = DatabaseFactory.getInstance()
.findCorrectDatabaseImplementation(new JdbcConnection(connection)); .findCorrectDatabaseImplementation(new JdbcConnection(connection));
return new LiquibaseMigration(em, "db/changelog/db.changelog-master.yaml", database); return new Liquibase(
"db/changelog/db.changelog-master.yaml", // Path to your Liquibase changelog
new ClassLoaderResourceAccessor(),
database
);
} }
} }

View File

@ -1,55 +0,0 @@
package net.hostsharing.hsadminng.hs.migration;
import liquibase.Liquibase;
import liquibase.database.Database;
import liquibase.resource.ClassLoaderResourceAccessor;
import lombok.SneakyThrows;
import jakarta.persistence.EntityManager;
import java.util.List;
import java.util.Objects;
import static org.assertj.core.api.Assertions.assertThat;
public class LiquibaseMigration extends Liquibase {
private final EntityManager em;
public LiquibaseMigration(final EntityManager em, final String changeLogFile, final Database db) {
super(changeLogFile, new ClassLoaderResourceAccessor(), db);
this.em = em;
}
@SneakyThrows
public void runWithContexts(final String... contexts) {
update(
new liquibase.Contexts(contexts),
new liquibase.LabelExpression());
}
public int assertReferenceStatusAfterRestore(
final int minExpectedLiquibaseChangelogs,
final String expectedChangesetOnlyAfterNewMigration) {
final var schemas = singleColumnSqlQuery("SELECT tablename FROM pg_catalog.pg_tables WHERE schemaname='public'");
assertThat(schemas).containsExactly("databasechangelog", "databasechangeloglock");
final var liquibaseScripts = singleColumnSqlQuery("SELECT id FROM public.databasechangelog");
assertThat(liquibaseScripts).hasSize(minExpectedLiquibaseChangelogs);
assertThat(liquibaseScripts).doesNotContain(expectedChangesetOnlyAfterNewMigration);
return liquibaseScripts.size();
}
public void assertThatCurrentMigrationsGotApplied(
final int initialChangeSetCount,
final String expectedChangesetOnlyAfterNewMigration) {
final var liquibaseScripts = singleColumnSqlQuery("SELECT id FROM public.databasechangelog");
assertThat(liquibaseScripts).hasSizeGreaterThan(initialChangeSetCount);
assertThat(liquibaseScripts).contains(expectedChangesetOnlyAfterNewMigration);
}
private List<String> singleColumnSqlQuery(final String sql) {
//noinspection unchecked
final var rows = (List<Object>) em.createNativeQuery(sql).getResultList();
return rows.stream().map(Objects::toString).toList();
}
}

View File

@ -1,81 +0,0 @@
package net.hostsharing.hsadminng.hs.migration;
import lombok.SneakyThrows;
import org.testcontainers.containers.JdbcDatabaseContainer;
import org.testcontainers.jdbc.ContainerDatabaseDriver;
import java.io.BufferedReader;
import java.io.File;
import java.io.InputStreamReader;
import java.util.stream.Collectors;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.apache.commons.io.FileUtils.readFileToString;
import static org.apache.commons.io.FileUtils.write;
import static org.apache.commons.io.FileUtils.writeStringToFile;
import static org.assertj.core.api.Assertions.assertThat;
public class PostgresTestcontainer {
@SneakyThrows
public static void dump(final String jdbcUrl, final File targetFileName) {
makeDir(targetFileName.getParentFile());
final var jdbcDatabaseContainer = getJdbcDatabaseContainer(jdbcUrl);
final var sqlDumpFile = new File(targetFileName.getParent(), "." + targetFileName.getName());
final var pb = new ProcessBuilder(
"pg_dump", "--column-inserts", "--disable-dollar-quoting",
"--host=" + jdbcDatabaseContainer.getHost(),
"--port=" + jdbcDatabaseContainer.getFirstMappedPort(),
"--username=" + jdbcDatabaseContainer.getUsername() ,
"--dbname=" + jdbcDatabaseContainer.getDatabaseName(),
"--file=" + sqlDumpFile.getCanonicalPath()
);
pb.environment().put("PGPASSWORD", jdbcDatabaseContainer.getPassword());
final var process = pb.start();
int exitCode = process.waitFor();
final var stderr = new BufferedReader(new InputStreamReader(process.getErrorStream()))
.lines().collect(Collectors.joining("\n"));
assertThat(exitCode).describedAs(stderr).isEqualTo(0);
final var header = """
-- =================================================================================
-- Generated reference-SQL-dump (hopefully of latest prod-release).
-- See: net.hostsharing.hsadminng.hs.migration.LiquibaseCompatibilityIntegrationTest
-- ---------------------------------------------------------------------------------
--
-- Explicit pre-initialization because we cannot use `pg_dump --create ...`
-- because the database is already created by Testcontainers.
--
CREATE ROLE postgres;
CREATE ROLE admin;
CREATE ROLE restricted;
""";
writeStringToFile(targetFileName, header, UTF_8, false); // false = overwrite
write(targetFileName, readFileToString(sqlDumpFile, UTF_8), UTF_8, true);
assertThat(sqlDumpFile.delete()).describedAs(sqlDumpFile + " cannot be deleted");
}
private static void makeDir(final File dir) {
assertThat(!dir.exists() || dir.isDirectory()).describedAs(dir + " does exist, but is not a directory").isTrue();
assertThat(dir.isDirectory() || dir.mkdirs()).describedAs(dir + " cannot be created").isTrue();
}
@SneakyThrows
private static JdbcDatabaseContainer<?> getJdbcDatabaseContainer(final String jdbcUrl) {
// TODO.test: check if, in the future, there is a better way to access auto-created Testcontainers
final var getContainerMethod = ContainerDatabaseDriver.class.getDeclaredMethod("getContainer", String.class);
getContainerMethod.setAccessible(true);
@SuppressWarnings("rawtypes")
final var container = (JdbcDatabaseContainer) getContainerMethod.invoke(null, jdbcUrl);
return container;
}
}

View File

@ -86,7 +86,7 @@ class HsOfficeMembershipControllerAcceptanceTest extends ContextBasedTestWithCle
"memberNumber": "M-1000101", "memberNumber": "M-1000101",
"memberNumberSuffix": "01", "memberNumberSuffix": "01",
"validFrom": "2022-10-01", "validFrom": "2022-10-01",
"validTo": "2024-12-30", "validTo": null,
"status": "ACTIVE" "status": "ACTIVE"
}, },
{ {
@ -94,7 +94,7 @@ class HsOfficeMembershipControllerAcceptanceTest extends ContextBasedTestWithCle
"memberNumber": "M-1000202", "memberNumber": "M-1000202",
"memberNumberSuffix": "02", "memberNumberSuffix": "02",
"validFrom": "2022-10-01", "validFrom": "2022-10-01",
"validTo": "2025-12-31", "validTo": null,
"status": "ACTIVE" "status": "ACTIVE"
}, },
{ {
@ -133,7 +133,7 @@ class HsOfficeMembershipControllerAcceptanceTest extends ContextBasedTestWithCle
"memberNumber": "M-1000101", "memberNumber": "M-1000101",
"memberNumberSuffix": "01", "memberNumberSuffix": "01",
"validFrom": "2022-10-01", "validFrom": "2022-10-01",
"validTo": "2024-12-30", "validTo": null,
"status": "ACTIVE" "status": "ACTIVE"
} }
] ]
@ -161,7 +161,7 @@ class HsOfficeMembershipControllerAcceptanceTest extends ContextBasedTestWithCle
"memberNumber": "M-1000202", "memberNumber": "M-1000202",
"memberNumberSuffix": "02", "memberNumberSuffix": "02",
"validFrom": "2022-10-01", "validFrom": "2022-10-01",
"validTo": "2025-12-31", "validTo": null,
"status": "ACTIVE" "status": "ACTIVE"
} }
] ]
@ -177,7 +177,7 @@ class HsOfficeMembershipControllerAcceptanceTest extends ContextBasedTestWithCle
void globalAdmin_canAddMembership() { void globalAdmin_canAddMembership() {
context.define("superuser-alex@hostsharing.net"); context.define("superuser-alex@hostsharing.net");
final var givenPartner = partnerRepo.findPartnerByOptionalNameLike("First").getFirst(); final var givenPartner = partnerRepo.findPartnerByOptionalNameLike("Third").get(0);
final var givenMemberSuffix = TEMP_MEMBER_NUMBER_SUFFIX; final var givenMemberSuffix = TEMP_MEMBER_NUMBER_SUFFIX;
final var expectedMemberNumber = Integer.parseInt(givenPartner.getPartnerNumber() + TEMP_MEMBER_NUMBER_SUFFIX); final var expectedMemberNumber = Integer.parseInt(givenPartner.getPartnerNumber() + TEMP_MEMBER_NUMBER_SUFFIX);
@ -189,7 +189,7 @@ class HsOfficeMembershipControllerAcceptanceTest extends ContextBasedTestWithCle
{ {
"partner.uuid": "%s", "partner.uuid": "%s",
"memberNumberSuffix": "%s", "memberNumberSuffix": "%s",
"validFrom": "2025-02-13", "validFrom": "2022-10-13",
"membershipFeeBillable": "true" "membershipFeeBillable": "true"
} }
""".formatted(givenPartner.getUuid(), givenMemberSuffix)) """.formatted(givenPartner.getUuid(), givenMemberSuffix))
@ -200,10 +200,10 @@ class HsOfficeMembershipControllerAcceptanceTest extends ContextBasedTestWithCle
.statusCode(201) .statusCode(201)
.contentType(ContentType.JSON) .contentType(ContentType.JSON)
.body("uuid", isUuidValid()) .body("uuid", isUuidValid())
.body("partner.partnerNumber", is("P-10001")) .body("partner.partnerNumber", is("P-10003"))
.body("memberNumber", is("M-" + expectedMemberNumber)) .body("memberNumber", is("M-" + expectedMemberNumber))
.body("memberNumberSuffix", is(givenMemberSuffix)) .body("memberNumberSuffix", is(givenMemberSuffix))
.body("validFrom", is("2025-02-13")) .body("validFrom", is("2022-10-13"))
.body("validTo", equalTo(null)) .body("validTo", equalTo(null))
.header("Location", startsWith("http://localhost")) .header("Location", startsWith("http://localhost"))
.extract().header("Location"); // @formatter:on .extract().header("Location"); // @formatter:on
@ -239,7 +239,7 @@ class HsOfficeMembershipControllerAcceptanceTest extends ContextBasedTestWithCle
"memberNumber": "M-1000101", "memberNumber": "M-1000101",
"memberNumberSuffix": "01", "memberNumberSuffix": "01",
"validFrom": "2022-10-01", "validFrom": "2022-10-01",
"validTo": "2024-12-30", "validTo": null,
"status": "ACTIVE" "status": "ACTIVE"
} }
""")); // @formatter:on """)); // @formatter:on
@ -297,13 +297,13 @@ class HsOfficeMembershipControllerAcceptanceTest extends ContextBasedTestWithCle
context.define("superuser-alex@hostsharing.net"); context.define("superuser-alex@hostsharing.net");
final var givenMembership = givenSomeTemporaryMembershipBessler("First"); final var givenMembership = givenSomeTemporaryMembershipBessler("First");
RestAssured // @formatter:off final var location = RestAssured // @formatter:off
.given() .given()
.header("current-subject", "superuser-alex@hostsharing.net") .header("current-subject", "superuser-alex@hostsharing.net")
.contentType(ContentType.JSON) .contentType(ContentType.JSON)
.body(""" .body("""
{ {
"validTo": "2025-12-31", "validTo": "2023-12-31",
"status": "CANCELLED" "status": "CANCELLED"
} }
""") """)
@ -316,8 +316,8 @@ class HsOfficeMembershipControllerAcceptanceTest extends ContextBasedTestWithCle
.body("uuid", isUuidValid()) .body("uuid", isUuidValid())
.body("partner.partnerNumber", is("P-" + givenMembership.getPartner().getPartnerNumber())) .body("partner.partnerNumber", is("P-" + givenMembership.getPartner().getPartnerNumber()))
.body("memberNumberSuffix", is(givenMembership.getMemberNumberSuffix())) .body("memberNumberSuffix", is(givenMembership.getMemberNumberSuffix()))
.body("validFrom", is("2025-02-01")) .body("validFrom", is("2022-11-01"))
.body("validTo", is("2025-12-31")) .body("validTo", is("2023-12-31"))
.body("status", is("CANCELLED")); .body("status", is("CANCELLED"));
// @formatter:on // @formatter:on
@ -326,7 +326,7 @@ class HsOfficeMembershipControllerAcceptanceTest extends ContextBasedTestWithCle
.matches(mandate -> { .matches(mandate -> {
assertThat(mandate.getPartner().toShortString()).isEqualTo("P-10001"); assertThat(mandate.getPartner().toShortString()).isEqualTo("P-10001");
assertThat(mandate.getMemberNumberSuffix()).isEqualTo(givenMembership.getMemberNumberSuffix()); assertThat(mandate.getMemberNumberSuffix()).isEqualTo(givenMembership.getMemberNumberSuffix());
assertThat(mandate.getValidity().asString()).isEqualTo("[2022-11-01,2026-01-01)"); assertThat(mandate.getValidity().asString()).isEqualTo("[2022-11-01,2024-01-01)");
assertThat(mandate.getStatus()).isEqualTo(CANCELLED); assertThat(mandate.getStatus()).isEqualTo(CANCELLED);
return true; return true;
}); });
@ -348,7 +348,7 @@ class HsOfficeMembershipControllerAcceptanceTest extends ContextBasedTestWithCle
.contentType(ContentType.JSON) .contentType(ContentType.JSON)
.body(""" .body("""
{ {
"validTo": "2025-12-31", "validTo": "2024-01-01",
"status": "CANCELLED" "status": "CANCELLED"
} }
""") """)
@ -361,7 +361,7 @@ class HsOfficeMembershipControllerAcceptanceTest extends ContextBasedTestWithCle
// finally, the Membership is actually updated // finally, the Membership is actually updated
assertThat(membershipRepo.findByUuid(givenMembership.getUuid())).isPresent().get() assertThat(membershipRepo.findByUuid(givenMembership.getUuid())).isPresent().get()
.matches(mandate -> { .matches(mandate -> {
assertThat(mandate.getValidity().asString()).isEqualTo("[2025-02-01,2026-01-01)"); assertThat(mandate.getValidity().asString()).isEqualTo("[2022-11-01,2024-01-02)");
assertThat(mandate.getStatus()).isEqualTo(CANCELLED); assertThat(mandate.getStatus()).isEqualTo(CANCELLED);
return true; return true;
}); });
@ -434,7 +434,7 @@ class HsOfficeMembershipControllerAcceptanceTest extends ContextBasedTestWithCle
final var newMembership = HsOfficeMembershipEntity.builder() final var newMembership = HsOfficeMembershipEntity.builder()
.partner(givenPartner) .partner(givenPartner)
.memberNumberSuffix(TEMP_MEMBER_NUMBER_SUFFIX) .memberNumberSuffix(TEMP_MEMBER_NUMBER_SUFFIX)
.validity(Range.closedInfinite(LocalDate.parse("2025-02-01"))) .validity(Range.closedInfinite(LocalDate.parse("2022-11-01")))
.status(ACTIVE) .status(ACTIVE)
.membershipFeeBillable(true) .membershipFeeBillable(true)
.build(); .build();

View File

@ -4,20 +4,19 @@ import io.hypersistence.utils.hibernate.type.range.Range;
import net.hostsharing.hsadminng.context.Context; import net.hostsharing.hsadminng.context.Context;
import net.hostsharing.hsadminng.hs.office.debitor.HsOfficeDebitorRepository; import net.hostsharing.hsadminng.hs.office.debitor.HsOfficeDebitorRepository;
import net.hostsharing.hsadminng.hs.office.partner.HsOfficePartnerRealRepository; import net.hostsharing.hsadminng.hs.office.partner.HsOfficePartnerRealRepository;
import net.hostsharing.hsadminng.mapper.Array; import net.hostsharing.hsadminng.rbac.test.ContextBasedTestWithCleanup;
import net.hostsharing.hsadminng.rbac.grant.RawRbacGrantRepository; import net.hostsharing.hsadminng.rbac.grant.RawRbacGrantRepository;
import net.hostsharing.hsadminng.rbac.role.RawRbacRoleRepository; import net.hostsharing.hsadminng.rbac.role.RawRbacRoleRepository;
import net.hostsharing.hsadminng.rbac.test.ContextBasedTestWithCleanup; import net.hostsharing.hsadminng.mapper.Array;
import net.hostsharing.hsadminng.rbac.test.JpaAttempt; import net.hostsharing.hsadminng.rbac.test.JpaAttempt;
import org.junit.jupiter.api.Nested; import org.junit.jupiter.api.Nested;
import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import org.postgresql.util.PSQLException;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.autoconfigure.orm.jpa.DataJpaTest; import org.springframework.boot.test.autoconfigure.orm.jpa.DataJpaTest;
import org.springframework.test.context.bean.override.mockito.MockitoBean;
import org.springframework.context.annotation.Import; import org.springframework.context.annotation.Import;
import org.springframework.orm.jpa.JpaSystemException; import org.springframework.orm.jpa.JpaSystemException;
import org.springframework.test.context.bean.override.mockito.MockitoBean;
import jakarta.persistence.EntityManager; import jakarta.persistence.EntityManager;
import jakarta.persistence.PersistenceContext; import jakarta.persistence.PersistenceContext;
@ -32,7 +31,7 @@ import static net.hostsharing.hsadminng.rbac.test.JpaAttempt.attempt;
import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThat;
@DataJpaTest @DataJpaTest
@Import({ Context.class, JpaAttempt.class }) @Import( { Context.class, JpaAttempt.class })
@Tag("officeIntegrationTest") @Tag("officeIntegrationTest")
class HsOfficeMembershipRepositoryIntegrationTest extends ContextBasedTestWithCleanup { class HsOfficeMembershipRepositoryIntegrationTest extends ContextBasedTestWithCleanup {
@ -71,16 +70,15 @@ class HsOfficeMembershipRepositoryIntegrationTest extends ContextBasedTestWithCl
final var givenPartner = partnerRepo.findPartnerByOptionalNameLike("First").get(0); final var givenPartner = partnerRepo.findPartnerByOptionalNameLike("First").get(0);
// when // when
final var result = attempt( final var result = attempt(em, () -> {
em, () -> { final var newMembership = HsOfficeMembershipEntity.builder()
final var newMembership = HsOfficeMembershipEntity.builder() .memberNumberSuffix("11")
.memberNumberSuffix("11") .partner(givenPartner)
.partner(givenPartner) .validity(Range.closedInfinite(LocalDate.parse("2020-01-01")))
.validity(Range.closedInfinite(LocalDate.parse("2025-01-01"))) .membershipFeeBillable(true)
.membershipFeeBillable(true) .build();
.build(); return toCleanup(membershipRepo.save(newMembership).load());
return toCleanup(membershipRepo.save(newMembership).load()); });
});
// then // then
result.assertSuccessful(); result.assertSuccessful();
@ -89,31 +87,6 @@ class HsOfficeMembershipRepositoryIntegrationTest extends ContextBasedTestWithCl
assertThat(membershipRepo.count()).isEqualTo(count + 1); assertThat(membershipRepo.count()).isEqualTo(count + 1);
} }
@Test
public void creatingMembershipForSamePartnerIsDisallowedIfAnotherOneIsStillActive() {
// given
context("superuser-alex@hostsharing.net");
final var givenPartner = partnerRepo.findPartnerByOptionalNameLike("First").getFirst();
// when
final var result = attempt(
em, () -> {
final var newMembership = HsOfficeMembershipEntity.builder()
.memberNumberSuffix("11")
.partner(givenPartner)
.validity(Range.closedInfinite(LocalDate.parse("2024-01-01")))
.membershipFeeBillable(true)
.build();
return toCleanup(membershipRepo.save(newMembership).load());
});
// then
result.assertExceptionWithRootCauseMessage(
PSQLException.class,
"Membership validity ranges overlap for partnerUuid " + givenPartner.getUuid() +
", partnerNumber " + givenPartner.getPartnerNumber());
}
@Test @Test
public void createsAndGrantsRoles() { public void createsAndGrantsRoles() {
// given // given
@ -124,17 +97,16 @@ class HsOfficeMembershipRepositoryIntegrationTest extends ContextBasedTestWithCl
.toList(); .toList();
// when // when
attempt( attempt(em, () -> {
em, () -> { final var givenPartner = partnerRepo.findPartnerByOptionalNameLike("First").get(0);
final var givenPartner = partnerRepo.findPartnerByOptionalNameLike("First").get(0); final var newMembership = HsOfficeMembershipEntity.builder()
final var newMembership = HsOfficeMembershipEntity.builder() .memberNumberSuffix("17")
.memberNumberSuffix("17") .partner(givenPartner)
.partner(givenPartner) .validity(Range.closedInfinite(LocalDate.parse("2020-01-01")))
.validity(Range.closedInfinite(LocalDate.parse("2025-01-01"))) .membershipFeeBillable(true)
.membershipFeeBillable(true) .build();
.build(); return toCleanup(membershipRepo.save(newMembership));
return toCleanup(membershipRepo.save(newMembership)); }).assertSuccessful();
}).assertSuccessful();
// then // then
final var all = rawRoleRepo.findAll(); final var all = rawRoleRepo.findAll();
@ -173,7 +145,7 @@ class HsOfficeMembershipRepositoryIntegrationTest extends ContextBasedTestWithCl
private void assertThatMembershipIsPersisted(final HsOfficeMembershipEntity saved) { private void assertThatMembershipIsPersisted(final HsOfficeMembershipEntity saved) {
final var found = membershipRepo.findByUuid(saved.getUuid()); final var found = membershipRepo.findByUuid(saved.getUuid());
assertThat(found).isNotEmpty().get().extracting(Object::toString).isEqualTo(saved.toString()); assertThat(found).isNotEmpty().get().extracting(Object::toString).isEqualTo(saved.toString()) ;
} }
} }
@ -191,8 +163,8 @@ class HsOfficeMembershipRepositoryIntegrationTest extends ContextBasedTestWithCl
// then // then
exactlyTheseMembershipsAreReturned( exactlyTheseMembershipsAreReturned(
result, result,
"Membership(M-1000101, P-10001, [2022-10-01,2024-12-31), ACTIVE)", "Membership(M-1000101, P-10001, [2022-10-01,), ACTIVE)",
"Membership(M-1000202, P-10002, [2022-10-01,2026-01-01), ACTIVE)", "Membership(M-1000202, P-10002, [2022-10-01,), ACTIVE)",
"Membership(M-1000303, P-10003, [2022-10-01,), ACTIVE)"); "Membership(M-1000303, P-10003, [2022-10-01,), ACTIVE)");
} }
@ -206,9 +178,8 @@ class HsOfficeMembershipRepositoryIntegrationTest extends ContextBasedTestWithCl
final var result = membershipRepo.findMembershipsByPartnerUuid(givenPartner.getUuid()); final var result = membershipRepo.findMembershipsByPartnerUuid(givenPartner.getUuid());
// then // then
exactlyTheseMembershipsAreReturned( exactlyTheseMembershipsAreReturned(result,
result, "Membership(M-1000101, P-10001, [2022-10-01,), ACTIVE)");
"Membership(M-1000101, P-10001, [2022-10-01,2024-12-31), ACTIVE)");
} }
@Test @Test
@ -223,7 +194,7 @@ class HsOfficeMembershipRepositoryIntegrationTest extends ContextBasedTestWithCl
assertThat(result) assertThat(result)
.isNotNull() .isNotNull()
.extracting(Object::toString) .extracting(Object::toString)
.isEqualTo("Membership(M-1000202, P-10002, [2022-10-01,2026-01-01), ACTIVE)"); .isEqualTo("Membership(M-1000202, P-10002, [2022-10-01,), ACTIVE)");
} }
@Test @Test
@ -238,7 +209,7 @@ class HsOfficeMembershipRepositoryIntegrationTest extends ContextBasedTestWithCl
assertThat(result) assertThat(result)
.isNotNull() .isNotNull()
.extracting(Object::toString) .extracting(Object::toString)
.isEqualTo("Membership(M-1000202, P-10002, [2022-10-01,2026-01-01), ACTIVE)"); .isEqualTo("Membership(M-1000202, P-10002, [2022-10-01,), ACTIVE)");
} }
@Test @Test
@ -250,9 +221,8 @@ class HsOfficeMembershipRepositoryIntegrationTest extends ContextBasedTestWithCl
final var result = membershipRepo.findMembershipsByPartnerNumber(10002); final var result = membershipRepo.findMembershipsByPartnerNumber(10002);
// then // then
exactlyTheseMembershipsAreReturned( exactlyTheseMembershipsAreReturned(result,
result, "Membership(M-1000202, P-10002, [2022-10-01,), ACTIVE)");
"Membership(M-1000202, P-10002, [2022-10-01,2026-01-01), ACTIVE)");
} }
} }
@ -263,7 +233,7 @@ class HsOfficeMembershipRepositoryIntegrationTest extends ContextBasedTestWithCl
public void globalAdmin_canUpdateValidityOfArbitraryMembership() { public void globalAdmin_canUpdateValidityOfArbitraryMembership() {
// given // given
context("superuser-alex@hostsharing.net"); context("superuser-alex@hostsharing.net");
final var givenMembership = givenSomeTemporaryMembership("First", "11"); final var givenMembership = givenSomeTemporaryMembership("First", "11");
assertThatMembershipExistsAndIsAccessibleToCurrentContext(givenMembership); assertThatMembershipExistsAndIsAccessibleToCurrentContext(givenMembership);
final var newValidityEnd = LocalDate.now(); final var newValidityEnd = LocalDate.now();
@ -303,8 +273,7 @@ class HsOfficeMembershipRepositoryIntegrationTest extends ContextBasedTestWithCl
}); });
// then // then
result.assertExceptionWithRootCauseMessage( result.assertExceptionWithRootCauseMessage(JpaSystemException.class,
JpaSystemException.class,
"[403] Subject ", " is not allowed to update hs_office.membership uuid"); "[403] Subject ", " is not allowed to update hs_office.membership uuid");
} }
@ -412,16 +381,14 @@ class HsOfficeMembershipRepositoryIntegrationTest extends ContextBasedTestWithCl
"[creating Membership test-data, hs_office.membership, INSERT, 03]"); "[creating Membership test-data, hs_office.membership, INSERT, 03]");
} }
private HsOfficeMembershipEntity givenSomeTemporaryMembership( private HsOfficeMembershipEntity givenSomeTemporaryMembership(final String partnerTradeName, final String memberNumberSuffix) {
final String partnerTradeName,
final String memberNumberSuffix) {
return jpaAttempt.transacted(() -> { return jpaAttempt.transacted(() -> {
context("superuser-alex@hostsharing.net"); context("superuser-alex@hostsharing.net");
final var givenPartner = partnerRepo.findPartnerByOptionalNameLike(partnerTradeName).get(0); final var givenPartner = partnerRepo.findPartnerByOptionalNameLike(partnerTradeName).get(0);
final var newMembership = HsOfficeMembershipEntity.builder() final var newMembership = HsOfficeMembershipEntity.builder()
.memberNumberSuffix(memberNumberSuffix) .memberNumberSuffix(memberNumberSuffix)
.partner(givenPartner) .partner(givenPartner)
.validity(Range.closedInfinite(LocalDate.parse("2025-02-01"))) .validity(Range.closedInfinite(LocalDate.parse("2020-01-01")))
.membershipFeeBillable(true) .membershipFeeBillable(true)
.build(); .build();

View File

@ -287,12 +287,12 @@ class HsOfficeScenarioTests extends ScenarioTest {
@Test @Test
@Order(2011) @Order(2011)
@Requires("Debitor: D-3101000 - Test AG - main debitor") @Requires("Person: Test AG")
@Produces("Debitor: D-3101001 - Test AG - additional debitor") @Produces("Debitor: D-3101001 - Test AG - main debitor")
void shouldCreateAdditionDebitorForPartner() { void shouldCreateExternalDebitorForPartner() {
new CreateSelfDebitorForPartner(scenarioTest) new CreateExternalDebitorForPartner(scenarioTest)
.given("partnerPersonTradeName", "Test AG") .given("partnerPersonTradeName", "Test AG")
.given("billingContactCaption", "Test AG - billing department") .given("billingContactCaption", "Billing GmbH - billing department")
.given("billingContactEmailAddress", "billing@test-ag.example.org") .given("billingContactEmailAddress", "billing@test-ag.example.org")
.given("debitorNumberSuffix", "01") .given("debitorNumberSuffix", "01")
.given("billable", true) .given("billable", true)
@ -305,30 +305,10 @@ class HsOfficeScenarioTests extends ScenarioTest {
.keep(); .keep();
} }
@Test
@Order(2012)
@Requires("Person: Test AG")
@Produces("Debitor: D-3101002 - Test AG - external debitor")
void shouldCreateExternalDebitorForPartner() {
new CreateExternalDebitorForPartner(scenarioTest)
.given("partnerPersonTradeName", "Test AG")
.given("billingContactCaption", "Billing GmbH - billing department")
.given("billingContactEmailAddress", "billing@test-ag.example.org")
.given("debitorNumberSuffix", "02")
.given("billable", true)
.given("vatId", "VAT123456")
.given("vatCountryCode", "DE")
.given("vatBusiness", true)
.given("vatReverseCharge", false)
.given("defaultPrefix", "tsy")
.doRun()
.keep();
}
@Test @Test
@Order(2020) @Order(2020)
@Requires("Person: Test AG") @Requires("Person: Test AG")
@Produces(explicitly = "Debitor: D-3101002 - Test AG - delete debitor", permanent = false) @Produces(explicitly = "Debitor: D-3101000 - Test AG - delete debitor", permanent = false)
void shouldDeleteDebitor() { void shouldDeleteDebitor() {
new DeleteDebitor(scenarioTest) new DeleteDebitor(scenarioTest)
.given("partnerNumber", "P-31020") .given("partnerNumber", "P-31020")
@ -337,7 +317,7 @@ class HsOfficeScenarioTests extends ScenarioTest {
} }
@Test @Test
@Order(2021) @Order(2020)
@Requires("Debitor: D-3101000 - Test AG - main debitor") @Requires("Debitor: D-3101000 - Test AG - main debitor")
@Disabled("see TODO.spec in DontDeleteDefaultDebitor") @Disabled("see TODO.spec in DontDeleteDefaultDebitor")
void shouldNotDeleteDefaultDebitor() { void shouldNotDeleteDefaultDebitor() {
@ -407,39 +387,22 @@ class HsOfficeScenarioTests extends ScenarioTest {
void shouldCreateMembershipForPartner() { void shouldCreateMembershipForPartner() {
new CreateMembership(scenarioTest) new CreateMembership(scenarioTest)
.given("partnerName", "Test AG") .given("partnerName", "Test AG")
.given("validFrom", "2020-10-15") .given("validFrom", "2024-10-15")
.given("newStatus", "ACTIVE") .given("newStatus", "ACTIVE")
.given("membershipFeeBillable", "true") .given("membershipFeeBillable", "true")
.doRun() .doRun()
.keep(); .keep();
} }
@Test
@Order(4080)
@Requires("Membership: M-3101000 - Test AG")
@Produces("Membership: M-3101000 - Test AG - cancelled")
void shouldCancelMembershipOfPartner() {
new CancelMembership(scenarioTest)
.given("memberNumber", "M-3101000")
.given("validTo", "2023-12-31")
.given("newStatus", "CANCELLED")
.doRun()
.keep();
}
@Test @Test
@Order(4090) @Order(4090)
@Requires("Membership: M-3101000 - Test AG - cancelled") @Requires("Membership: M-3101000 - Test AG")
@Produces("Membership: M-3101001 - Test AG") void shouldCancelMembershipOfPartner() {
void shouldCreateSubsequentMembershipOfPartner() { new CancelMembership(scenarioTest)
new CreateMembership(scenarioTest) .given("memberNumber", "M-3101000")
.given("partnerName", "Test AG") .given("validTo", "2025-12-30")
.given("memberNumberSuffix", "01") .given("newStatus", "CANCELLED")
.given("validFrom", "2025-02-24") .doRun();
.given("newStatus", "ACTIVE")
.given("membershipFeeBillable", "true")
.doRun()
.keep();
} }
} }

View File

@ -19,7 +19,7 @@ public class DeleteDebitor extends UseCase<DeleteDebitor> {
.given("vatCountryCode", "DE") .given("vatCountryCode", "DE")
.given("vatBusiness", true) .given("vatBusiness", true)
.given("vatReverseCharge", false) .given("vatReverseCharge", false)
.given("defaultPrefix", "tsz")); .given("defaultPrefix", "tsy"));
} }
@Override @Override

View File

@ -1,6 +1,6 @@
package net.hostsharing.hsadminng.rbac.test; package net.hostsharing.hsadminng.rbac.test;
import lombok.SneakyThrows; import org.assertj.core.api.ObjectAssert;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.NestedExceptionUtils; import org.springframework.core.NestedExceptionUtils;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
@ -78,9 +78,9 @@ public class JpaAttempt {
public static class JpaResult<T> { public static class JpaResult<T> {
private final T value; private final T value;
private final Throwable exception; private final RuntimeException exception;
private JpaResult(final T value, final Throwable exception) { private JpaResult(final T value, final RuntimeException exception) {
this.value = value; this.value = value;
this.exception = exception; this.exception = exception;
} }
@ -93,7 +93,7 @@ public class JpaAttempt {
return new JpaResult<>(value, null); return new JpaResult<>(value, null);
} }
public static <T> JpaResult<T> forException(final Throwable exception) { public static <T> JpaResult<T> forException(final RuntimeException exception) {
return new JpaResult<>(null, exception); return new JpaResult<>(null, exception);
} }
@ -105,23 +105,20 @@ public class JpaAttempt {
return value; return value;
} }
public Throwable caughtException() { public ObjectAssert<T> assertThatResult() {
assertSuccessful();
return assertThat(returnedValue());
}
public RuntimeException caughtException() {
return exception; return exception;
} }
public <E extends Throwable> E caughtException(final Class<E> expectedExceptionClass) { @SuppressWarnings("unchecked")
//noinspection unchecked public <E extends RuntimeException> E caughtException(final Class<E> expectedExceptionClass) {
return caughtException((E) exception, expectedExceptionClass);
}
public static <E extends Throwable> E caughtException(final Throwable exception, final Class<E> expectedExceptionClass) {
if (expectedExceptionClass.isAssignableFrom(exception.getClass())) { if (expectedExceptionClass.isAssignableFrom(exception.getClass())) {
//noinspection unchecked
return (E) exception; return (E) exception;
} }
if(exception.getCause() != null && exception.getCause() != exception ) {
return caughtException(exception.getCause(), expectedExceptionClass);
}
throw new AssertionError("expected " + expectedExceptionClass + " but got " + exception); throw new AssertionError("expected " + expectedExceptionClass + " but got " + exception);
} }
@ -130,7 +127,7 @@ public class JpaAttempt {
} }
public void assertExceptionWithRootCauseMessage( public void assertExceptionWithRootCauseMessage(
final Class<? extends Throwable> expectedExceptionClass, final Class<? extends RuntimeException> expectedExceptionClass,
final String... expectedRootCauseMessages) { final String... expectedRootCauseMessages) {
assertThat(wasSuccessful()).as("wasSuccessful").isFalse(); assertThat(wasSuccessful()).as("wasSuccessful").isFalse();
final String firstRootCauseMessageLine = firstRootCauseMessageLineOf(caughtException(expectedExceptionClass)); final String firstRootCauseMessageLine = firstRootCauseMessageLineOf(caughtException(expectedExceptionClass));
@ -139,11 +136,11 @@ public class JpaAttempt {
} }
} }
@SneakyThrows public JpaResult<T> reThrowException() {
public void reThrowException() {
if (exception != null) { if (exception != null) {
throw exception; throw exception;
} }
return this;
} }
public JpaResult<T> assumeSuccessful() { public JpaResult<T> assumeSuccessful() {
@ -161,9 +158,9 @@ public class JpaAttempt {
return this; return this;
} }
private String firstRootCauseMessageLineOf(final Throwable exception) { private String firstRootCauseMessageLineOf(final RuntimeException exception) {
final var rootCause = NestedExceptionUtils.getRootCause(exception); final var rootCause = NestedExceptionUtils.getRootCause(exception);
return Optional.ofNullable(rootCause != null ? rootCause : exception) return Optional.ofNullable(rootCause)
.map(Throwable::getMessage) .map(Throwable::getMessage)
.map(message -> message.split("\\r|\\n|\\r\\n", 0)[0]) .map(message -> message.split("\\r|\\n|\\r\\n", 0)[0])
.orElse(null); .orElse(null);