Compare commits
2 Commits
master
...
maintenanc
Author | SHA1 | Date | |
---|---|---|---|
|
1e2986b091 | ||
|
313b373ce2 |
17
.aliases
17
.aliases
@ -1,4 +1,4 @@
|
||||
# For using the alias gw-importHostingAssets,
|
||||
# For using the alias gw-importOfficeData or gw-importHostingAssets,
|
||||
# copy the file .tc-environment to .environment (ignored by git)
|
||||
# and amend them according to your external DB.
|
||||
|
||||
@ -71,6 +71,7 @@ function importLegacyData() {
|
||||
./gradlew $target --rerun
|
||||
fi
|
||||
}
|
||||
alias gw-importOfficeData='importLegacyData importOfficeData'
|
||||
alias gw-importHostingAssets='importLegacyData importHostingAssets'
|
||||
|
||||
alias podman-start='systemctl --user enable --now podman.socket && systemctl --user status podman.socket && ls -la /run/user/$UID/podman/podman.sock'
|
||||
@ -91,8 +92,8 @@ alias fp='grep -r '@Accepts' src | sed -e 's/^.*@/@/g' | sort -u | wc -l'
|
||||
alias gw-spotless='./gradlew spotlessApply -x pitest -x test -x :processResources'
|
||||
alias gw-check='. .aliases; . .tc-environment; gw test check -x pitest'
|
||||
|
||||
# HOWTO: run all 'normal' tests (by default without scenario+import-tests): `gw-test`
|
||||
# You can also mention specific targets: `gw-test importHostingAssets`, in that case only these tests are executed.
|
||||
# HOWTO: run all 'normal' tests (no scenario+import-tests): `gw-test`
|
||||
# You can also mention specific targets: `gw-test importOfficeData`.
|
||||
# This will always use the environment from `.tc-environment`.
|
||||
#
|
||||
# HOWTO: re-run tests even if no changed can be detected: `gw-test --rerun`
|
||||
@ -113,16 +114,16 @@ function _gwTest1() {
|
||||
echo "DONE gw $@"
|
||||
}
|
||||
function _gwTest() {
|
||||
. .aliases
|
||||
. .tc-environment
|
||||
rm -f /tmp/gwTest.tmp
|
||||
. .aliases;
|
||||
. .tc-environment;
|
||||
rm /tmp/gwTest.tmp
|
||||
if [ "$1" == "--all" ]; then
|
||||
shift # to remove the --all from $@
|
||||
# delierately in separate gradlew-calls to avoid Testcontains-PostgreSQL problem spillover
|
||||
time (_gwTest1 unitTest "$@" &&
|
||||
_gwTest1 officeIntegrationTest bookingIntegrationTest hostingIntegrationTest "$@" &&
|
||||
_gwTest1 scenarioTest "$@" &&
|
||||
_gwTest1 importHostingAssets "$@");
|
||||
_gwTest1 importOfficeData importHostingAssets "$@");
|
||||
elif [ $# -eq 0 ] || [[ $1 == -* ]]; then
|
||||
time _gwTest1 test "$@";
|
||||
else
|
||||
@ -136,7 +137,7 @@ alias howto=bin/howto
|
||||
alias cas-curl=bin/cas-curl
|
||||
|
||||
# etc/docker-compose.yml limits CPUs+MEM and includes a PostgreSQL config for analysing slow queries
|
||||
alias gw-importHostingAssets-in-docker-compose='
|
||||
alias gw-importOfficeData-in-docker-compose='
|
||||
docker-compose -f etc/docker-compose.yml down &&
|
||||
docker-compose -f etc/docker-compose.yml up -d && sleep 10 &&
|
||||
time gw-importHostingAssets'
|
||||
|
@ -7,7 +7,6 @@
|
||||
<entry key="HSADMINNG_POSTGRES_ADMIN_USERNAME" value="postgres" />
|
||||
<entry key="HSADMINNG_POSTGRES_JDBC_URL" value="jdbc:postgresql://localhost:5432/postgres" />
|
||||
<entry key="HSADMINNG_POSTGRES_RESTRICTED_USERNAME" value="restricted" />
|
||||
<entry key="HSADMINNG_MIGRATION_DATA_PATH" value="migration" />
|
||||
</map>
|
||||
</option>
|
||||
<option name="executionName" />
|
||||
|
@ -3,9 +3,9 @@
|
||||
<ExternalSystemSettings>
|
||||
<option name="env">
|
||||
<map>
|
||||
<entry key="HSADMINNG_MIGRATION_DATA_PATH" value="migration" />
|
||||
<entry key="HSADMINNG_POSTGRES_ADMIN_USERNAME" value="admin" />
|
||||
<entry key="HSADMINNG_POSTGRES_RESTRICTED_USERNAME" value="restricted" />
|
||||
<entry key="HSADMINNG_SUPERUSER" value="import-superuser@hostsharing.net" />
|
||||
</map>
|
||||
</option>
|
||||
<option name="executionName" />
|
||||
|
103
.run/ImportOfficeData.run.xml
Normal file
103
.run/ImportOfficeData.run.xml
Normal file
@ -0,0 +1,103 @@
|
||||
<component name="ProjectRunConfigurationManager">
|
||||
<configuration default="false" name="ImportOfficeData" type="GradleRunConfiguration" factoryName="Gradle">
|
||||
<ExternalSystemSettings>
|
||||
<option name="env">
|
||||
<map>
|
||||
<entry key="HSADMINNG_MIGRATION_DATA_PATH" value="migration" />
|
||||
<entry key="HSADMINNG_POSTGRES_ADMIN_USERNAME" value="admin" />
|
||||
<entry key="HSADMINNG_POSTGRES_RESTRICTED_USERNAME" value="restricted" />
|
||||
</map>
|
||||
</option>
|
||||
<option name="executionName" />
|
||||
<option name="externalProjectPath" value="$PROJECT_DIR$" />
|
||||
<option name="externalSystemIdString" value="GRADLE" />
|
||||
<option name="scriptParameters" value="" />
|
||||
<option name="taskDescriptions">
|
||||
<list />
|
||||
</option>
|
||||
<option name="taskNames">
|
||||
<list>
|
||||
<option value=":importOfficeData" />
|
||||
<option value="--tests" />
|
||||
<option value=""net.hostsharing.hsadminng.hs.migration.ImportOfficeData"" />
|
||||
</list>
|
||||
</option>
|
||||
<option name="vmOptions" />
|
||||
</ExternalSystemSettings>
|
||||
<ExternalSystemDebugServerProcess>false</ExternalSystemDebugServerProcess>
|
||||
<ExternalSystemReattachDebugProcess>true</ExternalSystemReattachDebugProcess>
|
||||
<EXTENSION ID="com.intellij.execution.ExternalSystemRunConfigurationJavaExtension">
|
||||
<extension name="coverage" sample_coverage="false" />
|
||||
</EXTENSION>
|
||||
<DebugAllEnabled>false</DebugAllEnabled>
|
||||
<RunAsTest>true</RunAsTest>
|
||||
<method v="2" />
|
||||
</configuration>
|
||||
<configuration default="false" name="ImportOfficeData" type="GradleRunConfiguration" factoryName="Gradle">
|
||||
<ExternalSystemSettings>
|
||||
<option name="env">
|
||||
<map>
|
||||
<entry key="HSADMINNG_MIGRATION_DATA_PATH" value="migration" />
|
||||
<entry key="HSADMINNG_POSTGRES_ADMIN_USERNAME" value="admin" />
|
||||
<entry key="HSADMINNG_POSTGRES_RESTRICTED_USERNAME" value="restricted" />
|
||||
</map>
|
||||
</option>
|
||||
<option name="executionName" />
|
||||
<option name="externalProjectPath" value="$PROJECT_DIR$" />
|
||||
<option name="externalSystemIdString" value="GRADLE" />
|
||||
<option name="scriptParameters" value="" />
|
||||
<option name="taskDescriptions">
|
||||
<list />
|
||||
</option>
|
||||
<option name="taskNames">
|
||||
<list>
|
||||
<option value=":importOfficeData" />
|
||||
<option value="--tests" />
|
||||
<option value=""net.hostsharing.hsadminng.hs.office.migration.ImportOfficeData"" />
|
||||
</list>
|
||||
</option>
|
||||
<option name="vmOptions" />
|
||||
</ExternalSystemSettings>
|
||||
<ExternalSystemDebugServerProcess>false</ExternalSystemDebugServerProcess>
|
||||
<ExternalSystemReattachDebugProcess>true</ExternalSystemReattachDebugProcess>
|
||||
<EXTENSION ID="com.intellij.execution.ExternalSystemRunConfigurationJavaExtension">
|
||||
<extension name="coverage" sample_coverage="false" />
|
||||
</EXTENSION>
|
||||
<DebugAllEnabled>false</DebugAllEnabled>
|
||||
<RunAsTest>true</RunAsTest>
|
||||
<method v="2" />
|
||||
</configuration>
|
||||
<configuration default="false" name="ImportOfficeData" type="GradleRunConfiguration" factoryName="Gradle">
|
||||
<ExternalSystemSettings>
|
||||
<option name="env">
|
||||
<map>
|
||||
<entry key="HSADMINNG_POSTGRES_ADMIN_USERNAME" value="admin" />
|
||||
<entry key="HSADMINNG_POSTGRES_RESTRICTED_USERNAME" value="restricted" />
|
||||
</map>
|
||||
</option>
|
||||
<option name="executionName" />
|
||||
<option name="externalProjectPath" value="$PROJECT_DIR$" />
|
||||
<option name="externalSystemIdString" value="GRADLE" />
|
||||
<option name="scriptParameters" value="" />
|
||||
<option name="taskDescriptions">
|
||||
<list />
|
||||
</option>
|
||||
<option name="taskNames">
|
||||
<list>
|
||||
<option value=":importOfficeData" />
|
||||
<option value="--tests" />
|
||||
<option value=""net.hostsharing.hsadminng.hs.migration.ImportOfficeData"" />
|
||||
</list>
|
||||
</option>
|
||||
<option name="vmOptions" />
|
||||
</ExternalSystemSettings>
|
||||
<ExternalSystemDebugServerProcess>false</ExternalSystemDebugServerProcess>
|
||||
<ExternalSystemReattachDebugProcess>true</ExternalSystemReattachDebugProcess>
|
||||
<EXTENSION ID="com.intellij.execution.ExternalSystemRunConfigurationJavaExtension">
|
||||
<extension name="coverage" sample_coverage="false" />
|
||||
</EXTENSION>
|
||||
<DebugAllEnabled>false</DebugAllEnabled>
|
||||
<RunAsTest>true</RunAsTest>
|
||||
<method v="2" />
|
||||
</configuration>
|
||||
</component>
|
@ -1,7 +1,7 @@
|
||||
source .unset-environment
|
||||
|
||||
export HSADMINNG_POSTGRES_RESTRICTED_USERNAME=restricted
|
||||
unset HSADMINNG_POSTGRES_JDBC_URL # dynamically set, different for normal tests and imports
|
||||
export HSADMINNG_POSTGRES_ADMIN_USERNAME=admin
|
||||
export HSADMINNG_SUPERUSER=import-superuser@hostsharing.net
|
||||
|
||||
export HSADMINNG_POSTGRES_ADMIN_PASSWORD=
|
||||
export HSADMINNG_POSTGRES_RESTRICTED_USERNAME=restricted
|
||||
export HSADMINNG_SUPERUSER=superuser-alex@hostsharing.net
|
||||
export HSADMINNG_MIGRATION_DATA_PATH=migration
|
||||
export LANG=en_US.UTF-8
|
||||
|
@ -4,5 +4,4 @@ unset HSADMINNG_POSTGRES_ADMIN_PASSWORD
|
||||
unset HSADMINNG_POSTGRES_RESTRICTED_USERNAME
|
||||
unset HSADMINNG_SUPERUSER
|
||||
unset HSADMINNG_MIGRATION_DATA_PATH
|
||||
unset HSADMINNG_OFFICE_DATA_SQL_FILE
|
||||
|
||||
|
4
Jenkinsfile
vendored
4
Jenkinsfile
vendored
@ -55,9 +55,9 @@ pipeline {
|
||||
sh './gradlew bookingIntegrationTest hostingIntegrationTest --no-daemon'
|
||||
}
|
||||
}
|
||||
stage('Test-Imports') {
|
||||
stage('Import-Tests') {
|
||||
steps {
|
||||
sh './gradlew importHostingAssets --no-daemon'
|
||||
sh './gradlew importOfficeData importHostingAssets --no-daemon'
|
||||
}
|
||||
}
|
||||
stage ('Scenario-Tests') {
|
||||
|
97
build.gradle
97
build.gradle
@ -20,7 +20,7 @@ version = '0.0.1-SNAPSHOT'
|
||||
|
||||
wrapper {
|
||||
distributionType = Wrapper.DistributionType.BIN
|
||||
gradleVersion = '8.5'
|
||||
gradleVersion = '8.12.1'
|
||||
}
|
||||
|
||||
// TODO.impl: self-attaching is deprecated, see:
|
||||
@ -42,8 +42,8 @@ configurations {
|
||||
|
||||
repositories {
|
||||
mavenCentral()
|
||||
maven { url 'https://repo.spring.io/milestone' }
|
||||
maven { url 'https://repo.spring.io/snapshot' }
|
||||
maven { url = 'https://repo.spring.io/milestone' }
|
||||
maven { url = 'https://repo.spring.io/snapshot' }
|
||||
}
|
||||
|
||||
java {
|
||||
@ -109,14 +109,14 @@ dependencyManagement {
|
||||
}
|
||||
|
||||
// Java Compiler Options
|
||||
tasks.withType(JavaCompile) {
|
||||
tasks.withType(JavaCompile).configureEach {
|
||||
options.compilerArgs += [
|
||||
"-parameters" // keep parameter names => no need for @Param for SpringData
|
||||
]
|
||||
}
|
||||
|
||||
// Configure tests
|
||||
tasks.named('test') {
|
||||
tasks.withType(Test).configureEach {
|
||||
useJUnitPlatform()
|
||||
jvmArgs '-Duser.language=en'
|
||||
jvmArgs '-Duser.country=US'
|
||||
@ -129,7 +129,7 @@ openapiProcessor {
|
||||
processor 'io.openapiprocessor:openapi-processor-spring:2022.5'
|
||||
apiPath "$projectDir/src/main/resources/api-definition/api-definition.yaml"
|
||||
mapping "$projectDir/src/main/resources/api-definition/api-mappings.yaml"
|
||||
targetDir "$buildDir/generated/sources/openapi-javax"
|
||||
targetDir layout.buildDirectory.dir("generated/sources/openapi-javax")
|
||||
showWarnings true
|
||||
openApiNullable true
|
||||
}
|
||||
@ -138,7 +138,7 @@ openapiProcessor {
|
||||
processor 'io.openapiprocessor:openapi-processor-spring:2022.5'
|
||||
apiPath "$projectDir/src/main/resources/api-definition/rbac/rbac.yaml"
|
||||
mapping "$projectDir/src/main/resources/api-definition/rbac/api-mappings.yaml"
|
||||
targetDir "$buildDir/generated/sources/openapi-javax"
|
||||
targetDir layout.buildDirectory.dir("generated/sources/openapi-javax")
|
||||
showWarnings true
|
||||
openApiNullable true
|
||||
}
|
||||
@ -147,7 +147,7 @@ openapiProcessor {
|
||||
processor 'io.openapiprocessor:openapi-processor-spring:2022.5'
|
||||
apiPath "$projectDir/src/main/resources/api-definition/test/test.yaml"
|
||||
mapping "$projectDir/src/main/resources/api-definition/test/api-mappings.yaml"
|
||||
targetDir "$buildDir/generated/sources/openapi-javax"
|
||||
targetDir layout.buildDirectory.dir("generated/sources/openapi-javax")
|
||||
showWarnings true
|
||||
openApiNullable true
|
||||
}
|
||||
@ -156,7 +156,7 @@ openapiProcessor {
|
||||
processor 'io.openapiprocessor:openapi-processor-spring:2022.5'
|
||||
apiPath "$projectDir/src/main/resources/api-definition/hs-office/hs-office.yaml"
|
||||
mapping "$projectDir/src/main/resources/api-definition/hs-office/api-mappings.yaml"
|
||||
targetDir "$buildDir/generated/sources/openapi-javax"
|
||||
targetDir layout.buildDirectory.dir("generated/sources/openapi-javax")
|
||||
showWarnings true
|
||||
openApiNullable true
|
||||
}
|
||||
@ -165,7 +165,7 @@ openapiProcessor {
|
||||
processor 'io.openapiprocessor:openapi-processor-spring:2022.5'
|
||||
apiPath "$projectDir/src/main/resources/api-definition/hs-booking/hs-booking.yaml"
|
||||
mapping "$projectDir/src/main/resources/api-definition/hs-booking/api-mappings.yaml"
|
||||
targetDir "$buildDir/generated/sources/openapi-javax"
|
||||
targetDir layout.buildDirectory.dir("generated/sources/openapi-javax")
|
||||
showWarnings true
|
||||
openApiNullable true
|
||||
}
|
||||
@ -174,7 +174,7 @@ openapiProcessor {
|
||||
processor 'io.openapiprocessor:openapi-processor-spring:2022.5'
|
||||
apiPath "$projectDir/src/main/resources/api-definition/hs-hosting/hs-hosting.yaml"
|
||||
mapping "$projectDir/src/main/resources/api-definition/hs-hosting/api-mappings.yaml"
|
||||
targetDir "$buildDir/generated/sources/openapi-javax"
|
||||
targetDir layout.buildDirectory.dir("generated/sources/openapi-javax")
|
||||
showWarnings true
|
||||
openApiNullable true
|
||||
}
|
||||
@ -201,11 +201,11 @@ project.tasks.compileJava.dependsOn processSpring
|
||||
// TODO.impl: Upgrade to io.openapiprocessor.openapi-processor >= 2024.2
|
||||
// and use either `bean-validation: true` in api-mapping.yaml or `useSpringBoot3 true` (not sure where exactly).
|
||||
task openApiGenerate(type: Copy) {
|
||||
from "$buildDir/generated/sources/openapi-javax"
|
||||
into "$buildDir/generated/sources/openapi"
|
||||
from layout.buildDirectory.dir("generated/sources/openapi-javax")
|
||||
into layout.buildDirectory.dir("generated/sources/openapi")
|
||||
filter { line -> line.replaceAll('javax', 'jakarta') }
|
||||
}
|
||||
compileJava.source "$buildDir/generated/sources/openapi"
|
||||
compileJava.source layout.buildDirectory.dir("generated/sources/openapi")
|
||||
compileJava.dependsOn openApiGenerate
|
||||
openApiGenerate.dependsOn processSpring
|
||||
|
||||
@ -263,7 +263,7 @@ test {
|
||||
'net.hostsharing.hsadminng.**.generated.**',
|
||||
]
|
||||
useJUnitPlatform {
|
||||
excludeTags 'importHostingAssets', 'scenarioTest'
|
||||
excludeTags 'importOfficeData', 'importHostingAssets', 'scenarioTest'
|
||||
}
|
||||
}
|
||||
|
||||
@ -338,12 +338,15 @@ jacocoTestCoverageVerification {
|
||||
// HOWTO: run all unit-tests which don't need a database: gw-test unitTest
|
||||
tasks.register('unitTest', Test) {
|
||||
useJUnitPlatform {
|
||||
excludeTags 'importHostingAssets', 'scenarioTest', 'generalIntegrationTest',
|
||||
excludeTags 'importOfficeData', 'importHostingAssets', 'scenarioTest', 'generalIntegrationTest',
|
||||
'officeIntegrationTest', 'bookingIntegrationTest', 'hostingIntegrationTest'
|
||||
}
|
||||
|
||||
group 'verification'
|
||||
description 'runs all unit-tests which do not need a database'
|
||||
group = 'verification'
|
||||
description = 'runs all unit-tests which do not need a database'
|
||||
|
||||
testClassesDirs = testing.suites.test.sources.output.classesDirs
|
||||
classpath = testing.suites.test.sources.runtimeClasspath
|
||||
|
||||
mustRunAfter spotlessJava
|
||||
}
|
||||
@ -354,8 +357,11 @@ tasks.register('generalIntegrationTest', Test) {
|
||||
includeTags 'generalIntegrationTest'
|
||||
}
|
||||
|
||||
group 'verification'
|
||||
description 'runs integration tests which are not specific to a module, like base, rbac, config etc.'
|
||||
group = 'verification'
|
||||
description = 'runs integration tests which are not specific to a module, like base, rbac, config etc.'
|
||||
|
||||
testClassesDirs = testing.suites.test.sources.output.classesDirs
|
||||
classpath = testing.suites.test.sources.runtimeClasspath
|
||||
|
||||
mustRunAfter spotlessJava
|
||||
}
|
||||
@ -366,8 +372,11 @@ tasks.register('officeIntegrationTest', Test) {
|
||||
includeTags 'officeIntegrationTest'
|
||||
}
|
||||
|
||||
group 'verification'
|
||||
description 'runs integration tests of the office module'
|
||||
group = 'verification'
|
||||
description = 'runs integration tests of the office module'
|
||||
|
||||
testClassesDirs = testing.suites.test.sources.output.classesDirs
|
||||
classpath = testing.suites.test.sources.runtimeClasspath
|
||||
|
||||
mustRunAfter spotlessJava
|
||||
}
|
||||
@ -378,8 +387,11 @@ tasks.register('bookingIntegrationTest', Test) {
|
||||
includeTags 'bookingIntegrationTest'
|
||||
}
|
||||
|
||||
group 'verification'
|
||||
description 'runs integration tests of the booking module'
|
||||
group = 'verification'
|
||||
description = 'runs integration tests of the booking module'
|
||||
|
||||
testClassesDirs = testing.suites.test.sources.output.classesDirs
|
||||
classpath = testing.suites.test.sources.runtimeClasspath
|
||||
|
||||
mustRunAfter spotlessJava
|
||||
}
|
||||
@ -390,8 +402,25 @@ tasks.register('hostingIntegrationTest', Test) {
|
||||
includeTags 'hostingIntegrationTest'
|
||||
}
|
||||
|
||||
group 'verification'
|
||||
description 'runs integration tests of the hosting module'
|
||||
group = 'verification'
|
||||
description = 'runs integration tests of the hosting module'
|
||||
|
||||
testClassesDirs = testing.suites.test.sources.output.classesDirs
|
||||
classpath = testing.suites.test.sources.runtimeClasspath
|
||||
|
||||
mustRunAfter spotlessJava
|
||||
}
|
||||
|
||||
tasks.register('importOfficeData', Test) {
|
||||
useJUnitPlatform {
|
||||
includeTags 'importOfficeData'
|
||||
}
|
||||
|
||||
group = 'verification'
|
||||
description = 'run the import jobs as tests'
|
||||
|
||||
testClassesDirs = testing.suites.test.sources.output.classesDirs
|
||||
classpath = testing.suites.test.sources.runtimeClasspath
|
||||
|
||||
mustRunAfter spotlessJava
|
||||
}
|
||||
@ -401,8 +430,11 @@ tasks.register('importHostingAssets', Test) {
|
||||
includeTags 'importHostingAssets'
|
||||
}
|
||||
|
||||
group 'verification'
|
||||
description 'run the import jobs as tests'
|
||||
group = 'verification'
|
||||
description = 'run the import jobs as tests'
|
||||
|
||||
testClassesDirs = testing.suites.test.sources.output.classesDirs
|
||||
classpath = testing.suites.test.sources.runtimeClasspath
|
||||
|
||||
mustRunAfter spotlessJava
|
||||
}
|
||||
@ -412,8 +444,11 @@ tasks.register('scenarioTest', Test) {
|
||||
includeTags 'scenarioTest'
|
||||
}
|
||||
|
||||
group 'verification'
|
||||
description 'run the import jobs as tests'
|
||||
group = 'verification'
|
||||
description = 'run the import jobs as tests'
|
||||
|
||||
testClassesDirs = testing.suites.test.sources.output.classesDirs
|
||||
classpath = testing.suites.test.sources.runtimeClasspath
|
||||
|
||||
mustRunAfter spotlessJava
|
||||
}
|
||||
@ -428,7 +463,7 @@ pitest {
|
||||
]
|
||||
|
||||
targetTests = ['net.hostsharing.hsadminng.**.*UnitTest', 'net.hostsharing.hsadminng.**.*RestTest']
|
||||
excludedTestClasses = ['**AcceptanceTest*', '**IntegrationTest*', '**ImportHostingAssets']
|
||||
excludedTestClasses = ['**AcceptanceTest*', '**IntegrationTest*', '**ImportOfficeData', '**ImportHostingAssets']
|
||||
|
||||
pitestVersion = '1.17.0'
|
||||
junit5PluginVersion = '1.1.0'
|
||||
|
519
build.gradle.kotlin
Normal file
519
build.gradle.kotlin
Normal file
@ -0,0 +1,519 @@
|
||||
plugins {
|
||||
`java-platform`
|
||||
//id( "java" )
|
||||
id( "org.springframework.boot") version "3.4.1"
|
||||
id( "io.spring.dependency-management") version "1.1.7" // manages implicit dependencies
|
||||
id( "io.openapiprocessor.openapi-processor") version "2023.2" // generates Controller-interface and resources from API-spec
|
||||
id( "com.github.jk1.dependency-license-report") version "2.9" // checks dependency-license compatibility
|
||||
id( "org.owasp.dependencycheck") version "12.0.1" // checks dependencies for known vulnerabilities
|
||||
id( "com.diffplug.spotless") version "7.0.2" // formats + checks formatting for source-code
|
||||
id( "jacoco") // determines code-coverage of tests
|
||||
id( "info.solidsoft.pitest") version "1.15.0" // performs mutation testing
|
||||
id( "se.patrikerdes.use-latest-versions") version "0.2.18" // updates module and plugin versions
|
||||
id( "com.github.ben-manes.versions") version "0.52.0" // determines which dependencies have updates
|
||||
}
|
||||
|
||||
// HOWTO: find out which dependency versions are managed by Spring Boot:
|
||||
// https://docs.spring.io/spring-boot/appendix/dependency-versions/coordinates.html
|
||||
|
||||
group = "net.hostsharing"
|
||||
version = "0.0.1-SNAPSHOT"
|
||||
|
||||
tasks.named<Wrapper>("wrapper") {
|
||||
distributionType = Wrapper.DistributionType.BIN
|
||||
gradleVersion = "8.5"
|
||||
}
|
||||
|
||||
|
||||
// TODO.impl: self-attaching is deprecated, see:
|
||||
// https://javadoc.io/doc/org.mockito/mockito-core/latest/org/mockito/Mockito.html#0.3
|
||||
|
||||
configurations {
|
||||
named("compileOnly") {
|
||||
extendsFrom(configurations.named("annotationProcessor").get())
|
||||
}
|
||||
named("testCompile") {
|
||||
extendsFrom(configurations.named("testAnnotationProcessor").get())
|
||||
|
||||
// Only JUnit 5 (Jupiter) should be used at compile time.
|
||||
// For runtime it's still needed by testcontainers, though.
|
||||
exclude(group = "junit", module = "junit")
|
||||
exclude(group = "org.junit.vintage", module = "junit-vintage-engine")
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
extensions.configure<JavaPluginExtension> {
|
||||
toolchain {
|
||||
languageVersion.set(JavaLanguageVersion.of(21))
|
||||
vendor.set(JvmVendorSpec.ADOPTIUM)
|
||||
implementation.set(JvmImplementation.VENDOR_SPECIFIC)
|
||||
}
|
||||
}
|
||||
|
||||
extra["testcontainersVersion"] = "1.17.3"
|
||||
|
||||
allprojects {
|
||||
repositories {
|
||||
mavenCentral()
|
||||
maven { url = uri("https://repo.spring.io/milestone") }
|
||||
maven { url = uri("https://repo.spring.io/snapshot") }
|
||||
}
|
||||
dependencies {
|
||||
implementation("org.springframework.boot:spring-boot-starter-data-jpa")
|
||||
implementation("org.springframework.boot:spring-boot-starter-data-rest")
|
||||
implementation("org.springframework.boot:spring-boot-starter-jdbc")
|
||||
implementation("org.springframework.boot:spring-boot-starter-web")
|
||||
implementation("org.springframework.boot:spring-boot-starter-validation")
|
||||
implementation("org.springframework.boot:spring-boot-starter-actuator")
|
||||
implementation("org.springframework.boot:spring-boot-starter-security")
|
||||
implementation("com.github.gavlyukovskiy:datasource-proxy-spring-boot-starter:1.10.0")
|
||||
implementation("org.springdoc:springdoc-openapi:2.8.3")
|
||||
classpath("org.postgresql:postgresql")
|
||||
classpath("org.liquibase:liquibase-core")
|
||||
classpath("io.hypersistence:hypersistence-utils-hibernate-63:3.9.0")
|
||||
classpath("com.fasterxml.jackson.datatype:jackson-datatype-jsr310")
|
||||
classpath("org.openapitools:jackson-databind-nullable:0.2.6")
|
||||
classpath("org.apache.commons:commons-text:1.13.0")
|
||||
classpath("net.java.dev.jna:jna:5.16.0")
|
||||
classpath("org.modelmapper:modelmapper:3.2.2")
|
||||
classpath("org.iban4j:iban4j:3.2.10-RELEASE")
|
||||
classpath("org.springdoc:springdoc-openapi-starter-webmvc-ui:2.8.3")
|
||||
classpath("org.reflections:reflections:0.10.2")
|
||||
|
||||
compileOnly("org.projectlombok:lombok")
|
||||
testCompileOnly("org.projectlombok:lombok")
|
||||
|
||||
// TODO.impl: version conflict with SpringDoc, check later and re-enable if fixed
|
||||
// developmentOnly "org.springframework.boot:spring-boot-devtools"
|
||||
|
||||
annotationProcessor("org.projectlombok:lombok")
|
||||
testAnnotationProcessor("org.projectlombok:lombok")
|
||||
|
||||
testImplementation("org.springframework.boot:spring-boot-starter-test")
|
||||
testImplementation("org.testcontainers:testcontainers")
|
||||
testImplementation("org.testcontainers:junit-jupiter")
|
||||
testImplementation("org.junit.jupiter:junit-jupiter")
|
||||
testImplementation("org.testcontainers:postgresql")
|
||||
testImplementation("com.tngtech.archunit:archunit-junit5:1.3.0")
|
||||
testImplementation("io.rest-assured:spring-mock-mvc")
|
||||
testImplementation("org.hamcrest:hamcrest-core")
|
||||
testImplementation("org.pitest:pitest-junit5-plugin:1.2.1")
|
||||
testImplementation("org.junit.jupiter:junit-jupiter-api")
|
||||
testImplementation("org.wiremock:wiremock-standalone:3.10.0")
|
||||
}
|
||||
}
|
||||
|
||||
// Java Compiler Options
|
||||
tasks.withType<JavaCompile>().configureEach {
|
||||
options.compilerArgs.add("-parameters") // keep parameter names => no need for @Param for SpringData
|
||||
}
|
||||
|
||||
// Configure tests
|
||||
tasks.withType<Test> {
|
||||
useJUnitPlatform()
|
||||
jvmArgs("-Duser.language=en", "-Duser.country=US")
|
||||
}
|
||||
|
||||
// OpenAPI Source Code Generation
|
||||
//openapiProcessor {
|
||||
// springRoot {
|
||||
// processorName = "spring"
|
||||
// processor = "io.openapiprocessor:openapi-processor-spring:2022.5"
|
||||
// apiPath "$projectDir/src/main/resources/api-definition/api-definition.yaml"
|
||||
// mapping "$projectDir/src/main/resources/api-definition/api-mappings.yaml"
|
||||
// targetDir layout.buildDirectory.dir("generated/sources/openapi-javax")
|
||||
// showWarnings true
|
||||
// openApiNullable true
|
||||
// }
|
||||
// springRbac {
|
||||
// processorName "spring"
|
||||
// processor "io.openapiprocessor:openapi-processor-spring:2022.5"
|
||||
// apiPath "$projectDir/src/main/resources/api-definition/rbac/rbac.yaml"
|
||||
// mapping "$projectDir/src/main/resources/api-definition/rbac/api-mappings.yaml"
|
||||
// targetDir layout.buildDirectory.dir("generated/sources/openapi-javax")
|
||||
// showWarnings true
|
||||
// openApiNullable true
|
||||
// }
|
||||
// springTest {
|
||||
// processorName "spring"
|
||||
// processor "io.openapiprocessor:openapi-processor-spring:2022.5"
|
||||
// apiPath "$projectDir/src/main/resources/api-definition/test/test.yaml"
|
||||
// mapping "$projectDir/src/main/resources/api-definition/test/api-mappings.yaml"
|
||||
// targetDir layout.buildDirectory.dir("generated/sources/openapi-javax")
|
||||
// showWarnings true
|
||||
// openApiNullable true
|
||||
// }
|
||||
// springHsOffice {
|
||||
// processorName "spring"
|
||||
// processor "io.openapiprocessor:openapi-processor-spring:2022.5"
|
||||
// apiPath "$projectDir/src/main/resources/api-definition/hs-office/hs-office.yaml"
|
||||
// mapping "$projectDir/src/main/resources/api-definition/hs-office/api-mappings.yaml"
|
||||
// targetDir layout.buildDirectory.dir("generated/sources/openapi-javax")
|
||||
// showWarnings true
|
||||
// openApiNullable true
|
||||
// }
|
||||
// springHsBooking {
|
||||
// processorName "spring"
|
||||
// processor "io.openapiprocessor:openapi-processor-spring:2022.5"
|
||||
// apiPath "$projectDir/src/main/resources/api-definition/hs-booking/hs-booking.yaml"
|
||||
// mapping "$projectDir/src/main/resources/api-definition/hs-booking/api-mappings.yaml"
|
||||
// targetDir layout.buildDirectory.dir("generated/sources/openapi-javax")
|
||||
// showWarnings true
|
||||
// openApiNullable true
|
||||
// }
|
||||
// springHsHosting {
|
||||
// processorName "spring"
|
||||
// processor "io.openapiprocessor:openapi-processor-spring:2022.5"
|
||||
// apiPath "$projectDir/src/main/resources/api-definition/hs-hosting/hs-hosting.yaml"
|
||||
// mapping "$projectDir/src/main/resources/api-definition/hs-hosting/api-mappings.yaml"
|
||||
// targetDir layout.buildDirectory.dir("generated/sources/openapi-javax")
|
||||
// showWarnings true
|
||||
// openApiNullable true
|
||||
// }
|
||||
//}
|
||||
//sourceSets.main.java.srcDir "build/generated/sources/openapi"
|
||||
|
||||
//abstract class ProcessSpring extends DefaultTask {}
|
||||
|
||||
//tasks.register("processSpring", ProcessSpring)
|
||||
//["processSpringRoot",
|
||||
// "processSpringRbac",
|
||||
// "processSpringTest",
|
||||
// "processSpringHsOffice",
|
||||
// "processSpringHsBooking",
|
||||
// "processSpringHsHosting"
|
||||
//].each {
|
||||
// project.tasks.processSpring.dependsOn it
|
||||
//}
|
||||
//project.tasks.processResources.dependsOn processSpring
|
||||
// project.tasks.compileJava.dependsOn processSpring
|
||||
|
||||
// Rename javax to jakarta in OpenApi generated java files because
|
||||
// io.openapiprocessor.openapi-processor 2022.5 does not yet support the openapiprocessor useSpringBoot3 config option.
|
||||
// TODO.impl: Upgrade to io.openapiprocessor.openapi-processor >= 2024.2
|
||||
// and use either `bean-validation: true` in api-mapping.yaml or `useSpringBoot3 true` (not sure where exactly).
|
||||
//task openApiGenerate(type: Copy) {
|
||||
// from layout.buildDirectory.dir("generated/sources/openapi-javax")
|
||||
// into layout.buildDirectory.dir("generated/sources/openapi")
|
||||
// filter { line -> line.replaceAll("javax", "jakarta") }
|
||||
}
|
||||
//compileJava.source layout.buildDirectory.dir("generated/sources/openapi")
|
||||
//compileJava.dependsOn openApiGenerate
|
||||
// openApiGenerate.dependsOn processSpring
|
||||
|
||||
// Spotless Code Formatting
|
||||
// spotless {
|
||||
// java {
|
||||
// removeUnusedImports()
|
||||
// leadingTabsToSpaces(4)
|
||||
// endWithNewline()
|
||||
// toggleOffOn()
|
||||
//
|
||||
// target fileTree(rootDir) {
|
||||
// include "**/*.java"
|
||||
// exclude "**/generated/**/*.java"
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
//project.tasks.check.dependsOn(spotlessCheck)
|
||||
// HACK: no idea why spotless uses the output of these tasks, but we get warnings without those
|
||||
//project.tasks.spotlessJava.dependsOn(
|
||||
// tasks.generateLicenseReport,
|
||||
// // tasks.pitest, TODO.test: PiTest currently does not work, needs to be fixed
|
||||
// tasks.jacocoTestReport,
|
||||
// tasks.processResources,
|
||||
// tasks.processTestResources)
|
||||
|
||||
// OWASP Dependency Security Test
|
||||
//dependencyCheck {
|
||||
// nvd {
|
||||
// apiKey = project.properties["OWASP_API_KEY"] // set it in ~/.gradle/gradle.properties
|
||||
// delay = 16000
|
||||
// }
|
||||
// format = "ALL"
|
||||
// suppressionFile = "etc/owasp-dependency-check-suppression.xml"
|
||||
// failOnError = true
|
||||
// failBuildOnCVSS = 5
|
||||
//}
|
||||
//project.tasks.check.dependsOn(dependencyCheckAnalyze)
|
||||
//project.tasks.dependencyCheckAnalyze.doFirst { // Why not doLast? See README.md!
|
||||
// println "OWASP Dependency Security Report: file:///${project.rootDir}/build/reports/dependency-check-report.html"
|
||||
//}
|
||||
|
||||
|
||||
// License Check
|
||||
//licenseReport {
|
||||
// excludeBoms = true
|
||||
// allowedLicensesFile = new File("$projectDir/etc/allowed-licenses.json")
|
||||
//}
|
||||
//project.tasks.check.dependsOn(checkLicense)
|
||||
|
||||
// HOWTO: run all tests except import- and scenario-tests: gw test
|
||||
//test {
|
||||
// finalizedBy jacocoTestReport // generate report after tests
|
||||
// excludes = [
|
||||
// "net.hostsharing.hsadminng.**.generated.**",
|
||||
// ]
|
||||
// useJUnitPlatform {
|
||||
// excludeTags "importOfficeData", "importHostingAssets", "scenarioTest"
|
||||
// }
|
||||
//}
|
||||
|
||||
// JaCoCo Test Code Coverage for unit-tests
|
||||
//jacoco {
|
||||
// toolVersion = "0.8.10"
|
||||
//}
|
||||
//jacocoTestReport {
|
||||
// dependsOn test
|
||||
// afterEvaluate {
|
||||
// classDirectories.setFrom(files(classDirectories.files.collect {
|
||||
// fileTree(dir: it, exclude: [
|
||||
// "net/hostsharing/hsadminng/**/generated/**/*.class",
|
||||
// "net/hostsharing/hsadminng/hs/HsadminNgApplication.class"
|
||||
// ])
|
||||
// }))
|
||||
// }
|
||||
// doFirst { // Why not doLast? See README.md!
|
||||
// println "HTML Jacoco Test Code Coverage Report: file://${reports.html.outputLocation.get()}/index.html"
|
||||
// }
|
||||
//}
|
||||
//project.tasks.check.dependsOn(jacocoTestCoverageVerification)
|
||||
//jacocoTestCoverageVerification {
|
||||
// violationRules {
|
||||
// rule {
|
||||
// limit {
|
||||
// minimum = 0.80 // TODO.test: improve instruction coverage
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// // element: PACKAGE, BUNDLE, CLASS, SOURCEFILE or METHOD
|
||||
// // counter: INSTRUCTION, BRANCH, LINE, COMPLEXITY, METHOD, or CLASS
|
||||
// // value: TOTALCOUNT, COVEREDCOUNT, MISSEDCOUNT, COVEREDRATIO or MISSEDRATIO
|
||||
//
|
||||
// rule {
|
||||
// element = "CLASS"
|
||||
// excludes = [
|
||||
// "net.hostsharing.hsadminng.**.generated.**",
|
||||
// "net.hostsharing.hsadminng.rbac.test.dom.TestDomainEntity",
|
||||
// "net.hostsharing.hsadminng.HsadminNgApplication",
|
||||
// "net.hostsharing.hsadminng.ping.PingController",
|
||||
// "net.hostsharing.hsadminng.rbac.generator.*",
|
||||
// "net.hostsharing.hsadminng.rbac.grant.RbacGrantsDiagramService",
|
||||
// "net.hostsharing.hsadminng.rbac.grant.RbacGrantsDiagramService.Node",
|
||||
// "net.hostsharing.hsadminng.**.*Repository",
|
||||
// "net.hostsharing.hsadminng.mapper.Mapper"
|
||||
// ]
|
||||
//
|
||||
// limit {
|
||||
// counter = "LINE"
|
||||
// value = "COVEREDRATIO"
|
||||
// minimum = 0.75 // TODO.test: improve line coverage
|
||||
// }
|
||||
// }
|
||||
// rule {
|
||||
// element = "METHOD"
|
||||
// excludes = [
|
||||
// "net.hostsharing.hsadminng.**.generated.**",
|
||||
// "net.hostsharing.hsadminng.HsadminNgApplication.main",
|
||||
// "net.hostsharing.hsadminng.ping.PingController.*"
|
||||
// ]
|
||||
//
|
||||
// limit {
|
||||
// counter = "BRANCH"
|
||||
// value = "COVEREDRATIO"
|
||||
// minimum = 0.00 // TODO.test: improve branch coverage
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
//}
|
||||
|
||||
// HOWTO: run all unit-tests which don"t need a database: gw-test unitTest
|
||||
//tasks.register("unitTest", Test) {
|
||||
// useJUnitPlatform {
|
||||
// excludeTags "importOfficeData", "importHostingAssets", "scenarioTest", "generalIntegrationTest",
|
||||
// "officeIntegrationTest", "bookingIntegrationTest", "hostingIntegrationTest"
|
||||
// }
|
||||
//
|
||||
// group "verification"
|
||||
// description "runs all unit-tests which do not need a database"
|
||||
//
|
||||
// mustRunAfter spotlessJava
|
||||
//}
|
||||
|
||||
// HOWTO: run all integration tests which are not specific to a module, like base, rbac, config etc.
|
||||
//tasks.register("generalIntegrationTest", Test) {
|
||||
// useJUnitPlatform {
|
||||
// includeTags "generalIntegrationTest"
|
||||
// }
|
||||
//
|
||||
// group "verification"
|
||||
// description "runs integration tests which are not specific to a module, like base, rbac, config etc."
|
||||
//
|
||||
// mustRunAfter spotlessJava
|
||||
//}
|
||||
|
||||
// HOWTO: run all integration tests of the office module: gw-test officeIntegrationTest
|
||||
//tasks.register("officeIntegrationTest", Test) {
|
||||
// useJUnitPlatform {
|
||||
// includeTags "officeIntegrationTest"
|
||||
// }
|
||||
//
|
||||
// group "verification"
|
||||
// description "runs integration tests of the office module"
|
||||
//
|
||||
// mustRunAfter spotlessJava
|
||||
//}
|
||||
|
||||
// HOWTO: run all integration tests of the booking module: gw-test bookingIntegrationTest
|
||||
//tasks.register("bookingIntegrationTest", Test) {
|
||||
// useJUnitPlatform {
|
||||
// includeTags "bookingIntegrationTest"
|
||||
// }
|
||||
//
|
||||
// group "verification"
|
||||
// description "runs integration tests of the booking module"
|
||||
//
|
||||
// mustRunAfter spotlessJava
|
||||
//}
|
||||
|
||||
// HOWTO: run all integration tests of the hosting module: gw-test hostingIntegrationTest
|
||||
//tasks.register("hostingIntegrationTest", Test) {
|
||||
// useJUnitPlatform {
|
||||
// includeTags "hostingIntegrationTest"
|
||||
// }
|
||||
//
|
||||
// group "verification"
|
||||
// description "runs integration tests of the hosting module"
|
||||
//
|
||||
// mustRunAfter spotlessJava
|
||||
//}
|
||||
|
||||
//tasks.register("importOfficeData", Test) {
|
||||
// useJUnitPlatform {
|
||||
// includeTags "importOfficeData"
|
||||
// }
|
||||
//
|
||||
// group "verification"
|
||||
// description "run the import jobs as tests"
|
||||
//
|
||||
// mustRunAfter spotlessJava
|
||||
//}
|
||||
|
||||
//tasks.register("importHostingAssets", Test) {
|
||||
// useJUnitPlatform {
|
||||
// includeTags "importHostingAssets"
|
||||
// }
|
||||
//
|
||||
// group "verification"
|
||||
// description "run the import jobs as tests"
|
||||
//
|
||||
// mustRunAfter spotlessJava
|
||||
//}
|
||||
|
||||
//tasks.register("scenarioTest", Test) {
|
||||
// useJUnitPlatform {
|
||||
// includeTags "scenarioTest"
|
||||
// }
|
||||
//
|
||||
// group "verification"
|
||||
// description "run the import jobs as tests"
|
||||
//
|
||||
// mustRunAfter spotlessJava
|
||||
//}
|
||||
|
||||
// pitest mutation testing
|
||||
//pitest {
|
||||
// targetClasses = ["net.hostsharing.hsadminng.**"]
|
||||
// excludedClasses = [
|
||||
// "net.hostsharing.hsadminng.config.**",
|
||||
// // "net.hostsharing.hsadminng.**.*Controller",
|
||||
// "net.hostsharing.hsadminng.**.generated.**"
|
||||
// ]
|
||||
//
|
||||
// targetTests = ["net.hostsharing.hsadminng.**.*UnitTest", "net.hostsharing.hsadminng.**.*RestTest"]
|
||||
// excludedTestClasses = ["**AcceptanceTest*", "**IntegrationTest*", "**ImportOfficeData", "**ImportHostingAssets"]
|
||||
//
|
||||
// pitestVersion = "1.17.0"
|
||||
// junit5PluginVersion = "1.1.0"
|
||||
//
|
||||
// threads = 4
|
||||
//
|
||||
// // As Java unit tests are pretty pointless in our case, this maybe makes not much sense.
|
||||
// mutationThreshold = 71
|
||||
// coverageThreshold = 57
|
||||
// testStrengthThreshold = 87
|
||||
//
|
||||
// outputFormats = ["XML", "HTML"]
|
||||
// timestampedReports = false
|
||||
//}
|
||||
// project.tasks.check.dependsOn(project.tasks.pitest) TODO.test: PiTest currently does not work, needs to be fixed
|
||||
//project.tasks.pitest.doFirst { // Why not doLast? See README.md!
|
||||
// println "PiTest Mutation Report: file:///${project.rootDir}/build/reports/pitest/index.html"
|
||||
//}
|
||||
|
||||
|
||||
// Dependency Versions Upgrade
|
||||
//useLatestVersions {
|
||||
// finalizedBy check
|
||||
//}
|
||||
|
||||
//def isNonStable = { String version ->
|
||||
// def stableKeyword = ["RELEASE", "FINAL", "GA"].any { it -> version.toUpperCase().contains(it) }
|
||||
// def regex = /^[0-9,.v-]+(-r)?$/
|
||||
// return !stableKeyword && !(version ==~ regex)
|
||||
//}
|
||||
|
||||
//tasks.named("dependencyUpdates").configure {
|
||||
// rejectVersionIf {
|
||||
// isNonStable(it.candidate.version)
|
||||
// }
|
||||
//}
|
||||
|
||||
|
||||
// Generate HTML from Markdown scenario-test-reports using Pandoc:
|
||||
//tasks.register("convertMarkdownToHtml") {
|
||||
// description = "Generates HTML from Markdown scenario-test-reports using Pandoc."
|
||||
// group = "Conversion"
|
||||
//
|
||||
// // Define the template file and input directory
|
||||
// def templateFile = file("doc/scenarios/.template.html")
|
||||
//
|
||||
// // Task configuration and execution
|
||||
// doFirst {
|
||||
// // Check if pandoc is installed
|
||||
// try {
|
||||
// exec {
|
||||
// commandLine "pandoc", "--version"
|
||||
// }
|
||||
// } catch (Exception) {
|
||||
// throw new GradleException("Pandoc is not installed or not found in the system path.")
|
||||
// }
|
||||
//
|
||||
// // Check if the template file exists
|
||||
// if (!templateFile.exists()) {
|
||||
// throw new GradleException("Template file "doc/scenarios/.template.html" not found.")
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// doLast {
|
||||
// // Gather all Markdown files in the current directory
|
||||
// fileTree(dir: ".", include: "build/doc/scenarios/*.md").each { file ->
|
||||
// // Corrected way to create the output file path
|
||||
// def outputFile = new File(file.parent, file.name.replaceAll(/\.md$/, ".html"))
|
||||
//
|
||||
// // Execute pandoc for each markdown file
|
||||
// exec {
|
||||
// commandLine "pandoc", file.absolutePath, "--template", templateFile.absolutePath, "-o", outputFile.absolutePath
|
||||
// }
|
||||
//
|
||||
// println "Converted ${file.name} to ${outputFile.name}"
|
||||
// }
|
||||
// }
|
||||
//}
|
||||
//convertMarkdownToHtml.dependsOn scenarioTest
|
||||
|
||||
// shortcut for compiling all files
|
||||
// tasks.register("compile") {
|
||||
// dependsOn "compileJava", "compileTestJava"
|
||||
// }
|
@ -108,40 +108,6 @@ der Person des _Subscriber-Contact_ (_Holder_) zur repräsentierten Person (_Anc
|
||||
Zusätzlich wird diese Relation mit dem Kurznamen der abonnierten Mailingliste markiert.
|
||||
|
||||
|
||||
### Coop-Asset-Transactions (Geschäftsguthabens-Transaktionen)
|
||||
|
||||
- positiver Wert => Geschäftsguthaben nehmen zu
|
||||
- negativer Wert => Geschäftsguthaben nehmen ab
|
||||
|
||||
**REVERSAL**: **Korrekturbuchung** einer fehlerhaften Buchung, positiver oder negativer Wert ist möglich
|
||||
|
||||
**DEPOSIT**: **Zahlungseingang** vom Mitglied nach Beteiligung mit Geschäftsanteilen, immer positiver Wert
|
||||
|
||||
**DISBURSAL**: **Zahlungsausgang** an Mitglied nach Kündigung von Geschäftsanteilen, immer negativer Wert
|
||||
|
||||
**TRANSFER**: **Übertragung** von Geschäftsguthaben an ein anderes Mitglied, immer negativer Wert
|
||||
|
||||
**ADOPTION**: **Übernahme** von Geschäftsguthaben von einem anderen Mitglied, immer positiver Wert
|
||||
|
||||
**CLEARING**: **Verrechnung** von Geschäftsguthaben mit Schulden des Mitglieds, immer negativer Wert
|
||||
|
||||
**LOSS**: **Verlust** von Geschäftsguthaben bei Zuweisung Eigenkapitalverlust nach Kündigung von Geschäftsanteilen, immer negativer Wert
|
||||
|
||||
**LIMITATION**: **Verjährung** von Geschäftsguthaben, wenn Auszahlung innerhalb der Frist nicht möglich war.
|
||||
|
||||
|
||||
### Coop-Share-Transactions (Geschäftsanteil-Transaktionen)
|
||||
|
||||
- positiver Wert => Geschäftsanteile nehmen zu
|
||||
- negativer Wert => Geschäftsanteile nehmen ab
|
||||
-
|
||||
**REVERSAL**: **Korrekturbuchung** einer fehlerhaften Buchung, positiver oder negativer Wert ist möglich
|
||||
|
||||
**SUBSCRIPTION**: **Beteiligung** mit Geschäftsanteilen, z.B. durch Beitrittserklärung, immer positiver Wert
|
||||
|
||||
**CANCELLATION**: **Kündigung** von Geschäftsanteilen, z.B. durch Austritt, immer negativer Wert
|
||||
|
||||
|
||||
#### Anchor / Relation-Anchor
|
||||
|
||||
siehe [Relation](#Relation)
|
||||
|
@ -116,7 +116,7 @@ classDiagram
|
||||
+BankAccount refundBankAccount
|
||||
+String defaultPrefix: mei
|
||||
}
|
||||
debitor-MeierGmbH o.. partner-MeierGmbH
|
||||
debitor-MeierGmbH o-- partner-MeierGmbH
|
||||
debitor-MeierGmbH *-- rel-MeierGmbH-Buha
|
||||
|
||||
class contactData-MeierGmbH-Buha {
|
||||
|
BIN
gradle/wrapper/gradle-wrapper.jar
vendored
BIN
gradle/wrapper/gradle-wrapper.jar
vendored
Binary file not shown.
2
gradle/wrapper/gradle-wrapper.properties
vendored
2
gradle/wrapper/gradle-wrapper.properties
vendored
@ -1,6 +1,6 @@
|
||||
distributionBase=GRADLE_USER_HOME
|
||||
distributionPath=wrapper/dists
|
||||
distributionUrl=https\://services.gradle.org/distributions/gradle-8.5-bin.zip
|
||||
distributionUrl=https\://services.gradle.org/distributions/gradle-8.12.1-bin.zip
|
||||
networkTimeout=10000
|
||||
validateDistributionUrl=true
|
||||
zipStoreBase=GRADLE_USER_HOME
|
||||
|
6
gradlew
vendored
6
gradlew
vendored
@ -15,6 +15,8 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
#
|
||||
|
||||
##############################################################################
|
||||
#
|
||||
@ -55,7 +57,7 @@
|
||||
# Darwin, MinGW, and NonStop.
|
||||
#
|
||||
# (3) This script is generated from the Groovy template
|
||||
# https://github.com/gradle/gradle/blob/HEAD/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt
|
||||
# https://github.com/gradle/gradle/blob/HEAD/platforms/jvm/plugins-application/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt
|
||||
# within the Gradle project.
|
||||
#
|
||||
# You can find Gradle at https://github.com/gradle/gradle/.
|
||||
@ -84,7 +86,7 @@ done
|
||||
# shellcheck disable=SC2034
|
||||
APP_BASE_NAME=${0##*/}
|
||||
# Discard cd standard output in case $CDPATH is set (https://github.com/gradle/gradle/issues/25036)
|
||||
APP_HOME=$( cd "${APP_HOME:-./}" > /dev/null && pwd -P ) || exit
|
||||
APP_HOME=$( cd -P "${APP_HOME:-./}" > /dev/null && printf '%s\n' "$PWD" ) || exit
|
||||
|
||||
# Use the maximum available, or set MAX_FD != -1 to use that value.
|
||||
MAX_FD=maximum
|
||||
|
22
gradlew.bat
vendored
22
gradlew.bat
vendored
@ -13,6 +13,8 @@
|
||||
@rem See the License for the specific language governing permissions and
|
||||
@rem limitations under the License.
|
||||
@rem
|
||||
@rem SPDX-License-Identifier: Apache-2.0
|
||||
@rem
|
||||
|
||||
@if "%DEBUG%"=="" @echo off
|
||||
@rem ##########################################################################
|
||||
@ -43,11 +45,11 @@ set JAVA_EXE=java.exe
|
||||
%JAVA_EXE% -version >NUL 2>&1
|
||||
if %ERRORLEVEL% equ 0 goto execute
|
||||
|
||||
echo.
|
||||
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
|
||||
echo.
|
||||
echo Please set the JAVA_HOME variable in your environment to match the
|
||||
echo location of your Java installation.
|
||||
echo. 1>&2
|
||||
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 1>&2
|
||||
echo. 1>&2
|
||||
echo Please set the JAVA_HOME variable in your environment to match the 1>&2
|
||||
echo location of your Java installation. 1>&2
|
||||
|
||||
goto fail
|
||||
|
||||
@ -57,11 +59,11 @@ set JAVA_EXE=%JAVA_HOME%/bin/java.exe
|
||||
|
||||
if exist "%JAVA_EXE%" goto execute
|
||||
|
||||
echo.
|
||||
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
|
||||
echo.
|
||||
echo Please set the JAVA_HOME variable in your environment to match the
|
||||
echo location of your Java installation.
|
||||
echo. 1>&2
|
||||
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 1>&2
|
||||
echo. 1>&2
|
||||
echo Please set the JAVA_HOME variable in your environment to match the 1>&2
|
||||
echo location of your Java installation. 1>&2
|
||||
|
||||
goto fail
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
pluginManagement {
|
||||
repositories {
|
||||
maven { url 'https://repo.spring.io/milestone' }
|
||||
maven { url 'https://repo.spring.io/snapshot' }
|
||||
maven { url = 'https://repo.spring.io/milestone' }
|
||||
maven { url = 'https://repo.spring.io/snapshot' }
|
||||
gradlePluginPortal()
|
||||
mavenCentral()
|
||||
}
|
||||
|
@ -1,6 +1,6 @@
|
||||
--liquibase formatted sql
|
||||
|
||||
-- TODO.impl: check if we really need the restricted user
|
||||
-- FIXME: check if we really need the restricted user
|
||||
|
||||
-- ============================================================================
|
||||
-- NUMERIC-HASH-FUNCTIONS
|
||||
|
@ -22,12 +22,13 @@ select (objectTable || '#' || objectIdName || ':' || roleType) as roleIdName, *
|
||||
--//
|
||||
|
||||
-- ============================================================================
|
||||
--changeset michael.hoennig:rbac-views-ROLE-RESTRICTED-VIEW runOnChange:true validCheckSum:ANY endDelimiter:--//
|
||||
--changeset michael.hoennig:rbac-views-ROLE-RESTRICTED-VIEW endDelimiter:--//
|
||||
-- ----------------------------------------------------------------------------
|
||||
/*
|
||||
Creates a view to the role table with row-level limitation
|
||||
based on the grants of the current user or assumed roles.
|
||||
*/
|
||||
drop view if exists rbac.role_rv;
|
||||
create or replace view rbac.role_rv as
|
||||
select *
|
||||
-- @formatter:off
|
||||
@ -105,7 +106,7 @@ create or replace view rbac.grant_ev as
|
||||
|
||||
|
||||
-- ============================================================================
|
||||
--changeset michael.hoennig:rbac-views-GRANT-RESTRICTED-VIEW runOnChange:true validCheckSum:ANY endDelimiter:--//
|
||||
--changeset michael.hoennig:rbac-views-GRANT-RESTRICTED-VIEW endDelimiter:--//
|
||||
-- ----------------------------------------------------------------------------
|
||||
/*
|
||||
Creates a view to the grants table with row-level limitation
|
||||
@ -221,12 +222,13 @@ select distinct *
|
||||
|
||||
|
||||
-- ============================================================================
|
||||
--changeset michael.hoennig:rbac-views-USER-RESTRICTED-VIEW runOnChange:true validCheckSum:ANY endDelimiter:--//
|
||||
--changeset michael.hoennig:rbac-views-USER-RESTRICTED-VIEW endDelimiter:--//
|
||||
-- ----------------------------------------------------------------------------
|
||||
/*
|
||||
Creates a view to the users table with row-level limitation
|
||||
based on the grants of the current user or assumed roles.
|
||||
*/
|
||||
drop view if exists rbac.subject_rv;
|
||||
create or replace view rbac.subject_rv as
|
||||
select distinct *
|
||||
-- @formatter:off
|
||||
@ -314,13 +316,14 @@ execute function rbac.delete_subject_tf();
|
||||
--/
|
||||
|
||||
-- ============================================================================
|
||||
--changeset michael.hoennig:rbac-views-OWN-GRANTED-PERMISSIONS-VIEW runOnChange:true validCheckSum:ANY endDelimiter:--//
|
||||
--changeset michael.hoennig:rbac-views-OWN-GRANTED-PERMISSIONS-VIEW endDelimiter:--//
|
||||
-- ----------------------------------------------------------------------------
|
||||
/*
|
||||
Creates a view to all permissions granted to the current user or
|
||||
based on the grants of the current user or assumed roles.
|
||||
*/
|
||||
-- @formatter:off
|
||||
drop view if exists rbac.own_granted_permissions_rv;
|
||||
create or replace view rbac.own_granted_permissions_rv as
|
||||
select r.uuid as roleuuid, p.uuid as permissionUuid,
|
||||
(r.objecttable || ':' || r.objectidname || ':' || r.roletype) as roleName, p.op,
|
||||
|
@ -111,7 +111,7 @@ end; $$;
|
||||
|
||||
|
||||
-- ============================================================================
|
||||
--changeset michael.hoennig:rbac-generators-IDENTITY-VIEW runOnChange:true validCheckSum:ANY endDelimiter:--//
|
||||
--changeset michael.hoennig:rbac-generators-IDENTITY-VIEW endDelimiter:--//
|
||||
-- ----------------------------------------------------------------------------
|
||||
|
||||
create or replace procedure rbac.generateRbacIdentityViewFromQuery(targetTable text, sqlQuery text)
|
||||
@ -171,7 +171,7 @@ end; $$;
|
||||
|
||||
|
||||
-- ============================================================================
|
||||
--changeset michael.hoennig:rbac-generators-RESTRICTED-VIEW runOnChange:true validCheckSum:ANY endDelimiter:--//
|
||||
--changeset michael.hoennig:rbac-generators-RESTRICTED-VIEW endDelimiter:--//
|
||||
-- ----------------------------------------------------------------------------
|
||||
|
||||
create or replace procedure rbac.generateRbacRestrictedView(targetTable text, orderBy text, columnUpdates text = null, columnNames text = '*')
|
||||
|
@ -1,7 +1,7 @@
|
||||
--liquibase formatted sql
|
||||
|
||||
-- ============================================================================
|
||||
--changeset michael.hoennig:rbac-global-OBJECT runOnChange:true validCheckSum:ANY endDelimiter:--//
|
||||
--changeset michael.hoennig:rbac-global-OBJECT endDelimiter:--//
|
||||
-- ----------------------------------------------------------------------------
|
||||
/*
|
||||
The purpose of this table is provide root business objects
|
||||
@ -11,12 +11,12 @@
|
||||
In production databases, there is only a single row in this table,
|
||||
in test stages, there can be one row for each test data realm.
|
||||
*/
|
||||
create table if not exists rbac.global
|
||||
create table rbac.global
|
||||
(
|
||||
uuid uuid primary key references rbac.object (uuid) on delete cascade,
|
||||
name varchar(63) unique
|
||||
);
|
||||
create unique index if not exists Global_Singleton on rbac.global ((0));
|
||||
create unique index Global_Singleton on rbac.global ((0));
|
||||
|
||||
grant select on rbac.global to ${HSADMINNG_POSTGRES_RESTRICTED_USERNAME};
|
||||
--//
|
||||
@ -75,12 +75,13 @@ $$;
|
||||
|
||||
|
||||
-- ============================================================================
|
||||
--changeset michael.hoennig:rbac-global-IDENTITY-VIEW runOnChange:true validCheckSum:ANY endDelimiter:--//
|
||||
--changeset michael.hoennig:rbac-global-IDENTITY-VIEW endDelimiter:--//
|
||||
-- ----------------------------------------------------------------------------
|
||||
|
||||
/*
|
||||
Creates a view to the rbac.global object table which maps the identifying name to the objectUuid.
|
||||
*/
|
||||
drop view if exists rbac.global_iv;
|
||||
create or replace view rbac.global_iv as
|
||||
select target.uuid, target.name as idName
|
||||
from rbac.global as target;
|
||||
|
@ -32,41 +32,6 @@ create table if not exists hs_office.membership
|
||||
--//
|
||||
|
||||
|
||||
-- ============================================================================
|
||||
--changeset michael.hoennig:hs-office-membership-SINGLE-MEMBERSHIP-CHECK endDelimiter:--//
|
||||
-- ----------------------------------------------------------------------------
|
||||
|
||||
CREATE OR REPLACE FUNCTION hs_office.validate_membership_validity()
|
||||
RETURNS trigger AS $$
|
||||
DECLARE
|
||||
partnerNumber int;
|
||||
BEGIN
|
||||
IF EXISTS (
|
||||
SELECT 1
|
||||
FROM hs_office.membership
|
||||
WHERE partnerUuid = NEW.partnerUuid
|
||||
AND uuid <> NEW.uuid
|
||||
AND NEW.validity && validity
|
||||
) THEN
|
||||
SELECT p.partnerNumber INTO partnerNumber
|
||||
FROM hs_office.partner AS p
|
||||
WHERE p.uuid = NEW.partnerUuid;
|
||||
RAISE EXCEPTION 'Membership validity ranges overlap for partnerUuid %, partnerNumber %', NEW.partnerUuid, partnerNumber;
|
||||
END IF;
|
||||
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE TRIGGER trg_validate_membership_validity
|
||||
BEFORE INSERT OR UPDATE ON hs_office.membership
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION hs_office.validate_membership_validity();
|
||||
|
||||
|
||||
--//
|
||||
|
||||
|
||||
-- ============================================================================
|
||||
--changeset michael.hoennig:hs-office-membership-MAIN-TABLE-JOURNAL endDelimiter:--//
|
||||
-- ----------------------------------------------------------------------------
|
||||
|
@ -10,8 +10,7 @@
|
||||
*/
|
||||
create or replace procedure hs_office.membership_create_test_data(
|
||||
forPartnerNumber numeric(5),
|
||||
newMemberNumberSuffix char(2),
|
||||
validity daterange)
|
||||
newMemberNumberSuffix char(2) )
|
||||
language plpgsql as $$
|
||||
declare
|
||||
relatedPartner hs_office.partner;
|
||||
@ -23,7 +22,7 @@ begin
|
||||
raise notice '- using partner (%): %', relatedPartner.uuid, relatedPartner;
|
||||
insert
|
||||
into hs_office.membership (uuid, partneruuid, memberNumberSuffix, validity, status)
|
||||
values (uuid_generate_v4(), relatedPartner.uuid, newMemberNumberSuffix, validity, 'ACTIVE');
|
||||
values (uuid_generate_v4(), relatedPartner.uuid, newMemberNumberSuffix, daterange('20221001' , null, '[]'), 'ACTIVE');
|
||||
end; $$;
|
||||
--//
|
||||
|
||||
@ -36,9 +35,9 @@ do language plpgsql $$
|
||||
begin
|
||||
call base.defineContext('creating Membership test-data', null, 'superuser-alex@hostsharing.net', 'rbac.global#global:ADMIN');
|
||||
|
||||
call hs_office.membership_create_test_data(10001, '01', daterange('20221001' , '20241231', '[)'));
|
||||
call hs_office.membership_create_test_data(10002, '02', daterange('20221001' , '20251231', '[]'));
|
||||
call hs_office.membership_create_test_data(10003, '03', daterange('20221001' , null, '[]'));
|
||||
call hs_office.membership_create_test_data(10001, '01');
|
||||
call hs_office.membership_create_test_data(10002, '02');
|
||||
call hs_office.membership_create_test_data(10003, '03');
|
||||
end;
|
||||
$$;
|
||||
--//
|
||||
|
@ -25,7 +25,7 @@ create table if not exists hs_booking.item
|
||||
caption varchar(80) not null,
|
||||
resources jsonb not null,
|
||||
|
||||
constraint booking_item_has_project_or_parent_item
|
||||
constraint booking_item_has_project_or_parent_asset
|
||||
check (projectUuid is not null or parentItemUuid is not null)
|
||||
);
|
||||
--//
|
||||
|
@ -1,38 +0,0 @@
|
||||
--liquibase formatted sql
|
||||
|
||||
-- ============================================================================
|
||||
--changeset michael.hoennig:hs-global-office-test-ddl-cleanup context:hosting-asset-import endDelimiter:--//
|
||||
-- ----------------------------------------------------------------------------
|
||||
|
||||
DROP PROCEDURE IF EXISTS hs_office.bankaccount_create_test_data(IN givenholder character varying, IN giveniban character varying, IN givenbic character varying);
|
||||
DROP PROCEDURE IF EXISTS hs_office.contact_create_test_data(IN contcaption character varying);
|
||||
DROP PROCEDURE IF EXISTS hs_office.contact_create_test_data(IN startcount integer, IN endcount integer);
|
||||
DROP PROCEDURE IF EXISTS hs_office.coopassettx_create_test_data(IN givenpartnernumber numeric, IN givenmembernumbersuffix character);
|
||||
DROP PROCEDURE IF EXISTS hs_office.coopsharetx_create_test_data(IN givenpartnernumber numeric, IN givenmembernumbersuffix character);
|
||||
DROP PROCEDURE IF EXISTS hs_office.debitor_create_test_data(IN withdebitornumbersuffix numeric, IN forpartnerpersonname character varying, IN forbillingcontactcaption character varying, IN withdefaultprefix character varying);
|
||||
DROP PROCEDURE IF EXISTS hs_office.membership_create_test_data(IN forpartnernumber numeric, IN newmembernumbersuffix character);
|
||||
DROP PROCEDURE IF EXISTS hs_office.partner_create_test_data(IN mandanttradename character varying, IN newpartnernumber numeric, IN partnerpersonname character varying, IN contactcaption character varying);
|
||||
DROP PROCEDURE IF EXISTS hs_office.person_create_test_data(IN newpersontype hs_office.persontype, IN newtradename character varying, IN newfamilyname character varying, IN newgivenname character varying);
|
||||
DROP PROCEDURE IF EXISTS hs_office.relation_create_test_data(IN startcount integer, IN endcount integer);
|
||||
DROP PROCEDURE IF EXISTS hs_office.relation_create_test_data(IN holderpersonname character varying, IN relationtype hs_office.relationtype, IN anchorpersonname character varying, IN contactcaption character varying, IN mark character varying);
|
||||
DROP PROCEDURE IF EXISTS hs_office.sepamandate_create_test_data(IN forpartnernumber numeric, IN fordebitorsuffix character, IN foriban character varying, IN withreference character varying);
|
||||
--//
|
||||
|
||||
|
||||
-- ============================================================================
|
||||
--changeset michael.hoennig:hs-global-rbac-test-ddl-cleanup context:hosting-asset-import endDelimiter:--//
|
||||
-- ----------------------------------------------------------------------------
|
||||
|
||||
DROP SCHEMA IF EXISTS rbactest CASCADE;
|
||||
--//
|
||||
|
||||
|
||||
-- ============================================================================
|
||||
--changeset michael.hoennig:hs-global-rbac-test-dml-cleanup context:hosting-asset-import endDelimiter:--//
|
||||
-- ----------------------------------------------------------------------------
|
||||
|
||||
call base.defineContext('9800-cleanup', null, '${HSADMINNG_SUPERUSER}', null);
|
||||
|
||||
DELETE FROM rbac.subject WHERE name='superuser-alex@hostsharing.net';
|
||||
DELETE FROM rbac.subject WHERE name='superuser-fran@hostsharing.net';
|
||||
--//
|
@ -212,10 +212,6 @@ databaseChangeLog:
|
||||
file: db/changelog/9-hs-global/9000-statistics.sql
|
||||
context: "!only-office"
|
||||
|
||||
- include:
|
||||
file: db/changelog/9-hs-global/9800-cleanup.sql
|
||||
context: "without-test-data"
|
||||
|
||||
- include:
|
||||
file: db/changelog/9-hs-global/9100-hs-integration-schema.sql
|
||||
- include:
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -4,8 +4,6 @@ import com.opencsv.CSVParserBuilder;
|
||||
import com.opencsv.CSVReader;
|
||||
import com.opencsv.CSVReaderBuilder;
|
||||
import lombok.SneakyThrows;
|
||||
import net.hostsharing.hsadminng.hs.booking.item.HsBookingItem;
|
||||
import net.hostsharing.hsadminng.hs.booking.project.HsBookingProject;
|
||||
import net.hostsharing.hsadminng.hs.hosting.asset.HsHostingAsset;
|
||||
import net.hostsharing.hsadminng.rbac.context.ContextBasedTest;
|
||||
import net.hostsharing.hsadminng.persistence.BaseEntity;
|
||||
@ -16,9 +14,6 @@ import org.junit.jupiter.api.extension.TestWatcher;
|
||||
import org.opentest4j.AssertionFailedError;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.core.io.AbstractResource;
|
||||
import org.springframework.core.io.ClassPathResource;
|
||||
import org.springframework.core.io.FileSystemResource;
|
||||
import org.springframework.test.context.bean.override.mockito.MockitoBean;
|
||||
import org.springframework.core.io.Resource;
|
||||
import org.springframework.transaction.support.TransactionTemplate;
|
||||
@ -29,7 +24,6 @@ import jakarta.servlet.http.HttpServletRequest;
|
||||
import jakarta.validation.ValidationException;
|
||||
import jakarta.validation.constraints.NotNull;
|
||||
import java.io.BufferedReader;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.io.Reader;
|
||||
@ -122,16 +116,10 @@ public class CsvDataImport extends ContextBasedTest {
|
||||
return stream(lines.getFirst()).map(String::trim).toArray(String[]::new);
|
||||
}
|
||||
|
||||
public static @NotNull AbstractResource resourceOf(final String sqlFile) {
|
||||
return new File(sqlFile).exists()
|
||||
? new FileSystemResource(sqlFile)
|
||||
: new ClassPathResource(sqlFile);
|
||||
}
|
||||
|
||||
protected Reader resourceReader(@NotNull final String resourcePath) {
|
||||
try {
|
||||
return new InputStreamReader(requireNonNull(resourceOf(resourcePath).getInputStream()));
|
||||
} catch (final Exception exc) {
|
||||
return new InputStreamReader(requireNonNull(getClass().getClassLoader().getResourceAsStream(resourcePath)));
|
||||
} catch (Exception exc) {
|
||||
throw new AssertionFailedError("cannot open '" + resourcePath + "'");
|
||||
}
|
||||
}
|
||||
@ -167,78 +155,37 @@ public class CsvDataImport extends ContextBasedTest {
|
||||
return record;
|
||||
}
|
||||
|
||||
@SneakyThrows
|
||||
public void persistViaSql(final Integer id, final HsBookingProject entity) {
|
||||
entity.setUuid(UUID.randomUUID());
|
||||
public <T extends BaseEntity> T persist(final Integer id, final T entity) {
|
||||
try {
|
||||
if (entity instanceof HsHostingAsset ha) {
|
||||
//noinspection unchecked
|
||||
return (T) persistViaSql(id, ha);
|
||||
}
|
||||
return persistViaEM(id, entity);
|
||||
} catch (Exception exc) {
|
||||
errors.add("failed to persist #" + entity.hashCode() + ": " + entity);
|
||||
errors.add(exc.toString());
|
||||
}
|
||||
return entity;
|
||||
}
|
||||
|
||||
final var query = em.createNativeQuery("""
|
||||
insert into hs_booking.project(
|
||||
uuid,
|
||||
version,
|
||||
debitorUuid,
|
||||
caption)
|
||||
values (
|
||||
:uuid,
|
||||
:version,
|
||||
:debitorUuid,
|
||||
:caption)
|
||||
""")
|
||||
.setParameter("uuid", entity.getUuid())
|
||||
.setParameter("version", entity.getVersion())
|
||||
.setParameter("debitorUuid", entity.getDebitor().getUuid())
|
||||
.setParameter("caption", entity.getCaption());
|
||||
|
||||
final var count = query.executeUpdate();
|
||||
logError(() -> {
|
||||
assertThat(count).describedAs("persisting BookingProject #" + id + " failed: " + entity).isEqualTo(1);
|
||||
});
|
||||
public <T extends BaseEntity> T persistViaEM(final Integer id, final T entity) {
|
||||
if (em.contains(entity)) {
|
||||
return entity;
|
||||
}
|
||||
try {
|
||||
em.persist(entity);
|
||||
em.flush(); // makes it a bit slower, but produces better error messages
|
||||
System.out.println("persisted #" + id + " as " + entity.getUuid());
|
||||
return entity;
|
||||
} catch (final Exception exc) {
|
||||
System.err.println("persist failed for #" + id + " as " + entity);
|
||||
throw exc; // for breakpoints
|
||||
}
|
||||
}
|
||||
|
||||
@SneakyThrows
|
||||
public void persistViaSql(final Integer id, final HsBookingItem entity) {
|
||||
if (entity.getUuid() != null) {
|
||||
return;
|
||||
}
|
||||
|
||||
entity.setUuid(UUID.randomUUID());
|
||||
|
||||
final var query = em.createNativeQuery("""
|
||||
insert into hs_booking.item(
|
||||
uuid,
|
||||
version,
|
||||
type,
|
||||
projectUuid,
|
||||
parentItemUuid,
|
||||
validity,
|
||||
caption,
|
||||
resources)
|
||||
values (
|
||||
:uuid,
|
||||
:version,
|
||||
:type,
|
||||
:projectUuid,
|
||||
:parentItemUuid,
|
||||
:validity,
|
||||
:caption,
|
||||
cast(:resources as jsonb))
|
||||
""")
|
||||
.setParameter("uuid", entity.getUuid())
|
||||
.setParameter("version", entity.getVersion())
|
||||
.setParameter("projectUuid", ofNullable(entity.getProject()).map(BaseEntity::getUuid).orElse(null))
|
||||
.setParameter("type", entity.getType().name())
|
||||
.setParameter("parentItemUuid", ofNullable(entity.getParentItem()).map(BaseEntity::getUuid).orElse(null))
|
||||
.setParameter("validity", entity.getValidity())
|
||||
.setParameter("caption", entity.getCaption())
|
||||
.setParameter("resources", entity.getResources().toString().replace("\t", "\\t"));
|
||||
|
||||
final var count = query.executeUpdate();
|
||||
logError(() -> {
|
||||
assertThat(count).describedAs("persisting BookingItem #" + id + " failed: " + entity).isEqualTo(1);
|
||||
});
|
||||
}
|
||||
|
||||
@SneakyThrows
|
||||
public HsHostingAsset persistViaSql(final Integer id, final HsHostingAsset entity) {
|
||||
public BaseEntity<HsHostingAsset> persistViaSql(final Integer id, final HsHostingAsset entity) {
|
||||
if (entity.getUuid() == null) {
|
||||
entity.setUuid(UUID.randomUUID());
|
||||
}
|
||||
@ -282,7 +229,7 @@ public class CsvDataImport extends ContextBasedTest {
|
||||
|
||||
final var count = query.executeUpdate();
|
||||
logError(() -> {
|
||||
assertThat(count).describedAs("persisting HostingAsset #" + id + " failed: " + entity).isEqualTo(1);
|
||||
assertThat(count).isEqualTo(1);
|
||||
});
|
||||
return entity;
|
||||
}
|
||||
@ -301,22 +248,63 @@ public class CsvDataImport extends ContextBasedTest {
|
||||
return json;
|
||||
}
|
||||
|
||||
protected void makeSureThatTheImportAdminUserExists() {
|
||||
protected void deleteTestDataFromHsOfficeTables() {
|
||||
jpaAttempt.transacted(() -> {
|
||||
context(null);
|
||||
em.createNativeQuery("""
|
||||
do language plpgsql $$
|
||||
declare
|
||||
admins uuid;
|
||||
begin
|
||||
if not exists (select 1 from rbac.subject where name = '${rbacSuperuser}') then
|
||||
admins = rbac.findRoleId(rbac.global_ADMIN());
|
||||
call rbac.grantRoleToSubjectUnchecked(admins, admins, rbac.create_subject('${rbacSuperuser}'));
|
||||
end if;
|
||||
end;
|
||||
$$;
|
||||
""".replace("${rbacSuperuser}", rbacSuperuser))
|
||||
context(rbacSuperuser);
|
||||
// TODO.perf: could we instead skip creating test-data based on an env var?
|
||||
em.createNativeQuery("delete from hs_hosting.asset where true").executeUpdate();
|
||||
em.createNativeQuery("delete from hs_hosting.asset_ex where true").executeUpdate();
|
||||
em.createNativeQuery("delete from hs_booking.item where true").executeUpdate();
|
||||
em.createNativeQuery("delete from hs_booking.item_ex where true").executeUpdate();
|
||||
em.createNativeQuery("delete from hs_booking.project where true").executeUpdate();
|
||||
em.createNativeQuery("delete from hs_booking.project_ex where true").executeUpdate();
|
||||
em.createNativeQuery("delete from hs_office.coopassettx where true").executeUpdate();
|
||||
em.createNativeQuery("delete from hs_office.coopassettx_legacy_id where true").executeUpdate();
|
||||
em.createNativeQuery("delete from hs_office.coopsharetx where true").executeUpdate();
|
||||
em.createNativeQuery("delete from hs_office.coopsharetx_legacy_id where true").executeUpdate();
|
||||
em.createNativeQuery("delete from hs_office.membership where true").executeUpdate();
|
||||
em.createNativeQuery("delete from hs_office.sepamandate where true").executeUpdate();
|
||||
em.createNativeQuery("delete from hs_office.sepamandate_legacy_id where true").executeUpdate();
|
||||
em.createNativeQuery("delete from hs_office.debitor where true").executeUpdate();
|
||||
em.createNativeQuery("delete from hs_office.bankaccount where true").executeUpdate();
|
||||
em.createNativeQuery("delete from hs_office.partner where true").executeUpdate();
|
||||
em.createNativeQuery("delete from hs_office.partner_details where true").executeUpdate();
|
||||
em.createNativeQuery("delete from hs_office.relation where true").executeUpdate();
|
||||
em.createNativeQuery("delete from hs_office.contact where true").executeUpdate();
|
||||
em.createNativeQuery("delete from hs_office.person where true").executeUpdate();
|
||||
}).assertSuccessful();
|
||||
}
|
||||
|
||||
protected void resetHsOfficeSequences() {
|
||||
jpaAttempt.transacted(() -> {
|
||||
context(rbacSuperuser);
|
||||
em.createNativeQuery("alter sequence hs_office.contact_legacy_id_seq restart with 1000000000;").executeUpdate();
|
||||
em.createNativeQuery("alter sequence hs_office.coopassettx_legacy_id_seq restart with 1000000000;")
|
||||
.executeUpdate();
|
||||
em.createNativeQuery("alter sequence public.hs_office.coopsharetx_legacy_id_seq restart with 1000000000;")
|
||||
.executeUpdate();
|
||||
em.createNativeQuery("alter sequence public.hs_office.partner_legacy_id_seq restart with 1000000000;")
|
||||
.executeUpdate();
|
||||
em.createNativeQuery("alter sequence public.hs_office.sepamandate_legacy_id_seq restart with 1000000000;")
|
||||
.executeUpdate();
|
||||
});
|
||||
}
|
||||
|
||||
protected void deleteFromTestTables() {
|
||||
jpaAttempt.transacted(() -> {
|
||||
context(rbacSuperuser);
|
||||
em.createNativeQuery("delete from rbactest.domain where true").executeUpdate();
|
||||
em.createNativeQuery("delete from rbactest.package where true").executeUpdate();
|
||||
em.createNativeQuery("delete from rbactest.customer where true").executeUpdate();
|
||||
}).assertSuccessful();
|
||||
}
|
||||
|
||||
protected void deleteFromCommonTables() {
|
||||
jpaAttempt.transacted(() -> {
|
||||
context(rbacSuperuser);
|
||||
em.createNativeQuery("delete from rbac.subject_rv where name not like 'superuser-%'").executeUpdate();
|
||||
em.createNativeQuery("delete from base.tx_journal where true").executeUpdate();
|
||||
em.createNativeQuery("delete from base.tx_context where true").executeUpdate();
|
||||
}).assertSuccessful();
|
||||
}
|
||||
|
||||
|
@ -7,12 +7,10 @@ import net.hostsharing.hsadminng.context.Context;
|
||||
import net.hostsharing.hsadminng.hash.HashGenerator;
|
||||
import net.hostsharing.hsadminng.hash.HashGenerator.Algorithm;
|
||||
import net.hostsharing.hsadminng.hs.booking.debitor.HsBookingDebitorEntity;
|
||||
import net.hostsharing.hsadminng.hs.booking.debitor.HsBookingDebitorRepository;
|
||||
import net.hostsharing.hsadminng.hs.booking.item.HsBookingItem;
|
||||
import net.hostsharing.hsadminng.hs.booking.item.HsBookingItemRealEntity;
|
||||
import net.hostsharing.hsadminng.hs.booking.item.HsBookingItemType;
|
||||
import net.hostsharing.hsadminng.hs.booking.item.validators.HsBookingItemEntityValidatorRegistry;
|
||||
import net.hostsharing.hsadminng.hs.booking.project.HsBookingProject;
|
||||
import net.hostsharing.hsadminng.hs.booking.project.HsBookingProjectRealEntity;
|
||||
import net.hostsharing.hsadminng.hs.hosting.asset.HsHostingAssetRealEntity;
|
||||
import net.hostsharing.hsadminng.hs.hosting.asset.HsHostingAssetType;
|
||||
@ -29,19 +27,13 @@ import org.junit.jupiter.api.Tag;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.TestMethodOrder;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.boot.test.autoconfigure.orm.jpa.DataJpaTest;
|
||||
import org.springframework.context.annotation.Import;
|
||||
import org.springframework.core.io.support.PathMatchingResourcePatternResolver;
|
||||
import org.springframework.jdbc.core.JdbcTemplate;
|
||||
import org.springframework.orm.jpa.EntityManagerFactoryInfo;
|
||||
import org.springframework.test.annotation.Commit;
|
||||
import org.springframework.test.annotation.DirtiesContext;
|
||||
import org.springframework.test.context.ActiveProfiles;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.Reader;
|
||||
import java.net.IDN;
|
||||
import java.util.ArrayList;
|
||||
@ -52,12 +44,10 @@ import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.TreeMap;
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
import java.util.function.Function;
|
||||
|
||||
import static java.nio.charset.StandardCharsets.UTF_8;
|
||||
import static java.util.Arrays.stream;
|
||||
import static java.util.Map.entry;
|
||||
import static java.util.Map.ofEntries;
|
||||
@ -86,22 +76,56 @@ import static net.hostsharing.hsadminng.hs.hosting.asset.HsHostingAssetType.UNIX
|
||||
import static net.hostsharing.hsadminng.mapper.PostgresDateRange.toPostgresDateRange;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.assertj.core.api.Assumptions.assumeThat;
|
||||
import static org.springframework.util.FileCopyUtils.copyToByteArray;
|
||||
|
||||
/*
|
||||
* This 'test' includes the complete legacy 'office' data import.
|
||||
*
|
||||
* There is no code in 'main' because the import is not needed a normal runtime.
|
||||
* There is some test data in Java resources to verify the data conversion.
|
||||
* For a real import a main method will be added later
|
||||
* which reads CSV files from the file system.
|
||||
*
|
||||
* When run on a Hostsharing database, it needs the following settings (hsh99_... just examples).
|
||||
*
|
||||
* In a real Hostsharing environment, these are created via (the old) hsadmin:
|
||||
|
||||
CREATE USER hsh99_admin WITH PASSWORD 'password';
|
||||
CREATE DATABASE hsh99_hsadminng ENCODING 'UTF8' TEMPLATE template0;
|
||||
REVOKE ALL ON DATABASE hsh99_hsadminng FROM public; -- why does hsadmin do that?
|
||||
ALTER DATABASE hsh99_hsadminng OWNER TO hsh99_admin;
|
||||
|
||||
CREATE USER hsh99_restricted WITH PASSWORD 'password';
|
||||
|
||||
\c hsh99_hsadminng
|
||||
|
||||
GRANT ALL PRIVILEGES ON SCHEMA public to hsh99_admin;
|
||||
|
||||
* Additionally, we need these settings (because the Hostsharing DB-Admin has no CREATE right):
|
||||
|
||||
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
|
||||
|
||||
-- maybe something like that is needed for the 2nd user
|
||||
-- GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public to hsh99_restricted;
|
||||
|
||||
* Then copy the file .tc-environment to a file named .environment (excluded from git) and fill in your specific values.
|
||||
|
||||
* To finally import the office data, run:
|
||||
*
|
||||
* gw-importHostingAssets # comes from .aliases file and uses .environment
|
||||
*/
|
||||
@Tag("importHostingAssets")
|
||||
@DataJpaTest(properties = {
|
||||
"spring.datasource.url=${HSADMINNG_POSTGRES_JDBC_URL:jdbc:tc:postgresql:15.5-bookworm:///importHostingAssetsTC}",
|
||||
"spring.datasource.username=${HSADMINNG_POSTGRES_ADMIN_USERNAME:ADMIN}",
|
||||
"spring.datasource.password=${HSADMINNG_POSTGRES_ADMIN_PASSWORD:password}",
|
||||
"hsadminng.superuser=${HSADMINNG_SUPERUSER:import-superuser@hostsharing.net}",
|
||||
"spring.liquibase.enabled=false" // @Sql should go first, Liquibase will be initialized programmatically
|
||||
"hsadminng.superuser=${HSADMINNG_SUPERUSER:superuser-alex@hostsharing.net}"
|
||||
})
|
||||
@DirtiesContext
|
||||
@Import({ Context.class, JpaAttempt.class, LiquibaseConfig.class })
|
||||
@ActiveProfiles({ "without-test-data", "liquibase-migration", "hosting-asset-import" })
|
||||
@Import({ Context.class, JpaAttempt.class })
|
||||
@ActiveProfiles("without-test-data")
|
||||
@TestMethodOrder(MethodOrderer.OrderAnnotation.class)
|
||||
@ExtendWith(OrderedDependedTestsExtension.class)
|
||||
public class ImportHostingAssets extends CsvDataImport {
|
||||
public class ImportHostingAssets extends BaseOfficeDataImport {
|
||||
|
||||
private static final Set<String> NOBODY_SUBSTITUTES = Set.of("nomail", "bounce");
|
||||
|
||||
@ -132,55 +156,13 @@ public class ImportHostingAssets extends CsvDataImport {
|
||||
|
||||
final ObjectMapper jsonMapper = new ObjectMapper();
|
||||
|
||||
@Autowired
|
||||
HsBookingDebitorRepository debitorRepo;
|
||||
|
||||
@Autowired
|
||||
LiquibaseMigration liquibase;
|
||||
|
||||
@Value("${HSADMINNG_OFFICE_DATA_SQL_FILE:/db/released-only-office-schema-with-import-test-data.sql}")
|
||||
String officeSchemaAndDataSqlFile;
|
||||
|
||||
@Test
|
||||
@Order(11000)
|
||||
@SneakyThrows
|
||||
void liquibaseMigrationForBookingAndHosting() {
|
||||
executeSqlScript(officeSchemaAndDataSqlFile);
|
||||
liquibase.assertReferenceStatusAfterRestore(286, "hs-booking-SCHEMA");
|
||||
makeSureThatTheImportAdminUserExists();
|
||||
liquibase.runWithContexts("migration", "without-test-data");
|
||||
liquibase.assertThatCurrentMigrationsGotApplied(331, "hs-booking-SCHEMA");
|
||||
}
|
||||
|
||||
@Test
|
||||
@Order(11010)
|
||||
void createBookingProjects() {
|
||||
|
||||
record PartnerLegacyIdMapping(UUID uuid, Integer bp_id) {}
|
||||
record DebitorRecord(UUID uuid, Integer version, String defaultPrefix) {}
|
||||
|
||||
final var partnerLegacyIdMappings = em.createNativeQuery(
|
||||
"""
|
||||
select debitor.uuid, pid.bp_id
|
||||
from hs_office.debitor debitor
|
||||
join hs_office.relation debitorRel on debitor.debitorReluUid=debitorRel.uuid
|
||||
join hs_office.relation partnerRel on partnerRel.holderUuid=debitorRel.anchorUuid
|
||||
join hs_office.partner partner on partner.partnerReluUid=partnerRel.uuid
|
||||
join hs_office.partner_legacy_id pid on partner.uuid=pid.uuid
|
||||
""", PartnerLegacyIdMapping.class).getResultList();
|
||||
//noinspection unchecked
|
||||
final var debitorUuidToLegacyBpIdMap = ((List<PartnerLegacyIdMapping>) partnerLegacyIdMappings).stream()
|
||||
.collect(toMap(row -> row.uuid, row -> row.bp_id));
|
||||
final var debitors = em.createNativeQuery(
|
||||
"select debitor.uuid, debitor.version, debitor.defaultPrefix from hs_office.debitor debitor",
|
||||
DebitorRecord.class).getResultList();
|
||||
//noinspection unchecked
|
||||
((List<DebitorRecord>) debitors).forEach(debitor -> {
|
||||
bookingProjects.put(
|
||||
debitorUuidToLegacyBpIdMap.get(debitor.uuid), HsBookingProjectRealEntity.builder()
|
||||
.version(debitor.version)
|
||||
.caption(debitor.defaultPrefix + " default project")
|
||||
.debitor(em.find(HsBookingDebitorEntity.class, debitor.uuid))
|
||||
debitors.forEach((id, debitor) -> {
|
||||
bookingProjects.put(id, HsBookingProjectRealEntity.builder()
|
||||
.caption(debitor.getDefaultPrefix() + " default project")
|
||||
.debitor(em.find(HsBookingDebitorEntity.class, debitor.getUuid()))
|
||||
.build());
|
||||
});
|
||||
}
|
||||
@ -746,12 +728,9 @@ public class ImportHostingAssets extends CsvDataImport {
|
||||
if (isImportingControlledTestData()) {
|
||||
expectError("zonedata dom_owner of mellis.de is old00 but expected to be mim00");
|
||||
expectError("\nexpected: \"vm1068\"\n but was: \"vm1093\"");
|
||||
expectError(
|
||||
"['EMAIL_ADDRESS:webmaster@hamburg-west.l-u-g.org.config.target' is expected to match any of [^[a-z][a-z0-9]{2}[0-9]{2}(-[a-z0-9][a-z0-9\\.+_-]*)?$, ^([a-zA-Z0-9_!#$%&'*+/=?`{|}~^.-]+)?@[a-zA-Z0-9.-]+$, ^nobody$, ^/dev/null$] but 'raoul.lottmann@example.com peter.lottmann@example.com' does not match any]");
|
||||
expectError(
|
||||
"['EMAIL_ADDRESS:abuse@mellis.de.config.target' length is expected to be at min 1 but length of [[]] is 0]");
|
||||
expectError(
|
||||
"['EMAIL_ADDRESS:abuse@ist-im-netz.de.config.target' length is expected to be at min 1 but length of [[]] is 0]");
|
||||
expectError("['EMAIL_ADDRESS:webmaster@hamburg-west.l-u-g.org.config.target' is expected to match any of [^[a-z][a-z0-9]{2}[0-9]{2}(-[a-z0-9][a-z0-9\\.+_-]*)?$, ^([a-zA-Z0-9_!#$%&'*+/=?`{|}~^.-]+)?@[a-zA-Z0-9.-]+$, ^nobody$, ^/dev/null$] but 'raoul.lottmann@example.com peter.lottmann@example.com' does not match any]");
|
||||
expectError("['EMAIL_ADDRESS:abuse@mellis.de.config.target' length is expected to be at min 1 but length of [[]] is 0]");
|
||||
expectError("['EMAIL_ADDRESS:abuse@ist-im-netz.de.config.target' length is expected to be at min 1 but length of [[]] is 0]");
|
||||
}
|
||||
this.assertNoErrors();
|
||||
}
|
||||
@ -759,7 +738,7 @@ public class ImportHostingAssets extends CsvDataImport {
|
||||
// --------------------------------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
@Order(19100)
|
||||
@Order(19000)
|
||||
@Commit
|
||||
void persistBookingProjects() {
|
||||
|
||||
@ -767,12 +746,12 @@ public class ImportHostingAssets extends CsvDataImport {
|
||||
|
||||
jpaAttempt.transacted(() -> {
|
||||
context(rbacSuperuser);
|
||||
bookingProjects.forEach(this::persistViaSql);
|
||||
bookingProjects.forEach(this::persist);
|
||||
}).assertSuccessful();
|
||||
}
|
||||
|
||||
@Test
|
||||
@Order(19110)
|
||||
@Order(19010)
|
||||
@Commit
|
||||
void persistBookingItems() {
|
||||
|
||||
@ -1092,14 +1071,13 @@ public class ImportHostingAssets extends CsvDataImport {
|
||||
|
||||
final var haCount = jpaAttempt.transacted(() -> {
|
||||
context(rbacSuperuser, "hs_booking.project#D-1000300-mimdefaultproject:AGENT");
|
||||
return (Integer) em.createNativeQuery(
|
||||
"select count(*) from hs_hosting.asset_rv where type='EMAIL_ADDRESS'",
|
||||
Integer.class)
|
||||
return (Integer) em.createNativeQuery("select count(*) from hs_hosting.asset_rv where type='EMAIL_ADDRESS'", Integer.class)
|
||||
.getSingleResult();
|
||||
}).assertSuccessful().returnedValue();
|
||||
assertThat(haCount).isEqualTo(68);
|
||||
}
|
||||
|
||||
|
||||
// ============================================================================================
|
||||
|
||||
@Test
|
||||
@ -1127,7 +1105,7 @@ public class ImportHostingAssets extends CsvDataImport {
|
||||
if (bi.getParentItem() != null) {
|
||||
persistRecursively(key, HsBookingItemEntityValidatorRegistry.validated(em, bi.getParentItem()));
|
||||
}
|
||||
persistViaSql(key, HsBookingItemEntityValidatorRegistry.validated(em, bi));
|
||||
persist(key, HsBookingItemEntityValidatorRegistry.validated(em, bi));
|
||||
}
|
||||
|
||||
private void persistHostingAssets(final Map<Integer, HsHostingAssetRealEntity> assets) {
|
||||
@ -1151,7 +1129,7 @@ public class ImportHostingAssets extends CsvDataImport {
|
||||
"'EMAIL_ADDRESS:.*\\.config\\.target' .*"
|
||||
)
|
||||
.prepareForSave()
|
||||
.saveUsing(entity -> persistViaSql(entry.getKey(), entity))
|
||||
.saveUsing(entity -> persist(entry.getKey(), entity))
|
||||
.validateContext()
|
||||
));
|
||||
}
|
||||
@ -1243,7 +1221,9 @@ public class ImportHostingAssets extends CsvDataImport {
|
||||
bookingItems.put(packet_id, bookingItem);
|
||||
final var haType = determineHaType(basepacket_code);
|
||||
|
||||
logError(() -> assertThat(!free || haType == MANAGED_WEBSPACE || defaultPrefix(bookingItem)
|
||||
logError(() -> assertThat(!free || haType == MANAGED_WEBSPACE || bookingItem.getRelatedProject()
|
||||
.getDebitor()
|
||||
.getDefaultPrefix()
|
||||
.equals("hsh"))
|
||||
.as("packet.free only supported for Hostsharing-Assets and ManagedWebspace in customer-ManagedServer, but is set for "
|
||||
+ packet_name)
|
||||
@ -1282,14 +1262,14 @@ public class ImportHostingAssets extends CsvDataImport {
|
||||
managedWebspace.setParentAsset(parentAsset);
|
||||
|
||||
if (parentAsset.getRelatedProject() != managedWebspace.getRelatedProject()
|
||||
&& managedWebspace.getRelatedProject().getDebitor().getDebitorNumber() == 10000_00) {
|
||||
&& managedWebspace.getRelatedProject().getDebitor().getDebitorNumber() == 10000_00 ) {
|
||||
assertThat(managedWebspace.getIdentifier()).startsWith("xyz");
|
||||
final var hshDebitor = managedWebspace.getBookingItem().getProject().getDebitor();
|
||||
final var newProject = HsBookingProjectRealEntity.builder()
|
||||
.debitor(hshDebitor)
|
||||
.caption(parentAsset.getIdentifier() + " Monitor")
|
||||
.build();
|
||||
bookingProjects.put(Collections.max(bookingProjects.keySet()) + 1, newProject);
|
||||
bookingProjects.put(Collections.max(bookingProjects.keySet())+1, newProject);
|
||||
managedWebspace.getBookingItem().setProject(newProject);
|
||||
} else {
|
||||
managedWebspace.getBookingItem().setParentItem(parentAsset.getBookingItem());
|
||||
@ -1298,13 +1278,6 @@ public class ImportHostingAssets extends CsvDataImport {
|
||||
});
|
||||
}
|
||||
|
||||
private String defaultPrefix(final HsBookingItem bookingItem) {
|
||||
return ofNullable(bookingItem.getProject())
|
||||
.map(HsBookingProject::getDebitor)
|
||||
.map(HsBookingDebitorEntity::getDefaultPrefix)
|
||||
.orElse("<no default prefix for BI: " + bookingItem.getCaption() + ">");
|
||||
}
|
||||
|
||||
private void importPacketComponents(final String[] header, final List<String[]> records) {
|
||||
final var columns = new Columns(header);
|
||||
records.stream()
|
||||
@ -1651,23 +1624,18 @@ public class ImportHostingAssets extends CsvDataImport {
|
||||
entry("includes", options.contains("includes")),
|
||||
entry("letsencrypt", options.contains("letsencrypt")),
|
||||
entry("multiviews", options.contains("multiviews")),
|
||||
entry(
|
||||
"subdomains", withDefault(rec.getString("valid_subdomain_names"), "*")
|
||||
entry("subdomains", withDefault(rec.getString("valid_subdomain_names"), "*")
|
||||
.split(",")),
|
||||
entry(
|
||||
"fcgi-php-bin", withDefault(
|
||||
entry("fcgi-php-bin", withDefault(
|
||||
rec.getString("fcgi_php_bin"),
|
||||
httpDomainSetupValidator.getProperty("fcgi-php-bin").defaultValue())),
|
||||
entry(
|
||||
"passenger-nodejs", withDefault(
|
||||
entry("passenger-nodejs", withDefault(
|
||||
rec.getString("passenger_nodejs"),
|
||||
httpDomainSetupValidator.getProperty("passenger-nodejs").defaultValue())),
|
||||
entry(
|
||||
"passenger-python", withDefault(
|
||||
entry("passenger-python", withDefault(
|
||||
rec.getString("passenger_python"),
|
||||
httpDomainSetupValidator.getProperty("passenger-python").defaultValue())),
|
||||
entry(
|
||||
"passenger-ruby", withDefault(
|
||||
entry("passenger-ruby", withDefault(
|
||||
rec.getString("passenger_ruby"),
|
||||
httpDomainSetupValidator.getProperty("passenger-ruby").defaultValue()))
|
||||
))
|
||||
@ -1776,8 +1744,7 @@ public class ImportHostingAssets extends CsvDataImport {
|
||||
logError(() -> assertThat(vmName).isEqualTo(domUser.getParentAsset().getParentAsset().getIdentifier()));
|
||||
|
||||
//noinspection unchecked
|
||||
zoneData.put(
|
||||
"user-RR", ((ArrayList<ArrayList<Object>>) zoneData.get("user-RR")).stream()
|
||||
zoneData.put("user-RR", ((ArrayList<ArrayList<Object>>) zoneData.get("user-RR")).stream()
|
||||
.map(userRR -> userRR.stream().map(Object::toString).collect(joining(" ")))
|
||||
.toArray(String[]::new)
|
||||
);
|
||||
@ -1931,10 +1898,10 @@ public class ImportHostingAssets extends CsvDataImport {
|
||||
//noinspection unchecked
|
||||
return ((List<List<?>>) em.createNativeQuery(
|
||||
"""
|
||||
select li.* from hs_hosting.asset_legacy_id li
|
||||
join hs_hosting.asset ha on ha.uuid=li.uuid
|
||||
where cast(ha.type as text)=:type
|
||||
order by legacy_id
|
||||
SELECT li.* FROM hs_hosting.asset_legacy_id li
|
||||
JOIN hs_hosting.asset ha ON ha.uuid=li.uuid
|
||||
WHERE CAST(ha.type AS text)=:type
|
||||
ORDER BY legacy_id
|
||||
""",
|
||||
List.class)
|
||||
.setParameter("type", type.name())
|
||||
@ -1946,10 +1913,10 @@ public class ImportHostingAssets extends CsvDataImport {
|
||||
//noinspection unchecked
|
||||
return ((List<List<?>>) em.createNativeQuery(
|
||||
"""
|
||||
select ha.uuid, ha.type, ha.identifier from hs_hosting.asset ha
|
||||
join hs_hosting.asset_legacy_id li on li.uuid=ha.uuid
|
||||
where li.legacy_id is null and cast(ha.type as text)=:type
|
||||
order by li.legacy_id
|
||||
SELECT ha.uuid, ha.type, ha.identifier FROM hs_hosting.asset ha
|
||||
JOIN hs_hosting.asset_legacy_id li ON li.uuid=ha.uuid
|
||||
WHERE li.legacy_id is null AND CAST(ha.type AS text)=:type
|
||||
ORDER BY li.legacy_id
|
||||
""",
|
||||
List.class)
|
||||
.setParameter("type", type.name())
|
||||
@ -1957,17 +1924,4 @@ public class ImportHostingAssets extends CsvDataImport {
|
||||
.map(row -> row.stream().map(Object::toString).collect(joining(", ")))
|
||||
.collect(joining("\n"));
|
||||
}
|
||||
|
||||
@SneakyThrows
|
||||
private void executeSqlScript(final String sqlFile) {
|
||||
jpaAttempt.transacted(() -> {
|
||||
try (InputStream resourceStream = resourceOf(sqlFile).getInputStream()) {
|
||||
final var sqlScript = new String(copyToByteArray(resourceStream), UTF_8);
|
||||
final var emf = (EntityManagerFactoryInfo) em.getEntityManagerFactory();
|
||||
new JdbcTemplate(emf.getDataSource()).execute(sqlScript);
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}).assertSuccessful();
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,61 @@
|
||||
package net.hostsharing.hsadminng.hs.migration;
|
||||
|
||||
import net.hostsharing.hsadminng.context.Context;
|
||||
import net.hostsharing.hsadminng.rbac.test.JpaAttempt;
|
||||
import org.junit.jupiter.api.*;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.springframework.boot.test.autoconfigure.orm.jpa.DataJpaTest;
|
||||
import org.springframework.context.annotation.Import;
|
||||
import org.springframework.test.annotation.DirtiesContext;
|
||||
import org.springframework.test.context.ActiveProfiles;
|
||||
|
||||
/*
|
||||
* This 'test' includes the complete legacy 'office' data import.
|
||||
*
|
||||
* There is no code in 'main' because the import is not needed a normal runtime.
|
||||
* There is some test data in Java resources to verify the data conversion.
|
||||
* For a real import a main method will be added later
|
||||
* which reads CSV files from the file system.
|
||||
*
|
||||
* When run on a Hostsharing database, it needs the following settings (hsh99_... just examples).
|
||||
*
|
||||
* In a real Hostsharing environment, these are created via (the old) hsadmin:
|
||||
|
||||
CREATE USER hsh99_admin WITH PASSWORD 'password';
|
||||
CREATE DATABASE hsh99_hsadminng ENCODING 'UTF8' TEMPLATE template0;
|
||||
REVOKE ALL ON DATABASE hsh99_hsadminng FROM public; -- why does hsadmin do that?
|
||||
ALTER DATABASE hsh99_hsadminng OWNER TO hsh99_admin;
|
||||
|
||||
CREATE USER hsh99_restricted WITH PASSWORD 'password';
|
||||
|
||||
\c hsh99_hsadminng
|
||||
|
||||
GRANT ALL PRIVILEGES ON SCHEMA public to hsh99_admin;
|
||||
|
||||
* Additionally, we need these settings (because the Hostsharing DB-Admin has no CREATE right):
|
||||
|
||||
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
|
||||
|
||||
-- maybe something like that is needed for the 2nd user
|
||||
-- GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public to hsh99_restricted;
|
||||
|
||||
* Then copy the file .tc-environment to a file named .environment (excluded from git) and fill in your specific values.
|
||||
|
||||
* To finally import the office data, run:
|
||||
*
|
||||
* gw-importOfficeTables # comes from .aliases file and uses .environment
|
||||
*/
|
||||
@Tag("importOfficeData")
|
||||
@DataJpaTest(properties = {
|
||||
"spring.datasource.url=${HSADMINNG_POSTGRES_JDBC_URL:jdbc:tc:postgresql:15.5-bookworm:///importOfficeDataTC}",
|
||||
"spring.datasource.username=${HSADMINNG_POSTGRES_ADMIN_USERNAME:ADMIN}",
|
||||
"spring.datasource.password=${HSADMINNG_POSTGRES_ADMIN_PASSWORD:password}",
|
||||
"hsadminng.superuser=${HSADMINNG_SUPERUSER:superuser-alex@hostsharing.net}"
|
||||
})
|
||||
@ActiveProfiles("without-test-data")
|
||||
@DirtiesContext
|
||||
@Import({ Context.class, JpaAttempt.class })
|
||||
@TestMethodOrder(MethodOrderer.OrderAnnotation.class)
|
||||
@ExtendWith(OrderedDependedTestsExtension.class)
|
||||
public class ImportOfficeData extends BaseOfficeDataImport {
|
||||
}
|
@ -1,17 +1,33 @@
|
||||
package net.hostsharing.hsadminng.hs.migration;
|
||||
|
||||
import liquibase.Liquibase;
|
||||
import lombok.SneakyThrows;
|
||||
import org.junit.jupiter.api.Tag;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.boot.test.autoconfigure.orm.jpa.DataJpaTest;
|
||||
import org.springframework.context.annotation.Import;
|
||||
import org.springframework.test.annotation.DirtiesContext;
|
||||
import org.springframework.test.context.ActiveProfiles;
|
||||
import org.springframework.test.context.jdbc.Sql;
|
||||
import org.testcontainers.containers.JdbcDatabaseContainer;
|
||||
import org.testcontainers.jdbc.ContainerDatabaseDriver;
|
||||
|
||||
import jakarta.persistence.EntityManager;
|
||||
import jakarta.persistence.PersistenceContext;
|
||||
import javax.sql.DataSource;
|
||||
import java.io.BufferedReader;
|
||||
import java.io.File;
|
||||
import java.io.InputStreamReader;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static java.nio.charset.StandardCharsets.UTF_8;
|
||||
import static org.apache.commons.io.FileUtils.readFileToString;
|
||||
import static org.apache.commons.io.FileUtils.write;
|
||||
import static org.apache.commons.io.FileUtils.writeStringToFile;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.springframework.test.context.jdbc.Sql.ExecutionPhase.BEFORE_TEST_CLASS;
|
||||
|
||||
// BLOG: Liquibase-migration-test (not before the reference-SQL-dump-generation is simplified)
|
||||
@ -24,9 +40,9 @@ import static org.springframework.test.context.jdbc.Sql.ExecutionPhase.BEFORE_TE
|
||||
* <p>The test works as follows:</p>
|
||||
*
|
||||
* <ol>
|
||||
* <li>the database is initialized by `db/released-only-office-schema-with-test-data.sql` from the test-resources</li>
|
||||
* <li>the database is initialized by `db/prod-only-office-schema-with-test-data.sql` from the test-resources</li>
|
||||
* <li>the current Liquibase-migrations (only-office but with-test-data) are performed</li>
|
||||
* <li>a new dump is written to `db/released-only-office-schema-with-test-data.sql` in the build-directory</li>
|
||||
* <li>a new dump is written to `db/prod-only-office-schema-with-test-data.sql` in the build-directory</li>
|
||||
* <li>an extra Liquibase-changeset (liquibase-migration-test) is applied</li>
|
||||
* <li>it's asserted that the extra changeset got applied</li>
|
||||
* </ol>
|
||||
@ -42,31 +58,123 @@ import static org.springframework.test.context.jdbc.Sql.ExecutionPhase.BEFORE_TE
|
||||
@DirtiesContext
|
||||
@ActiveProfiles("liquibase-migration-test")
|
||||
@Import(LiquibaseConfig.class)
|
||||
@Sql(value = "/db/released-only-office-schema-with-test-data.sql", executionPhase = BEFORE_TEST_CLASS) // release-schema
|
||||
@Sql(value = "/db/prod-only-office-schema-with-test-data.sql", executionPhase = BEFORE_TEST_CLASS)
|
||||
public class LiquibaseCompatibilityIntegrationTest {
|
||||
|
||||
private static final String EXPECTED_CHANGESET_ONLY_AFTER_NEW_MIGRATION = "hs-global-liquibase-migration-test";
|
||||
private static final int EXPECTED_LIQUIBASE_CHANGELOGS_IN_PROD_SCHEMA_DUMP = 287;
|
||||
|
||||
@Value("${spring.datasource.url}")
|
||||
private String jdbcUrl;
|
||||
|
||||
@Autowired
|
||||
private LiquibaseMigration liquibase;
|
||||
private DataSource dataSource;
|
||||
|
||||
@Autowired
|
||||
private Liquibase liquibase;
|
||||
|
||||
@PersistenceContext
|
||||
private EntityManager em;
|
||||
|
||||
@Test
|
||||
void migrationWorksBasedOnAPreviouslyPopulatedSchema() {
|
||||
// check the initial status from the @Sql-annotation
|
||||
final var initialChangeSetCount = liquibase.assertReferenceStatusAfterRestore(
|
||||
EXPECTED_LIQUIBASE_CHANGELOGS_IN_PROD_SCHEMA_DUMP, EXPECTED_CHANGESET_ONLY_AFTER_NEW_MIGRATION);
|
||||
final var initialChangeSetCount = assertProdReferenceStatusAfterRestore();
|
||||
|
||||
// run the current migrations and dump the result to the build-directory
|
||||
liquibase.runWithContexts("only-office", "with-test-data");
|
||||
PostgresTestcontainer.dump(jdbcUrl, new File("build/db/released-only-office-schema-with-test-data.sql"));
|
||||
runLiquibaseMigrationsWithContexts("only-office", "with-test-data");
|
||||
dumpTo(new File("build/db/prod-only-office-schema-with-test-data.sql"));
|
||||
|
||||
// then add another migration and assert if it was applied
|
||||
liquibase.runWithContexts("liquibase-migration-test");
|
||||
liquibase.assertThatCurrentMigrationsGotApplied(
|
||||
initialChangeSetCount, EXPECTED_CHANGESET_ONLY_AFTER_NEW_MIGRATION);
|
||||
runLiquibaseMigrationsWithContexts("liquibase-migration-test");
|
||||
assertThatCurrentMigrationsGotApplied(initialChangeSetCount);
|
||||
}
|
||||
|
||||
private int assertProdReferenceStatusAfterRestore() {
|
||||
final var schemas = singleColumnSqlQuery("SELECT tablename FROM pg_catalog.pg_tables WHERE schemaname='public'");
|
||||
assertThat(schemas).containsExactly("databasechangelog", "databasechangeloglock");
|
||||
|
||||
final var liquibaseScripts1 = singleColumnSqlQuery("SELECT * FROM public.databasechangelog");
|
||||
assertThat(liquibaseScripts1).hasSizeGreaterThan(285);
|
||||
assertThat(liquibaseScripts1).doesNotContain(EXPECTED_CHANGESET_ONLY_AFTER_NEW_MIGRATION);
|
||||
final var initialChangeSetCount = liquibaseScripts1.size();
|
||||
return initialChangeSetCount;
|
||||
}
|
||||
|
||||
private void assertThatCurrentMigrationsGotApplied(final int initialChangeSetCount) {
|
||||
final var liquibaseScripts = singleColumnSqlQuery("SELECT id FROM public.databasechangelog");
|
||||
assertThat(liquibaseScripts).hasSizeGreaterThan(initialChangeSetCount);
|
||||
assertThat(liquibaseScripts).contains(EXPECTED_CHANGESET_ONLY_AFTER_NEW_MIGRATION);
|
||||
}
|
||||
|
||||
@SneakyThrows
|
||||
private void dumpTo(final File targetFileName) {
|
||||
makeDir(targetFileName.getParentFile());
|
||||
|
||||
final var jdbcDatabaseContainer = getJdbcDatabaseContainer();
|
||||
|
||||
final var sqlDumpFile = new File(targetFileName.getParent(), "." + targetFileName.getName());
|
||||
final var pb = new ProcessBuilder(
|
||||
"pg_dump", "--column-inserts", "--disable-dollar-quoting",
|
||||
"--host=" + jdbcDatabaseContainer.getHost(),
|
||||
"--port=" + jdbcDatabaseContainer.getFirstMappedPort(),
|
||||
"--username=" + jdbcDatabaseContainer.getUsername() ,
|
||||
"--dbname=" + jdbcDatabaseContainer.getDatabaseName(),
|
||||
"--file=" + sqlDumpFile.getCanonicalPath()
|
||||
);
|
||||
pb.environment().put("PGPASSWORD", jdbcDatabaseContainer.getPassword());
|
||||
|
||||
final var process = pb.start();
|
||||
int exitCode = process.waitFor();
|
||||
final var stderr = new BufferedReader(new InputStreamReader(process.getErrorStream()))
|
||||
.lines().collect(Collectors.joining("\n"));
|
||||
assertThat(exitCode).describedAs(stderr).isEqualTo(0);
|
||||
|
||||
final var header = """
|
||||
-- =================================================================================
|
||||
-- Generated reference-SQL-dump (hopefully of latest prod-release).
|
||||
-- See: net.hostsharing.hsadminng.hs.migration.LiquibaseCompatibilityIntegrationTest
|
||||
-- ---------------------------------------------------------------------------------
|
||||
|
||||
--
|
||||
-- Explicit pre-initialization because we cannot use `pg_dump --create ...`
|
||||
-- because the database is already created by Testcontainers.
|
||||
--
|
||||
|
||||
CREATE ROLE postgres;
|
||||
CREATE ROLE admin;
|
||||
CREATE ROLE restricted;
|
||||
|
||||
""";
|
||||
writeStringToFile(targetFileName, header, UTF_8, false); // false = overwrite
|
||||
|
||||
write(targetFileName, readFileToString(sqlDumpFile, UTF_8), UTF_8, true);
|
||||
|
||||
assertThat(sqlDumpFile.delete()).describedAs(sqlDumpFile + " cannot be deleted");
|
||||
}
|
||||
|
||||
private void makeDir(final File dir) {
|
||||
assertThat(!dir.exists() || dir.isDirectory()).describedAs(dir + " does exist, but is not a directory").isTrue();
|
||||
assertThat(dir.isDirectory() || dir.mkdirs()).describedAs(dir + " cannot be created").isTrue();
|
||||
}
|
||||
|
||||
@SneakyThrows
|
||||
private void runLiquibaseMigrationsWithContexts(final String... contexts) {
|
||||
liquibase.update(
|
||||
new liquibase.Contexts(contexts),
|
||||
new liquibase.LabelExpression());
|
||||
}
|
||||
|
||||
private List<String> singleColumnSqlQuery(final String sql) {
|
||||
//noinspection unchecked
|
||||
final var rows = (List<Object>) em.createNativeQuery(sql).getResultList();
|
||||
return rows.stream().map(Objects::toString).toList();
|
||||
}
|
||||
|
||||
@SneakyThrows
|
||||
private static JdbcDatabaseContainer<?> getJdbcDatabaseContainer() {
|
||||
final var getContainerMethod = ContainerDatabaseDriver.class.getDeclaredMethod("getContainer", String.class);
|
||||
getContainerMethod.setAccessible(true);
|
||||
|
||||
@SuppressWarnings("rawtypes")
|
||||
final var container = (JdbcDatabaseContainer) getContainerMethod.invoke(null,
|
||||
"jdbc:tc:postgresql:15.5-bookworm:///liquibaseMigrationTestTC");
|
||||
return container;
|
||||
}
|
||||
}
|
||||
|
@ -1,27 +1,28 @@
|
||||
package net.hostsharing.hsadminng.hs.migration;
|
||||
|
||||
import liquibase.Liquibase;
|
||||
import liquibase.database.DatabaseFactory;
|
||||
import liquibase.database.jvm.JdbcConnection;
|
||||
import liquibase.resource.ClassLoaderResourceAccessor;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.annotation.Profile;
|
||||
|
||||
import jakarta.persistence.EntityManager;
|
||||
import jakarta.persistence.PersistenceContext;
|
||||
import javax.sql.DataSource;
|
||||
|
||||
@Configuration
|
||||
@Profile({"liquibase-migration", "liquibase-migration-test"})
|
||||
@Profile("liquibase-migration-test")
|
||||
public class LiquibaseConfig {
|
||||
|
||||
@PersistenceContext
|
||||
private EntityManager em;
|
||||
|
||||
@Bean
|
||||
public LiquibaseMigration liquibase(DataSource dataSource) throws Exception {
|
||||
public Liquibase liquibase(DataSource dataSource) throws Exception {
|
||||
final var connection = dataSource.getConnection();
|
||||
final var database = DatabaseFactory.getInstance()
|
||||
.findCorrectDatabaseImplementation(new JdbcConnection(connection));
|
||||
return new LiquibaseMigration(em, "db/changelog/db.changelog-master.yaml", database);
|
||||
return new Liquibase(
|
||||
"db/changelog/db.changelog-master.yaml", // Path to your Liquibase changelog
|
||||
new ClassLoaderResourceAccessor(),
|
||||
database
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -1,55 +0,0 @@
|
||||
package net.hostsharing.hsadminng.hs.migration;
|
||||
|
||||
import liquibase.Liquibase;
|
||||
import liquibase.database.Database;
|
||||
import liquibase.resource.ClassLoaderResourceAccessor;
|
||||
import lombok.SneakyThrows;
|
||||
|
||||
import jakarta.persistence.EntityManager;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
public class LiquibaseMigration extends Liquibase {
|
||||
|
||||
private final EntityManager em;
|
||||
|
||||
public LiquibaseMigration(final EntityManager em, final String changeLogFile, final Database db) {
|
||||
super(changeLogFile, new ClassLoaderResourceAccessor(), db);
|
||||
this.em = em;
|
||||
}
|
||||
|
||||
@SneakyThrows
|
||||
public void runWithContexts(final String... contexts) {
|
||||
update(
|
||||
new liquibase.Contexts(contexts),
|
||||
new liquibase.LabelExpression());
|
||||
}
|
||||
|
||||
public int assertReferenceStatusAfterRestore(
|
||||
final int minExpectedLiquibaseChangelogs,
|
||||
final String expectedChangesetOnlyAfterNewMigration) {
|
||||
final var schemas = singleColumnSqlQuery("SELECT tablename FROM pg_catalog.pg_tables WHERE schemaname='public'");
|
||||
assertThat(schemas).containsExactly("databasechangelog", "databasechangeloglock");
|
||||
|
||||
final var liquibaseScripts = singleColumnSqlQuery("SELECT id FROM public.databasechangelog");
|
||||
assertThat(liquibaseScripts).hasSize(minExpectedLiquibaseChangelogs);
|
||||
assertThat(liquibaseScripts).doesNotContain(expectedChangesetOnlyAfterNewMigration);
|
||||
return liquibaseScripts.size();
|
||||
}
|
||||
|
||||
public void assertThatCurrentMigrationsGotApplied(
|
||||
final int initialChangeSetCount,
|
||||
final String expectedChangesetOnlyAfterNewMigration) {
|
||||
final var liquibaseScripts = singleColumnSqlQuery("SELECT id FROM public.databasechangelog");
|
||||
assertThat(liquibaseScripts).hasSizeGreaterThan(initialChangeSetCount);
|
||||
assertThat(liquibaseScripts).contains(expectedChangesetOnlyAfterNewMigration);
|
||||
}
|
||||
|
||||
private List<String> singleColumnSqlQuery(final String sql) {
|
||||
//noinspection unchecked
|
||||
final var rows = (List<Object>) em.createNativeQuery(sql).getResultList();
|
||||
return rows.stream().map(Objects::toString).toList();
|
||||
}
|
||||
}
|
@ -1,81 +0,0 @@
|
||||
package net.hostsharing.hsadminng.hs.migration;
|
||||
|
||||
import lombok.SneakyThrows;
|
||||
import org.testcontainers.containers.JdbcDatabaseContainer;
|
||||
import org.testcontainers.jdbc.ContainerDatabaseDriver;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.File;
|
||||
import java.io.InputStreamReader;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static java.nio.charset.StandardCharsets.UTF_8;
|
||||
import static org.apache.commons.io.FileUtils.readFileToString;
|
||||
import static org.apache.commons.io.FileUtils.write;
|
||||
import static org.apache.commons.io.FileUtils.writeStringToFile;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
public class PostgresTestcontainer {
|
||||
|
||||
@SneakyThrows
|
||||
public static void dump(final String jdbcUrl, final File targetFileName) {
|
||||
makeDir(targetFileName.getParentFile());
|
||||
|
||||
final var jdbcDatabaseContainer = getJdbcDatabaseContainer(jdbcUrl);
|
||||
|
||||
final var sqlDumpFile = new File(targetFileName.getParent(), "." + targetFileName.getName());
|
||||
final var pb = new ProcessBuilder(
|
||||
"pg_dump", "--column-inserts", "--disable-dollar-quoting",
|
||||
"--host=" + jdbcDatabaseContainer.getHost(),
|
||||
"--port=" + jdbcDatabaseContainer.getFirstMappedPort(),
|
||||
"--username=" + jdbcDatabaseContainer.getUsername() ,
|
||||
"--dbname=" + jdbcDatabaseContainer.getDatabaseName(),
|
||||
"--file=" + sqlDumpFile.getCanonicalPath()
|
||||
);
|
||||
pb.environment().put("PGPASSWORD", jdbcDatabaseContainer.getPassword());
|
||||
|
||||
final var process = pb.start();
|
||||
int exitCode = process.waitFor();
|
||||
final var stderr = new BufferedReader(new InputStreamReader(process.getErrorStream()))
|
||||
.lines().collect(Collectors.joining("\n"));
|
||||
assertThat(exitCode).describedAs(stderr).isEqualTo(0);
|
||||
|
||||
final var header = """
|
||||
-- =================================================================================
|
||||
-- Generated reference-SQL-dump (hopefully of latest prod-release).
|
||||
-- See: net.hostsharing.hsadminng.hs.migration.LiquibaseCompatibilityIntegrationTest
|
||||
-- ---------------------------------------------------------------------------------
|
||||
|
||||
--
|
||||
-- Explicit pre-initialization because we cannot use `pg_dump --create ...`
|
||||
-- because the database is already created by Testcontainers.
|
||||
--
|
||||
|
||||
CREATE ROLE postgres;
|
||||
CREATE ROLE admin;
|
||||
CREATE ROLE restricted;
|
||||
|
||||
""";
|
||||
writeStringToFile(targetFileName, header, UTF_8, false); // false = overwrite
|
||||
|
||||
write(targetFileName, readFileToString(sqlDumpFile, UTF_8), UTF_8, true);
|
||||
|
||||
assertThat(sqlDumpFile.delete()).describedAs(sqlDumpFile + " cannot be deleted");
|
||||
}
|
||||
|
||||
private static void makeDir(final File dir) {
|
||||
assertThat(!dir.exists() || dir.isDirectory()).describedAs(dir + " does exist, but is not a directory").isTrue();
|
||||
assertThat(dir.isDirectory() || dir.mkdirs()).describedAs(dir + " cannot be created").isTrue();
|
||||
}
|
||||
|
||||
@SneakyThrows
|
||||
private static JdbcDatabaseContainer<?> getJdbcDatabaseContainer(final String jdbcUrl) {
|
||||
// TODO.test: check if, in the future, there is a better way to access auto-created Testcontainers
|
||||
final var getContainerMethod = ContainerDatabaseDriver.class.getDeclaredMethod("getContainer", String.class);
|
||||
getContainerMethod.setAccessible(true);
|
||||
|
||||
@SuppressWarnings("rawtypes")
|
||||
final var container = (JdbcDatabaseContainer) getContainerMethod.invoke(null, jdbcUrl);
|
||||
return container;
|
||||
}
|
||||
}
|
@ -86,7 +86,7 @@ class HsOfficeMembershipControllerAcceptanceTest extends ContextBasedTestWithCle
|
||||
"memberNumber": "M-1000101",
|
||||
"memberNumberSuffix": "01",
|
||||
"validFrom": "2022-10-01",
|
||||
"validTo": "2024-12-30",
|
||||
"validTo": null,
|
||||
"status": "ACTIVE"
|
||||
},
|
||||
{
|
||||
@ -94,7 +94,7 @@ class HsOfficeMembershipControllerAcceptanceTest extends ContextBasedTestWithCle
|
||||
"memberNumber": "M-1000202",
|
||||
"memberNumberSuffix": "02",
|
||||
"validFrom": "2022-10-01",
|
||||
"validTo": "2025-12-31",
|
||||
"validTo": null,
|
||||
"status": "ACTIVE"
|
||||
},
|
||||
{
|
||||
@ -133,7 +133,7 @@ class HsOfficeMembershipControllerAcceptanceTest extends ContextBasedTestWithCle
|
||||
"memberNumber": "M-1000101",
|
||||
"memberNumberSuffix": "01",
|
||||
"validFrom": "2022-10-01",
|
||||
"validTo": "2024-12-30",
|
||||
"validTo": null,
|
||||
"status": "ACTIVE"
|
||||
}
|
||||
]
|
||||
@ -161,7 +161,7 @@ class HsOfficeMembershipControllerAcceptanceTest extends ContextBasedTestWithCle
|
||||
"memberNumber": "M-1000202",
|
||||
"memberNumberSuffix": "02",
|
||||
"validFrom": "2022-10-01",
|
||||
"validTo": "2025-12-31",
|
||||
"validTo": null,
|
||||
"status": "ACTIVE"
|
||||
}
|
||||
]
|
||||
@ -177,7 +177,7 @@ class HsOfficeMembershipControllerAcceptanceTest extends ContextBasedTestWithCle
|
||||
void globalAdmin_canAddMembership() {
|
||||
|
||||
context.define("superuser-alex@hostsharing.net");
|
||||
final var givenPartner = partnerRepo.findPartnerByOptionalNameLike("First").getFirst();
|
||||
final var givenPartner = partnerRepo.findPartnerByOptionalNameLike("Third").get(0);
|
||||
final var givenMemberSuffix = TEMP_MEMBER_NUMBER_SUFFIX;
|
||||
final var expectedMemberNumber = Integer.parseInt(givenPartner.getPartnerNumber() + TEMP_MEMBER_NUMBER_SUFFIX);
|
||||
|
||||
@ -189,7 +189,7 @@ class HsOfficeMembershipControllerAcceptanceTest extends ContextBasedTestWithCle
|
||||
{
|
||||
"partner.uuid": "%s",
|
||||
"memberNumberSuffix": "%s",
|
||||
"validFrom": "2025-02-13",
|
||||
"validFrom": "2022-10-13",
|
||||
"membershipFeeBillable": "true"
|
||||
}
|
||||
""".formatted(givenPartner.getUuid(), givenMemberSuffix))
|
||||
@ -200,10 +200,10 @@ class HsOfficeMembershipControllerAcceptanceTest extends ContextBasedTestWithCle
|
||||
.statusCode(201)
|
||||
.contentType(ContentType.JSON)
|
||||
.body("uuid", isUuidValid())
|
||||
.body("partner.partnerNumber", is("P-10001"))
|
||||
.body("partner.partnerNumber", is("P-10003"))
|
||||
.body("memberNumber", is("M-" + expectedMemberNumber))
|
||||
.body("memberNumberSuffix", is(givenMemberSuffix))
|
||||
.body("validFrom", is("2025-02-13"))
|
||||
.body("validFrom", is("2022-10-13"))
|
||||
.body("validTo", equalTo(null))
|
||||
.header("Location", startsWith("http://localhost"))
|
||||
.extract().header("Location"); // @formatter:on
|
||||
@ -239,7 +239,7 @@ class HsOfficeMembershipControllerAcceptanceTest extends ContextBasedTestWithCle
|
||||
"memberNumber": "M-1000101",
|
||||
"memberNumberSuffix": "01",
|
||||
"validFrom": "2022-10-01",
|
||||
"validTo": "2024-12-30",
|
||||
"validTo": null,
|
||||
"status": "ACTIVE"
|
||||
}
|
||||
""")); // @formatter:on
|
||||
@ -297,13 +297,13 @@ class HsOfficeMembershipControllerAcceptanceTest extends ContextBasedTestWithCle
|
||||
context.define("superuser-alex@hostsharing.net");
|
||||
final var givenMembership = givenSomeTemporaryMembershipBessler("First");
|
||||
|
||||
RestAssured // @formatter:off
|
||||
final var location = RestAssured // @formatter:off
|
||||
.given()
|
||||
.header("current-subject", "superuser-alex@hostsharing.net")
|
||||
.contentType(ContentType.JSON)
|
||||
.body("""
|
||||
{
|
||||
"validTo": "2025-12-31",
|
||||
"validTo": "2023-12-31",
|
||||
"status": "CANCELLED"
|
||||
}
|
||||
""")
|
||||
@ -316,8 +316,8 @@ class HsOfficeMembershipControllerAcceptanceTest extends ContextBasedTestWithCle
|
||||
.body("uuid", isUuidValid())
|
||||
.body("partner.partnerNumber", is("P-" + givenMembership.getPartner().getPartnerNumber()))
|
||||
.body("memberNumberSuffix", is(givenMembership.getMemberNumberSuffix()))
|
||||
.body("validFrom", is("2025-02-01"))
|
||||
.body("validTo", is("2025-12-31"))
|
||||
.body("validFrom", is("2022-11-01"))
|
||||
.body("validTo", is("2023-12-31"))
|
||||
.body("status", is("CANCELLED"));
|
||||
// @formatter:on
|
||||
|
||||
@ -326,7 +326,7 @@ class HsOfficeMembershipControllerAcceptanceTest extends ContextBasedTestWithCle
|
||||
.matches(mandate -> {
|
||||
assertThat(mandate.getPartner().toShortString()).isEqualTo("P-10001");
|
||||
assertThat(mandate.getMemberNumberSuffix()).isEqualTo(givenMembership.getMemberNumberSuffix());
|
||||
assertThat(mandate.getValidity().asString()).isEqualTo("[2022-11-01,2026-01-01)");
|
||||
assertThat(mandate.getValidity().asString()).isEqualTo("[2022-11-01,2024-01-01)");
|
||||
assertThat(mandate.getStatus()).isEqualTo(CANCELLED);
|
||||
return true;
|
||||
});
|
||||
@ -348,7 +348,7 @@ class HsOfficeMembershipControllerAcceptanceTest extends ContextBasedTestWithCle
|
||||
.contentType(ContentType.JSON)
|
||||
.body("""
|
||||
{
|
||||
"validTo": "2025-12-31",
|
||||
"validTo": "2024-01-01",
|
||||
"status": "CANCELLED"
|
||||
}
|
||||
""")
|
||||
@ -361,7 +361,7 @@ class HsOfficeMembershipControllerAcceptanceTest extends ContextBasedTestWithCle
|
||||
// finally, the Membership is actually updated
|
||||
assertThat(membershipRepo.findByUuid(givenMembership.getUuid())).isPresent().get()
|
||||
.matches(mandate -> {
|
||||
assertThat(mandate.getValidity().asString()).isEqualTo("[2025-02-01,2026-01-01)");
|
||||
assertThat(mandate.getValidity().asString()).isEqualTo("[2022-11-01,2024-01-02)");
|
||||
assertThat(mandate.getStatus()).isEqualTo(CANCELLED);
|
||||
return true;
|
||||
});
|
||||
@ -434,7 +434,7 @@ class HsOfficeMembershipControllerAcceptanceTest extends ContextBasedTestWithCle
|
||||
final var newMembership = HsOfficeMembershipEntity.builder()
|
||||
.partner(givenPartner)
|
||||
.memberNumberSuffix(TEMP_MEMBER_NUMBER_SUFFIX)
|
||||
.validity(Range.closedInfinite(LocalDate.parse("2025-02-01")))
|
||||
.validity(Range.closedInfinite(LocalDate.parse("2022-11-01")))
|
||||
.status(ACTIVE)
|
||||
.membershipFeeBillable(true)
|
||||
.build();
|
||||
|
@ -4,20 +4,19 @@ import io.hypersistence.utils.hibernate.type.range.Range;
|
||||
import net.hostsharing.hsadminng.context.Context;
|
||||
import net.hostsharing.hsadminng.hs.office.debitor.HsOfficeDebitorRepository;
|
||||
import net.hostsharing.hsadminng.hs.office.partner.HsOfficePartnerRealRepository;
|
||||
import net.hostsharing.hsadminng.mapper.Array;
|
||||
import net.hostsharing.hsadminng.rbac.test.ContextBasedTestWithCleanup;
|
||||
import net.hostsharing.hsadminng.rbac.grant.RawRbacGrantRepository;
|
||||
import net.hostsharing.hsadminng.rbac.role.RawRbacRoleRepository;
|
||||
import net.hostsharing.hsadminng.rbac.test.ContextBasedTestWithCleanup;
|
||||
import net.hostsharing.hsadminng.mapper.Array;
|
||||
import net.hostsharing.hsadminng.rbac.test.JpaAttempt;
|
||||
import org.junit.jupiter.api.Nested;
|
||||
import org.junit.jupiter.api.Tag;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.postgresql.util.PSQLException;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.boot.test.autoconfigure.orm.jpa.DataJpaTest;
|
||||
import org.springframework.test.context.bean.override.mockito.MockitoBean;
|
||||
import org.springframework.context.annotation.Import;
|
||||
import org.springframework.orm.jpa.JpaSystemException;
|
||||
import org.springframework.test.context.bean.override.mockito.MockitoBean;
|
||||
|
||||
import jakarta.persistence.EntityManager;
|
||||
import jakarta.persistence.PersistenceContext;
|
||||
@ -32,7 +31,7 @@ import static net.hostsharing.hsadminng.rbac.test.JpaAttempt.attempt;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
@DataJpaTest
|
||||
@Import({ Context.class, JpaAttempt.class })
|
||||
@Import( { Context.class, JpaAttempt.class })
|
||||
@Tag("officeIntegrationTest")
|
||||
class HsOfficeMembershipRepositoryIntegrationTest extends ContextBasedTestWithCleanup {
|
||||
|
||||
@ -71,12 +70,11 @@ class HsOfficeMembershipRepositoryIntegrationTest extends ContextBasedTestWithCl
|
||||
final var givenPartner = partnerRepo.findPartnerByOptionalNameLike("First").get(0);
|
||||
|
||||
// when
|
||||
final var result = attempt(
|
||||
em, () -> {
|
||||
final var result = attempt(em, () -> {
|
||||
final var newMembership = HsOfficeMembershipEntity.builder()
|
||||
.memberNumberSuffix("11")
|
||||
.partner(givenPartner)
|
||||
.validity(Range.closedInfinite(LocalDate.parse("2025-01-01")))
|
||||
.validity(Range.closedInfinite(LocalDate.parse("2020-01-01")))
|
||||
.membershipFeeBillable(true)
|
||||
.build();
|
||||
return toCleanup(membershipRepo.save(newMembership).load());
|
||||
@ -89,31 +87,6 @@ class HsOfficeMembershipRepositoryIntegrationTest extends ContextBasedTestWithCl
|
||||
assertThat(membershipRepo.count()).isEqualTo(count + 1);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void creatingMembershipForSamePartnerIsDisallowedIfAnotherOneIsStillActive() {
|
||||
// given
|
||||
context("superuser-alex@hostsharing.net");
|
||||
final var givenPartner = partnerRepo.findPartnerByOptionalNameLike("First").getFirst();
|
||||
|
||||
// when
|
||||
final var result = attempt(
|
||||
em, () -> {
|
||||
final var newMembership = HsOfficeMembershipEntity.builder()
|
||||
.memberNumberSuffix("11")
|
||||
.partner(givenPartner)
|
||||
.validity(Range.closedInfinite(LocalDate.parse("2024-01-01")))
|
||||
.membershipFeeBillable(true)
|
||||
.build();
|
||||
return toCleanup(membershipRepo.save(newMembership).load());
|
||||
});
|
||||
|
||||
// then
|
||||
result.assertExceptionWithRootCauseMessage(
|
||||
PSQLException.class,
|
||||
"Membership validity ranges overlap for partnerUuid " + givenPartner.getUuid() +
|
||||
", partnerNumber " + givenPartner.getPartnerNumber());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void createsAndGrantsRoles() {
|
||||
// given
|
||||
@ -124,13 +97,12 @@ class HsOfficeMembershipRepositoryIntegrationTest extends ContextBasedTestWithCl
|
||||
.toList();
|
||||
|
||||
// when
|
||||
attempt(
|
||||
em, () -> {
|
||||
attempt(em, () -> {
|
||||
final var givenPartner = partnerRepo.findPartnerByOptionalNameLike("First").get(0);
|
||||
final var newMembership = HsOfficeMembershipEntity.builder()
|
||||
.memberNumberSuffix("17")
|
||||
.partner(givenPartner)
|
||||
.validity(Range.closedInfinite(LocalDate.parse("2025-01-01")))
|
||||
.validity(Range.closedInfinite(LocalDate.parse("2020-01-01")))
|
||||
.membershipFeeBillable(true)
|
||||
.build();
|
||||
return toCleanup(membershipRepo.save(newMembership));
|
||||
@ -173,7 +145,7 @@ class HsOfficeMembershipRepositoryIntegrationTest extends ContextBasedTestWithCl
|
||||
|
||||
private void assertThatMembershipIsPersisted(final HsOfficeMembershipEntity saved) {
|
||||
final var found = membershipRepo.findByUuid(saved.getUuid());
|
||||
assertThat(found).isNotEmpty().get().extracting(Object::toString).isEqualTo(saved.toString());
|
||||
assertThat(found).isNotEmpty().get().extracting(Object::toString).isEqualTo(saved.toString()) ;
|
||||
}
|
||||
}
|
||||
|
||||
@ -191,8 +163,8 @@ class HsOfficeMembershipRepositoryIntegrationTest extends ContextBasedTestWithCl
|
||||
// then
|
||||
exactlyTheseMembershipsAreReturned(
|
||||
result,
|
||||
"Membership(M-1000101, P-10001, [2022-10-01,2024-12-31), ACTIVE)",
|
||||
"Membership(M-1000202, P-10002, [2022-10-01,2026-01-01), ACTIVE)",
|
||||
"Membership(M-1000101, P-10001, [2022-10-01,), ACTIVE)",
|
||||
"Membership(M-1000202, P-10002, [2022-10-01,), ACTIVE)",
|
||||
"Membership(M-1000303, P-10003, [2022-10-01,), ACTIVE)");
|
||||
}
|
||||
|
||||
@ -206,9 +178,8 @@ class HsOfficeMembershipRepositoryIntegrationTest extends ContextBasedTestWithCl
|
||||
final var result = membershipRepo.findMembershipsByPartnerUuid(givenPartner.getUuid());
|
||||
|
||||
// then
|
||||
exactlyTheseMembershipsAreReturned(
|
||||
result,
|
||||
"Membership(M-1000101, P-10001, [2022-10-01,2024-12-31), ACTIVE)");
|
||||
exactlyTheseMembershipsAreReturned(result,
|
||||
"Membership(M-1000101, P-10001, [2022-10-01,), ACTIVE)");
|
||||
}
|
||||
|
||||
@Test
|
||||
@ -223,7 +194,7 @@ class HsOfficeMembershipRepositoryIntegrationTest extends ContextBasedTestWithCl
|
||||
assertThat(result)
|
||||
.isNotNull()
|
||||
.extracting(Object::toString)
|
||||
.isEqualTo("Membership(M-1000202, P-10002, [2022-10-01,2026-01-01), ACTIVE)");
|
||||
.isEqualTo("Membership(M-1000202, P-10002, [2022-10-01,), ACTIVE)");
|
||||
}
|
||||
|
||||
@Test
|
||||
@ -238,7 +209,7 @@ class HsOfficeMembershipRepositoryIntegrationTest extends ContextBasedTestWithCl
|
||||
assertThat(result)
|
||||
.isNotNull()
|
||||
.extracting(Object::toString)
|
||||
.isEqualTo("Membership(M-1000202, P-10002, [2022-10-01,2026-01-01), ACTIVE)");
|
||||
.isEqualTo("Membership(M-1000202, P-10002, [2022-10-01,), ACTIVE)");
|
||||
}
|
||||
|
||||
@Test
|
||||
@ -250,9 +221,8 @@ class HsOfficeMembershipRepositoryIntegrationTest extends ContextBasedTestWithCl
|
||||
final var result = membershipRepo.findMembershipsByPartnerNumber(10002);
|
||||
|
||||
// then
|
||||
exactlyTheseMembershipsAreReturned(
|
||||
result,
|
||||
"Membership(M-1000202, P-10002, [2022-10-01,2026-01-01), ACTIVE)");
|
||||
exactlyTheseMembershipsAreReturned(result,
|
||||
"Membership(M-1000202, P-10002, [2022-10-01,), ACTIVE)");
|
||||
}
|
||||
}
|
||||
|
||||
@ -303,8 +273,7 @@ class HsOfficeMembershipRepositoryIntegrationTest extends ContextBasedTestWithCl
|
||||
});
|
||||
|
||||
// then
|
||||
result.assertExceptionWithRootCauseMessage(
|
||||
JpaSystemException.class,
|
||||
result.assertExceptionWithRootCauseMessage(JpaSystemException.class,
|
||||
"[403] Subject ", " is not allowed to update hs_office.membership uuid");
|
||||
}
|
||||
|
||||
@ -412,16 +381,14 @@ class HsOfficeMembershipRepositoryIntegrationTest extends ContextBasedTestWithCl
|
||||
"[creating Membership test-data, hs_office.membership, INSERT, 03]");
|
||||
}
|
||||
|
||||
private HsOfficeMembershipEntity givenSomeTemporaryMembership(
|
||||
final String partnerTradeName,
|
||||
final String memberNumberSuffix) {
|
||||
private HsOfficeMembershipEntity givenSomeTemporaryMembership(final String partnerTradeName, final String memberNumberSuffix) {
|
||||
return jpaAttempt.transacted(() -> {
|
||||
context("superuser-alex@hostsharing.net");
|
||||
final var givenPartner = partnerRepo.findPartnerByOptionalNameLike(partnerTradeName).get(0);
|
||||
final var newMembership = HsOfficeMembershipEntity.builder()
|
||||
.memberNumberSuffix(memberNumberSuffix)
|
||||
.partner(givenPartner)
|
||||
.validity(Range.closedInfinite(LocalDate.parse("2025-02-01")))
|
||||
.validity(Range.closedInfinite(LocalDate.parse("2020-01-01")))
|
||||
.membershipFeeBillable(true)
|
||||
.build();
|
||||
|
||||
|
@ -287,12 +287,12 @@ class HsOfficeScenarioTests extends ScenarioTest {
|
||||
|
||||
@Test
|
||||
@Order(2011)
|
||||
@Requires("Debitor: D-3101000 - Test AG - main debitor")
|
||||
@Produces("Debitor: D-3101001 - Test AG - additional debitor")
|
||||
void shouldCreateAdditionDebitorForPartner() {
|
||||
new CreateSelfDebitorForPartner(scenarioTest)
|
||||
@Requires("Person: Test AG")
|
||||
@Produces("Debitor: D-3101001 - Test AG - main debitor")
|
||||
void shouldCreateExternalDebitorForPartner() {
|
||||
new CreateExternalDebitorForPartner(scenarioTest)
|
||||
.given("partnerPersonTradeName", "Test AG")
|
||||
.given("billingContactCaption", "Test AG - billing department")
|
||||
.given("billingContactCaption", "Billing GmbH - billing department")
|
||||
.given("billingContactEmailAddress", "billing@test-ag.example.org")
|
||||
.given("debitorNumberSuffix", "01")
|
||||
.given("billable", true)
|
||||
@ -305,30 +305,10 @@ class HsOfficeScenarioTests extends ScenarioTest {
|
||||
.keep();
|
||||
}
|
||||
|
||||
@Test
|
||||
@Order(2012)
|
||||
@Requires("Person: Test AG")
|
||||
@Produces("Debitor: D-3101002 - Test AG - external debitor")
|
||||
void shouldCreateExternalDebitorForPartner() {
|
||||
new CreateExternalDebitorForPartner(scenarioTest)
|
||||
.given("partnerPersonTradeName", "Test AG")
|
||||
.given("billingContactCaption", "Billing GmbH - billing department")
|
||||
.given("billingContactEmailAddress", "billing@test-ag.example.org")
|
||||
.given("debitorNumberSuffix", "02")
|
||||
.given("billable", true)
|
||||
.given("vatId", "VAT123456")
|
||||
.given("vatCountryCode", "DE")
|
||||
.given("vatBusiness", true)
|
||||
.given("vatReverseCharge", false)
|
||||
.given("defaultPrefix", "tsy")
|
||||
.doRun()
|
||||
.keep();
|
||||
}
|
||||
|
||||
@Test
|
||||
@Order(2020)
|
||||
@Requires("Person: Test AG")
|
||||
@Produces(explicitly = "Debitor: D-3101002 - Test AG - delete debitor", permanent = false)
|
||||
@Produces(explicitly = "Debitor: D-3101000 - Test AG - delete debitor", permanent = false)
|
||||
void shouldDeleteDebitor() {
|
||||
new DeleteDebitor(scenarioTest)
|
||||
.given("partnerNumber", "P-31020")
|
||||
@ -337,7 +317,7 @@ class HsOfficeScenarioTests extends ScenarioTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
@Order(2021)
|
||||
@Order(2020)
|
||||
@Requires("Debitor: D-3101000 - Test AG - main debitor")
|
||||
@Disabled("see TODO.spec in DontDeleteDefaultDebitor")
|
||||
void shouldNotDeleteDefaultDebitor() {
|
||||
@ -407,39 +387,22 @@ class HsOfficeScenarioTests extends ScenarioTest {
|
||||
void shouldCreateMembershipForPartner() {
|
||||
new CreateMembership(scenarioTest)
|
||||
.given("partnerName", "Test AG")
|
||||
.given("validFrom", "2020-10-15")
|
||||
.given("validFrom", "2024-10-15")
|
||||
.given("newStatus", "ACTIVE")
|
||||
.given("membershipFeeBillable", "true")
|
||||
.doRun()
|
||||
.keep();
|
||||
}
|
||||
|
||||
@Test
|
||||
@Order(4080)
|
||||
@Requires("Membership: M-3101000 - Test AG")
|
||||
@Produces("Membership: M-3101000 - Test AG - cancelled")
|
||||
void shouldCancelMembershipOfPartner() {
|
||||
new CancelMembership(scenarioTest)
|
||||
.given("memberNumber", "M-3101000")
|
||||
.given("validTo", "2023-12-31")
|
||||
.given("newStatus", "CANCELLED")
|
||||
.doRun()
|
||||
.keep();
|
||||
}
|
||||
|
||||
@Test
|
||||
@Order(4090)
|
||||
@Requires("Membership: M-3101000 - Test AG - cancelled")
|
||||
@Produces("Membership: M-3101001 - Test AG")
|
||||
void shouldCreateSubsequentMembershipOfPartner() {
|
||||
new CreateMembership(scenarioTest)
|
||||
.given("partnerName", "Test AG")
|
||||
.given("memberNumberSuffix", "01")
|
||||
.given("validFrom", "2025-02-24")
|
||||
.given("newStatus", "ACTIVE")
|
||||
.given("membershipFeeBillable", "true")
|
||||
.doRun()
|
||||
.keep();
|
||||
@Requires("Membership: M-3101000 - Test AG")
|
||||
void shouldCancelMembershipOfPartner() {
|
||||
new CancelMembership(scenarioTest)
|
||||
.given("memberNumber", "M-3101000")
|
||||
.given("validTo", "2025-12-30")
|
||||
.given("newStatus", "CANCELLED")
|
||||
.doRun();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -19,7 +19,7 @@ public class DeleteDebitor extends UseCase<DeleteDebitor> {
|
||||
.given("vatCountryCode", "DE")
|
||||
.given("vatBusiness", true)
|
||||
.given("vatReverseCharge", false)
|
||||
.given("defaultPrefix", "tsz"));
|
||||
.given("defaultPrefix", "tsy"));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -1,6 +1,6 @@
|
||||
package net.hostsharing.hsadminng.rbac.test;
|
||||
|
||||
import lombok.SneakyThrows;
|
||||
import org.assertj.core.api.ObjectAssert;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.core.NestedExceptionUtils;
|
||||
import org.springframework.stereotype.Service;
|
||||
@ -78,9 +78,9 @@ public class JpaAttempt {
|
||||
public static class JpaResult<T> {
|
||||
|
||||
private final T value;
|
||||
private final Throwable exception;
|
||||
private final RuntimeException exception;
|
||||
|
||||
private JpaResult(final T value, final Throwable exception) {
|
||||
private JpaResult(final T value, final RuntimeException exception) {
|
||||
this.value = value;
|
||||
this.exception = exception;
|
||||
}
|
||||
@ -93,7 +93,7 @@ public class JpaAttempt {
|
||||
return new JpaResult<>(value, null);
|
||||
}
|
||||
|
||||
public static <T> JpaResult<T> forException(final Throwable exception) {
|
||||
public static <T> JpaResult<T> forException(final RuntimeException exception) {
|
||||
return new JpaResult<>(null, exception);
|
||||
}
|
||||
|
||||
@ -105,23 +105,20 @@ public class JpaAttempt {
|
||||
return value;
|
||||
}
|
||||
|
||||
public Throwable caughtException() {
|
||||
public ObjectAssert<T> assertThatResult() {
|
||||
assertSuccessful();
|
||||
return assertThat(returnedValue());
|
||||
}
|
||||
|
||||
public RuntimeException caughtException() {
|
||||
return exception;
|
||||
}
|
||||
|
||||
public <E extends Throwable> E caughtException(final Class<E> expectedExceptionClass) {
|
||||
//noinspection unchecked
|
||||
return caughtException((E) exception, expectedExceptionClass);
|
||||
}
|
||||
|
||||
public static <E extends Throwable> E caughtException(final Throwable exception, final Class<E> expectedExceptionClass) {
|
||||
@SuppressWarnings("unchecked")
|
||||
public <E extends RuntimeException> E caughtException(final Class<E> expectedExceptionClass) {
|
||||
if (expectedExceptionClass.isAssignableFrom(exception.getClass())) {
|
||||
//noinspection unchecked
|
||||
return (E) exception;
|
||||
}
|
||||
if(exception.getCause() != null && exception.getCause() != exception ) {
|
||||
return caughtException(exception.getCause(), expectedExceptionClass);
|
||||
}
|
||||
throw new AssertionError("expected " + expectedExceptionClass + " but got " + exception);
|
||||
}
|
||||
|
||||
@ -130,7 +127,7 @@ public class JpaAttempt {
|
||||
}
|
||||
|
||||
public void assertExceptionWithRootCauseMessage(
|
||||
final Class<? extends Throwable> expectedExceptionClass,
|
||||
final Class<? extends RuntimeException> expectedExceptionClass,
|
||||
final String... expectedRootCauseMessages) {
|
||||
assertThat(wasSuccessful()).as("wasSuccessful").isFalse();
|
||||
final String firstRootCauseMessageLine = firstRootCauseMessageLineOf(caughtException(expectedExceptionClass));
|
||||
@ -139,11 +136,11 @@ public class JpaAttempt {
|
||||
}
|
||||
}
|
||||
|
||||
@SneakyThrows
|
||||
public void reThrowException() {
|
||||
public JpaResult<T> reThrowException() {
|
||||
if (exception != null) {
|
||||
throw exception;
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public JpaResult<T> assumeSuccessful() {
|
||||
@ -161,9 +158,9 @@ public class JpaAttempt {
|
||||
return this;
|
||||
}
|
||||
|
||||
private String firstRootCauseMessageLineOf(final Throwable exception) {
|
||||
private String firstRootCauseMessageLineOf(final RuntimeException exception) {
|
||||
final var rootCause = NestedExceptionUtils.getRootCause(exception);
|
||||
return Optional.ofNullable(rootCause != null ? rootCause : exception)
|
||||
return Optional.ofNullable(rootCause)
|
||||
.map(Throwable::getMessage)
|
||||
.map(message -> message.split("\\r|\\n|\\r\\n", 0)[0])
|
||||
.orElse(null);
|
||||
|
File diff suppressed because it is too large
Load Diff
Loading…
x
Reference in New Issue
Block a user