Compare commits
5 Commits
maintenanc
...
master
Author | SHA1 | Date | |
---|---|---|---|
f8fda06beb | |||
a0635960a5 | |||
ddd96654ef | |||
abafd64813 | |||
a1a753e00a |
17
.aliases
17
.aliases
@ -1,4 +1,4 @@
|
||||
# For using the alias gw-importOfficeData or gw-importHostingAssets,
|
||||
# For using the alias gw-importHostingAssets,
|
||||
# copy the file .tc-environment to .environment (ignored by git)
|
||||
# and amend them according to your external DB.
|
||||
|
||||
@ -71,7 +71,6 @@ function importLegacyData() {
|
||||
./gradlew $target --rerun
|
||||
fi
|
||||
}
|
||||
alias gw-importOfficeData='importLegacyData importOfficeData'
|
||||
alias gw-importHostingAssets='importLegacyData importHostingAssets'
|
||||
|
||||
alias podman-start='systemctl --user enable --now podman.socket && systemctl --user status podman.socket && ls -la /run/user/$UID/podman/podman.sock'
|
||||
@ -92,8 +91,8 @@ alias fp='grep -r '@Accepts' src | sed -e 's/^.*@/@/g' | sort -u | wc -l'
|
||||
alias gw-spotless='./gradlew spotlessApply -x pitest -x test -x :processResources'
|
||||
alias gw-check='. .aliases; . .tc-environment; gw test check -x pitest'
|
||||
|
||||
# HOWTO: run all 'normal' tests (no scenario+import-tests): `gw-test`
|
||||
# You can also mention specific targets: `gw-test importOfficeData`.
|
||||
# HOWTO: run all 'normal' tests (by default without scenario+import-tests): `gw-test`
|
||||
# You can also mention specific targets: `gw-test importHostingAssets`, in that case only these tests are executed.
|
||||
# This will always use the environment from `.tc-environment`.
|
||||
#
|
||||
# HOWTO: re-run tests even if no changed can be detected: `gw-test --rerun`
|
||||
@ -114,16 +113,16 @@ function _gwTest1() {
|
||||
echo "DONE gw $@"
|
||||
}
|
||||
function _gwTest() {
|
||||
. .aliases;
|
||||
. .tc-environment;
|
||||
rm /tmp/gwTest.tmp
|
||||
. .aliases
|
||||
. .tc-environment
|
||||
rm -f /tmp/gwTest.tmp
|
||||
if [ "$1" == "--all" ]; then
|
||||
shift # to remove the --all from $@
|
||||
# delierately in separate gradlew-calls to avoid Testcontains-PostgreSQL problem spillover
|
||||
time (_gwTest1 unitTest "$@" &&
|
||||
_gwTest1 officeIntegrationTest bookingIntegrationTest hostingIntegrationTest "$@" &&
|
||||
_gwTest1 scenarioTest "$@" &&
|
||||
_gwTest1 importOfficeData importHostingAssets "$@");
|
||||
_gwTest1 importHostingAssets "$@");
|
||||
elif [ $# -eq 0 ] || [[ $1 == -* ]]; then
|
||||
time _gwTest1 test "$@";
|
||||
else
|
||||
@ -137,7 +136,7 @@ alias howto=bin/howto
|
||||
alias cas-curl=bin/cas-curl
|
||||
|
||||
# etc/docker-compose.yml limits CPUs+MEM and includes a PostgreSQL config for analysing slow queries
|
||||
alias gw-importOfficeData-in-docker-compose='
|
||||
alias gw-importHostingAssets-in-docker-compose='
|
||||
docker-compose -f etc/docker-compose.yml down &&
|
||||
docker-compose -f etc/docker-compose.yml up -d && sleep 10 &&
|
||||
time gw-importHostingAssets'
|
||||
|
@ -7,6 +7,7 @@
|
||||
<entry key="HSADMINNG_POSTGRES_ADMIN_USERNAME" value="postgres" />
|
||||
<entry key="HSADMINNG_POSTGRES_JDBC_URL" value="jdbc:postgresql://localhost:5432/postgres" />
|
||||
<entry key="HSADMINNG_POSTGRES_RESTRICTED_USERNAME" value="restricted" />
|
||||
<entry key="HSADMINNG_MIGRATION_DATA_PATH" value="migration" />
|
||||
</map>
|
||||
</option>
|
||||
<option name="executionName" />
|
||||
|
@ -3,9 +3,9 @@
|
||||
<ExternalSystemSettings>
|
||||
<option name="env">
|
||||
<map>
|
||||
<entry key="HSADMINNG_MIGRATION_DATA_PATH" value="migration" />
|
||||
<entry key="HSADMINNG_POSTGRES_ADMIN_USERNAME" value="admin" />
|
||||
<entry key="HSADMINNG_POSTGRES_RESTRICTED_USERNAME" value="restricted" />
|
||||
<entry key="HSADMINNG_SUPERUSER" value="import-superuser@hostsharing.net" />
|
||||
</map>
|
||||
</option>
|
||||
<option name="executionName" />
|
||||
|
@ -1,103 +0,0 @@
|
||||
<component name="ProjectRunConfigurationManager">
|
||||
<configuration default="false" name="ImportOfficeData" type="GradleRunConfiguration" factoryName="Gradle">
|
||||
<ExternalSystemSettings>
|
||||
<option name="env">
|
||||
<map>
|
||||
<entry key="HSADMINNG_MIGRATION_DATA_PATH" value="migration" />
|
||||
<entry key="HSADMINNG_POSTGRES_ADMIN_USERNAME" value="admin" />
|
||||
<entry key="HSADMINNG_POSTGRES_RESTRICTED_USERNAME" value="restricted" />
|
||||
</map>
|
||||
</option>
|
||||
<option name="executionName" />
|
||||
<option name="externalProjectPath" value="$PROJECT_DIR$" />
|
||||
<option name="externalSystemIdString" value="GRADLE" />
|
||||
<option name="scriptParameters" value="" />
|
||||
<option name="taskDescriptions">
|
||||
<list />
|
||||
</option>
|
||||
<option name="taskNames">
|
||||
<list>
|
||||
<option value=":importOfficeData" />
|
||||
<option value="--tests" />
|
||||
<option value=""net.hostsharing.hsadminng.hs.migration.ImportOfficeData"" />
|
||||
</list>
|
||||
</option>
|
||||
<option name="vmOptions" />
|
||||
</ExternalSystemSettings>
|
||||
<ExternalSystemDebugServerProcess>false</ExternalSystemDebugServerProcess>
|
||||
<ExternalSystemReattachDebugProcess>true</ExternalSystemReattachDebugProcess>
|
||||
<EXTENSION ID="com.intellij.execution.ExternalSystemRunConfigurationJavaExtension">
|
||||
<extension name="coverage" sample_coverage="false" />
|
||||
</EXTENSION>
|
||||
<DebugAllEnabled>false</DebugAllEnabled>
|
||||
<RunAsTest>true</RunAsTest>
|
||||
<method v="2" />
|
||||
</configuration>
|
||||
<configuration default="false" name="ImportOfficeData" type="GradleRunConfiguration" factoryName="Gradle">
|
||||
<ExternalSystemSettings>
|
||||
<option name="env">
|
||||
<map>
|
||||
<entry key="HSADMINNG_MIGRATION_DATA_PATH" value="migration" />
|
||||
<entry key="HSADMINNG_POSTGRES_ADMIN_USERNAME" value="admin" />
|
||||
<entry key="HSADMINNG_POSTGRES_RESTRICTED_USERNAME" value="restricted" />
|
||||
</map>
|
||||
</option>
|
||||
<option name="executionName" />
|
||||
<option name="externalProjectPath" value="$PROJECT_DIR$" />
|
||||
<option name="externalSystemIdString" value="GRADLE" />
|
||||
<option name="scriptParameters" value="" />
|
||||
<option name="taskDescriptions">
|
||||
<list />
|
||||
</option>
|
||||
<option name="taskNames">
|
||||
<list>
|
||||
<option value=":importOfficeData" />
|
||||
<option value="--tests" />
|
||||
<option value=""net.hostsharing.hsadminng.hs.office.migration.ImportOfficeData"" />
|
||||
</list>
|
||||
</option>
|
||||
<option name="vmOptions" />
|
||||
</ExternalSystemSettings>
|
||||
<ExternalSystemDebugServerProcess>false</ExternalSystemDebugServerProcess>
|
||||
<ExternalSystemReattachDebugProcess>true</ExternalSystemReattachDebugProcess>
|
||||
<EXTENSION ID="com.intellij.execution.ExternalSystemRunConfigurationJavaExtension">
|
||||
<extension name="coverage" sample_coverage="false" />
|
||||
</EXTENSION>
|
||||
<DebugAllEnabled>false</DebugAllEnabled>
|
||||
<RunAsTest>true</RunAsTest>
|
||||
<method v="2" />
|
||||
</configuration>
|
||||
<configuration default="false" name="ImportOfficeData" type="GradleRunConfiguration" factoryName="Gradle">
|
||||
<ExternalSystemSettings>
|
||||
<option name="env">
|
||||
<map>
|
||||
<entry key="HSADMINNG_POSTGRES_ADMIN_USERNAME" value="admin" />
|
||||
<entry key="HSADMINNG_POSTGRES_RESTRICTED_USERNAME" value="restricted" />
|
||||
</map>
|
||||
</option>
|
||||
<option name="executionName" />
|
||||
<option name="externalProjectPath" value="$PROJECT_DIR$" />
|
||||
<option name="externalSystemIdString" value="GRADLE" />
|
||||
<option name="scriptParameters" value="" />
|
||||
<option name="taskDescriptions">
|
||||
<list />
|
||||
</option>
|
||||
<option name="taskNames">
|
||||
<list>
|
||||
<option value=":importOfficeData" />
|
||||
<option value="--tests" />
|
||||
<option value=""net.hostsharing.hsadminng.hs.migration.ImportOfficeData"" />
|
||||
</list>
|
||||
</option>
|
||||
<option name="vmOptions" />
|
||||
</ExternalSystemSettings>
|
||||
<ExternalSystemDebugServerProcess>false</ExternalSystemDebugServerProcess>
|
||||
<ExternalSystemReattachDebugProcess>true</ExternalSystemReattachDebugProcess>
|
||||
<EXTENSION ID="com.intellij.execution.ExternalSystemRunConfigurationJavaExtension">
|
||||
<extension name="coverage" sample_coverage="false" />
|
||||
</EXTENSION>
|
||||
<DebugAllEnabled>false</DebugAllEnabled>
|
||||
<RunAsTest>true</RunAsTest>
|
||||
<method v="2" />
|
||||
</configuration>
|
||||
</component>
|
@ -1,7 +1,7 @@
|
||||
unset HSADMINNG_POSTGRES_JDBC_URL # dynamically set, different for normal tests and imports
|
||||
export HSADMINNG_POSTGRES_ADMIN_USERNAME=admin
|
||||
export HSADMINNG_POSTGRES_ADMIN_PASSWORD=
|
||||
source .unset-environment
|
||||
|
||||
export HSADMINNG_POSTGRES_RESTRICTED_USERNAME=restricted
|
||||
export HSADMINNG_SUPERUSER=superuser-alex@hostsharing.net
|
||||
export HSADMINNG_MIGRATION_DATA_PATH=migration
|
||||
export HSADMINNG_POSTGRES_ADMIN_USERNAME=admin
|
||||
export HSADMINNG_SUPERUSER=import-superuser@hostsharing.net
|
||||
|
||||
export LANG=en_US.UTF-8
|
||||
|
@ -4,4 +4,5 @@ unset HSADMINNG_POSTGRES_ADMIN_PASSWORD
|
||||
unset HSADMINNG_POSTGRES_RESTRICTED_USERNAME
|
||||
unset HSADMINNG_SUPERUSER
|
||||
unset HSADMINNG_MIGRATION_DATA_PATH
|
||||
unset HSADMINNG_OFFICE_DATA_SQL_FILE
|
||||
|
||||
|
4
Jenkinsfile
vendored
4
Jenkinsfile
vendored
@ -55,9 +55,9 @@ pipeline {
|
||||
sh './gradlew bookingIntegrationTest hostingIntegrationTest --no-daemon'
|
||||
}
|
||||
}
|
||||
stage('Import-Tests') {
|
||||
stage('Test-Imports') {
|
||||
steps {
|
||||
sh './gradlew importOfficeData importHostingAssets --no-daemon'
|
||||
sh './gradlew importHostingAssets --no-daemon'
|
||||
}
|
||||
}
|
||||
stage ('Scenario-Tests') {
|
||||
|
17
build.gradle
17
build.gradle
@ -263,7 +263,7 @@ test {
|
||||
'net.hostsharing.hsadminng.**.generated.**',
|
||||
]
|
||||
useJUnitPlatform {
|
||||
excludeTags 'importOfficeData', 'importHostingAssets', 'scenarioTest'
|
||||
excludeTags 'importHostingAssets', 'scenarioTest'
|
||||
}
|
||||
}
|
||||
|
||||
@ -338,7 +338,7 @@ jacocoTestCoverageVerification {
|
||||
// HOWTO: run all unit-tests which don't need a database: gw-test unitTest
|
||||
tasks.register('unitTest', Test) {
|
||||
useJUnitPlatform {
|
||||
excludeTags 'importOfficeData', 'importHostingAssets', 'scenarioTest', 'generalIntegrationTest',
|
||||
excludeTags 'importHostingAssets', 'scenarioTest', 'generalIntegrationTest',
|
||||
'officeIntegrationTest', 'bookingIntegrationTest', 'hostingIntegrationTest'
|
||||
}
|
||||
|
||||
@ -396,17 +396,6 @@ tasks.register('hostingIntegrationTest', Test) {
|
||||
mustRunAfter spotlessJava
|
||||
}
|
||||
|
||||
tasks.register('importOfficeData', Test) {
|
||||
useJUnitPlatform {
|
||||
includeTags 'importOfficeData'
|
||||
}
|
||||
|
||||
group 'verification'
|
||||
description 'run the import jobs as tests'
|
||||
|
||||
mustRunAfter spotlessJava
|
||||
}
|
||||
|
||||
tasks.register('importHostingAssets', Test) {
|
||||
useJUnitPlatform {
|
||||
includeTags 'importHostingAssets'
|
||||
@ -439,7 +428,7 @@ pitest {
|
||||
]
|
||||
|
||||
targetTests = ['net.hostsharing.hsadminng.**.*UnitTest', 'net.hostsharing.hsadminng.**.*RestTest']
|
||||
excludedTestClasses = ['**AcceptanceTest*', '**IntegrationTest*', '**ImportOfficeData', '**ImportHostingAssets']
|
||||
excludedTestClasses = ['**AcceptanceTest*', '**IntegrationTest*', '**ImportHostingAssets']
|
||||
|
||||
pitestVersion = '1.17.0'
|
||||
junit5PluginVersion = '1.1.0'
|
||||
|
@ -108,6 +108,40 @@ der Person des _Subscriber-Contact_ (_Holder_) zur repräsentierten Person (_Anc
|
||||
Zusätzlich wird diese Relation mit dem Kurznamen der abonnierten Mailingliste markiert.
|
||||
|
||||
|
||||
### Coop-Asset-Transactions (Geschäftsguthabens-Transaktionen)
|
||||
|
||||
- positiver Wert => Geschäftsguthaben nehmen zu
|
||||
- negativer Wert => Geschäftsguthaben nehmen ab
|
||||
|
||||
**REVERSAL**: **Korrekturbuchung** einer fehlerhaften Buchung, positiver oder negativer Wert ist möglich
|
||||
|
||||
**DEPOSIT**: **Zahlungseingang** vom Mitglied nach Beteiligung mit Geschäftsanteilen, immer positiver Wert
|
||||
|
||||
**DISBURSAL**: **Zahlungsausgang** an Mitglied nach Kündigung von Geschäftsanteilen, immer negativer Wert
|
||||
|
||||
**TRANSFER**: **Übertragung** von Geschäftsguthaben an ein anderes Mitglied, immer negativer Wert
|
||||
|
||||
**ADOPTION**: **Übernahme** von Geschäftsguthaben von einem anderen Mitglied, immer positiver Wert
|
||||
|
||||
**CLEARING**: **Verrechnung** von Geschäftsguthaben mit Schulden des Mitglieds, immer negativer Wert
|
||||
|
||||
**LOSS**: **Verlust** von Geschäftsguthaben bei Zuweisung Eigenkapitalverlust nach Kündigung von Geschäftsanteilen, immer negativer Wert
|
||||
|
||||
**LIMITATION**: **Verjährung** von Geschäftsguthaben, wenn Auszahlung innerhalb der Frist nicht möglich war.
|
||||
|
||||
|
||||
### Coop-Share-Transactions (Geschäftsanteil-Transaktionen)
|
||||
|
||||
- positiver Wert => Geschäftsanteile nehmen zu
|
||||
- negativer Wert => Geschäftsanteile nehmen ab
|
||||
-
|
||||
**REVERSAL**: **Korrekturbuchung** einer fehlerhaften Buchung, positiver oder negativer Wert ist möglich
|
||||
|
||||
**SUBSCRIPTION**: **Beteiligung** mit Geschäftsanteilen, z.B. durch Beitrittserklärung, immer positiver Wert
|
||||
|
||||
**CANCELLATION**: **Kündigung** von Geschäftsanteilen, z.B. durch Austritt, immer negativer Wert
|
||||
|
||||
|
||||
#### Anchor / Relation-Anchor
|
||||
|
||||
siehe [Relation](#Relation)
|
||||
|
@ -116,7 +116,7 @@ classDiagram
|
||||
+BankAccount refundBankAccount
|
||||
+String defaultPrefix: mei
|
||||
}
|
||||
debitor-MeierGmbH o-- partner-MeierGmbH
|
||||
debitor-MeierGmbH o.. partner-MeierGmbH
|
||||
debitor-MeierGmbH *-- rel-MeierGmbH-Buha
|
||||
|
||||
class contactData-MeierGmbH-Buha {
|
||||
|
@ -1,6 +1,6 @@
|
||||
--liquibase formatted sql
|
||||
|
||||
-- FIXME: check if we really need the restricted user
|
||||
-- TODO.impl: check if we really need the restricted user
|
||||
|
||||
-- ============================================================================
|
||||
-- NUMERIC-HASH-FUNCTIONS
|
||||
|
@ -22,13 +22,12 @@ select (objectTable || '#' || objectIdName || ':' || roleType) as roleIdName, *
|
||||
--//
|
||||
|
||||
-- ============================================================================
|
||||
--changeset michael.hoennig:rbac-views-ROLE-RESTRICTED-VIEW endDelimiter:--//
|
||||
--changeset michael.hoennig:rbac-views-ROLE-RESTRICTED-VIEW runOnChange:true validCheckSum:ANY endDelimiter:--//
|
||||
-- ----------------------------------------------------------------------------
|
||||
/*
|
||||
Creates a view to the role table with row-level limitation
|
||||
based on the grants of the current user or assumed roles.
|
||||
*/
|
||||
drop view if exists rbac.role_rv;
|
||||
create or replace view rbac.role_rv as
|
||||
select *
|
||||
-- @formatter:off
|
||||
@ -106,7 +105,7 @@ create or replace view rbac.grant_ev as
|
||||
|
||||
|
||||
-- ============================================================================
|
||||
--changeset michael.hoennig:rbac-views-GRANT-RESTRICTED-VIEW endDelimiter:--//
|
||||
--changeset michael.hoennig:rbac-views-GRANT-RESTRICTED-VIEW runOnChange:true validCheckSum:ANY endDelimiter:--//
|
||||
-- ----------------------------------------------------------------------------
|
||||
/*
|
||||
Creates a view to the grants table with row-level limitation
|
||||
@ -222,13 +221,12 @@ select distinct *
|
||||
|
||||
|
||||
-- ============================================================================
|
||||
--changeset michael.hoennig:rbac-views-USER-RESTRICTED-VIEW endDelimiter:--//
|
||||
--changeset michael.hoennig:rbac-views-USER-RESTRICTED-VIEW runOnChange:true validCheckSum:ANY endDelimiter:--//
|
||||
-- ----------------------------------------------------------------------------
|
||||
/*
|
||||
Creates a view to the users table with row-level limitation
|
||||
based on the grants of the current user or assumed roles.
|
||||
*/
|
||||
drop view if exists rbac.subject_rv;
|
||||
create or replace view rbac.subject_rv as
|
||||
select distinct *
|
||||
-- @formatter:off
|
||||
@ -316,14 +314,13 @@ execute function rbac.delete_subject_tf();
|
||||
--/
|
||||
|
||||
-- ============================================================================
|
||||
--changeset michael.hoennig:rbac-views-OWN-GRANTED-PERMISSIONS-VIEW endDelimiter:--//
|
||||
--changeset michael.hoennig:rbac-views-OWN-GRANTED-PERMISSIONS-VIEW runOnChange:true validCheckSum:ANY endDelimiter:--//
|
||||
-- ----------------------------------------------------------------------------
|
||||
/*
|
||||
Creates a view to all permissions granted to the current user or
|
||||
based on the grants of the current user or assumed roles.
|
||||
*/
|
||||
-- @formatter:off
|
||||
drop view if exists rbac.own_granted_permissions_rv;
|
||||
create or replace view rbac.own_granted_permissions_rv as
|
||||
select r.uuid as roleuuid, p.uuid as permissionUuid,
|
||||
(r.objecttable || ':' || r.objectidname || ':' || r.roletype) as roleName, p.op,
|
||||
|
@ -111,7 +111,7 @@ end; $$;
|
||||
|
||||
|
||||
-- ============================================================================
|
||||
--changeset michael.hoennig:rbac-generators-IDENTITY-VIEW endDelimiter:--//
|
||||
--changeset michael.hoennig:rbac-generators-IDENTITY-VIEW runOnChange:true validCheckSum:ANY endDelimiter:--//
|
||||
-- ----------------------------------------------------------------------------
|
||||
|
||||
create or replace procedure rbac.generateRbacIdentityViewFromQuery(targetTable text, sqlQuery text)
|
||||
@ -171,7 +171,7 @@ end; $$;
|
||||
|
||||
|
||||
-- ============================================================================
|
||||
--changeset michael.hoennig:rbac-generators-RESTRICTED-VIEW endDelimiter:--//
|
||||
--changeset michael.hoennig:rbac-generators-RESTRICTED-VIEW runOnChange:true validCheckSum:ANY endDelimiter:--//
|
||||
-- ----------------------------------------------------------------------------
|
||||
|
||||
create or replace procedure rbac.generateRbacRestrictedView(targetTable text, orderBy text, columnUpdates text = null, columnNames text = '*')
|
||||
|
@ -1,7 +1,7 @@
|
||||
--liquibase formatted sql
|
||||
|
||||
-- ============================================================================
|
||||
--changeset michael.hoennig:rbac-global-OBJECT endDelimiter:--//
|
||||
--changeset michael.hoennig:rbac-global-OBJECT runOnChange:true validCheckSum:ANY endDelimiter:--//
|
||||
-- ----------------------------------------------------------------------------
|
||||
/*
|
||||
The purpose of this table is provide root business objects
|
||||
@ -11,12 +11,12 @@
|
||||
In production databases, there is only a single row in this table,
|
||||
in test stages, there can be one row for each test data realm.
|
||||
*/
|
||||
create table rbac.global
|
||||
create table if not exists rbac.global
|
||||
(
|
||||
uuid uuid primary key references rbac.object (uuid) on delete cascade,
|
||||
name varchar(63) unique
|
||||
);
|
||||
create unique index Global_Singleton on rbac.global ((0));
|
||||
create unique index if not exists Global_Singleton on rbac.global ((0));
|
||||
|
||||
grant select on rbac.global to ${HSADMINNG_POSTGRES_RESTRICTED_USERNAME};
|
||||
--//
|
||||
@ -75,13 +75,12 @@ $$;
|
||||
|
||||
|
||||
-- ============================================================================
|
||||
--changeset michael.hoennig:rbac-global-IDENTITY-VIEW endDelimiter:--//
|
||||
--changeset michael.hoennig:rbac-global-IDENTITY-VIEW runOnChange:true validCheckSum:ANY endDelimiter:--//
|
||||
-- ----------------------------------------------------------------------------
|
||||
|
||||
/*
|
||||
Creates a view to the rbac.global object table which maps the identifying name to the objectUuid.
|
||||
*/
|
||||
drop view if exists rbac.global_iv;
|
||||
create or replace view rbac.global_iv as
|
||||
select target.uuid, target.name as idName
|
||||
from rbac.global as target;
|
||||
|
@ -32,6 +32,41 @@ create table if not exists hs_office.membership
|
||||
--//
|
||||
|
||||
|
||||
-- ============================================================================
|
||||
--changeset michael.hoennig:hs-office-membership-SINGLE-MEMBERSHIP-CHECK endDelimiter:--//
|
||||
-- ----------------------------------------------------------------------------
|
||||
|
||||
CREATE OR REPLACE FUNCTION hs_office.validate_membership_validity()
|
||||
RETURNS trigger AS $$
|
||||
DECLARE
|
||||
partnerNumber int;
|
||||
BEGIN
|
||||
IF EXISTS (
|
||||
SELECT 1
|
||||
FROM hs_office.membership
|
||||
WHERE partnerUuid = NEW.partnerUuid
|
||||
AND uuid <> NEW.uuid
|
||||
AND NEW.validity && validity
|
||||
) THEN
|
||||
SELECT p.partnerNumber INTO partnerNumber
|
||||
FROM hs_office.partner AS p
|
||||
WHERE p.uuid = NEW.partnerUuid;
|
||||
RAISE EXCEPTION 'Membership validity ranges overlap for partnerUuid %, partnerNumber %', NEW.partnerUuid, partnerNumber;
|
||||
END IF;
|
||||
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE TRIGGER trg_validate_membership_validity
|
||||
BEFORE INSERT OR UPDATE ON hs_office.membership
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION hs_office.validate_membership_validity();
|
||||
|
||||
|
||||
--//
|
||||
|
||||
|
||||
-- ============================================================================
|
||||
--changeset michael.hoennig:hs-office-membership-MAIN-TABLE-JOURNAL endDelimiter:--//
|
||||
-- ----------------------------------------------------------------------------
|
||||
|
@ -10,7 +10,8 @@
|
||||
*/
|
||||
create or replace procedure hs_office.membership_create_test_data(
|
||||
forPartnerNumber numeric(5),
|
||||
newMemberNumberSuffix char(2) )
|
||||
newMemberNumberSuffix char(2),
|
||||
validity daterange)
|
||||
language plpgsql as $$
|
||||
declare
|
||||
relatedPartner hs_office.partner;
|
||||
@ -22,7 +23,7 @@ begin
|
||||
raise notice '- using partner (%): %', relatedPartner.uuid, relatedPartner;
|
||||
insert
|
||||
into hs_office.membership (uuid, partneruuid, memberNumberSuffix, validity, status)
|
||||
values (uuid_generate_v4(), relatedPartner.uuid, newMemberNumberSuffix, daterange('20221001' , null, '[]'), 'ACTIVE');
|
||||
values (uuid_generate_v4(), relatedPartner.uuid, newMemberNumberSuffix, validity, 'ACTIVE');
|
||||
end; $$;
|
||||
--//
|
||||
|
||||
@ -35,9 +36,9 @@ do language plpgsql $$
|
||||
begin
|
||||
call base.defineContext('creating Membership test-data', null, 'superuser-alex@hostsharing.net', 'rbac.global#global:ADMIN');
|
||||
|
||||
call hs_office.membership_create_test_data(10001, '01');
|
||||
call hs_office.membership_create_test_data(10002, '02');
|
||||
call hs_office.membership_create_test_data(10003, '03');
|
||||
call hs_office.membership_create_test_data(10001, '01', daterange('20221001' , '20241231', '[)'));
|
||||
call hs_office.membership_create_test_data(10002, '02', daterange('20221001' , '20251231', '[]'));
|
||||
call hs_office.membership_create_test_data(10003, '03', daterange('20221001' , null, '[]'));
|
||||
end;
|
||||
$$;
|
||||
--//
|
||||
|
@ -25,7 +25,7 @@ create table if not exists hs_booking.item
|
||||
caption varchar(80) not null,
|
||||
resources jsonb not null,
|
||||
|
||||
constraint booking_item_has_project_or_parent_asset
|
||||
constraint booking_item_has_project_or_parent_item
|
||||
check (projectUuid is not null or parentItemUuid is not null)
|
||||
);
|
||||
--//
|
||||
|
38
src/main/resources/db/changelog/9-hs-global/9800-cleanup.sql
Normal file
38
src/main/resources/db/changelog/9-hs-global/9800-cleanup.sql
Normal file
@ -0,0 +1,38 @@
|
||||
--liquibase formatted sql
|
||||
|
||||
-- ============================================================================
|
||||
--changeset michael.hoennig:hs-global-office-test-ddl-cleanup context:hosting-asset-import endDelimiter:--//
|
||||
-- ----------------------------------------------------------------------------
|
||||
|
||||
DROP PROCEDURE IF EXISTS hs_office.bankaccount_create_test_data(IN givenholder character varying, IN giveniban character varying, IN givenbic character varying);
|
||||
DROP PROCEDURE IF EXISTS hs_office.contact_create_test_data(IN contcaption character varying);
|
||||
DROP PROCEDURE IF EXISTS hs_office.contact_create_test_data(IN startcount integer, IN endcount integer);
|
||||
DROP PROCEDURE IF EXISTS hs_office.coopassettx_create_test_data(IN givenpartnernumber numeric, IN givenmembernumbersuffix character);
|
||||
DROP PROCEDURE IF EXISTS hs_office.coopsharetx_create_test_data(IN givenpartnernumber numeric, IN givenmembernumbersuffix character);
|
||||
DROP PROCEDURE IF EXISTS hs_office.debitor_create_test_data(IN withdebitornumbersuffix numeric, IN forpartnerpersonname character varying, IN forbillingcontactcaption character varying, IN withdefaultprefix character varying);
|
||||
DROP PROCEDURE IF EXISTS hs_office.membership_create_test_data(IN forpartnernumber numeric, IN newmembernumbersuffix character);
|
||||
DROP PROCEDURE IF EXISTS hs_office.partner_create_test_data(IN mandanttradename character varying, IN newpartnernumber numeric, IN partnerpersonname character varying, IN contactcaption character varying);
|
||||
DROP PROCEDURE IF EXISTS hs_office.person_create_test_data(IN newpersontype hs_office.persontype, IN newtradename character varying, IN newfamilyname character varying, IN newgivenname character varying);
|
||||
DROP PROCEDURE IF EXISTS hs_office.relation_create_test_data(IN startcount integer, IN endcount integer);
|
||||
DROP PROCEDURE IF EXISTS hs_office.relation_create_test_data(IN holderpersonname character varying, IN relationtype hs_office.relationtype, IN anchorpersonname character varying, IN contactcaption character varying, IN mark character varying);
|
||||
DROP PROCEDURE IF EXISTS hs_office.sepamandate_create_test_data(IN forpartnernumber numeric, IN fordebitorsuffix character, IN foriban character varying, IN withreference character varying);
|
||||
--//
|
||||
|
||||
|
||||
-- ============================================================================
|
||||
--changeset michael.hoennig:hs-global-rbac-test-ddl-cleanup context:hosting-asset-import endDelimiter:--//
|
||||
-- ----------------------------------------------------------------------------
|
||||
|
||||
DROP SCHEMA IF EXISTS rbactest CASCADE;
|
||||
--//
|
||||
|
||||
|
||||
-- ============================================================================
|
||||
--changeset michael.hoennig:hs-global-rbac-test-dml-cleanup context:hosting-asset-import endDelimiter:--//
|
||||
-- ----------------------------------------------------------------------------
|
||||
|
||||
call base.defineContext('9800-cleanup', null, '${HSADMINNG_SUPERUSER}', null);
|
||||
|
||||
DELETE FROM rbac.subject WHERE name='superuser-alex@hostsharing.net';
|
||||
DELETE FROM rbac.subject WHERE name='superuser-fran@hostsharing.net';
|
||||
--//
|
@ -212,6 +212,10 @@ databaseChangeLog:
|
||||
file: db/changelog/9-hs-global/9000-statistics.sql
|
||||
context: "!only-office"
|
||||
|
||||
- include:
|
||||
file: db/changelog/9-hs-global/9800-cleanup.sql
|
||||
context: "without-test-data"
|
||||
|
||||
- include:
|
||||
file: db/changelog/9-hs-global/9100-hs-integration-schema.sql
|
||||
- include:
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -4,6 +4,8 @@ import com.opencsv.CSVParserBuilder;
|
||||
import com.opencsv.CSVReader;
|
||||
import com.opencsv.CSVReaderBuilder;
|
||||
import lombok.SneakyThrows;
|
||||
import net.hostsharing.hsadminng.hs.booking.item.HsBookingItem;
|
||||
import net.hostsharing.hsadminng.hs.booking.project.HsBookingProject;
|
||||
import net.hostsharing.hsadminng.hs.hosting.asset.HsHostingAsset;
|
||||
import net.hostsharing.hsadminng.rbac.context.ContextBasedTest;
|
||||
import net.hostsharing.hsadminng.persistence.BaseEntity;
|
||||
@ -14,6 +16,9 @@ import org.junit.jupiter.api.extension.TestWatcher;
|
||||
import org.opentest4j.AssertionFailedError;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.core.io.AbstractResource;
|
||||
import org.springframework.core.io.ClassPathResource;
|
||||
import org.springframework.core.io.FileSystemResource;
|
||||
import org.springframework.test.context.bean.override.mockito.MockitoBean;
|
||||
import org.springframework.core.io.Resource;
|
||||
import org.springframework.transaction.support.TransactionTemplate;
|
||||
@ -24,6 +29,7 @@ import jakarta.servlet.http.HttpServletRequest;
|
||||
import jakarta.validation.ValidationException;
|
||||
import jakarta.validation.constraints.NotNull;
|
||||
import java.io.BufferedReader;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.io.Reader;
|
||||
@ -116,10 +122,16 @@ public class CsvDataImport extends ContextBasedTest {
|
||||
return stream(lines.getFirst()).map(String::trim).toArray(String[]::new);
|
||||
}
|
||||
|
||||
public static @NotNull AbstractResource resourceOf(final String sqlFile) {
|
||||
return new File(sqlFile).exists()
|
||||
? new FileSystemResource(sqlFile)
|
||||
: new ClassPathResource(sqlFile);
|
||||
}
|
||||
|
||||
protected Reader resourceReader(@NotNull final String resourcePath) {
|
||||
try {
|
||||
return new InputStreamReader(requireNonNull(getClass().getClassLoader().getResourceAsStream(resourcePath)));
|
||||
} catch (Exception exc) {
|
||||
return new InputStreamReader(requireNonNull(resourceOf(resourcePath).getInputStream()));
|
||||
} catch (final Exception exc) {
|
||||
throw new AssertionFailedError("cannot open '" + resourcePath + "'");
|
||||
}
|
||||
}
|
||||
@ -155,37 +167,78 @@ public class CsvDataImport extends ContextBasedTest {
|
||||
return record;
|
||||
}
|
||||
|
||||
public <T extends BaseEntity> T persist(final Integer id, final T entity) {
|
||||
try {
|
||||
if (entity instanceof HsHostingAsset ha) {
|
||||
//noinspection unchecked
|
||||
return (T) persistViaSql(id, ha);
|
||||
}
|
||||
return persistViaEM(id, entity);
|
||||
} catch (Exception exc) {
|
||||
errors.add("failed to persist #" + entity.hashCode() + ": " + entity);
|
||||
errors.add(exc.toString());
|
||||
}
|
||||
return entity;
|
||||
}
|
||||
@SneakyThrows
|
||||
public void persistViaSql(final Integer id, final HsBookingProject entity) {
|
||||
entity.setUuid(UUID.randomUUID());
|
||||
|
||||
public <T extends BaseEntity> T persistViaEM(final Integer id, final T entity) {
|
||||
if (em.contains(entity)) {
|
||||
return entity;
|
||||
}
|
||||
try {
|
||||
em.persist(entity);
|
||||
em.flush(); // makes it a bit slower, but produces better error messages
|
||||
System.out.println("persisted #" + id + " as " + entity.getUuid());
|
||||
return entity;
|
||||
} catch (final Exception exc) {
|
||||
System.err.println("persist failed for #" + id + " as " + entity);
|
||||
throw exc; // for breakpoints
|
||||
}
|
||||
final var query = em.createNativeQuery("""
|
||||
insert into hs_booking.project(
|
||||
uuid,
|
||||
version,
|
||||
debitorUuid,
|
||||
caption)
|
||||
values (
|
||||
:uuid,
|
||||
:version,
|
||||
:debitorUuid,
|
||||
:caption)
|
||||
""")
|
||||
.setParameter("uuid", entity.getUuid())
|
||||
.setParameter("version", entity.getVersion())
|
||||
.setParameter("debitorUuid", entity.getDebitor().getUuid())
|
||||
.setParameter("caption", entity.getCaption());
|
||||
|
||||
final var count = query.executeUpdate();
|
||||
logError(() -> {
|
||||
assertThat(count).describedAs("persisting BookingProject #" + id + " failed: " + entity).isEqualTo(1);
|
||||
});
|
||||
}
|
||||
|
||||
@SneakyThrows
|
||||
public BaseEntity<HsHostingAsset> persistViaSql(final Integer id, final HsHostingAsset entity) {
|
||||
public void persistViaSql(final Integer id, final HsBookingItem entity) {
|
||||
if (entity.getUuid() != null) {
|
||||
return;
|
||||
}
|
||||
|
||||
entity.setUuid(UUID.randomUUID());
|
||||
|
||||
final var query = em.createNativeQuery("""
|
||||
insert into hs_booking.item(
|
||||
uuid,
|
||||
version,
|
||||
type,
|
||||
projectUuid,
|
||||
parentItemUuid,
|
||||
validity,
|
||||
caption,
|
||||
resources)
|
||||
values (
|
||||
:uuid,
|
||||
:version,
|
||||
:type,
|
||||
:projectUuid,
|
||||
:parentItemUuid,
|
||||
:validity,
|
||||
:caption,
|
||||
cast(:resources as jsonb))
|
||||
""")
|
||||
.setParameter("uuid", entity.getUuid())
|
||||
.setParameter("version", entity.getVersion())
|
||||
.setParameter("projectUuid", ofNullable(entity.getProject()).map(BaseEntity::getUuid).orElse(null))
|
||||
.setParameter("type", entity.getType().name())
|
||||
.setParameter("parentItemUuid", ofNullable(entity.getParentItem()).map(BaseEntity::getUuid).orElse(null))
|
||||
.setParameter("validity", entity.getValidity())
|
||||
.setParameter("caption", entity.getCaption())
|
||||
.setParameter("resources", entity.getResources().toString().replace("\t", "\\t"));
|
||||
|
||||
final var count = query.executeUpdate();
|
||||
logError(() -> {
|
||||
assertThat(count).describedAs("persisting BookingItem #" + id + " failed: " + entity).isEqualTo(1);
|
||||
});
|
||||
}
|
||||
|
||||
@SneakyThrows
|
||||
public HsHostingAsset persistViaSql(final Integer id, final HsHostingAsset entity) {
|
||||
if (entity.getUuid() == null) {
|
||||
entity.setUuid(UUID.randomUUID());
|
||||
}
|
||||
@ -229,7 +282,7 @@ public class CsvDataImport extends ContextBasedTest {
|
||||
|
||||
final var count = query.executeUpdate();
|
||||
logError(() -> {
|
||||
assertThat(count).isEqualTo(1);
|
||||
assertThat(count).describedAs("persisting HostingAsset #" + id + " failed: " + entity).isEqualTo(1);
|
||||
});
|
||||
return entity;
|
||||
}
|
||||
@ -248,63 +301,22 @@ public class CsvDataImport extends ContextBasedTest {
|
||||
return json;
|
||||
}
|
||||
|
||||
protected void deleteTestDataFromHsOfficeTables() {
|
||||
protected void makeSureThatTheImportAdminUserExists() {
|
||||
jpaAttempt.transacted(() -> {
|
||||
context(rbacSuperuser);
|
||||
// TODO.perf: could we instead skip creating test-data based on an env var?
|
||||
em.createNativeQuery("delete from hs_hosting.asset where true").executeUpdate();
|
||||
em.createNativeQuery("delete from hs_hosting.asset_ex where true").executeUpdate();
|
||||
em.createNativeQuery("delete from hs_booking.item where true").executeUpdate();
|
||||
em.createNativeQuery("delete from hs_booking.item_ex where true").executeUpdate();
|
||||
em.createNativeQuery("delete from hs_booking.project where true").executeUpdate();
|
||||
em.createNativeQuery("delete from hs_booking.project_ex where true").executeUpdate();
|
||||
em.createNativeQuery("delete from hs_office.coopassettx where true").executeUpdate();
|
||||
em.createNativeQuery("delete from hs_office.coopassettx_legacy_id where true").executeUpdate();
|
||||
em.createNativeQuery("delete from hs_office.coopsharetx where true").executeUpdate();
|
||||
em.createNativeQuery("delete from hs_office.coopsharetx_legacy_id where true").executeUpdate();
|
||||
em.createNativeQuery("delete from hs_office.membership where true").executeUpdate();
|
||||
em.createNativeQuery("delete from hs_office.sepamandate where true").executeUpdate();
|
||||
em.createNativeQuery("delete from hs_office.sepamandate_legacy_id where true").executeUpdate();
|
||||
em.createNativeQuery("delete from hs_office.debitor where true").executeUpdate();
|
||||
em.createNativeQuery("delete from hs_office.bankaccount where true").executeUpdate();
|
||||
em.createNativeQuery("delete from hs_office.partner where true").executeUpdate();
|
||||
em.createNativeQuery("delete from hs_office.partner_details where true").executeUpdate();
|
||||
em.createNativeQuery("delete from hs_office.relation where true").executeUpdate();
|
||||
em.createNativeQuery("delete from hs_office.contact where true").executeUpdate();
|
||||
em.createNativeQuery("delete from hs_office.person where true").executeUpdate();
|
||||
}).assertSuccessful();
|
||||
}
|
||||
|
||||
protected void resetHsOfficeSequences() {
|
||||
jpaAttempt.transacted(() -> {
|
||||
context(rbacSuperuser);
|
||||
em.createNativeQuery("alter sequence hs_office.contact_legacy_id_seq restart with 1000000000;").executeUpdate();
|
||||
em.createNativeQuery("alter sequence hs_office.coopassettx_legacy_id_seq restart with 1000000000;")
|
||||
context(null);
|
||||
em.createNativeQuery("""
|
||||
do language plpgsql $$
|
||||
declare
|
||||
admins uuid;
|
||||
begin
|
||||
if not exists (select 1 from rbac.subject where name = '${rbacSuperuser}') then
|
||||
admins = rbac.findRoleId(rbac.global_ADMIN());
|
||||
call rbac.grantRoleToSubjectUnchecked(admins, admins, rbac.create_subject('${rbacSuperuser}'));
|
||||
end if;
|
||||
end;
|
||||
$$;
|
||||
""".replace("${rbacSuperuser}", rbacSuperuser))
|
||||
.executeUpdate();
|
||||
em.createNativeQuery("alter sequence public.hs_office.coopsharetx_legacy_id_seq restart with 1000000000;")
|
||||
.executeUpdate();
|
||||
em.createNativeQuery("alter sequence public.hs_office.partner_legacy_id_seq restart with 1000000000;")
|
||||
.executeUpdate();
|
||||
em.createNativeQuery("alter sequence public.hs_office.sepamandate_legacy_id_seq restart with 1000000000;")
|
||||
.executeUpdate();
|
||||
});
|
||||
}
|
||||
|
||||
protected void deleteFromTestTables() {
|
||||
jpaAttempt.transacted(() -> {
|
||||
context(rbacSuperuser);
|
||||
em.createNativeQuery("delete from rbactest.domain where true").executeUpdate();
|
||||
em.createNativeQuery("delete from rbactest.package where true").executeUpdate();
|
||||
em.createNativeQuery("delete from rbactest.customer where true").executeUpdate();
|
||||
}).assertSuccessful();
|
||||
}
|
||||
|
||||
protected void deleteFromCommonTables() {
|
||||
jpaAttempt.transacted(() -> {
|
||||
context(rbacSuperuser);
|
||||
em.createNativeQuery("delete from rbac.subject_rv where name not like 'superuser-%'").executeUpdate();
|
||||
em.createNativeQuery("delete from base.tx_journal where true").executeUpdate();
|
||||
em.createNativeQuery("delete from base.tx_context where true").executeUpdate();
|
||||
}).assertSuccessful();
|
||||
}
|
||||
|
||||
|
@ -7,10 +7,12 @@ import net.hostsharing.hsadminng.context.Context;
|
||||
import net.hostsharing.hsadminng.hash.HashGenerator;
|
||||
import net.hostsharing.hsadminng.hash.HashGenerator.Algorithm;
|
||||
import net.hostsharing.hsadminng.hs.booking.debitor.HsBookingDebitorEntity;
|
||||
import net.hostsharing.hsadminng.hs.booking.debitor.HsBookingDebitorRepository;
|
||||
import net.hostsharing.hsadminng.hs.booking.item.HsBookingItem;
|
||||
import net.hostsharing.hsadminng.hs.booking.item.HsBookingItemRealEntity;
|
||||
import net.hostsharing.hsadminng.hs.booking.item.HsBookingItemType;
|
||||
import net.hostsharing.hsadminng.hs.booking.item.validators.HsBookingItemEntityValidatorRegistry;
|
||||
import net.hostsharing.hsadminng.hs.booking.project.HsBookingProject;
|
||||
import net.hostsharing.hsadminng.hs.booking.project.HsBookingProjectRealEntity;
|
||||
import net.hostsharing.hsadminng.hs.hosting.asset.HsHostingAssetRealEntity;
|
||||
import net.hostsharing.hsadminng.hs.hosting.asset.HsHostingAssetType;
|
||||
@ -27,13 +29,19 @@ import org.junit.jupiter.api.Tag;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.TestMethodOrder;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.boot.test.autoconfigure.orm.jpa.DataJpaTest;
|
||||
import org.springframework.context.annotation.Import;
|
||||
import org.springframework.core.io.support.PathMatchingResourcePatternResolver;
|
||||
import org.springframework.jdbc.core.JdbcTemplate;
|
||||
import org.springframework.orm.jpa.EntityManagerFactoryInfo;
|
||||
import org.springframework.test.annotation.Commit;
|
||||
import org.springframework.test.annotation.DirtiesContext;
|
||||
import org.springframework.test.context.ActiveProfiles;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.Reader;
|
||||
import java.net.IDN;
|
||||
import java.util.ArrayList;
|
||||
@ -44,10 +52,12 @@ import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.TreeMap;
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
import java.util.function.Function;
|
||||
|
||||
import static java.nio.charset.StandardCharsets.UTF_8;
|
||||
import static java.util.Arrays.stream;
|
||||
import static java.util.Map.entry;
|
||||
import static java.util.Map.ofEntries;
|
||||
@ -76,56 +86,22 @@ import static net.hostsharing.hsadminng.hs.hosting.asset.HsHostingAssetType.UNIX
|
||||
import static net.hostsharing.hsadminng.mapper.PostgresDateRange.toPostgresDateRange;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.assertj.core.api.Assumptions.assumeThat;
|
||||
import static org.springframework.util.FileCopyUtils.copyToByteArray;
|
||||
|
||||
/*
|
||||
* This 'test' includes the complete legacy 'office' data import.
|
||||
*
|
||||
* There is no code in 'main' because the import is not needed a normal runtime.
|
||||
* There is some test data in Java resources to verify the data conversion.
|
||||
* For a real import a main method will be added later
|
||||
* which reads CSV files from the file system.
|
||||
*
|
||||
* When run on a Hostsharing database, it needs the following settings (hsh99_... just examples).
|
||||
*
|
||||
* In a real Hostsharing environment, these are created via (the old) hsadmin:
|
||||
|
||||
CREATE USER hsh99_admin WITH PASSWORD 'password';
|
||||
CREATE DATABASE hsh99_hsadminng ENCODING 'UTF8' TEMPLATE template0;
|
||||
REVOKE ALL ON DATABASE hsh99_hsadminng FROM public; -- why does hsadmin do that?
|
||||
ALTER DATABASE hsh99_hsadminng OWNER TO hsh99_admin;
|
||||
|
||||
CREATE USER hsh99_restricted WITH PASSWORD 'password';
|
||||
|
||||
\c hsh99_hsadminng
|
||||
|
||||
GRANT ALL PRIVILEGES ON SCHEMA public to hsh99_admin;
|
||||
|
||||
* Additionally, we need these settings (because the Hostsharing DB-Admin has no CREATE right):
|
||||
|
||||
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
|
||||
|
||||
-- maybe something like that is needed for the 2nd user
|
||||
-- GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public to hsh99_restricted;
|
||||
|
||||
* Then copy the file .tc-environment to a file named .environment (excluded from git) and fill in your specific values.
|
||||
|
||||
* To finally import the office data, run:
|
||||
*
|
||||
* gw-importHostingAssets # comes from .aliases file and uses .environment
|
||||
*/
|
||||
@Tag("importHostingAssets")
|
||||
@DataJpaTest(properties = {
|
||||
"spring.datasource.url=${HSADMINNG_POSTGRES_JDBC_URL:jdbc:tc:postgresql:15.5-bookworm:///importHostingAssetsTC}",
|
||||
"spring.datasource.username=${HSADMINNG_POSTGRES_ADMIN_USERNAME:ADMIN}",
|
||||
"spring.datasource.password=${HSADMINNG_POSTGRES_ADMIN_PASSWORD:password}",
|
||||
"hsadminng.superuser=${HSADMINNG_SUPERUSER:superuser-alex@hostsharing.net}"
|
||||
"hsadminng.superuser=${HSADMINNG_SUPERUSER:import-superuser@hostsharing.net}",
|
||||
"spring.liquibase.enabled=false" // @Sql should go first, Liquibase will be initialized programmatically
|
||||
})
|
||||
@DirtiesContext
|
||||
@Import({ Context.class, JpaAttempt.class })
|
||||
@ActiveProfiles("without-test-data")
|
||||
@Import({ Context.class, JpaAttempt.class, LiquibaseConfig.class })
|
||||
@ActiveProfiles({ "without-test-data", "liquibase-migration", "hosting-asset-import" })
|
||||
@TestMethodOrder(MethodOrderer.OrderAnnotation.class)
|
||||
@ExtendWith(OrderedDependedTestsExtension.class)
|
||||
public class ImportHostingAssets extends BaseOfficeDataImport {
|
||||
public class ImportHostingAssets extends CsvDataImport {
|
||||
|
||||
private static final Set<String> NOBODY_SUBSTITUTES = Set.of("nomail", "bounce");
|
||||
|
||||
@ -156,13 +132,55 @@ public class ImportHostingAssets extends BaseOfficeDataImport {
|
||||
|
||||
final ObjectMapper jsonMapper = new ObjectMapper();
|
||||
|
||||
@Autowired
|
||||
HsBookingDebitorRepository debitorRepo;
|
||||
|
||||
@Autowired
|
||||
LiquibaseMigration liquibase;
|
||||
|
||||
@Value("${HSADMINNG_OFFICE_DATA_SQL_FILE:/db/released-only-office-schema-with-import-test-data.sql}")
|
||||
String officeSchemaAndDataSqlFile;
|
||||
|
||||
@Test
|
||||
@Order(11000)
|
||||
@SneakyThrows
|
||||
void liquibaseMigrationForBookingAndHosting() {
|
||||
executeSqlScript(officeSchemaAndDataSqlFile);
|
||||
liquibase.assertReferenceStatusAfterRestore(286, "hs-booking-SCHEMA");
|
||||
makeSureThatTheImportAdminUserExists();
|
||||
liquibase.runWithContexts("migration", "without-test-data");
|
||||
liquibase.assertThatCurrentMigrationsGotApplied(331, "hs-booking-SCHEMA");
|
||||
}
|
||||
|
||||
@Test
|
||||
@Order(11010)
|
||||
void createBookingProjects() {
|
||||
debitors.forEach((id, debitor) -> {
|
||||
bookingProjects.put(id, HsBookingProjectRealEntity.builder()
|
||||
.caption(debitor.getDefaultPrefix() + " default project")
|
||||
.debitor(em.find(HsBookingDebitorEntity.class, debitor.getUuid()))
|
||||
|
||||
record PartnerLegacyIdMapping(UUID uuid, Integer bp_id) {}
|
||||
record DebitorRecord(UUID uuid, Integer version, String defaultPrefix) {}
|
||||
|
||||
final var partnerLegacyIdMappings = em.createNativeQuery(
|
||||
"""
|
||||
select debitor.uuid, pid.bp_id
|
||||
from hs_office.debitor debitor
|
||||
join hs_office.relation debitorRel on debitor.debitorReluUid=debitorRel.uuid
|
||||
join hs_office.relation partnerRel on partnerRel.holderUuid=debitorRel.anchorUuid
|
||||
join hs_office.partner partner on partner.partnerReluUid=partnerRel.uuid
|
||||
join hs_office.partner_legacy_id pid on partner.uuid=pid.uuid
|
||||
""", PartnerLegacyIdMapping.class).getResultList();
|
||||
//noinspection unchecked
|
||||
final var debitorUuidToLegacyBpIdMap = ((List<PartnerLegacyIdMapping>) partnerLegacyIdMappings).stream()
|
||||
.collect(toMap(row -> row.uuid, row -> row.bp_id));
|
||||
final var debitors = em.createNativeQuery(
|
||||
"select debitor.uuid, debitor.version, debitor.defaultPrefix from hs_office.debitor debitor",
|
||||
DebitorRecord.class).getResultList();
|
||||
//noinspection unchecked
|
||||
((List<DebitorRecord>) debitors).forEach(debitor -> {
|
||||
bookingProjects.put(
|
||||
debitorUuidToLegacyBpIdMap.get(debitor.uuid), HsBookingProjectRealEntity.builder()
|
||||
.version(debitor.version)
|
||||
.caption(debitor.defaultPrefix + " default project")
|
||||
.debitor(em.find(HsBookingDebitorEntity.class, debitor.uuid))
|
||||
.build());
|
||||
});
|
||||
}
|
||||
@ -728,9 +746,12 @@ public class ImportHostingAssets extends BaseOfficeDataImport {
|
||||
if (isImportingControlledTestData()) {
|
||||
expectError("zonedata dom_owner of mellis.de is old00 but expected to be mim00");
|
||||
expectError("\nexpected: \"vm1068\"\n but was: \"vm1093\"");
|
||||
expectError("['EMAIL_ADDRESS:webmaster@hamburg-west.l-u-g.org.config.target' is expected to match any of [^[a-z][a-z0-9]{2}[0-9]{2}(-[a-z0-9][a-z0-9\\.+_-]*)?$, ^([a-zA-Z0-9_!#$%&'*+/=?`{|}~^.-]+)?@[a-zA-Z0-9.-]+$, ^nobody$, ^/dev/null$] but 'raoul.lottmann@example.com peter.lottmann@example.com' does not match any]");
|
||||
expectError("['EMAIL_ADDRESS:abuse@mellis.de.config.target' length is expected to be at min 1 but length of [[]] is 0]");
|
||||
expectError("['EMAIL_ADDRESS:abuse@ist-im-netz.de.config.target' length is expected to be at min 1 but length of [[]] is 0]");
|
||||
expectError(
|
||||
"['EMAIL_ADDRESS:webmaster@hamburg-west.l-u-g.org.config.target' is expected to match any of [^[a-z][a-z0-9]{2}[0-9]{2}(-[a-z0-9][a-z0-9\\.+_-]*)?$, ^([a-zA-Z0-9_!#$%&'*+/=?`{|}~^.-]+)?@[a-zA-Z0-9.-]+$, ^nobody$, ^/dev/null$] but 'raoul.lottmann@example.com peter.lottmann@example.com' does not match any]");
|
||||
expectError(
|
||||
"['EMAIL_ADDRESS:abuse@mellis.de.config.target' length is expected to be at min 1 but length of [[]] is 0]");
|
||||
expectError(
|
||||
"['EMAIL_ADDRESS:abuse@ist-im-netz.de.config.target' length is expected to be at min 1 but length of [[]] is 0]");
|
||||
}
|
||||
this.assertNoErrors();
|
||||
}
|
||||
@ -738,7 +759,7 @@ public class ImportHostingAssets extends BaseOfficeDataImport {
|
||||
// --------------------------------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
@Order(19000)
|
||||
@Order(19100)
|
||||
@Commit
|
||||
void persistBookingProjects() {
|
||||
|
||||
@ -746,12 +767,12 @@ public class ImportHostingAssets extends BaseOfficeDataImport {
|
||||
|
||||
jpaAttempt.transacted(() -> {
|
||||
context(rbacSuperuser);
|
||||
bookingProjects.forEach(this::persist);
|
||||
bookingProjects.forEach(this::persistViaSql);
|
||||
}).assertSuccessful();
|
||||
}
|
||||
|
||||
@Test
|
||||
@Order(19010)
|
||||
@Order(19110)
|
||||
@Commit
|
||||
void persistBookingItems() {
|
||||
|
||||
@ -1071,13 +1092,14 @@ public class ImportHostingAssets extends BaseOfficeDataImport {
|
||||
|
||||
final var haCount = jpaAttempt.transacted(() -> {
|
||||
context(rbacSuperuser, "hs_booking.project#D-1000300-mimdefaultproject:AGENT");
|
||||
return (Integer) em.createNativeQuery("select count(*) from hs_hosting.asset_rv where type='EMAIL_ADDRESS'", Integer.class)
|
||||
return (Integer) em.createNativeQuery(
|
||||
"select count(*) from hs_hosting.asset_rv where type='EMAIL_ADDRESS'",
|
||||
Integer.class)
|
||||
.getSingleResult();
|
||||
}).assertSuccessful().returnedValue();
|
||||
assertThat(haCount).isEqualTo(68);
|
||||
}
|
||||
|
||||
|
||||
// ============================================================================================
|
||||
|
||||
@Test
|
||||
@ -1105,7 +1127,7 @@ public class ImportHostingAssets extends BaseOfficeDataImport {
|
||||
if (bi.getParentItem() != null) {
|
||||
persistRecursively(key, HsBookingItemEntityValidatorRegistry.validated(em, bi.getParentItem()));
|
||||
}
|
||||
persist(key, HsBookingItemEntityValidatorRegistry.validated(em, bi));
|
||||
persistViaSql(key, HsBookingItemEntityValidatorRegistry.validated(em, bi));
|
||||
}
|
||||
|
||||
private void persistHostingAssets(final Map<Integer, HsHostingAssetRealEntity> assets) {
|
||||
@ -1129,7 +1151,7 @@ public class ImportHostingAssets extends BaseOfficeDataImport {
|
||||
"'EMAIL_ADDRESS:.*\\.config\\.target' .*"
|
||||
)
|
||||
.prepareForSave()
|
||||
.saveUsing(entity -> persist(entry.getKey(), entity))
|
||||
.saveUsing(entity -> persistViaSql(entry.getKey(), entity))
|
||||
.validateContext()
|
||||
));
|
||||
}
|
||||
@ -1221,9 +1243,7 @@ public class ImportHostingAssets extends BaseOfficeDataImport {
|
||||
bookingItems.put(packet_id, bookingItem);
|
||||
final var haType = determineHaType(basepacket_code);
|
||||
|
||||
logError(() -> assertThat(!free || haType == MANAGED_WEBSPACE || bookingItem.getRelatedProject()
|
||||
.getDebitor()
|
||||
.getDefaultPrefix()
|
||||
logError(() -> assertThat(!free || haType == MANAGED_WEBSPACE || defaultPrefix(bookingItem)
|
||||
.equals("hsh"))
|
||||
.as("packet.free only supported for Hostsharing-Assets and ManagedWebspace in customer-ManagedServer, but is set for "
|
||||
+ packet_name)
|
||||
@ -1262,14 +1282,14 @@ public class ImportHostingAssets extends BaseOfficeDataImport {
|
||||
managedWebspace.setParentAsset(parentAsset);
|
||||
|
||||
if (parentAsset.getRelatedProject() != managedWebspace.getRelatedProject()
|
||||
&& managedWebspace.getRelatedProject().getDebitor().getDebitorNumber() == 10000_00 ) {
|
||||
&& managedWebspace.getRelatedProject().getDebitor().getDebitorNumber() == 10000_00) {
|
||||
assertThat(managedWebspace.getIdentifier()).startsWith("xyz");
|
||||
final var hshDebitor = managedWebspace.getBookingItem().getProject().getDebitor();
|
||||
final var newProject = HsBookingProjectRealEntity.builder()
|
||||
.debitor(hshDebitor)
|
||||
.caption(parentAsset.getIdentifier() + " Monitor")
|
||||
.build();
|
||||
bookingProjects.put(Collections.max(bookingProjects.keySet())+1, newProject);
|
||||
bookingProjects.put(Collections.max(bookingProjects.keySet()) + 1, newProject);
|
||||
managedWebspace.getBookingItem().setProject(newProject);
|
||||
} else {
|
||||
managedWebspace.getBookingItem().setParentItem(parentAsset.getBookingItem());
|
||||
@ -1278,6 +1298,13 @@ public class ImportHostingAssets extends BaseOfficeDataImport {
|
||||
});
|
||||
}
|
||||
|
||||
private String defaultPrefix(final HsBookingItem bookingItem) {
|
||||
return ofNullable(bookingItem.getProject())
|
||||
.map(HsBookingProject::getDebitor)
|
||||
.map(HsBookingDebitorEntity::getDefaultPrefix)
|
||||
.orElse("<no default prefix for BI: " + bookingItem.getCaption() + ">");
|
||||
}
|
||||
|
||||
private void importPacketComponents(final String[] header, final List<String[]> records) {
|
||||
final var columns = new Columns(header);
|
||||
records.stream()
|
||||
@ -1624,18 +1651,23 @@ public class ImportHostingAssets extends BaseOfficeDataImport {
|
||||
entry("includes", options.contains("includes")),
|
||||
entry("letsencrypt", options.contains("letsencrypt")),
|
||||
entry("multiviews", options.contains("multiviews")),
|
||||
entry("subdomains", withDefault(rec.getString("valid_subdomain_names"), "*")
|
||||
entry(
|
||||
"subdomains", withDefault(rec.getString("valid_subdomain_names"), "*")
|
||||
.split(",")),
|
||||
entry("fcgi-php-bin", withDefault(
|
||||
entry(
|
||||
"fcgi-php-bin", withDefault(
|
||||
rec.getString("fcgi_php_bin"),
|
||||
httpDomainSetupValidator.getProperty("fcgi-php-bin").defaultValue())),
|
||||
entry("passenger-nodejs", withDefault(
|
||||
entry(
|
||||
"passenger-nodejs", withDefault(
|
||||
rec.getString("passenger_nodejs"),
|
||||
httpDomainSetupValidator.getProperty("passenger-nodejs").defaultValue())),
|
||||
entry("passenger-python", withDefault(
|
||||
entry(
|
||||
"passenger-python", withDefault(
|
||||
rec.getString("passenger_python"),
|
||||
httpDomainSetupValidator.getProperty("passenger-python").defaultValue())),
|
||||
entry("passenger-ruby", withDefault(
|
||||
entry(
|
||||
"passenger-ruby", withDefault(
|
||||
rec.getString("passenger_ruby"),
|
||||
httpDomainSetupValidator.getProperty("passenger-ruby").defaultValue()))
|
||||
))
|
||||
@ -1744,7 +1776,8 @@ public class ImportHostingAssets extends BaseOfficeDataImport {
|
||||
logError(() -> assertThat(vmName).isEqualTo(domUser.getParentAsset().getParentAsset().getIdentifier()));
|
||||
|
||||
//noinspection unchecked
|
||||
zoneData.put("user-RR", ((ArrayList<ArrayList<Object>>) zoneData.get("user-RR")).stream()
|
||||
zoneData.put(
|
||||
"user-RR", ((ArrayList<ArrayList<Object>>) zoneData.get("user-RR")).stream()
|
||||
.map(userRR -> userRR.stream().map(Object::toString).collect(joining(" ")))
|
||||
.toArray(String[]::new)
|
||||
);
|
||||
@ -1898,10 +1931,10 @@ public class ImportHostingAssets extends BaseOfficeDataImport {
|
||||
//noinspection unchecked
|
||||
return ((List<List<?>>) em.createNativeQuery(
|
||||
"""
|
||||
SELECT li.* FROM hs_hosting.asset_legacy_id li
|
||||
JOIN hs_hosting.asset ha ON ha.uuid=li.uuid
|
||||
WHERE CAST(ha.type AS text)=:type
|
||||
ORDER BY legacy_id
|
||||
select li.* from hs_hosting.asset_legacy_id li
|
||||
join hs_hosting.asset ha on ha.uuid=li.uuid
|
||||
where cast(ha.type as text)=:type
|
||||
order by legacy_id
|
||||
""",
|
||||
List.class)
|
||||
.setParameter("type", type.name())
|
||||
@ -1913,10 +1946,10 @@ public class ImportHostingAssets extends BaseOfficeDataImport {
|
||||
//noinspection unchecked
|
||||
return ((List<List<?>>) em.createNativeQuery(
|
||||
"""
|
||||
SELECT ha.uuid, ha.type, ha.identifier FROM hs_hosting.asset ha
|
||||
JOIN hs_hosting.asset_legacy_id li ON li.uuid=ha.uuid
|
||||
WHERE li.legacy_id is null AND CAST(ha.type AS text)=:type
|
||||
ORDER BY li.legacy_id
|
||||
select ha.uuid, ha.type, ha.identifier from hs_hosting.asset ha
|
||||
join hs_hosting.asset_legacy_id li on li.uuid=ha.uuid
|
||||
where li.legacy_id is null and cast(ha.type as text)=:type
|
||||
order by li.legacy_id
|
||||
""",
|
||||
List.class)
|
||||
.setParameter("type", type.name())
|
||||
@ -1924,4 +1957,17 @@ public class ImportHostingAssets extends BaseOfficeDataImport {
|
||||
.map(row -> row.stream().map(Object::toString).collect(joining(", ")))
|
||||
.collect(joining("\n"));
|
||||
}
|
||||
|
||||
@SneakyThrows
|
||||
private void executeSqlScript(final String sqlFile) {
|
||||
jpaAttempt.transacted(() -> {
|
||||
try (InputStream resourceStream = resourceOf(sqlFile).getInputStream()) {
|
||||
final var sqlScript = new String(copyToByteArray(resourceStream), UTF_8);
|
||||
final var emf = (EntityManagerFactoryInfo) em.getEntityManagerFactory();
|
||||
new JdbcTemplate(emf.getDataSource()).execute(sqlScript);
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}).assertSuccessful();
|
||||
}
|
||||
}
|
||||
|
@ -1,61 +0,0 @@
|
||||
package net.hostsharing.hsadminng.hs.migration;
|
||||
|
||||
import net.hostsharing.hsadminng.context.Context;
|
||||
import net.hostsharing.hsadminng.rbac.test.JpaAttempt;
|
||||
import org.junit.jupiter.api.*;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.springframework.boot.test.autoconfigure.orm.jpa.DataJpaTest;
|
||||
import org.springframework.context.annotation.Import;
|
||||
import org.springframework.test.annotation.DirtiesContext;
|
||||
import org.springframework.test.context.ActiveProfiles;
|
||||
|
||||
/*
|
||||
* This 'test' includes the complete legacy 'office' data import.
|
||||
*
|
||||
* There is no code in 'main' because the import is not needed a normal runtime.
|
||||
* There is some test data in Java resources to verify the data conversion.
|
||||
* For a real import a main method will be added later
|
||||
* which reads CSV files from the file system.
|
||||
*
|
||||
* When run on a Hostsharing database, it needs the following settings (hsh99_... just examples).
|
||||
*
|
||||
* In a real Hostsharing environment, these are created via (the old) hsadmin:
|
||||
|
||||
CREATE USER hsh99_admin WITH PASSWORD 'password';
|
||||
CREATE DATABASE hsh99_hsadminng ENCODING 'UTF8' TEMPLATE template0;
|
||||
REVOKE ALL ON DATABASE hsh99_hsadminng FROM public; -- why does hsadmin do that?
|
||||
ALTER DATABASE hsh99_hsadminng OWNER TO hsh99_admin;
|
||||
|
||||
CREATE USER hsh99_restricted WITH PASSWORD 'password';
|
||||
|
||||
\c hsh99_hsadminng
|
||||
|
||||
GRANT ALL PRIVILEGES ON SCHEMA public to hsh99_admin;
|
||||
|
||||
* Additionally, we need these settings (because the Hostsharing DB-Admin has no CREATE right):
|
||||
|
||||
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
|
||||
|
||||
-- maybe something like that is needed for the 2nd user
|
||||
-- GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public to hsh99_restricted;
|
||||
|
||||
* Then copy the file .tc-environment to a file named .environment (excluded from git) and fill in your specific values.
|
||||
|
||||
* To finally import the office data, run:
|
||||
*
|
||||
* gw-importOfficeTables # comes from .aliases file and uses .environment
|
||||
*/
|
||||
@Tag("importOfficeData")
|
||||
@DataJpaTest(properties = {
|
||||
"spring.datasource.url=${HSADMINNG_POSTGRES_JDBC_URL:jdbc:tc:postgresql:15.5-bookworm:///importOfficeDataTC}",
|
||||
"spring.datasource.username=${HSADMINNG_POSTGRES_ADMIN_USERNAME:ADMIN}",
|
||||
"spring.datasource.password=${HSADMINNG_POSTGRES_ADMIN_PASSWORD:password}",
|
||||
"hsadminng.superuser=${HSADMINNG_SUPERUSER:superuser-alex@hostsharing.net}"
|
||||
})
|
||||
@ActiveProfiles("without-test-data")
|
||||
@DirtiesContext
|
||||
@Import({ Context.class, JpaAttempt.class })
|
||||
@TestMethodOrder(MethodOrderer.OrderAnnotation.class)
|
||||
@ExtendWith(OrderedDependedTestsExtension.class)
|
||||
public class ImportOfficeData extends BaseOfficeDataImport {
|
||||
}
|
@ -1,33 +1,17 @@
|
||||
package net.hostsharing.hsadminng.hs.migration;
|
||||
|
||||
import liquibase.Liquibase;
|
||||
import lombok.SneakyThrows;
|
||||
import org.junit.jupiter.api.Tag;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.boot.test.autoconfigure.orm.jpa.DataJpaTest;
|
||||
import org.springframework.context.annotation.Import;
|
||||
import org.springframework.test.annotation.DirtiesContext;
|
||||
import org.springframework.test.context.ActiveProfiles;
|
||||
import org.springframework.test.context.jdbc.Sql;
|
||||
import org.testcontainers.containers.JdbcDatabaseContainer;
|
||||
import org.testcontainers.jdbc.ContainerDatabaseDriver;
|
||||
|
||||
import jakarta.persistence.EntityManager;
|
||||
import jakarta.persistence.PersistenceContext;
|
||||
import javax.sql.DataSource;
|
||||
import java.io.BufferedReader;
|
||||
import java.io.File;
|
||||
import java.io.InputStreamReader;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static java.nio.charset.StandardCharsets.UTF_8;
|
||||
import static org.apache.commons.io.FileUtils.readFileToString;
|
||||
import static org.apache.commons.io.FileUtils.write;
|
||||
import static org.apache.commons.io.FileUtils.writeStringToFile;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.springframework.test.context.jdbc.Sql.ExecutionPhase.BEFORE_TEST_CLASS;
|
||||
|
||||
// BLOG: Liquibase-migration-test (not before the reference-SQL-dump-generation is simplified)
|
||||
@ -40,9 +24,9 @@ import static org.springframework.test.context.jdbc.Sql.ExecutionPhase.BEFORE_TE
|
||||
* <p>The test works as follows:</p>
|
||||
*
|
||||
* <ol>
|
||||
* <li>the database is initialized by `db/prod-only-office-schema-with-test-data.sql` from the test-resources</li>
|
||||
* <li>the database is initialized by `db/released-only-office-schema-with-test-data.sql` from the test-resources</li>
|
||||
* <li>the current Liquibase-migrations (only-office but with-test-data) are performed</li>
|
||||
* <li>a new dump is written to `db/prod-only-office-schema-with-test-data.sql` in the build-directory</li>
|
||||
* <li>a new dump is written to `db/released-only-office-schema-with-test-data.sql` in the build-directory</li>
|
||||
* <li>an extra Liquibase-changeset (liquibase-migration-test) is applied</li>
|
||||
* <li>it's asserted that the extra changeset got applied</li>
|
||||
* </ol>
|
||||
@ -58,123 +42,31 @@ import static org.springframework.test.context.jdbc.Sql.ExecutionPhase.BEFORE_TE
|
||||
@DirtiesContext
|
||||
@ActiveProfiles("liquibase-migration-test")
|
||||
@Import(LiquibaseConfig.class)
|
||||
@Sql(value = "/db/prod-only-office-schema-with-test-data.sql", executionPhase = BEFORE_TEST_CLASS)
|
||||
@Sql(value = "/db/released-only-office-schema-with-test-data.sql", executionPhase = BEFORE_TEST_CLASS) // release-schema
|
||||
public class LiquibaseCompatibilityIntegrationTest {
|
||||
|
||||
private static final String EXPECTED_CHANGESET_ONLY_AFTER_NEW_MIGRATION = "hs-global-liquibase-migration-test";
|
||||
private static final int EXPECTED_LIQUIBASE_CHANGELOGS_IN_PROD_SCHEMA_DUMP = 287;
|
||||
|
||||
@Value("${spring.datasource.url}")
|
||||
private String jdbcUrl;
|
||||
|
||||
@Autowired
|
||||
private DataSource dataSource;
|
||||
|
||||
@Autowired
|
||||
private Liquibase liquibase;
|
||||
|
||||
@PersistenceContext
|
||||
private EntityManager em;
|
||||
private LiquibaseMigration liquibase;
|
||||
|
||||
@Test
|
||||
void migrationWorksBasedOnAPreviouslyPopulatedSchema() {
|
||||
// check the initial status from the @Sql-annotation
|
||||
final var initialChangeSetCount = assertProdReferenceStatusAfterRestore();
|
||||
final var initialChangeSetCount = liquibase.assertReferenceStatusAfterRestore(
|
||||
EXPECTED_LIQUIBASE_CHANGELOGS_IN_PROD_SCHEMA_DUMP, EXPECTED_CHANGESET_ONLY_AFTER_NEW_MIGRATION);
|
||||
|
||||
// run the current migrations and dump the result to the build-directory
|
||||
runLiquibaseMigrationsWithContexts("only-office", "with-test-data");
|
||||
dumpTo(new File("build/db/prod-only-office-schema-with-test-data.sql"));
|
||||
liquibase.runWithContexts("only-office", "with-test-data");
|
||||
PostgresTestcontainer.dump(jdbcUrl, new File("build/db/released-only-office-schema-with-test-data.sql"));
|
||||
|
||||
// then add another migration and assert if it was applied
|
||||
runLiquibaseMigrationsWithContexts("liquibase-migration-test");
|
||||
assertThatCurrentMigrationsGotApplied(initialChangeSetCount);
|
||||
}
|
||||
|
||||
private int assertProdReferenceStatusAfterRestore() {
|
||||
final var schemas = singleColumnSqlQuery("SELECT tablename FROM pg_catalog.pg_tables WHERE schemaname='public'");
|
||||
assertThat(schemas).containsExactly("databasechangelog", "databasechangeloglock");
|
||||
|
||||
final var liquibaseScripts1 = singleColumnSqlQuery("SELECT * FROM public.databasechangelog");
|
||||
assertThat(liquibaseScripts1).hasSizeGreaterThan(285);
|
||||
assertThat(liquibaseScripts1).doesNotContain(EXPECTED_CHANGESET_ONLY_AFTER_NEW_MIGRATION);
|
||||
final var initialChangeSetCount = liquibaseScripts1.size();
|
||||
return initialChangeSetCount;
|
||||
}
|
||||
|
||||
private void assertThatCurrentMigrationsGotApplied(final int initialChangeSetCount) {
|
||||
final var liquibaseScripts = singleColumnSqlQuery("SELECT id FROM public.databasechangelog");
|
||||
assertThat(liquibaseScripts).hasSizeGreaterThan(initialChangeSetCount);
|
||||
assertThat(liquibaseScripts).contains(EXPECTED_CHANGESET_ONLY_AFTER_NEW_MIGRATION);
|
||||
}
|
||||
|
||||
@SneakyThrows
|
||||
private void dumpTo(final File targetFileName) {
|
||||
makeDir(targetFileName.getParentFile());
|
||||
|
||||
final var jdbcDatabaseContainer = getJdbcDatabaseContainer();
|
||||
|
||||
final var sqlDumpFile = new File(targetFileName.getParent(), "." + targetFileName.getName());
|
||||
final var pb = new ProcessBuilder(
|
||||
"pg_dump", "--column-inserts", "--disable-dollar-quoting",
|
||||
"--host=" + jdbcDatabaseContainer.getHost(),
|
||||
"--port=" + jdbcDatabaseContainer.getFirstMappedPort(),
|
||||
"--username=" + jdbcDatabaseContainer.getUsername() ,
|
||||
"--dbname=" + jdbcDatabaseContainer.getDatabaseName(),
|
||||
"--file=" + sqlDumpFile.getCanonicalPath()
|
||||
);
|
||||
pb.environment().put("PGPASSWORD", jdbcDatabaseContainer.getPassword());
|
||||
|
||||
final var process = pb.start();
|
||||
int exitCode = process.waitFor();
|
||||
final var stderr = new BufferedReader(new InputStreamReader(process.getErrorStream()))
|
||||
.lines().collect(Collectors.joining("\n"));
|
||||
assertThat(exitCode).describedAs(stderr).isEqualTo(0);
|
||||
|
||||
final var header = """
|
||||
-- =================================================================================
|
||||
-- Generated reference-SQL-dump (hopefully of latest prod-release).
|
||||
-- See: net.hostsharing.hsadminng.hs.migration.LiquibaseCompatibilityIntegrationTest
|
||||
-- ---------------------------------------------------------------------------------
|
||||
|
||||
--
|
||||
-- Explicit pre-initialization because we cannot use `pg_dump --create ...`
|
||||
-- because the database is already created by Testcontainers.
|
||||
--
|
||||
|
||||
CREATE ROLE postgres;
|
||||
CREATE ROLE admin;
|
||||
CREATE ROLE restricted;
|
||||
|
||||
""";
|
||||
writeStringToFile(targetFileName, header, UTF_8, false); // false = overwrite
|
||||
|
||||
write(targetFileName, readFileToString(sqlDumpFile, UTF_8), UTF_8, true);
|
||||
|
||||
assertThat(sqlDumpFile.delete()).describedAs(sqlDumpFile + " cannot be deleted");
|
||||
}
|
||||
|
||||
private void makeDir(final File dir) {
|
||||
assertThat(!dir.exists() || dir.isDirectory()).describedAs(dir + " does exist, but is not a directory").isTrue();
|
||||
assertThat(dir.isDirectory() || dir.mkdirs()).describedAs(dir + " cannot be created").isTrue();
|
||||
}
|
||||
|
||||
@SneakyThrows
|
||||
private void runLiquibaseMigrationsWithContexts(final String... contexts) {
|
||||
liquibase.update(
|
||||
new liquibase.Contexts(contexts),
|
||||
new liquibase.LabelExpression());
|
||||
}
|
||||
|
||||
private List<String> singleColumnSqlQuery(final String sql) {
|
||||
//noinspection unchecked
|
||||
final var rows = (List<Object>) em.createNativeQuery(sql).getResultList();
|
||||
return rows.stream().map(Objects::toString).toList();
|
||||
}
|
||||
|
||||
@SneakyThrows
|
||||
private static JdbcDatabaseContainer<?> getJdbcDatabaseContainer() {
|
||||
final var getContainerMethod = ContainerDatabaseDriver.class.getDeclaredMethod("getContainer", String.class);
|
||||
getContainerMethod.setAccessible(true);
|
||||
|
||||
@SuppressWarnings("rawtypes")
|
||||
final var container = (JdbcDatabaseContainer) getContainerMethod.invoke(null,
|
||||
"jdbc:tc:postgresql:15.5-bookworm:///liquibaseMigrationTestTC");
|
||||
return container;
|
||||
liquibase.runWithContexts("liquibase-migration-test");
|
||||
liquibase.assertThatCurrentMigrationsGotApplied(
|
||||
initialChangeSetCount, EXPECTED_CHANGESET_ONLY_AFTER_NEW_MIGRATION);
|
||||
}
|
||||
}
|
||||
|
@ -1,28 +1,27 @@
|
||||
package net.hostsharing.hsadminng.hs.migration;
|
||||
|
||||
import liquibase.Liquibase;
|
||||
import liquibase.database.DatabaseFactory;
|
||||
import liquibase.database.jvm.JdbcConnection;
|
||||
import liquibase.resource.ClassLoaderResourceAccessor;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.annotation.Profile;
|
||||
|
||||
import jakarta.persistence.EntityManager;
|
||||
import jakarta.persistence.PersistenceContext;
|
||||
import javax.sql.DataSource;
|
||||
|
||||
@Configuration
|
||||
@Profile("liquibase-migration-test")
|
||||
@Profile({"liquibase-migration", "liquibase-migration-test"})
|
||||
public class LiquibaseConfig {
|
||||
|
||||
@PersistenceContext
|
||||
private EntityManager em;
|
||||
|
||||
@Bean
|
||||
public Liquibase liquibase(DataSource dataSource) throws Exception {
|
||||
public LiquibaseMigration liquibase(DataSource dataSource) throws Exception {
|
||||
final var connection = dataSource.getConnection();
|
||||
final var database = DatabaseFactory.getInstance()
|
||||
.findCorrectDatabaseImplementation(new JdbcConnection(connection));
|
||||
return new Liquibase(
|
||||
"db/changelog/db.changelog-master.yaml", // Path to your Liquibase changelog
|
||||
new ClassLoaderResourceAccessor(),
|
||||
database
|
||||
);
|
||||
return new LiquibaseMigration(em, "db/changelog/db.changelog-master.yaml", database);
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,55 @@
|
||||
package net.hostsharing.hsadminng.hs.migration;
|
||||
|
||||
import liquibase.Liquibase;
|
||||
import liquibase.database.Database;
|
||||
import liquibase.resource.ClassLoaderResourceAccessor;
|
||||
import lombok.SneakyThrows;
|
||||
|
||||
import jakarta.persistence.EntityManager;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
public class LiquibaseMigration extends Liquibase {
|
||||
|
||||
private final EntityManager em;
|
||||
|
||||
public LiquibaseMigration(final EntityManager em, final String changeLogFile, final Database db) {
|
||||
super(changeLogFile, new ClassLoaderResourceAccessor(), db);
|
||||
this.em = em;
|
||||
}
|
||||
|
||||
@SneakyThrows
|
||||
public void runWithContexts(final String... contexts) {
|
||||
update(
|
||||
new liquibase.Contexts(contexts),
|
||||
new liquibase.LabelExpression());
|
||||
}
|
||||
|
||||
public int assertReferenceStatusAfterRestore(
|
||||
final int minExpectedLiquibaseChangelogs,
|
||||
final String expectedChangesetOnlyAfterNewMigration) {
|
||||
final var schemas = singleColumnSqlQuery("SELECT tablename FROM pg_catalog.pg_tables WHERE schemaname='public'");
|
||||
assertThat(schemas).containsExactly("databasechangelog", "databasechangeloglock");
|
||||
|
||||
final var liquibaseScripts = singleColumnSqlQuery("SELECT id FROM public.databasechangelog");
|
||||
assertThat(liquibaseScripts).hasSize(minExpectedLiquibaseChangelogs);
|
||||
assertThat(liquibaseScripts).doesNotContain(expectedChangesetOnlyAfterNewMigration);
|
||||
return liquibaseScripts.size();
|
||||
}
|
||||
|
||||
public void assertThatCurrentMigrationsGotApplied(
|
||||
final int initialChangeSetCount,
|
||||
final String expectedChangesetOnlyAfterNewMigration) {
|
||||
final var liquibaseScripts = singleColumnSqlQuery("SELECT id FROM public.databasechangelog");
|
||||
assertThat(liquibaseScripts).hasSizeGreaterThan(initialChangeSetCount);
|
||||
assertThat(liquibaseScripts).contains(expectedChangesetOnlyAfterNewMigration);
|
||||
}
|
||||
|
||||
private List<String> singleColumnSqlQuery(final String sql) {
|
||||
//noinspection unchecked
|
||||
final var rows = (List<Object>) em.createNativeQuery(sql).getResultList();
|
||||
return rows.stream().map(Objects::toString).toList();
|
||||
}
|
||||
}
|
@ -0,0 +1,81 @@
|
||||
package net.hostsharing.hsadminng.hs.migration;
|
||||
|
||||
import lombok.SneakyThrows;
|
||||
import org.testcontainers.containers.JdbcDatabaseContainer;
|
||||
import org.testcontainers.jdbc.ContainerDatabaseDriver;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.File;
|
||||
import java.io.InputStreamReader;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static java.nio.charset.StandardCharsets.UTF_8;
|
||||
import static org.apache.commons.io.FileUtils.readFileToString;
|
||||
import static org.apache.commons.io.FileUtils.write;
|
||||
import static org.apache.commons.io.FileUtils.writeStringToFile;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
public class PostgresTestcontainer {
|
||||
|
||||
@SneakyThrows
|
||||
public static void dump(final String jdbcUrl, final File targetFileName) {
|
||||
makeDir(targetFileName.getParentFile());
|
||||
|
||||
final var jdbcDatabaseContainer = getJdbcDatabaseContainer(jdbcUrl);
|
||||
|
||||
final var sqlDumpFile = new File(targetFileName.getParent(), "." + targetFileName.getName());
|
||||
final var pb = new ProcessBuilder(
|
||||
"pg_dump", "--column-inserts", "--disable-dollar-quoting",
|
||||
"--host=" + jdbcDatabaseContainer.getHost(),
|
||||
"--port=" + jdbcDatabaseContainer.getFirstMappedPort(),
|
||||
"--username=" + jdbcDatabaseContainer.getUsername() ,
|
||||
"--dbname=" + jdbcDatabaseContainer.getDatabaseName(),
|
||||
"--file=" + sqlDumpFile.getCanonicalPath()
|
||||
);
|
||||
pb.environment().put("PGPASSWORD", jdbcDatabaseContainer.getPassword());
|
||||
|
||||
final var process = pb.start();
|
||||
int exitCode = process.waitFor();
|
||||
final var stderr = new BufferedReader(new InputStreamReader(process.getErrorStream()))
|
||||
.lines().collect(Collectors.joining("\n"));
|
||||
assertThat(exitCode).describedAs(stderr).isEqualTo(0);
|
||||
|
||||
final var header = """
|
||||
-- =================================================================================
|
||||
-- Generated reference-SQL-dump (hopefully of latest prod-release).
|
||||
-- See: net.hostsharing.hsadminng.hs.migration.LiquibaseCompatibilityIntegrationTest
|
||||
-- ---------------------------------------------------------------------------------
|
||||
|
||||
--
|
||||
-- Explicit pre-initialization because we cannot use `pg_dump --create ...`
|
||||
-- because the database is already created by Testcontainers.
|
||||
--
|
||||
|
||||
CREATE ROLE postgres;
|
||||
CREATE ROLE admin;
|
||||
CREATE ROLE restricted;
|
||||
|
||||
""";
|
||||
writeStringToFile(targetFileName, header, UTF_8, false); // false = overwrite
|
||||
|
||||
write(targetFileName, readFileToString(sqlDumpFile, UTF_8), UTF_8, true);
|
||||
|
||||
assertThat(sqlDumpFile.delete()).describedAs(sqlDumpFile + " cannot be deleted");
|
||||
}
|
||||
|
||||
private static void makeDir(final File dir) {
|
||||
assertThat(!dir.exists() || dir.isDirectory()).describedAs(dir + " does exist, but is not a directory").isTrue();
|
||||
assertThat(dir.isDirectory() || dir.mkdirs()).describedAs(dir + " cannot be created").isTrue();
|
||||
}
|
||||
|
||||
@SneakyThrows
|
||||
private static JdbcDatabaseContainer<?> getJdbcDatabaseContainer(final String jdbcUrl) {
|
||||
// TODO.test: check if, in the future, there is a better way to access auto-created Testcontainers
|
||||
final var getContainerMethod = ContainerDatabaseDriver.class.getDeclaredMethod("getContainer", String.class);
|
||||
getContainerMethod.setAccessible(true);
|
||||
|
||||
@SuppressWarnings("rawtypes")
|
||||
final var container = (JdbcDatabaseContainer) getContainerMethod.invoke(null, jdbcUrl);
|
||||
return container;
|
||||
}
|
||||
}
|
@ -86,7 +86,7 @@ class HsOfficeMembershipControllerAcceptanceTest extends ContextBasedTestWithCle
|
||||
"memberNumber": "M-1000101",
|
||||
"memberNumberSuffix": "01",
|
||||
"validFrom": "2022-10-01",
|
||||
"validTo": null,
|
||||
"validTo": "2024-12-30",
|
||||
"status": "ACTIVE"
|
||||
},
|
||||
{
|
||||
@ -94,7 +94,7 @@ class HsOfficeMembershipControllerAcceptanceTest extends ContextBasedTestWithCle
|
||||
"memberNumber": "M-1000202",
|
||||
"memberNumberSuffix": "02",
|
||||
"validFrom": "2022-10-01",
|
||||
"validTo": null,
|
||||
"validTo": "2025-12-31",
|
||||
"status": "ACTIVE"
|
||||
},
|
||||
{
|
||||
@ -133,7 +133,7 @@ class HsOfficeMembershipControllerAcceptanceTest extends ContextBasedTestWithCle
|
||||
"memberNumber": "M-1000101",
|
||||
"memberNumberSuffix": "01",
|
||||
"validFrom": "2022-10-01",
|
||||
"validTo": null,
|
||||
"validTo": "2024-12-30",
|
||||
"status": "ACTIVE"
|
||||
}
|
||||
]
|
||||
@ -161,7 +161,7 @@ class HsOfficeMembershipControllerAcceptanceTest extends ContextBasedTestWithCle
|
||||
"memberNumber": "M-1000202",
|
||||
"memberNumberSuffix": "02",
|
||||
"validFrom": "2022-10-01",
|
||||
"validTo": null,
|
||||
"validTo": "2025-12-31",
|
||||
"status": "ACTIVE"
|
||||
}
|
||||
]
|
||||
@ -177,7 +177,7 @@ class HsOfficeMembershipControllerAcceptanceTest extends ContextBasedTestWithCle
|
||||
void globalAdmin_canAddMembership() {
|
||||
|
||||
context.define("superuser-alex@hostsharing.net");
|
||||
final var givenPartner = partnerRepo.findPartnerByOptionalNameLike("Third").get(0);
|
||||
final var givenPartner = partnerRepo.findPartnerByOptionalNameLike("First").getFirst();
|
||||
final var givenMemberSuffix = TEMP_MEMBER_NUMBER_SUFFIX;
|
||||
final var expectedMemberNumber = Integer.parseInt(givenPartner.getPartnerNumber() + TEMP_MEMBER_NUMBER_SUFFIX);
|
||||
|
||||
@ -189,7 +189,7 @@ class HsOfficeMembershipControllerAcceptanceTest extends ContextBasedTestWithCle
|
||||
{
|
||||
"partner.uuid": "%s",
|
||||
"memberNumberSuffix": "%s",
|
||||
"validFrom": "2022-10-13",
|
||||
"validFrom": "2025-02-13",
|
||||
"membershipFeeBillable": "true"
|
||||
}
|
||||
""".formatted(givenPartner.getUuid(), givenMemberSuffix))
|
||||
@ -200,10 +200,10 @@ class HsOfficeMembershipControllerAcceptanceTest extends ContextBasedTestWithCle
|
||||
.statusCode(201)
|
||||
.contentType(ContentType.JSON)
|
||||
.body("uuid", isUuidValid())
|
||||
.body("partner.partnerNumber", is("P-10003"))
|
||||
.body("partner.partnerNumber", is("P-10001"))
|
||||
.body("memberNumber", is("M-" + expectedMemberNumber))
|
||||
.body("memberNumberSuffix", is(givenMemberSuffix))
|
||||
.body("validFrom", is("2022-10-13"))
|
||||
.body("validFrom", is("2025-02-13"))
|
||||
.body("validTo", equalTo(null))
|
||||
.header("Location", startsWith("http://localhost"))
|
||||
.extract().header("Location"); // @formatter:on
|
||||
@ -239,7 +239,7 @@ class HsOfficeMembershipControllerAcceptanceTest extends ContextBasedTestWithCle
|
||||
"memberNumber": "M-1000101",
|
||||
"memberNumberSuffix": "01",
|
||||
"validFrom": "2022-10-01",
|
||||
"validTo": null,
|
||||
"validTo": "2024-12-30",
|
||||
"status": "ACTIVE"
|
||||
}
|
||||
""")); // @formatter:on
|
||||
@ -297,13 +297,13 @@ class HsOfficeMembershipControllerAcceptanceTest extends ContextBasedTestWithCle
|
||||
context.define("superuser-alex@hostsharing.net");
|
||||
final var givenMembership = givenSomeTemporaryMembershipBessler("First");
|
||||
|
||||
final var location = RestAssured // @formatter:off
|
||||
RestAssured // @formatter:off
|
||||
.given()
|
||||
.header("current-subject", "superuser-alex@hostsharing.net")
|
||||
.contentType(ContentType.JSON)
|
||||
.body("""
|
||||
{
|
||||
"validTo": "2023-12-31",
|
||||
"validTo": "2025-12-31",
|
||||
"status": "CANCELLED"
|
||||
}
|
||||
""")
|
||||
@ -316,8 +316,8 @@ class HsOfficeMembershipControllerAcceptanceTest extends ContextBasedTestWithCle
|
||||
.body("uuid", isUuidValid())
|
||||
.body("partner.partnerNumber", is("P-" + givenMembership.getPartner().getPartnerNumber()))
|
||||
.body("memberNumberSuffix", is(givenMembership.getMemberNumberSuffix()))
|
||||
.body("validFrom", is("2022-11-01"))
|
||||
.body("validTo", is("2023-12-31"))
|
||||
.body("validFrom", is("2025-02-01"))
|
||||
.body("validTo", is("2025-12-31"))
|
||||
.body("status", is("CANCELLED"));
|
||||
// @formatter:on
|
||||
|
||||
@ -326,7 +326,7 @@ class HsOfficeMembershipControllerAcceptanceTest extends ContextBasedTestWithCle
|
||||
.matches(mandate -> {
|
||||
assertThat(mandate.getPartner().toShortString()).isEqualTo("P-10001");
|
||||
assertThat(mandate.getMemberNumberSuffix()).isEqualTo(givenMembership.getMemberNumberSuffix());
|
||||
assertThat(mandate.getValidity().asString()).isEqualTo("[2022-11-01,2024-01-01)");
|
||||
assertThat(mandate.getValidity().asString()).isEqualTo("[2022-11-01,2026-01-01)");
|
||||
assertThat(mandate.getStatus()).isEqualTo(CANCELLED);
|
||||
return true;
|
||||
});
|
||||
@ -348,7 +348,7 @@ class HsOfficeMembershipControllerAcceptanceTest extends ContextBasedTestWithCle
|
||||
.contentType(ContentType.JSON)
|
||||
.body("""
|
||||
{
|
||||
"validTo": "2024-01-01",
|
||||
"validTo": "2025-12-31",
|
||||
"status": "CANCELLED"
|
||||
}
|
||||
""")
|
||||
@ -361,7 +361,7 @@ class HsOfficeMembershipControllerAcceptanceTest extends ContextBasedTestWithCle
|
||||
// finally, the Membership is actually updated
|
||||
assertThat(membershipRepo.findByUuid(givenMembership.getUuid())).isPresent().get()
|
||||
.matches(mandate -> {
|
||||
assertThat(mandate.getValidity().asString()).isEqualTo("[2022-11-01,2024-01-02)");
|
||||
assertThat(mandate.getValidity().asString()).isEqualTo("[2025-02-01,2026-01-01)");
|
||||
assertThat(mandate.getStatus()).isEqualTo(CANCELLED);
|
||||
return true;
|
||||
});
|
||||
@ -434,7 +434,7 @@ class HsOfficeMembershipControllerAcceptanceTest extends ContextBasedTestWithCle
|
||||
final var newMembership = HsOfficeMembershipEntity.builder()
|
||||
.partner(givenPartner)
|
||||
.memberNumberSuffix(TEMP_MEMBER_NUMBER_SUFFIX)
|
||||
.validity(Range.closedInfinite(LocalDate.parse("2022-11-01")))
|
||||
.validity(Range.closedInfinite(LocalDate.parse("2025-02-01")))
|
||||
.status(ACTIVE)
|
||||
.membershipFeeBillable(true)
|
||||
.build();
|
||||
|
@ -4,19 +4,20 @@ import io.hypersistence.utils.hibernate.type.range.Range;
|
||||
import net.hostsharing.hsadminng.context.Context;
|
||||
import net.hostsharing.hsadminng.hs.office.debitor.HsOfficeDebitorRepository;
|
||||
import net.hostsharing.hsadminng.hs.office.partner.HsOfficePartnerRealRepository;
|
||||
import net.hostsharing.hsadminng.rbac.test.ContextBasedTestWithCleanup;
|
||||
import net.hostsharing.hsadminng.mapper.Array;
|
||||
import net.hostsharing.hsadminng.rbac.grant.RawRbacGrantRepository;
|
||||
import net.hostsharing.hsadminng.rbac.role.RawRbacRoleRepository;
|
||||
import net.hostsharing.hsadminng.mapper.Array;
|
||||
import net.hostsharing.hsadminng.rbac.test.ContextBasedTestWithCleanup;
|
||||
import net.hostsharing.hsadminng.rbac.test.JpaAttempt;
|
||||
import org.junit.jupiter.api.Nested;
|
||||
import org.junit.jupiter.api.Tag;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.postgresql.util.PSQLException;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.boot.test.autoconfigure.orm.jpa.DataJpaTest;
|
||||
import org.springframework.test.context.bean.override.mockito.MockitoBean;
|
||||
import org.springframework.context.annotation.Import;
|
||||
import org.springframework.orm.jpa.JpaSystemException;
|
||||
import org.springframework.test.context.bean.override.mockito.MockitoBean;
|
||||
|
||||
import jakarta.persistence.EntityManager;
|
||||
import jakarta.persistence.PersistenceContext;
|
||||
@ -31,7 +32,7 @@ import static net.hostsharing.hsadminng.rbac.test.JpaAttempt.attempt;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
@DataJpaTest
|
||||
@Import( { Context.class, JpaAttempt.class })
|
||||
@Import({ Context.class, JpaAttempt.class })
|
||||
@Tag("officeIntegrationTest")
|
||||
class HsOfficeMembershipRepositoryIntegrationTest extends ContextBasedTestWithCleanup {
|
||||
|
||||
@ -70,11 +71,12 @@ class HsOfficeMembershipRepositoryIntegrationTest extends ContextBasedTestWithCl
|
||||
final var givenPartner = partnerRepo.findPartnerByOptionalNameLike("First").get(0);
|
||||
|
||||
// when
|
||||
final var result = attempt(em, () -> {
|
||||
final var result = attempt(
|
||||
em, () -> {
|
||||
final var newMembership = HsOfficeMembershipEntity.builder()
|
||||
.memberNumberSuffix("11")
|
||||
.partner(givenPartner)
|
||||
.validity(Range.closedInfinite(LocalDate.parse("2020-01-01")))
|
||||
.validity(Range.closedInfinite(LocalDate.parse("2025-01-01")))
|
||||
.membershipFeeBillable(true)
|
||||
.build();
|
||||
return toCleanup(membershipRepo.save(newMembership).load());
|
||||
@ -87,6 +89,31 @@ class HsOfficeMembershipRepositoryIntegrationTest extends ContextBasedTestWithCl
|
||||
assertThat(membershipRepo.count()).isEqualTo(count + 1);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void creatingMembershipForSamePartnerIsDisallowedIfAnotherOneIsStillActive() {
|
||||
// given
|
||||
context("superuser-alex@hostsharing.net");
|
||||
final var givenPartner = partnerRepo.findPartnerByOptionalNameLike("First").getFirst();
|
||||
|
||||
// when
|
||||
final var result = attempt(
|
||||
em, () -> {
|
||||
final var newMembership = HsOfficeMembershipEntity.builder()
|
||||
.memberNumberSuffix("11")
|
||||
.partner(givenPartner)
|
||||
.validity(Range.closedInfinite(LocalDate.parse("2024-01-01")))
|
||||
.membershipFeeBillable(true)
|
||||
.build();
|
||||
return toCleanup(membershipRepo.save(newMembership).load());
|
||||
});
|
||||
|
||||
// then
|
||||
result.assertExceptionWithRootCauseMessage(
|
||||
PSQLException.class,
|
||||
"Membership validity ranges overlap for partnerUuid " + givenPartner.getUuid() +
|
||||
", partnerNumber " + givenPartner.getPartnerNumber());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void createsAndGrantsRoles() {
|
||||
// given
|
||||
@ -97,12 +124,13 @@ class HsOfficeMembershipRepositoryIntegrationTest extends ContextBasedTestWithCl
|
||||
.toList();
|
||||
|
||||
// when
|
||||
attempt(em, () -> {
|
||||
attempt(
|
||||
em, () -> {
|
||||
final var givenPartner = partnerRepo.findPartnerByOptionalNameLike("First").get(0);
|
||||
final var newMembership = HsOfficeMembershipEntity.builder()
|
||||
.memberNumberSuffix("17")
|
||||
.partner(givenPartner)
|
||||
.validity(Range.closedInfinite(LocalDate.parse("2020-01-01")))
|
||||
.validity(Range.closedInfinite(LocalDate.parse("2025-01-01")))
|
||||
.membershipFeeBillable(true)
|
||||
.build();
|
||||
return toCleanup(membershipRepo.save(newMembership));
|
||||
@ -145,7 +173,7 @@ class HsOfficeMembershipRepositoryIntegrationTest extends ContextBasedTestWithCl
|
||||
|
||||
private void assertThatMembershipIsPersisted(final HsOfficeMembershipEntity saved) {
|
||||
final var found = membershipRepo.findByUuid(saved.getUuid());
|
||||
assertThat(found).isNotEmpty().get().extracting(Object::toString).isEqualTo(saved.toString()) ;
|
||||
assertThat(found).isNotEmpty().get().extracting(Object::toString).isEqualTo(saved.toString());
|
||||
}
|
||||
}
|
||||
|
||||
@ -163,8 +191,8 @@ class HsOfficeMembershipRepositoryIntegrationTest extends ContextBasedTestWithCl
|
||||
// then
|
||||
exactlyTheseMembershipsAreReturned(
|
||||
result,
|
||||
"Membership(M-1000101, P-10001, [2022-10-01,), ACTIVE)",
|
||||
"Membership(M-1000202, P-10002, [2022-10-01,), ACTIVE)",
|
||||
"Membership(M-1000101, P-10001, [2022-10-01,2024-12-31), ACTIVE)",
|
||||
"Membership(M-1000202, P-10002, [2022-10-01,2026-01-01), ACTIVE)",
|
||||
"Membership(M-1000303, P-10003, [2022-10-01,), ACTIVE)");
|
||||
}
|
||||
|
||||
@ -178,8 +206,9 @@ class HsOfficeMembershipRepositoryIntegrationTest extends ContextBasedTestWithCl
|
||||
final var result = membershipRepo.findMembershipsByPartnerUuid(givenPartner.getUuid());
|
||||
|
||||
// then
|
||||
exactlyTheseMembershipsAreReturned(result,
|
||||
"Membership(M-1000101, P-10001, [2022-10-01,), ACTIVE)");
|
||||
exactlyTheseMembershipsAreReturned(
|
||||
result,
|
||||
"Membership(M-1000101, P-10001, [2022-10-01,2024-12-31), ACTIVE)");
|
||||
}
|
||||
|
||||
@Test
|
||||
@ -194,7 +223,7 @@ class HsOfficeMembershipRepositoryIntegrationTest extends ContextBasedTestWithCl
|
||||
assertThat(result)
|
||||
.isNotNull()
|
||||
.extracting(Object::toString)
|
||||
.isEqualTo("Membership(M-1000202, P-10002, [2022-10-01,), ACTIVE)");
|
||||
.isEqualTo("Membership(M-1000202, P-10002, [2022-10-01,2026-01-01), ACTIVE)");
|
||||
}
|
||||
|
||||
@Test
|
||||
@ -209,7 +238,7 @@ class HsOfficeMembershipRepositoryIntegrationTest extends ContextBasedTestWithCl
|
||||
assertThat(result)
|
||||
.isNotNull()
|
||||
.extracting(Object::toString)
|
||||
.isEqualTo("Membership(M-1000202, P-10002, [2022-10-01,), ACTIVE)");
|
||||
.isEqualTo("Membership(M-1000202, P-10002, [2022-10-01,2026-01-01), ACTIVE)");
|
||||
}
|
||||
|
||||
@Test
|
||||
@ -221,8 +250,9 @@ class HsOfficeMembershipRepositoryIntegrationTest extends ContextBasedTestWithCl
|
||||
final var result = membershipRepo.findMembershipsByPartnerNumber(10002);
|
||||
|
||||
// then
|
||||
exactlyTheseMembershipsAreReturned(result,
|
||||
"Membership(M-1000202, P-10002, [2022-10-01,), ACTIVE)");
|
||||
exactlyTheseMembershipsAreReturned(
|
||||
result,
|
||||
"Membership(M-1000202, P-10002, [2022-10-01,2026-01-01), ACTIVE)");
|
||||
}
|
||||
}
|
||||
|
||||
@ -273,7 +303,8 @@ class HsOfficeMembershipRepositoryIntegrationTest extends ContextBasedTestWithCl
|
||||
});
|
||||
|
||||
// then
|
||||
result.assertExceptionWithRootCauseMessage(JpaSystemException.class,
|
||||
result.assertExceptionWithRootCauseMessage(
|
||||
JpaSystemException.class,
|
||||
"[403] Subject ", " is not allowed to update hs_office.membership uuid");
|
||||
}
|
||||
|
||||
@ -381,14 +412,16 @@ class HsOfficeMembershipRepositoryIntegrationTest extends ContextBasedTestWithCl
|
||||
"[creating Membership test-data, hs_office.membership, INSERT, 03]");
|
||||
}
|
||||
|
||||
private HsOfficeMembershipEntity givenSomeTemporaryMembership(final String partnerTradeName, final String memberNumberSuffix) {
|
||||
private HsOfficeMembershipEntity givenSomeTemporaryMembership(
|
||||
final String partnerTradeName,
|
||||
final String memberNumberSuffix) {
|
||||
return jpaAttempt.transacted(() -> {
|
||||
context("superuser-alex@hostsharing.net");
|
||||
final var givenPartner = partnerRepo.findPartnerByOptionalNameLike(partnerTradeName).get(0);
|
||||
final var newMembership = HsOfficeMembershipEntity.builder()
|
||||
.memberNumberSuffix(memberNumberSuffix)
|
||||
.partner(givenPartner)
|
||||
.validity(Range.closedInfinite(LocalDate.parse("2020-01-01")))
|
||||
.validity(Range.closedInfinite(LocalDate.parse("2025-02-01")))
|
||||
.membershipFeeBillable(true)
|
||||
.build();
|
||||
|
||||
|
@ -287,12 +287,12 @@ class HsOfficeScenarioTests extends ScenarioTest {
|
||||
|
||||
@Test
|
||||
@Order(2011)
|
||||
@Requires("Person: Test AG")
|
||||
@Produces("Debitor: D-3101001 - Test AG - main debitor")
|
||||
void shouldCreateExternalDebitorForPartner() {
|
||||
new CreateExternalDebitorForPartner(scenarioTest)
|
||||
@Requires("Debitor: D-3101000 - Test AG - main debitor")
|
||||
@Produces("Debitor: D-3101001 - Test AG - additional debitor")
|
||||
void shouldCreateAdditionDebitorForPartner() {
|
||||
new CreateSelfDebitorForPartner(scenarioTest)
|
||||
.given("partnerPersonTradeName", "Test AG")
|
||||
.given("billingContactCaption", "Billing GmbH - billing department")
|
||||
.given("billingContactCaption", "Test AG - billing department")
|
||||
.given("billingContactEmailAddress", "billing@test-ag.example.org")
|
||||
.given("debitorNumberSuffix", "01")
|
||||
.given("billable", true)
|
||||
@ -305,10 +305,30 @@ class HsOfficeScenarioTests extends ScenarioTest {
|
||||
.keep();
|
||||
}
|
||||
|
||||
@Test
|
||||
@Order(2012)
|
||||
@Requires("Person: Test AG")
|
||||
@Produces("Debitor: D-3101002 - Test AG - external debitor")
|
||||
void shouldCreateExternalDebitorForPartner() {
|
||||
new CreateExternalDebitorForPartner(scenarioTest)
|
||||
.given("partnerPersonTradeName", "Test AG")
|
||||
.given("billingContactCaption", "Billing GmbH - billing department")
|
||||
.given("billingContactEmailAddress", "billing@test-ag.example.org")
|
||||
.given("debitorNumberSuffix", "02")
|
||||
.given("billable", true)
|
||||
.given("vatId", "VAT123456")
|
||||
.given("vatCountryCode", "DE")
|
||||
.given("vatBusiness", true)
|
||||
.given("vatReverseCharge", false)
|
||||
.given("defaultPrefix", "tsy")
|
||||
.doRun()
|
||||
.keep();
|
||||
}
|
||||
|
||||
@Test
|
||||
@Order(2020)
|
||||
@Requires("Person: Test AG")
|
||||
@Produces(explicitly = "Debitor: D-3101000 - Test AG - delete debitor", permanent = false)
|
||||
@Produces(explicitly = "Debitor: D-3101002 - Test AG - delete debitor", permanent = false)
|
||||
void shouldDeleteDebitor() {
|
||||
new DeleteDebitor(scenarioTest)
|
||||
.given("partnerNumber", "P-31020")
|
||||
@ -317,7 +337,7 @@ class HsOfficeScenarioTests extends ScenarioTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
@Order(2020)
|
||||
@Order(2021)
|
||||
@Requires("Debitor: D-3101000 - Test AG - main debitor")
|
||||
@Disabled("see TODO.spec in DontDeleteDefaultDebitor")
|
||||
void shouldNotDeleteDefaultDebitor() {
|
||||
@ -387,7 +407,7 @@ class HsOfficeScenarioTests extends ScenarioTest {
|
||||
void shouldCreateMembershipForPartner() {
|
||||
new CreateMembership(scenarioTest)
|
||||
.given("partnerName", "Test AG")
|
||||
.given("validFrom", "2024-10-15")
|
||||
.given("validFrom", "2020-10-15")
|
||||
.given("newStatus", "ACTIVE")
|
||||
.given("membershipFeeBillable", "true")
|
||||
.doRun()
|
||||
@ -395,14 +415,31 @@ class HsOfficeScenarioTests extends ScenarioTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
@Order(4090)
|
||||
@Order(4080)
|
||||
@Requires("Membership: M-3101000 - Test AG")
|
||||
@Produces("Membership: M-3101000 - Test AG - cancelled")
|
||||
void shouldCancelMembershipOfPartner() {
|
||||
new CancelMembership(scenarioTest)
|
||||
.given("memberNumber", "M-3101000")
|
||||
.given("validTo", "2025-12-30")
|
||||
.given("validTo", "2023-12-31")
|
||||
.given("newStatus", "CANCELLED")
|
||||
.doRun();
|
||||
.doRun()
|
||||
.keep();
|
||||
}
|
||||
|
||||
@Test
|
||||
@Order(4090)
|
||||
@Requires("Membership: M-3101000 - Test AG - cancelled")
|
||||
@Produces("Membership: M-3101001 - Test AG")
|
||||
void shouldCreateSubsequentMembershipOfPartner() {
|
||||
new CreateMembership(scenarioTest)
|
||||
.given("partnerName", "Test AG")
|
||||
.given("memberNumberSuffix", "01")
|
||||
.given("validFrom", "2025-02-24")
|
||||
.given("newStatus", "ACTIVE")
|
||||
.given("membershipFeeBillable", "true")
|
||||
.doRun()
|
||||
.keep();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -19,7 +19,7 @@ public class DeleteDebitor extends UseCase<DeleteDebitor> {
|
||||
.given("vatCountryCode", "DE")
|
||||
.given("vatBusiness", true)
|
||||
.given("vatReverseCharge", false)
|
||||
.given("defaultPrefix", "tsy"));
|
||||
.given("defaultPrefix", "tsz"));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -1,6 +1,6 @@
|
||||
package net.hostsharing.hsadminng.rbac.test;
|
||||
|
||||
import org.assertj.core.api.ObjectAssert;
|
||||
import lombok.SneakyThrows;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.core.NestedExceptionUtils;
|
||||
import org.springframework.stereotype.Service;
|
||||
@ -78,9 +78,9 @@ public class JpaAttempt {
|
||||
public static class JpaResult<T> {
|
||||
|
||||
private final T value;
|
||||
private final RuntimeException exception;
|
||||
private final Throwable exception;
|
||||
|
||||
private JpaResult(final T value, final RuntimeException exception) {
|
||||
private JpaResult(final T value, final Throwable exception) {
|
||||
this.value = value;
|
||||
this.exception = exception;
|
||||
}
|
||||
@ -93,7 +93,7 @@ public class JpaAttempt {
|
||||
return new JpaResult<>(value, null);
|
||||
}
|
||||
|
||||
public static <T> JpaResult<T> forException(final RuntimeException exception) {
|
||||
public static <T> JpaResult<T> forException(final Throwable exception) {
|
||||
return new JpaResult<>(null, exception);
|
||||
}
|
||||
|
||||
@ -105,20 +105,23 @@ public class JpaAttempt {
|
||||
return value;
|
||||
}
|
||||
|
||||
public ObjectAssert<T> assertThatResult() {
|
||||
assertSuccessful();
|
||||
return assertThat(returnedValue());
|
||||
}
|
||||
|
||||
public RuntimeException caughtException() {
|
||||
public Throwable caughtException() {
|
||||
return exception;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public <E extends RuntimeException> E caughtException(final Class<E> expectedExceptionClass) {
|
||||
public <E extends Throwable> E caughtException(final Class<E> expectedExceptionClass) {
|
||||
//noinspection unchecked
|
||||
return caughtException((E) exception, expectedExceptionClass);
|
||||
}
|
||||
|
||||
public static <E extends Throwable> E caughtException(final Throwable exception, final Class<E> expectedExceptionClass) {
|
||||
if (expectedExceptionClass.isAssignableFrom(exception.getClass())) {
|
||||
//noinspection unchecked
|
||||
return (E) exception;
|
||||
}
|
||||
if(exception.getCause() != null && exception.getCause() != exception ) {
|
||||
return caughtException(exception.getCause(), expectedExceptionClass);
|
||||
}
|
||||
throw new AssertionError("expected " + expectedExceptionClass + " but got " + exception);
|
||||
}
|
||||
|
||||
@ -127,7 +130,7 @@ public class JpaAttempt {
|
||||
}
|
||||
|
||||
public void assertExceptionWithRootCauseMessage(
|
||||
final Class<? extends RuntimeException> expectedExceptionClass,
|
||||
final Class<? extends Throwable> expectedExceptionClass,
|
||||
final String... expectedRootCauseMessages) {
|
||||
assertThat(wasSuccessful()).as("wasSuccessful").isFalse();
|
||||
final String firstRootCauseMessageLine = firstRootCauseMessageLineOf(caughtException(expectedExceptionClass));
|
||||
@ -136,11 +139,11 @@ public class JpaAttempt {
|
||||
}
|
||||
}
|
||||
|
||||
public JpaResult<T> reThrowException() {
|
||||
@SneakyThrows
|
||||
public void reThrowException() {
|
||||
if (exception != null) {
|
||||
throw exception;
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public JpaResult<T> assumeSuccessful() {
|
||||
@ -158,9 +161,9 @@ public class JpaAttempt {
|
||||
return this;
|
||||
}
|
||||
|
||||
private String firstRootCauseMessageLineOf(final RuntimeException exception) {
|
||||
private String firstRootCauseMessageLineOf(final Throwable exception) {
|
||||
final var rootCause = NestedExceptionUtils.getRootCause(exception);
|
||||
return Optional.ofNullable(rootCause)
|
||||
return Optional.ofNullable(rootCause != null ? rootCause : exception)
|
||||
.map(Throwable::getMessage)
|
||||
.map(message -> message.split("\\r|\\n|\\r\\n", 0)[0])
|
||||
.orElse(null);
|
||||
|
File diff suppressed because it is too large
Load Diff
Loading…
x
Reference in New Issue
Block a user