Compare commits

..

2 Commits

Author SHA1 Message Date
Timotheus Pokorra
e8e3b24fb9 fix name of changeset for mlmmj 2025-01-29 09:24:35 +01:00
Timotheus Pokorra
0d0a913466 create database only with office tables 2025-01-23 09:39:02 +01:00
14 changed files with 44 additions and 17535 deletions

View File

@ -523,7 +523,7 @@ Classes to be scanned, tests to be executed and thresholds are configured in [bu
A report is generated under [build/reports/pitest/index.html](./build/reports/pitest/index.html). A report is generated under [build/reports/pitest/index.html](./build/reports/pitest/index.html).
A link to the report is also printed after the `pitest` run. A link to the report is also printed after the `pitest` run.
<!-- TODO.test: This task is also executed as part of `gw check`. --> This task is also executed as part of `gw check`.
#### Remark #### Remark
@ -562,7 +562,7 @@ In case of suppression, a note must be added to explain why it does not apply to
See also: https://jeremylong.github.io/DependencyCheck/dependency-check-gradle/index.html. See also: https://jeremylong.github.io/DependencyCheck/dependency-check-gradle/index.html.
### How to Check Dependency-License-Compatibility ### Dependency-License-Compatibility
The `gw check` phase depends on a dependency-license-compatibility check. The `gw check` phase depends on a dependency-license-compatibility check.
If any dependency violates the configured [list of allowed licenses](etc/allowed-licenses.json), the build will fail. If any dependency violates the configured [list of allowed licenses](etc/allowed-licenses.json), the build will fail.
@ -592,7 +592,7 @@ The generated license can be found here: [index.html](build/reports/dependency-l
More information can be found on the [project's website](https://github.com/jk1/Gradle-License-Report). More information can be found on the [project's website](https://github.com/jk1/Gradle-License-Report).
### How to Upgrade Versions of Dependencies ### Dependency Version Upgrade
Dependency versions can be automatically upgraded to the latest available version: Dependency versions can be automatically upgraded to the latest available version:
@ -620,8 +620,8 @@ This way we would get rid of all explicit grants within the same DB-row
and would not need the `rbac.role` table anymore. and would not need the `rbac.role` table anymore.
We would also reduce the depth of the expensive recursive CTE-query. We would also reduce the depth of the expensive recursive CTE-query.
This has to be explored further. For now, we just keep it in mind and avoid roles+grants This has to be explored further.
which would not fit into a simplified system with a fixed role-type-system. For now, we just keep it in mind and FIXME
### The Mapper is Error-Prone ### The Mapper is Error-Prone

View File

@ -1,15 +1,15 @@
plugins { plugins {
id 'java' id 'java'
id 'org.springframework.boot' version '3.4.1' id 'org.springframework.boot' version '3.4.1'
id 'io.spring.dependency-management' version '1.1.7' // manages implicit dependencies id 'io.spring.dependency-management' version '1.1.7'
id 'io.openapiprocessor.openapi-processor' version '2023.2' // generates Controller-interface and resources from API-spec id 'io.openapiprocessor.openapi-processor' version '2023.2'
id 'com.github.jk1.dependency-license-report' version '2.9' // checks dependency-license compatibility id 'com.github.jk1.dependency-license-report' version '2.9'
id "org.owasp.dependencycheck" version "12.0.1" // checks dependencies for known vulnerabilities id "org.owasp.dependencycheck" version "12.0.0"
id "com.diffplug.spotless" version "7.0.2" // formats + checks formatting for source-code id "com.diffplug.spotless" version "7.0.2"
id 'jacoco' // determines code-coverage of tests id 'jacoco'
id 'info.solidsoft.pitest' version '1.15.0' // performs mutation testing id 'info.solidsoft.pitest' version '1.15.0'
id 'se.patrikerdes.use-latest-versions' version '0.2.18' // updates module and plugin versions id 'se.patrikerdes.use-latest-versions' version '0.2.18'
id 'com.github.ben-manes.versions' version '0.52.0' // determines which dependencies have updates id 'com.github.ben-manes.versions' version '0.51.0'
} }
// HOWTO: find out which dependency versions are managed by Spring Boot: // HOWTO: find out which dependency versions are managed by Spring Boot:
@ -227,7 +227,7 @@ project.tasks.check.dependsOn(spotlessCheck)
// HACK: no idea why spotless uses the output of these tasks, but we get warnings without those // HACK: no idea why spotless uses the output of these tasks, but we get warnings without those
project.tasks.spotlessJava.dependsOn( project.tasks.spotlessJava.dependsOn(
tasks.generateLicenseReport, tasks.generateLicenseReport,
// tasks.pitest, TODO.test: PiTest currently does not work, needs to be fixed tasks.pitest,
tasks.jacocoTestReport, tasks.jacocoTestReport,
tasks.processResources, tasks.processResources,
tasks.processTestResources) tasks.processTestResources)
@ -335,7 +335,7 @@ jacocoTestCoverageVerification {
} }
} }
// HOWTO: run all unit-tests which don't need a database: gw-test unitTest // HOWTO: run all unit-tests which don't need a database: gw unitTest
tasks.register('unitTest', Test) { tasks.register('unitTest', Test) {
useJUnitPlatform { useJUnitPlatform {
excludeTags 'importOfficeData', 'importHostingAssets', 'scenarioTest', 'generalIntegrationTest', excludeTags 'importOfficeData', 'importHostingAssets', 'scenarioTest', 'generalIntegrationTest',
@ -360,7 +360,7 @@ tasks.register('generalIntegrationTest', Test) {
mustRunAfter spotlessJava mustRunAfter spotlessJava
} }
// HOWTO: run all integration tests of the office module: gw-test officeIntegrationTest // HOWTO: run all integration tests of the office module: gw officeIntegrationTest
tasks.register('officeIntegrationTest', Test) { tasks.register('officeIntegrationTest', Test) {
useJUnitPlatform { useJUnitPlatform {
includeTags 'officeIntegrationTest' includeTags 'officeIntegrationTest'
@ -372,26 +372,26 @@ tasks.register('officeIntegrationTest', Test) {
mustRunAfter spotlessJava mustRunAfter spotlessJava
} }
// HOWTO: run all integration tests of the booking module: gw-test bookingIntegrationTest // HOWTO: run all integration tests of the booking module: gw bookingIntegrationTest
tasks.register('bookingIntegrationTest', Test) { tasks.register('bookingIntegrationTest', Test) {
useJUnitPlatform { useJUnitPlatform {
includeTags 'bookingIntegrationTest' includeTags 'bookingIntegrationTest'
} }
group 'verification' group 'verification'
description 'runs integration tests of the booking module' description 'runs integration tests of the office module'
mustRunAfter spotlessJava mustRunAfter spotlessJava
} }
// HOWTO: run all integration tests of the hosting module: gw-test hostingIntegrationTest // HOWTO: run all integration tests of the hosting module: gw hostingIntegrationTest
tasks.register('hostingIntegrationTest', Test) { tasks.register('hostingIntegrationTest', Test) {
useJUnitPlatform { useJUnitPlatform {
includeTags 'hostingIntegrationTest' includeTags 'hostingIntegrationTest'
} }
group 'verification' group 'verification'
description 'runs integration tests of the hosting module' description 'runs integration tests of the office module'
mustRunAfter spotlessJava mustRunAfter spotlessJava
} }
@ -454,7 +454,7 @@ pitest {
outputFormats = ['XML', 'HTML'] outputFormats = ['XML', 'HTML']
timestampedReports = false timestampedReports = false
} }
// project.tasks.check.dependsOn(project.tasks.pitest) TODO.test: PiTest currently does not work, needs to be fixed project.tasks.check.dependsOn(project.tasks.pitest)
project.tasks.pitest.doFirst { // Why not doLast? See README.md! project.tasks.pitest.doFirst { // Why not doLast? See README.md!
println "PiTest Mutation Report: file:///${project.rootDir}/build/reports/pitest/index.html" println "PiTest Mutation Report: file:///${project.rootDir}/build/reports/pitest/index.html"
} }

View File

@ -1,10 +1,9 @@
--liquibase formatted sql --liquibase formatted sql
-- FIXME: check if we really need the restricted user
-- ============================================================================ -- ============================================================================
-- NUMERIC-HASH-FUNCTIONS -- NUMERIC-HASH-FUNCTIONS
--changeset michael.hoennig:hash runOnChange:true validCheckSum:ANY endDelimiter:--// --changeset michael.hoennig:hash endDelimiter:--//
-- ---------------------------------------------------------------------------- -- ----------------------------------------------------------------------------
do $$ do $$

View File

@ -870,23 +870,18 @@ $$;
-- ============================================================================ -- ============================================================================
--changeset michael.hoennig:rbac-base-PGSQL-ROLES runOnChange:true validCheckSum:ANY context:!external-db endDelimiter:--// --changeset michael.hoennig:rbac-base-PGSQL-ROLES context:!external-db endDelimiter:--//
-- ---------------------------------------------------------------------------- -- ----------------------------------------------------------------------------
do $$ do $$
begin begin
if '${HSADMINNG_POSTGRES_ADMIN_USERNAME}'='admin' then if '${HSADMINNG_POSTGRES_ADMIN_USERNAME}'='admin' then
if not exists (select from pg_catalog.pg_roles where rolname = 'admin') then create role admin;
create role admin;
end if;
grant all privileges on all tables in schema public to admin; grant all privileges on all tables in schema public to admin;
end if; end if;
if '${HSADMINNG_POSTGRES_RESTRICTED_USERNAME}'='restricted' then if '${HSADMINNG_POSTGRES_RESTRICTED_USERNAME}'='restricted' then
if not exists (select from pg_catalog.pg_roles where rolname = 'restricted') then create role restricted;
create role restricted;
end if;
grant all privileges on all tables in schema public to restricted; grant all privileges on all tables in schema public to restricted;
end if; end if;
end $$; end $$;

View File

@ -1,14 +0,0 @@
--liquibase formatted sql
-- ============================================================================
--changeset michael.hoennig:hs-global-liquibase-migration-test endDelimiter:--//
CREATE OR REPLACE VIEW hs_integration.subscription AS
SELECT DISTINCT
relation.mark as subscription,
contact.emailaddresses->>'main' as email
FROM hs_office.contact AS contact
JOIN hs_office.relation AS relation ON relation.contactuuid = contact.uuid AND relation.type = 'SUBSCRIBER'
ORDER BY subscription, email;
--//

View File

@ -51,37 +51,6 @@ databaseChangeLog:
- include: - include:
file: db/changelog/1-rbac/1080-rbac-global.sql file: db/changelog/1-rbac/1080-rbac-global.sql
- include:
file: db/changelog/2-rbactest/200-rbactest-schema.sql
context: "!without-test-data"
- include:
file: db/changelog/2-rbactest/201-rbactest-customer/2010-rbactest-customer.sql
context: "!without-test-data"
- include:
file: db/changelog/2-rbactest/201-rbactest-customer/2013-rbactest-customer-rbac.sql
context: "!without-test-data"
- include:
file: db/changelog/2-rbactest/201-rbactest-customer/2018-rbactest-customer-test-data.sql
context: "!without-test-data"
- include:
file: db/changelog/2-rbactest/202-rbactest-package/2020-rbactest-package.sql
context: "!without-test-data"
- include:
file: db/changelog/2-rbactest/202-rbactest-package/2023-rbactest-package-rbac.sql
context: "!without-test-data"
- include:
file: db/changelog/2-rbactest/202-rbactest-package/2028-rbactest-package-test-data.sql
context: "!without-test-data"
- include:
file: db/changelog/2-rbactest/203-rbactest-domain/2030-rbactest-domain.sql
context: "!without-test-data"
- include:
file: db/changelog/2-rbactest/203-rbactest-domain/2033-rbactest-domain-rbac.sql
context: "!without-test-data"
- include:
file: db/changelog/2-rbactest/203-rbactest-domain/2038-rbactest-domain-test-data.sql
context: "!without-test-data"
- include: - include:
file: db/changelog/5-hs-office/500-hs-office-schema.sql file: db/changelog/5-hs-office/500-hs-office-schema.sql
- include: - include:
@ -167,51 +136,6 @@ databaseChangeLog:
file: db/changelog/5-hs-office/512-coopassets/5128-hs-office-coopassets-test-data.sql file: db/changelog/5-hs-office/512-coopassets/5128-hs-office-coopassets-test-data.sql
context: "!without-test-data" context: "!without-test-data"
- include:
file: db/changelog/6-hs-booking/600-hs-booking-schema.sql
context: "!only-office"
- include:
file: db/changelog/6-hs-booking/610-booking-debitor/6100-hs-booking-debitor.sql
context: "!only-office"
- include:
file: db/changelog/6-hs-booking/620-booking-project/6200-hs-booking-project.sql
context: "!only-office"
- include:
file: db/changelog/6-hs-booking/620-booking-project/6203-hs-booking-project-rbac.sql
context: "!only-office"
- include:
file: db/changelog/6-hs-booking/620-booking-project/6208-hs-booking-project-test-data.sql
context: "!only-office and !without-test-data"
- include:
file: db/changelog/6-hs-booking/630-booking-item/6300-hs-booking-item.sql
context: "!only-office"
- include:
file: db/changelog/6-hs-booking/630-booking-item/6303-hs-booking-item-rbac.sql
context: "!only-office"
- include:
file: db/changelog/6-hs-booking/630-booking-item/6308-hs-booking-item-test-data.sql
context: "!only-office and !without-test-data"
- include:
file: db/changelog/7-hs-hosting/700-hs-hosting-schema.sql
context: "!only-office"
- include:
file: db/changelog/7-hs-hosting/701-hosting-asset/7010-hs-hosting-asset.sql
context: "!only-office"
- include:
file: db/changelog/7-hs-hosting/701-hosting-asset/7013-hs-hosting-asset-rbac.sql
context: "!only-office"
- include:
file: db/changelog/7-hs-hosting/701-hosting-asset/7016-hs-hosting-asset-migration.sql
context: "!only-office"
- include:
file: db/changelog/7-hs-hosting/701-hosting-asset/7018-hs-hosting-asset-test-data.sql
context: "!only-office and !without-test-data"
- include:
file: db/changelog/9-hs-global/9000-statistics.sql
context: "!only-office"
- include: - include:
file: db/changelog/9-hs-global/9100-hs-integration-schema.sql file: db/changelog/9-hs-global/9100-hs-integration-schema.sql
- include: - include:
@ -220,7 +144,3 @@ databaseChangeLog:
file: db/changelog/9-hs-global/9120-integration-znuny.sql file: db/changelog/9-hs-global/9120-integration-znuny.sql
- include: - include:
file: db/changelog/9-hs-global/9130-integration-mlmmj.sql file: db/changelog/9-hs-global/9130-integration-mlmmj.sql
- include:
file: db/changelog/9-hs-global/9999-liquibase-migration-test.sql
context: liquibase-migration-test

View File

@ -17,7 +17,6 @@ import static net.hostsharing.hsadminng.config.HttpHeadersBuilder.headers;
import static org.apache.commons.lang3.RandomStringUtils.randomAlphanumeric; import static org.apache.commons.lang3.RandomStringUtils.randomAlphanumeric;
import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThat;
import static com.github.tomakehurst.wiremock.client.WireMock.*; import static com.github.tomakehurst.wiremock.client.WireMock.*;
@SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT) @SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT)
@TestPropertySource(properties = {"server.port=0", "hsadminng.cas.server=http://localhost:8088/cas"}) @TestPropertySource(properties = {"server.port=0", "hsadminng.cas.server=http://localhost:8088/cas"})
@ActiveProfiles("wiremock") // IMPORTANT: To test prod config, do not use test profile! @ActiveProfiles("wiremock") // IMPORTANT: To test prod config, do not use test profile!

View File

@ -72,8 +72,8 @@ class HashGeneratorUnitTest {
@Test @Test
void generatesMySqlNativePasswordHash() { void generatesMySqlNativePasswordHash() {
final var hash = HashGenerator.using(MYSQL_NATIVE).hash("t8L7FULt"); // results in line+branch-coverage final var hash = HashGenerator.using(MYSQL_NATIVE).hash("Test1234");
assertThat(hash).isEqualTo("*F1E107E5C47E0939C7BC941DDE59EDBBDA1F7E39"); assertThat(hash).isEqualTo("*14F1A8C42F8B6D4662BB3ED290FD37BF135FE45C");
} }
@Test @Test

View File

@ -56,35 +56,6 @@ class HsEMailAddressHostingAssetValidatorUnitTest {
"{type=string[], propertyName=target, elementsOf={type=string, propertyName=target, matchesRegEx=[^[a-z][a-z0-9]{2}[0-9]{2}(-[a-z0-9][a-z0-9\\.+_-]*)?$, ^([a-zA-Z0-9_!#$%&'*+/=?`{|}~^.-]+)?@[a-zA-Z0-9.-]+$, ^nobody$, ^/dev/null$], maxLength=320}, required=true, minLength=1}"); "{type=string[], propertyName=target, elementsOf={type=string, propertyName=target, matchesRegEx=[^[a-z][a-z0-9]{2}[0-9]{2}(-[a-z0-9][a-z0-9\\.+_-]*)?$, ^([a-zA-Z0-9_!#$%&'*+/=?`{|}~^.-]+)?@[a-zA-Z0-9.-]+$, ^nobody$, ^/dev/null$], maxLength=320}, required=true, minLength=1}");
} }
@Test
void preprocessEntityWithInitializedIdentifier() {
// given
final var givenEntity = validEntityBuilder().identifier("some-local-part@example.org").build();
assertThat(givenEntity.getParentAsset().getIdentifier()).as("preconditon failed").isEqualTo("example.org|MBOX");
final var validator = HostingAssetEntityValidatorRegistry.forType(givenEntity.getType());
// when
validator.preprocessEntity(givenEntity);
// then
assertThat(givenEntity.getIdentifier()).isEqualTo("some-local-part@example.org");
}
@Test
void preprocessEntityWithUninitializedIdentifier() {
// given
final var givenEntity = validEntityBuilder().identifier(null).build();
assertThat(givenEntity.getParentAsset().getIdentifier()).as("preconditon failed").isEqualTo("example.org|MBOX");
final var validator = HostingAssetEntityValidatorRegistry.forType(givenEntity.getType());
// when
validator.preprocessEntity(givenEntity);
// then
assertThat(givenEntity.getIdentifier())
.isEqualTo(givenEntity.getDirectValue("local-part", String.class) + "@example.org");
}
@Test @Test
void acceptsValidEntity() { void acceptsValidEntity() {
// given // given

View File

@ -9,17 +9,18 @@ import org.junit.jupiter.api.Test;
import java.util.Map; import java.util.Map;
import static java.util.Map.entry; import static java.util.Map.entry;
import static net.hostsharing.hsadminng.hs.hosting.asset.HsHostingAssetType.DOMAIN_SMTP_SETUP;
import static net.hostsharing.hsadminng.hs.hosting.asset.HsHostingAssetType.MANAGED_WEBSPACE; import static net.hostsharing.hsadminng.hs.hosting.asset.HsHostingAssetType.MANAGED_WEBSPACE;
import static net.hostsharing.hsadminng.hs.hosting.asset.HsHostingAssetType.MARIADB_INSTANCE;
import static net.hostsharing.hsadminng.hs.hosting.asset.HsHostingAssetTestEntities.MANAGED_SERVER_HOSTING_ASSET_REAL_TEST_ENTITY; import static net.hostsharing.hsadminng.hs.hosting.asset.HsHostingAssetTestEntities.MANAGED_SERVER_HOSTING_ASSET_REAL_TEST_ENTITY;
import static net.hostsharing.hsadminng.hs.hosting.asset.HsHostingAssetType.PGSQL_INSTANCE; import static net.hostsharing.hsadminng.hs.hosting.asset.validators.HsMariaDbInstanceHostingAssetValidator.DEFAULT_INSTANCE_IDENTIFIER_SUFFIX;
import static net.hostsharing.hsadminng.hs.hosting.asset.validators.HsPostgreSqlDbInstanceHostingAssetValidator.DEFAULT_INSTANCE_IDENTIFIER_SUFFIX;
import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThat;
class HsPostgreSqlInstanceHostingAssetValidatorUnitTest { class HsPostgreSqlInstanceHostingAssetValidatorUnitTest {
static HsHostingAssetRbacEntity.HsHostingAssetRbacEntityBuilder<?, ?> validEntityBuilder() { static HsHostingAssetRbacEntity.HsHostingAssetRbacEntityBuilder<?, ?> validEntityBuilder() {
return HsHostingAssetRbacEntity.builder() return HsHostingAssetRbacEntity.builder()
.type(PGSQL_INSTANCE) .type(MARIADB_INSTANCE)
.parentAsset(MANAGED_SERVER_HOSTING_ASSET_REAL_TEST_ENTITY) .parentAsset(MANAGED_SERVER_HOSTING_ASSET_REAL_TEST_ENTITY)
.identifier(MANAGED_SERVER_HOSTING_ASSET_REAL_TEST_ENTITY.getIdentifier() + DEFAULT_INSTANCE_IDENTIFIER_SUFFIX); .identifier(MANAGED_SERVER_HOSTING_ASSET_REAL_TEST_ENTITY.getIdentifier() + DEFAULT_INSTANCE_IDENTIFIER_SUFFIX);
} }
@ -27,7 +28,7 @@ class HsPostgreSqlInstanceHostingAssetValidatorUnitTest {
@Test @Test
void containsExpectedProperties() { void containsExpectedProperties() {
// when // when
final var validator = HostingAssetEntityValidatorRegistry.forType(PGSQL_INSTANCE); final var validator = HostingAssetEntityValidatorRegistry.forType(DOMAIN_SMTP_SETUP);
// then // then
assertThat(validator.properties()).map(Map::toString).isEmpty(); assertThat(validator.properties()).map(Map::toString).isEmpty();
@ -44,7 +45,7 @@ class HsPostgreSqlInstanceHostingAssetValidatorUnitTest {
validator.preprocessEntity(givenEntity); validator.preprocessEntity(givenEntity);
// then // then
assertThat(givenEntity.getIdentifier()).isEqualTo("vm1234|PgSql.default"); assertThat(givenEntity.getIdentifier()).isEqualTo("vm1234|MariaDB.default");
} }
@Test @Test
@ -63,7 +64,7 @@ class HsPostgreSqlInstanceHostingAssetValidatorUnitTest {
@Test @Test
void rejectsInvalidIdentifier() { void rejectsInvalidIdentifier() {
// given // given
final var givenEntity = validEntityBuilder().identifier("PostgreSQL").build(); final var givenEntity = validEntityBuilder().identifier("example.org").build();
final var validator = HostingAssetEntityValidatorRegistry.forType(givenEntity.getType()); final var validator = HostingAssetEntityValidatorRegistry.forType(givenEntity.getType());
// when // when
@ -71,7 +72,7 @@ class HsPostgreSqlInstanceHostingAssetValidatorUnitTest {
// then // then
assertThat(result).containsExactly( assertThat(result).containsExactly(
"'identifier' expected to match '^\\Qvm1234|PgSql.default\\E$', but is 'PostgreSQL'" "'identifier' expected to match '^\\Qvm1234|MariaDB.default\\E$', but is 'example.org'"
); );
} }
@ -90,9 +91,9 @@ class HsPostgreSqlInstanceHostingAssetValidatorUnitTest {
// then // then
assertThat(result).containsExactlyInAnyOrder( assertThat(result).containsExactlyInAnyOrder(
"'PGSQL_INSTANCE:vm1234|PgSql.default.bookingItem' must be null but is of type CLOUD_SERVER", "'MARIADB_INSTANCE:vm1234|MariaDB.default.bookingItem' must be null but is of type CLOUD_SERVER",
"'PGSQL_INSTANCE:vm1234|PgSql.default.parentAsset' must be of type MANAGED_SERVER but is of type MANAGED_WEBSPACE", "'MARIADB_INSTANCE:vm1234|MariaDB.default.parentAsset' must be of type MANAGED_SERVER but is of type MANAGED_WEBSPACE",
"'PGSQL_INSTANCE:vm1234|PgSql.default.assignedToAsset' must be null but is of type MANAGED_WEBSPACE"); "'MARIADB_INSTANCE:vm1234|MariaDB.default.assignedToAsset' must be null but is of type MANAGED_WEBSPACE");
} }
@Test @Test
@ -110,6 +111,6 @@ class HsPostgreSqlInstanceHostingAssetValidatorUnitTest {
// then // then
assertThat(result).containsExactlyInAnyOrder( assertThat(result).containsExactlyInAnyOrder(
"'PGSQL_INSTANCE:vm1234|PgSql.default.config.any' is not expected but is set to 'false'"); "'MARIADB_INSTANCE:vm1234|MariaDB.default.config.any' is not expected but is set to 'false'");
} }
} }

View File

@ -252,12 +252,14 @@ public class CsvDataImport extends ContextBasedTest {
jpaAttempt.transacted(() -> { jpaAttempt.transacted(() -> {
context(rbacSuperuser); context(rbacSuperuser);
// TODO.perf: could we instead skip creating test-data based on an env var? // TODO.perf: could we instead skip creating test-data based on an env var?
/*
em.createNativeQuery("delete from hs_hosting.asset where true").executeUpdate(); em.createNativeQuery("delete from hs_hosting.asset where true").executeUpdate();
em.createNativeQuery("delete from hs_hosting.asset_ex where true").executeUpdate(); em.createNativeQuery("delete from hs_hosting.asset_ex where true").executeUpdate();
em.createNativeQuery("delete from hs_booking.item where true").executeUpdate(); em.createNativeQuery("delete from hs_booking.item where true").executeUpdate();
em.createNativeQuery("delete from hs_booking.item_ex where true").executeUpdate(); em.createNativeQuery("delete from hs_booking.item_ex where true").executeUpdate();
em.createNativeQuery("delete from hs_booking.project where true").executeUpdate(); em.createNativeQuery("delete from hs_booking.project where true").executeUpdate();
em.createNativeQuery("delete from hs_booking.project_ex where true").executeUpdate(); em.createNativeQuery("delete from hs_booking.project_ex where true").executeUpdate();
*/
em.createNativeQuery("delete from hs_office.coopassettx where true").executeUpdate(); em.createNativeQuery("delete from hs_office.coopassettx where true").executeUpdate();
em.createNativeQuery("delete from hs_office.coopassettx_legacy_id where true").executeUpdate(); em.createNativeQuery("delete from hs_office.coopassettx_legacy_id where true").executeUpdate();
em.createNativeQuery("delete from hs_office.coopsharetx where true").executeUpdate(); em.createNativeQuery("delete from hs_office.coopsharetx where true").executeUpdate();
@ -293,9 +295,11 @@ public class CsvDataImport extends ContextBasedTest {
protected void deleteFromTestTables() { protected void deleteFromTestTables() {
jpaAttempt.transacted(() -> { jpaAttempt.transacted(() -> {
context(rbacSuperuser); context(rbacSuperuser);
/*
em.createNativeQuery("delete from rbactest.domain where true").executeUpdate(); em.createNativeQuery("delete from rbactest.domain where true").executeUpdate();
em.createNativeQuery("delete from rbactest.package where true").executeUpdate(); em.createNativeQuery("delete from rbactest.package where true").executeUpdate();
em.createNativeQuery("delete from rbactest.customer where true").executeUpdate(); em.createNativeQuery("delete from rbactest.customer where true").executeUpdate();
*/
}).assertSuccessful(); }).assertSuccessful();
} }

View File

@ -1,180 +0,0 @@
package net.hostsharing.hsadminng.hs.migration;
import liquibase.Liquibase;
import lombok.SneakyThrows;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.autoconfigure.orm.jpa.DataJpaTest;
import org.springframework.context.annotation.Import;
import org.springframework.test.annotation.DirtiesContext;
import org.springframework.test.context.ActiveProfiles;
import org.springframework.test.context.jdbc.Sql;
import org.testcontainers.containers.JdbcDatabaseContainer;
import org.testcontainers.jdbc.ContainerDatabaseDriver;
import jakarta.persistence.EntityManager;
import jakarta.persistence.PersistenceContext;
import javax.sql.DataSource;
import java.io.BufferedReader;
import java.io.File;
import java.io.InputStreamReader;
import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.apache.commons.io.FileUtils.readFileToString;
import static org.apache.commons.io.FileUtils.write;
import static org.apache.commons.io.FileUtils.writeStringToFile;
import static org.assertj.core.api.Assertions.assertThat;
import static org.springframework.test.context.jdbc.Sql.ExecutionPhase.BEFORE_TEST_CLASS;
// BLOG: Liquibase-migration-test (not before the reference-SQL-dump-generation is simplified)
// HOWTO: generate the prod-reference-SQL-dump during a prod-release
/**
* Tests, if the Liquibase scripts can be applied to a database which is already populated with schemas
* and test-data from a previous version.
*
* <p>The test works as follows:</p>
*
* <ol>
* <li>the database is initialized by `db/prod-only-office-schema-with-test-data.sql` from the test-resources</li>
* <li>the current Liquibase-migrations (only-office but with-test-data) are performed</li>
* <li>a new dump is written to `db/prod-only-office-schema-with-test-data.sql` in the build-directory</li>
* <li>an extra Liquibase-changeset (liquibase-migration-test) is applied</li>
* <li>it's asserted that the extra changeset got applied</li>
* </ol>
*
* <p>During a release, the generated dump has to be committed to git and will be used in future test-runs
* until it gets replaced with a new dump at the next release.</p>
*/
@Tag("officeIntegrationTest")
@DataJpaTest(properties = {
"spring.datasource.url=jdbc:tc:postgresql:15.5-bookworm:///liquibaseMigrationTestTC",
"spring.liquibase.enabled=false" // @Sql should go first, Liquibase will be initialized programmatically
})
@DirtiesContext
@ActiveProfiles("liquibase-migration-test")
@Import(LiquibaseConfig.class)
@Sql(value = "/db/prod-only-office-schema-with-test-data.sql", executionPhase = BEFORE_TEST_CLASS)
public class LiquibaseCompatibilityIntegrationTest {
private static final String EXPECTED_CHANGESET_ONLY_AFTER_NEW_MIGRATION = "hs-global-liquibase-migration-test";
@Autowired
private DataSource dataSource;
@Autowired
private Liquibase liquibase;
@PersistenceContext
private EntityManager em;
@Test
void migrationWorksBasedOnAPreviouslyPopulatedSchema() {
// check the initial status from the @Sql-annotation
final var initialChangeSetCount = assertProdReferenceStatusAfterRestore();
// run the current migrations and dump the result to the build-directory
runLiquibaseMigrationsWithContexts("only-office", "with-test-data");
dumpTo(new File("build/db/prod-only-office-schema-with-test-data.sql"));
// then add another migration and assert if it was applied
runLiquibaseMigrationsWithContexts("liquibase-migration-test");
assertThatCurrentMigrationsGotApplied(initialChangeSetCount);
}
private int assertProdReferenceStatusAfterRestore() {
final var schemas = singleColumnSqlQuery("SELECT tablename FROM pg_catalog.pg_tables WHERE schemaname='public'");
assertThat(schemas).containsExactly("databasechangelog", "databasechangeloglock");
final var liquibaseScripts1 = singleColumnSqlQuery("SELECT * FROM public.databasechangelog");
assertThat(liquibaseScripts1).hasSizeGreaterThan(285);
assertThat(liquibaseScripts1).doesNotContain(EXPECTED_CHANGESET_ONLY_AFTER_NEW_MIGRATION);
final var initialChangeSetCount = liquibaseScripts1.size();
return initialChangeSetCount;
}
private void assertThatCurrentMigrationsGotApplied(final int initialChangeSetCount) {
final var liquibaseScripts = singleColumnSqlQuery("SELECT id FROM public.databasechangelog");
assertThat(liquibaseScripts).hasSizeGreaterThan(initialChangeSetCount);
assertThat(liquibaseScripts).contains(EXPECTED_CHANGESET_ONLY_AFTER_NEW_MIGRATION);
}
@SneakyThrows
private void dumpTo(final File targetFileName) {
makeDir(targetFileName.getParentFile());
final var jdbcDatabaseContainer = getJdbcDatabaseContainer();
final var sqlDumpFile = new File(targetFileName.getParent(), "." + targetFileName.getName());
final var pb = new ProcessBuilder(
"pg_dump", "--column-inserts", "--disable-dollar-quoting",
"--host=" + jdbcDatabaseContainer.getHost(),
"--port=" + jdbcDatabaseContainer.getFirstMappedPort(),
"--username=" + jdbcDatabaseContainer.getUsername() ,
"--dbname=" + jdbcDatabaseContainer.getDatabaseName(),
"--file=" + sqlDumpFile.getCanonicalPath()
);
pb.environment().put("PGPASSWORD", jdbcDatabaseContainer.getPassword());
final var process = pb.start();
int exitCode = process.waitFor();
final var stderr = new BufferedReader(new InputStreamReader(process.getErrorStream()))
.lines().collect(Collectors.joining("\n"));
assertThat(exitCode).describedAs(stderr).isEqualTo(0);
final var header = """
-- =================================================================================
-- Generated reference-SQL-dump (hopefully of latest prod-release).
-- See: net.hostsharing.hsadminng.hs.migration.LiquibaseCompatibilityIntegrationTest
-- ---------------------------------------------------------------------------------
--
-- Explicit pre-initialization because we cannot use `pg_dump --create ...`
-- because the database is already created by Testcontainers.
--
CREATE ROLE postgres;
CREATE ROLE admin;
CREATE ROLE restricted;
""";
writeStringToFile(targetFileName, header, UTF_8, false); // false = overwrite
write(targetFileName, readFileToString(sqlDumpFile, UTF_8), UTF_8, true);
assertThat(sqlDumpFile.delete()).describedAs(sqlDumpFile + " cannot be deleted");
}
private void makeDir(final File dir) {
assertThat(!dir.exists() || dir.isDirectory()).describedAs(dir + " does exist, but is not a directory").isTrue();
assertThat(dir.isDirectory() || dir.mkdirs()).describedAs(dir + " cannot be created").isTrue();
}
@SneakyThrows
private void runLiquibaseMigrationsWithContexts(final String... contexts) {
liquibase.update(
new liquibase.Contexts(contexts),
new liquibase.LabelExpression());
}
private List<String> singleColumnSqlQuery(final String sql) {
//noinspection unchecked
final var rows = (List<Object>) em.createNativeQuery(sql).getResultList();
return rows.stream().map(Objects::toString).toList();
}
@SneakyThrows
private static JdbcDatabaseContainer<?> getJdbcDatabaseContainer() {
final var getContainerMethod = ContainerDatabaseDriver.class.getDeclaredMethod("getContainer", String.class);
getContainerMethod.setAccessible(true);
@SuppressWarnings("rawtypes")
final var container = (JdbcDatabaseContainer) getContainerMethod.invoke(null,
"jdbc:tc:postgresql:15.5-bookworm:///liquibaseMigrationTestTC");
return container;
}
}

View File

@ -1,28 +0,0 @@
package net.hostsharing.hsadminng.hs.migration;
import liquibase.Liquibase;
import liquibase.database.DatabaseFactory;
import liquibase.database.jvm.JdbcConnection;
import liquibase.resource.ClassLoaderResourceAccessor;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Profile;
import javax.sql.DataSource;
@Configuration
@Profile("liquibase-migration-test")
public class LiquibaseConfig {
@Bean
public Liquibase liquibase(DataSource dataSource) throws Exception {
final var connection = dataSource.getConnection();
final var database = DatabaseFactory.getInstance()
.findCorrectDatabaseImplementation(new JdbcConnection(connection));
return new Liquibase(
"db/changelog/db.changelog-master.yaml", // Path to your Liquibase changelog
new ClassLoaderResourceAccessor(),
database
);
}
}

File diff suppressed because it is too large Load Diff