updatable liquibase-changesets with env-vars #157

Closed
hsh-michaelhoennig wants to merge 4 commits from bugfix/updatable-liquibase-changesets-with-env-vars into master
9 changed files with 536 additions and 483 deletions
Showing only changes of commit 2cbc333657 - Show all commits

View File

@ -113,9 +113,9 @@ function _gwTest1() {
echo "DONE gw $@" echo "DONE gw $@"
} }
function _gwTest() { function _gwTest() {
. .aliases; . .aliases
. .tc-environment; . .tc-environment
rm /tmp/gwTest.tmp rm -f /tmp/gwTest.tmp
if [ "$1" == "--all" ]; then if [ "$1" == "--all" ]; then
shift # to remove the --all from $@ shift # to remove the --all from $@
# delierately in separate gradlew-calls to avoid Testcontains-PostgreSQL problem spillover # delierately in separate gradlew-calls to avoid Testcontains-PostgreSQL problem spillover

View File

@ -7,6 +7,7 @@
<entry key="HSADMINNG_POSTGRES_ADMIN_USERNAME" value="postgres" /> <entry key="HSADMINNG_POSTGRES_ADMIN_USERNAME" value="postgres" />
<entry key="HSADMINNG_POSTGRES_JDBC_URL" value="jdbc:postgresql://localhost:5432/postgres" /> <entry key="HSADMINNG_POSTGRES_JDBC_URL" value="jdbc:postgresql://localhost:5432/postgres" />
<entry key="HSADMINNG_POSTGRES_RESTRICTED_USERNAME" value="restricted" /> <entry key="HSADMINNG_POSTGRES_RESTRICTED_USERNAME" value="restricted" />
<entry key="HSADMINNG_MIGRATION_DATA_PATH" value="migration" />
</map> </map>
</option> </option>
<option name="executionName" /> <option name="executionName" />
@ -34,4 +35,4 @@
<RunAsTest>true</RunAsTest> <RunAsTest>true</RunAsTest>
<method v="2" /> <method v="2" />
</configuration> </configuration>
</component> </component>

View File

@ -3,7 +3,6 @@
<ExternalSystemSettings> <ExternalSystemSettings>
<option name="env"> <option name="env">
<map> <map>
<entry key="HSADMINNG_MIGRATION_DATA_PATH" value="migration" />
<entry key="HSADMINNG_POSTGRES_ADMIN_USERNAME" value="admin" /> <entry key="HSADMINNG_POSTGRES_ADMIN_USERNAME" value="admin" />
<entry key="HSADMINNG_POSTGRES_RESTRICTED_USERNAME" value="restricted" /> <entry key="HSADMINNG_POSTGRES_RESTRICTED_USERNAME" value="restricted" />
<entry key="HSADMINNG_SUPERUSER" value="import-superuser@hostsharing.net" /> <entry key="HSADMINNG_SUPERUSER" value="import-superuser@hostsharing.net" />

View File

@ -1,7 +1,7 @@
unset HSADMINNG_POSTGRES_JDBC_URL # dynamically set, different for normal tests and imports source .unset-environment
export HSADMINNG_POSTGRES_ADMIN_USERNAME=admin
export HSADMINNG_POSTGRES_ADMIN_PASSWORD=
export HSADMINNG_POSTGRES_RESTRICTED_USERNAME=restricted export HSADMINNG_POSTGRES_RESTRICTED_USERNAME=restricted
export HSADMINNG_SUPERUSER=superuser-alex@hostsharing.net export HSADMINNG_POSTGRES_ADMIN_USERNAME=admin
export HSADMINNG_MIGRATION_DATA_PATH=migration export HSADMINNG_SUPERUSER=import-superuser@hostsharing.net
export LANG=en_US.UTF-8 export LANG=en_US.UTF-8

View File

@ -4,4 +4,5 @@ unset HSADMINNG_POSTGRES_ADMIN_PASSWORD
unset HSADMINNG_POSTGRES_RESTRICTED_USERNAME unset HSADMINNG_POSTGRES_RESTRICTED_USERNAME
unset HSADMINNG_SUPERUSER unset HSADMINNG_SUPERUSER
unset HSADMINNG_MIGRATION_DATA_PATH unset HSADMINNG_MIGRATION_DATA_PATH
unset HSADMINNG_OFFICE_DATA_SQL_FILE

View File

@ -16,7 +16,7 @@ create table if not exists rbac.global
uuid uuid primary key references rbac.object (uuid) on delete cascade, uuid uuid primary key references rbac.object (uuid) on delete cascade,
name varchar(63) unique name varchar(63) unique
); );
create unique index Global_Singleton on rbac.global ((0)); create unique index if not exists Global_Singleton on rbac.global ((0));
grant select on rbac.global to ${HSADMINNG_POSTGRES_RESTRICTED_USERNAME}; grant select on rbac.global to ${HSADMINNG_POSTGRES_RESTRICTED_USERNAME};
--// --//

View File

@ -16,6 +16,9 @@ import org.junit.jupiter.api.extension.TestWatcher;
import org.opentest4j.AssertionFailedError; import org.opentest4j.AssertionFailedError;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value; import org.springframework.beans.factory.annotation.Value;
import org.springframework.core.io.AbstractResource;
import org.springframework.core.io.ClassPathResource;
import org.springframework.core.io.FileSystemResource;
import org.springframework.test.context.bean.override.mockito.MockitoBean; import org.springframework.test.context.bean.override.mockito.MockitoBean;
import org.springframework.core.io.Resource; import org.springframework.core.io.Resource;
import org.springframework.transaction.support.TransactionTemplate; import org.springframework.transaction.support.TransactionTemplate;
@ -26,6 +29,7 @@ import jakarta.servlet.http.HttpServletRequest;
import jakarta.validation.ValidationException; import jakarta.validation.ValidationException;
import jakarta.validation.constraints.NotNull; import jakarta.validation.constraints.NotNull;
import java.io.BufferedReader; import java.io.BufferedReader;
import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.io.InputStreamReader; import java.io.InputStreamReader;
import java.io.Reader; import java.io.Reader;
@ -118,10 +122,16 @@ public class CsvDataImport extends ContextBasedTest {
return stream(lines.getFirst()).map(String::trim).toArray(String[]::new); return stream(lines.getFirst()).map(String::trim).toArray(String[]::new);
} }
public static @NotNull AbstractResource resourceOf(final String sqlFile) {
return new File(sqlFile).exists()
? new FileSystemResource(sqlFile)
: new ClassPathResource(sqlFile);
}
protected Reader resourceReader(@NotNull final String resourcePath) { protected Reader resourceReader(@NotNull final String resourcePath) {
try { try {
return new InputStreamReader(requireNonNull(getClass().getClassLoader().getResourceAsStream(resourcePath))); return new InputStreamReader(requireNonNull(resourceOf(resourcePath).getInputStream()));
} catch (Exception exc) { } catch (final Exception exc) {
throw new AssertionFailedError("cannot open '" + resourcePath + "'"); throw new AssertionFailedError("cannot open '" + resourcePath + "'");
} }
} }

View File

@ -12,6 +12,7 @@ import net.hostsharing.hsadminng.hs.booking.item.HsBookingItem;
import net.hostsharing.hsadminng.hs.booking.item.HsBookingItemRealEntity; import net.hostsharing.hsadminng.hs.booking.item.HsBookingItemRealEntity;
import net.hostsharing.hsadminng.hs.booking.item.HsBookingItemType; import net.hostsharing.hsadminng.hs.booking.item.HsBookingItemType;
import net.hostsharing.hsadminng.hs.booking.item.validators.HsBookingItemEntityValidatorRegistry; import net.hostsharing.hsadminng.hs.booking.item.validators.HsBookingItemEntityValidatorRegistry;
import net.hostsharing.hsadminng.hs.booking.project.HsBookingProject;
import net.hostsharing.hsadminng.hs.booking.project.HsBookingProjectRealEntity; import net.hostsharing.hsadminng.hs.booking.project.HsBookingProjectRealEntity;
import net.hostsharing.hsadminng.hs.hosting.asset.HsHostingAssetRealEntity; import net.hostsharing.hsadminng.hs.hosting.asset.HsHostingAssetRealEntity;
import net.hostsharing.hsadminng.hs.hosting.asset.HsHostingAssetType; import net.hostsharing.hsadminng.hs.hosting.asset.HsHostingAssetType;
@ -29,14 +30,18 @@ import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestMethodOrder; import org.junit.jupiter.api.TestMethodOrder;
import org.junit.jupiter.api.extension.ExtendWith; import org.junit.jupiter.api.extension.ExtendWith;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.test.autoconfigure.orm.jpa.DataJpaTest; import org.springframework.boot.test.autoconfigure.orm.jpa.DataJpaTest;
import org.springframework.context.annotation.Import; import org.springframework.context.annotation.Import;
import org.springframework.core.io.support.PathMatchingResourcePatternResolver; import org.springframework.core.io.support.PathMatchingResourcePatternResolver;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.orm.jpa.EntityManagerFactoryInfo;
import org.springframework.test.annotation.Commit; import org.springframework.test.annotation.Commit;
import org.springframework.test.annotation.DirtiesContext; import org.springframework.test.annotation.DirtiesContext;
import org.springframework.test.context.ActiveProfiles; import org.springframework.test.context.ActiveProfiles;
import org.springframework.test.context.jdbc.Sql;
import java.io.IOException;
import java.io.InputStream;
import java.io.Reader; import java.io.Reader;
import java.net.IDN; import java.net.IDN;
import java.util.ArrayList; import java.util.ArrayList;
@ -52,6 +57,7 @@ import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Function; import java.util.function.Function;
import static java.nio.charset.StandardCharsets.UTF_8;
import static java.util.Arrays.stream; import static java.util.Arrays.stream;
import static java.util.Map.entry; import static java.util.Map.entry;
import static java.util.Map.ofEntries; import static java.util.Map.ofEntries;
@ -80,7 +86,7 @@ import static net.hostsharing.hsadminng.hs.hosting.asset.HsHostingAssetType.UNIX
import static net.hostsharing.hsadminng.mapper.PostgresDateRange.toPostgresDateRange; import static net.hostsharing.hsadminng.mapper.PostgresDateRange.toPostgresDateRange;
import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assumptions.assumeThat; import static org.assertj.core.api.Assumptions.assumeThat;
import static org.springframework.test.context.jdbc.Sql.ExecutionPhase.BEFORE_TEST_CLASS; import static org.springframework.util.FileCopyUtils.copyToByteArray;
@Tag("importHostingAssets") @Tag("importHostingAssets")
@DataJpaTest(properties = { @DataJpaTest(properties = {
@ -95,7 +101,6 @@ import static org.springframework.test.context.jdbc.Sql.ExecutionPhase.BEFORE_TE
@ActiveProfiles({ "without-test-data", "liquibase-migration", "hosting-asset-import" }) @ActiveProfiles({ "without-test-data", "liquibase-migration", "hosting-asset-import" })
@TestMethodOrder(MethodOrderer.OrderAnnotation.class) @TestMethodOrder(MethodOrderer.OrderAnnotation.class)
@ExtendWith(OrderedDependedTestsExtension.class) @ExtendWith(OrderedDependedTestsExtension.class)
@Sql(value = "/db/released-only-office-schema-with-import-test-data.sql", executionPhase = BEFORE_TEST_CLASS) // release-schema
public class ImportHostingAssets extends CsvDataImport { public class ImportHostingAssets extends CsvDataImport {
private static final Set<String> NOBODY_SUBSTITUTES = Set.of("nomail", "bounce"); private static final Set<String> NOBODY_SUBSTITUTES = Set.of("nomail", "bounce");
@ -133,9 +138,14 @@ public class ImportHostingAssets extends CsvDataImport {
@Autowired @Autowired
LiquibaseMigration liquibase; LiquibaseMigration liquibase;
@Value("${HSADMINNG_OFFICE_DATA_SQL_FILE:/db/released-only-office-schema-with-import-test-data.sql}")
String officeSchemaAndDataSqlFile;
@Test @Test
@Order(11000) @Order(11000)
@SneakyThrows
void liquibaseMigrationForBookingAndHosting() { void liquibaseMigrationForBookingAndHosting() {
executeSqlScript(officeSchemaAndDataSqlFile);
liquibase.assertReferenceStatusAfterRestore(286, "hs-booking-SCHEMA"); liquibase.assertReferenceStatusAfterRestore(286, "hs-booking-SCHEMA");
makeSureThatTheImportAdminUserExists(); makeSureThatTheImportAdminUserExists();
liquibase.runWithContexts("migration", "without-test-data"); liquibase.runWithContexts("migration", "without-test-data");
@ -146,8 +156,8 @@ public class ImportHostingAssets extends CsvDataImport {
@Order(11010) @Order(11010)
void createBookingProjects() { void createBookingProjects() {
record PartnerLegacyIdMapping(UUID uuid, Integer bp_id){} record PartnerLegacyIdMapping(UUID uuid, Integer bp_id) {}
record DebitorRecord(UUID uuid, Integer version, String defaultPrefix){} record DebitorRecord(UUID uuid, Integer version, String defaultPrefix) {}
final var partnerLegacyIdMappings = em.createNativeQuery( final var partnerLegacyIdMappings = em.createNativeQuery(
""" """
@ -161,16 +171,18 @@ public class ImportHostingAssets extends CsvDataImport {
//noinspection unchecked //noinspection unchecked
final var debitorUuidToLegacyBpIdMap = ((List<PartnerLegacyIdMapping>) partnerLegacyIdMappings).stream() final var debitorUuidToLegacyBpIdMap = ((List<PartnerLegacyIdMapping>) partnerLegacyIdMappings).stream()
.collect(toMap(row -> row.uuid, row -> row.bp_id)); .collect(toMap(row -> row.uuid, row -> row.bp_id));
final var debitors = em.createNativeQuery("SELECT debitor.uuid, debitor.version, debitor.defaultPrefix FROM hs_office.debitor debitor", DebitorRecord.class).getResultList(); final var debitors = em.createNativeQuery(
"select debitor.uuid, debitor.version, debitor.defaultPrefix from hs_office.debitor debitor",
DebitorRecord.class).getResultList();
//noinspection unchecked //noinspection unchecked
((List<DebitorRecord>)debitors).forEach(debitor -> { ((List<DebitorRecord>) debitors).forEach(debitor -> {
bookingProjects.put( bookingProjects.put(
debitorUuidToLegacyBpIdMap.get(debitor.uuid), HsBookingProjectRealEntity.builder() debitorUuidToLegacyBpIdMap.get(debitor.uuid), HsBookingProjectRealEntity.builder()
.version(debitor.version) .version(debitor.version)
.caption(debitor.defaultPrefix + " default project") .caption(debitor.defaultPrefix + " default project")
.debitor(em.find(HsBookingDebitorEntity.class, debitor.uuid)) .debitor(em.find(HsBookingDebitorEntity.class, debitor.uuid))
.build()); .build());
}); });
} }
@Test @Test
@ -1231,9 +1243,7 @@ public class ImportHostingAssets extends CsvDataImport {
bookingItems.put(packet_id, bookingItem); bookingItems.put(packet_id, bookingItem);
final var haType = determineHaType(basepacket_code); final var haType = determineHaType(basepacket_code);
logError(() -> assertThat(!free || haType == MANAGED_WEBSPACE || bookingItem.getRelatedProject() logError(() -> assertThat(!free || haType == MANAGED_WEBSPACE || defaultPrefix(bookingItem)
.getDebitor()
.getDefaultPrefix()
.equals("hsh")) .equals("hsh"))
.as("packet.free only supported for Hostsharing-Assets and ManagedWebspace in customer-ManagedServer, but is set for " .as("packet.free only supported for Hostsharing-Assets and ManagedWebspace in customer-ManagedServer, but is set for "
+ packet_name) + packet_name)
@ -1288,6 +1298,13 @@ public class ImportHostingAssets extends CsvDataImport {
}); });
} }
private String defaultPrefix(final HsBookingItem bookingItem) {
return ofNullable(bookingItem.getProject())
.map(HsBookingProject::getDebitor)
.map(HsBookingDebitorEntity::getDefaultPrefix)
.orElse("<no default prefix for BI: " + bookingItem.getCaption() + ">");
}
private void importPacketComponents(final String[] header, final List<String[]> records) { private void importPacketComponents(final String[] header, final List<String[]> records) {
final var columns = new Columns(header); final var columns = new Columns(header);
records.stream() records.stream()
@ -1940,4 +1957,17 @@ public class ImportHostingAssets extends CsvDataImport {
.map(row -> row.stream().map(Object::toString).collect(joining(", "))) .map(row -> row.stream().map(Object::toString).collect(joining(", ")))
.collect(joining("\n")); .collect(joining("\n"));
} }
@SneakyThrows
private void executeSqlScript(final String sqlFile) {
jpaAttempt.transacted(() -> {
try (InputStream resourceStream = resourceOf(sqlFile).getInputStream()) {
final var sqlScript = new String(copyToByteArray(resourceStream), UTF_8);
final var emf = (EntityManagerFactoryInfo) em.getEntityManagerFactory();
new JdbcTemplate(emf.getDataSource()).execute(sqlScript);
} catch (IOException e) {
throw new RuntimeException(e);
}
}).assertSuccessful();
}
} }