WIP: importing office data as base for hosting assets
This commit is contained in:
parent
a28d1b9a1e
commit
2c24a2f593
@ -119,10 +119,14 @@ public class CsvDataImport extends ContextBasedTest {
|
|||||||
|
|
||||||
protected void persist(final Integer id, final RbacObject entity) {
|
protected void persist(final Integer id, final RbacObject entity) {
|
||||||
try {
|
try {
|
||||||
|
final var asString = entity.toString();
|
||||||
|
if ( asString.contains("'null null, null'") || asString.equals("person()")) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
//System.out.println("persisting #" + entity.hashCode() + ": " + entity);
|
//System.out.println("persisting #" + entity.hashCode() + ": " + entity);
|
||||||
em.persist(entity);
|
em.persist(entity);
|
||||||
// uncomment for debugging purposes
|
// uncomment for debugging purposes
|
||||||
// em.flush();
|
em.flush();
|
||||||
// System.out.println("persisted #" + entity.hashCode() + " as " + entity.getUuid());
|
// System.out.println("persisted #" + entity.hashCode() + " as " + entity.getUuid());
|
||||||
} catch (Exception exc) {
|
} catch (Exception exc) {
|
||||||
System.err.println("failed to persist #" + entity.hashCode() + ": " + entity);
|
System.err.println("failed to persist #" + entity.hashCode() + ": " + entity);
|
||||||
|
@ -7,6 +7,7 @@ import net.hostsharing.hsadminng.hs.booking.item.validators.HsBookingItemEntityV
|
|||||||
import net.hostsharing.hsadminng.hs.hosting.asset.HsHostingAssetEntity;
|
import net.hostsharing.hsadminng.hs.hosting.asset.HsHostingAssetEntity;
|
||||||
import net.hostsharing.hsadminng.hs.hosting.asset.HsHostingAssetType;
|
import net.hostsharing.hsadminng.hs.hosting.asset.HsHostingAssetType;
|
||||||
import net.hostsharing.hsadminng.hs.hosting.asset.validators.HostingAssetEntitySaveProcessor;
|
import net.hostsharing.hsadminng.hs.hosting.asset.validators.HostingAssetEntitySaveProcessor;
|
||||||
|
import net.hostsharing.hsadminng.hs.office.debitor.HsOfficeDebitorEntity;
|
||||||
import net.hostsharing.hsadminng.rbac.test.JpaAttempt;
|
import net.hostsharing.hsadminng.rbac.test.JpaAttempt;
|
||||||
import org.jetbrains.annotations.NotNull;
|
import org.jetbrains.annotations.NotNull;
|
||||||
import org.junit.jupiter.api.MethodOrderer;
|
import org.junit.jupiter.api.MethodOrderer;
|
||||||
@ -35,6 +36,7 @@ import static net.hostsharing.hsadminng.hs.hosting.asset.HsHostingAssetType.MANA
|
|||||||
import static net.hostsharing.hsadminng.hs.hosting.asset.HsHostingAssetType.MANAGED_WEBSPACE;
|
import static net.hostsharing.hsadminng.hs.hosting.asset.HsHostingAssetType.MANAGED_WEBSPACE;
|
||||||
import static net.hostsharing.hsadminng.mapper.PostgresDateRange.toPostgresDateRange;
|
import static net.hostsharing.hsadminng.mapper.PostgresDateRange.toPostgresDateRange;
|
||||||
import static org.assertj.core.api.Assertions.assertThat;
|
import static org.assertj.core.api.Assertions.assertThat;
|
||||||
|
import static org.assertj.core.api.Assumptions.assumeThat;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* This 'test' includes the complete legacy 'office' data import.
|
* This 'test' includes the complete legacy 'office' data import.
|
||||||
@ -83,7 +85,7 @@ import static org.assertj.core.api.Assertions.assertThat;
|
|||||||
@Import({ Context.class, JpaAttempt.class })
|
@Import({ Context.class, JpaAttempt.class })
|
||||||
@TestMethodOrder(MethodOrderer.OrderAnnotation.class)
|
@TestMethodOrder(MethodOrderer.OrderAnnotation.class)
|
||||||
@ExtendWith(OrderedDependedTestsExtension.class)
|
@ExtendWith(OrderedDependedTestsExtension.class)
|
||||||
public class ImportHostingAssets extends CsvDataImport {
|
public class ImportHostingAssets extends ImportOfficeData {
|
||||||
|
|
||||||
private static final Integer IP_NUMBER_ID_OFFSET = 1000000;
|
private static final Integer IP_NUMBER_ID_OFFSET = 1000000;
|
||||||
private static final Integer HIVE_ID_OFFSET = 2000000;
|
private static final Integer HIVE_ID_OFFSET = 2000000;
|
||||||
@ -96,7 +98,7 @@ public class ImportHostingAssets extends CsvDataImport {
|
|||||||
private static Map<Integer, HsHostingAssetEntity> hostingAssets = new WriteOnceMap<>();
|
private static Map<Integer, HsHostingAssetEntity> hostingAssets = new WriteOnceMap<>();
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@Order(1010)
|
@Order(2010)
|
||||||
void importIpNumbers() {
|
void importIpNumbers() {
|
||||||
try (Reader reader = resourceReader(MIGRATION_DATA_PATH + "inet_addr.csv")) {
|
try (Reader reader = resourceReader(MIGRATION_DATA_PATH + "inet_addr.csv")) {
|
||||||
final var lines = readAllLines(reader);
|
final var lines = readAllLines(reader);
|
||||||
@ -107,7 +109,7 @@ public class ImportHostingAssets extends CsvDataImport {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@Order(1019)
|
@Order(2019)
|
||||||
void verifyIpNumbers() {
|
void verifyIpNumbers() {
|
||||||
assumeThatWeAreImportingControlledTestData();
|
assumeThatWeAreImportingControlledTestData();
|
||||||
|
|
||||||
@ -124,7 +126,7 @@ public class ImportHostingAssets extends CsvDataImport {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@Order(1030)
|
@Order(2030)
|
||||||
void importHives() {
|
void importHives() {
|
||||||
try (Reader reader = resourceReader(MIGRATION_DATA_PATH + "hive.csv")) {
|
try (Reader reader = resourceReader(MIGRATION_DATA_PATH + "hive.csv")) {
|
||||||
final var lines = readAllLines(reader);
|
final var lines = readAllLines(reader);
|
||||||
@ -135,7 +137,7 @@ public class ImportHostingAssets extends CsvDataImport {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@Order(1039)
|
@Order(2039)
|
||||||
void verifyHives() {
|
void verifyHives() {
|
||||||
assumeThatWeAreImportingControlledTestData();
|
assumeThatWeAreImportingControlledTestData();
|
||||||
|
|
||||||
@ -152,7 +154,7 @@ public class ImportHostingAssets extends CsvDataImport {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@Order(2000)
|
@Order(3000)
|
||||||
void importPackets() {
|
void importPackets() {
|
||||||
try (Reader reader = resourceReader(MIGRATION_DATA_PATH + "packet.csv")) {
|
try (Reader reader = resourceReader(MIGRATION_DATA_PATH + "packet.csv")) {
|
||||||
final var lines = readAllLines(reader);
|
final var lines = readAllLines(reader);
|
||||||
@ -163,7 +165,7 @@ public class ImportHostingAssets extends CsvDataImport {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@Order(2009)
|
@Order(3009)
|
||||||
void verifyPackets() {
|
void verifyPackets() {
|
||||||
assumeThatWeAreImportingControlledTestData();
|
assumeThatWeAreImportingControlledTestData();
|
||||||
|
|
||||||
@ -200,7 +202,7 @@ public class ImportHostingAssets extends CsvDataImport {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@Order(2010)
|
@Order(3010)
|
||||||
void importPacketComponents() {
|
void importPacketComponents() {
|
||||||
try (Reader reader = resourceReader(MIGRATION_DATA_PATH + "packet_component.csv")) {
|
try (Reader reader = resourceReader(MIGRATION_DATA_PATH + "packet_component.csv")) {
|
||||||
final var lines = readAllLines(reader);
|
final var lines = readAllLines(reader);
|
||||||
@ -211,7 +213,7 @@ public class ImportHostingAssets extends CsvDataImport {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@Order(2019)
|
@Order(3019)
|
||||||
void verifyPacketComponents() {
|
void verifyPacketComponents() {
|
||||||
assumeThatWeAreImportingControlledTestData();
|
assumeThatWeAreImportingControlledTestData();
|
||||||
|
|
||||||
@ -294,7 +296,6 @@ public class ImportHostingAssets extends CsvDataImport {
|
|||||||
}).assertSuccessful();
|
}).assertSuccessful();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
private void importIpNumbers(final String[] header, final List<String[]> records) {
|
private void importIpNumbers(final String[] header, final List<String[]> records) {
|
||||||
final var columns = new Columns(header);
|
final var columns = new Columns(header);
|
||||||
records.stream()
|
records.stream()
|
||||||
@ -547,6 +548,11 @@ public class ImportHostingAssets extends CsvDataImport {
|
|||||||
.collect(toMap(Map.Entry::getKey, Map.Entry::getValue));
|
.collect(toMap(Map.Entry::getKey, Map.Entry::getValue));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void assumeThatWeAreExplicitlyImportingOfficeData() {
|
||||||
|
assumeThat(false).isTrue();
|
||||||
|
}
|
||||||
|
|
||||||
protected static boolean isImportingControlledTestData() {
|
protected static boolean isImportingControlledTestData() {
|
||||||
return MIGRATION_DATA_PATH.equals("migration");
|
return MIGRATION_DATA_PATH.equals("migration");
|
||||||
}
|
}
|
||||||
|
@ -111,6 +111,7 @@ public class ImportOfficeData extends CsvDataImport {
|
|||||||
private static final List<Integer> IGNORE_BUSINESS_PARTNERS = Arrays.asList(
|
private static final List<Integer> IGNORE_BUSINESS_PARTNERS = Arrays.asList(
|
||||||
512167, // 11139, partner without contractual contact
|
512167, // 11139, partner without contractual contact
|
||||||
512170, // 11142, partner without contractual contact
|
512170, // 11142, partner without contractual contact
|
||||||
|
511725, // 10764, partner without contractual contact
|
||||||
-1
|
-1
|
||||||
);
|
);
|
||||||
|
|
||||||
@ -313,6 +314,8 @@ public class ImportOfficeData extends CsvDataImport {
|
|||||||
@Test
|
@Test
|
||||||
@Order(1040)
|
@Order(1040)
|
||||||
void importCoopShares() {
|
void importCoopShares() {
|
||||||
|
assumeThatWeAreExplicitlyImportingOfficeData();
|
||||||
|
|
||||||
try (Reader reader = resourceReader(MIGRATION_DATA_PATH + "share-transactions.csv")) {
|
try (Reader reader = resourceReader(MIGRATION_DATA_PATH + "share-transactions.csv")) {
|
||||||
final var lines = readAllLines(reader);
|
final var lines = readAllLines(reader);
|
||||||
importCoopShares(justHeader(lines), withoutHeader(lines));
|
importCoopShares(justHeader(lines), withoutHeader(lines));
|
||||||
@ -324,6 +327,7 @@ public class ImportOfficeData extends CsvDataImport {
|
|||||||
@Test
|
@Test
|
||||||
@Order(1041)
|
@Order(1041)
|
||||||
void verifyCoopShares() {
|
void verifyCoopShares() {
|
||||||
|
assumeThatWeAreExplicitlyImportingOfficeData();
|
||||||
assumeThatWeAreImportingControlledTestData();
|
assumeThatWeAreImportingControlledTestData();
|
||||||
|
|
||||||
assertThat(toFormattedString(coopShares)).isEqualToIgnoringWhitespace("""
|
assertThat(toFormattedString(coopShares)).isEqualToIgnoringWhitespace("""
|
||||||
@ -339,6 +343,7 @@ public class ImportOfficeData extends CsvDataImport {
|
|||||||
@Test
|
@Test
|
||||||
@Order(1050)
|
@Order(1050)
|
||||||
void importCoopAssets() {
|
void importCoopAssets() {
|
||||||
|
assumeThatWeAreExplicitlyImportingOfficeData();
|
||||||
|
|
||||||
try (Reader reader = resourceReader(MIGRATION_DATA_PATH + "asset-transactions.csv")) {
|
try (Reader reader = resourceReader(MIGRATION_DATA_PATH + "asset-transactions.csv")) {
|
||||||
final var lines = readAllLines(reader);
|
final var lines = readAllLines(reader);
|
||||||
@ -351,6 +356,7 @@ public class ImportOfficeData extends CsvDataImport {
|
|||||||
@Test
|
@Test
|
||||||
@Order(1059)
|
@Order(1059)
|
||||||
void verifyCoopAssets() {
|
void verifyCoopAssets() {
|
||||||
|
assumeThatWeAreExplicitlyImportingOfficeData();
|
||||||
assumeThatWeAreImportingControlledTestData();
|
assumeThatWeAreImportingControlledTestData();
|
||||||
|
|
||||||
assertThat(toFormattedString(coopAssets)).isEqualToIgnoringWhitespace("""
|
assertThat(toFormattedString(coopAssets)).isEqualToIgnoringWhitespace("""
|
||||||
@ -372,7 +378,9 @@ public class ImportOfficeData extends CsvDataImport {
|
|||||||
@Test
|
@Test
|
||||||
@Order(1099)
|
@Order(1099)
|
||||||
void verifyMemberships() {
|
void verifyMemberships() {
|
||||||
|
assumeThatWeAreExplicitlyImportingOfficeData();
|
||||||
assumeThatWeAreImportingControlledTestData();
|
assumeThatWeAreImportingControlledTestData();
|
||||||
|
|
||||||
assertThat(toFormattedString(memberships)).isEqualToIgnoringWhitespace("""
|
assertThat(toFormattedString(memberships)).isEqualToIgnoringWhitespace("""
|
||||||
{
|
{
|
||||||
17=Membership(M-1001700, P-10017, [2000-12-06,), ACTIVE),
|
17=Membership(M-1001700, P-10017, [2000-12-06,), ACTIVE),
|
||||||
@ -386,6 +394,8 @@ public class ImportOfficeData extends CsvDataImport {
|
|||||||
@Test
|
@Test
|
||||||
@Order(2000)
|
@Order(2000)
|
||||||
void verifyAllPartnersHavePersons() {
|
void verifyAllPartnersHavePersons() {
|
||||||
|
assumeThatWeAreExplicitlyImportingOfficeData();
|
||||||
|
|
||||||
partners.forEach((id, p) -> {
|
partners.forEach((id, p) -> {
|
||||||
final var partnerRel = p.getPartnerRel();
|
final var partnerRel = p.getPartnerRel();
|
||||||
assertThat(partnerRel).describedAs("partner " + id + " without partnerRel").isNotNull();
|
assertThat(partnerRel).describedAs("partner " + id + " without partnerRel").isNotNull();
|
||||||
@ -480,6 +490,23 @@ public class ImportOfficeData extends CsvDataImport {
|
|||||||
assertThat(idsToRemove.size()).isEqualTo(1); // only from partner #99
|
assertThat(idsToRemove.size()).isEqualTo(1); // only from partner #99
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@Order(3005)
|
||||||
|
void removeEmptyPersons() {
|
||||||
|
// avoid a error when persisting the deliberately invalid partner entry #99
|
||||||
|
final var idsToRemove = new HashSet<Integer>();
|
||||||
|
persons.forEach( (id, p) -> {
|
||||||
|
if ( p.getPersonType() == null ||
|
||||||
|
(p.getFamilyName() == null && p.getGivenName() == null && p.getTradeName() == null) ) {
|
||||||
|
idsToRemove.add(id);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
idsToRemove.forEach(id -> persons.remove(id));
|
||||||
|
|
||||||
|
assumeThatWeAreImportingControlledTestData();
|
||||||
|
assertThat(idsToRemove.size()).isEqualTo(0);
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@Order(9000)
|
@Order(9000)
|
||||||
@Commit
|
@Commit
|
||||||
@ -500,6 +527,8 @@ public class ImportOfficeData extends CsvDataImport {
|
|||||||
jpaAttempt.transacted(() -> {
|
jpaAttempt.transacted(() -> {
|
||||||
context(rbacSuperuser);
|
context(rbacSuperuser);
|
||||||
persons.forEach(this::persist);
|
persons.forEach(this::persist);
|
||||||
|
relations.forEach( (id, rel) -> this.persist(id, rel.getAnchor()) );
|
||||||
|
relations.forEach( (id, rel) -> this.persist(id, rel.getHolder()) );
|
||||||
}).assertSuccessful();
|
}).assertSuccessful();
|
||||||
|
|
||||||
jpaAttempt.transacted(() -> {
|
jpaAttempt.transacted(() -> {
|
||||||
@ -560,6 +589,10 @@ public class ImportOfficeData extends CsvDataImport {
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
protected void assumeThatWeAreExplicitlyImportingOfficeData() {
|
||||||
|
assumeThat(true).isFalse();
|
||||||
|
}
|
||||||
|
|
||||||
private static boolean isImportingControlledTestData() {
|
private static boolean isImportingControlledTestData() {
|
||||||
return partners.size() <= MAX_NUMBER_OF_TEST_DATA_PARTNERS;
|
return partners.size() <= MAX_NUMBER_OF_TEST_DATA_PARTNERS;
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user