hosting-asset-data-migration #79
5
.gitignore
vendored
5
.gitignore
vendored
@ -136,4 +136,9 @@ Desktop.ini
|
|||||||
# ESLint
|
# ESLint
|
||||||
######################
|
######################
|
||||||
.eslintcache
|
.eslintcache
|
||||||
|
|
||||||
|
######################
|
||||||
|
# Project Related
|
||||||
|
######################
|
||||||
/.environment*
|
/.environment*
|
||||||
|
/src/test/resources/migration-prod/
|
||||||
|
@ -21,7 +21,10 @@ import java.util.List;
|
|||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
import static java.util.stream.Collectors.toMap;
|
import static java.util.stream.Collectors.toMap;
|
||||||
|
import static net.hostsharing.hsadminng.hs.hosting.asset.HsHostingAssetType.CLOUD_SERVER;
|
||||||
import static net.hostsharing.hsadminng.hs.hosting.asset.HsHostingAssetType.IPV4_NUMBER;
|
import static net.hostsharing.hsadminng.hs.hosting.asset.HsHostingAssetType.IPV4_NUMBER;
|
||||||
|
import static net.hostsharing.hsadminng.hs.hosting.asset.HsHostingAssetType.MANAGED_SERVER;
|
||||||
|
import static net.hostsharing.hsadminng.hs.hosting.asset.HsHostingAssetType.MANAGED_WEBSPACE;
|
||||||
import static org.assertj.core.api.Assertions.assertThat;
|
import static org.assertj.core.api.Assertions.assertThat;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
@ -73,7 +76,13 @@ import static org.assertj.core.api.Assertions.assertThat;
|
|||||||
@ExtendWith(OrderedDependedTestsExtension.class)
|
@ExtendWith(OrderedDependedTestsExtension.class)
|
||||||
public class ImportHostingAssets extends CsvDataImport {
|
public class ImportHostingAssets extends CsvDataImport {
|
||||||
|
|
||||||
|
private static final Integer IP_NUMBER_ID_OFFSET = 1000000;
|
||||||
|
private static final Integer PACKET_ID_OFFSET = 2000000;
|
||||||
|
|
||||||
|
record Hive(int hive_id, String hive_name, int inet_addr_id){};
|
||||||
|
|
||||||
private static Map<Integer, HsBookingItemEntity> bookingItems = new WriteOnceMap<>();
|
private static Map<Integer, HsBookingItemEntity> bookingItems = new WriteOnceMap<>();
|
||||||
|
private static Map<Integer, Hive> hives = new WriteOnceMap<>();
|
||||||
private static Map<Integer, HsHostingAssetEntity> hostingAssets = new WriteOnceMap<>();
|
private static Map<Integer, HsHostingAssetEntity> hostingAssets = new WriteOnceMap<>();
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@ -95,11 +104,67 @@ public class ImportHostingAssets extends CsvDataImport {
|
|||||||
// no contacts yet => mostly null values
|
// no contacts yet => mostly null values
|
||||||
assertThat(toFormattedString(firstOfType(5, hostingAssets, IPV4_NUMBER))).isEqualToIgnoringWhitespace("""
|
assertThat(toFormattedString(firstOfType(5, hostingAssets, IPV4_NUMBER))).isEqualToIgnoringWhitespace("""
|
||||||
{
|
{
|
||||||
329=HsHostingAssetEntity(IPV4_NUMBER, 83.223.95.0),
|
1000333=HsHostingAssetEntity(IPV4_NUMBER, 83.223.95.4),
|
||||||
330=HsHostingAssetEntity(IPV4_NUMBER, 83.223.95.1),
|
1000332=HsHostingAssetEntity(IPV4_NUMBER, 83.223.95.3),
|
||||||
331=HsHostingAssetEntity(IPV4_NUMBER, 83.223.95.2),
|
1000331=HsHostingAssetEntity(IPV4_NUMBER, 83.223.95.2),
|
||||||
332=HsHostingAssetEntity(IPV4_NUMBER, 83.223.95.3),
|
1000330=HsHostingAssetEntity(IPV4_NUMBER, 83.223.95.1),
|
||||||
333=HsHostingAssetEntity(IPV4_NUMBER, 83.223.95.4)
|
1000329=HsHostingAssetEntity(IPV4_NUMBER, 83.223.95.0)
|
||||||
|
}
|
||||||
|
""");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@Order(1030)
|
||||||
|
void importHives() {
|
||||||
|
try (Reader reader = resourceReader(MIGRATION_DATA_PATH + "hive.csv")) {
|
||||||
|
final var lines = readAllLines(reader);
|
||||||
|
importHives(justHeader(lines), withoutHeader(lines));
|
||||||
|
} catch (Exception e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@Order(1039)
|
||||||
|
void verifyHives() {
|
||||||
|
assumeThatWeAreImportingControlledTestData();
|
||||||
|
|
||||||
|
// no contacts yet => mostly null values
|
||||||
|
assertThat(toFormattedString(first(5, hives))).isEqualToIgnoringWhitespace("""
|
||||||
|
{
|
||||||
|
1=Hive[hive_id=1, hive_name=h01, inet_addr_id=358],
|
||||||
|
2=Hive[hive_id=2, hive_name=h02, inet_addr_id=359],
|
||||||
|
4=Hive[hive_id=4, hive_name=h03, inet_addr_id=360],
|
||||||
|
7=Hive[hive_id=7, hive_name=h04, inet_addr_id=361],
|
||||||
|
13=Hive[hive_id=13, hive_name=h05, inet_addr_id=430]
|
||||||
|
}
|
||||||
|
""");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@Order(2000)
|
||||||
|
void importServersAndWebspaces() {
|
||||||
|
try (Reader reader = resourceReader(MIGRATION_DATA_PATH + "packet.csv")) {
|
||||||
|
final var lines = readAllLines(reader);
|
||||||
|
importServersAndWebspaces(justHeader(lines), withoutHeader(lines));
|
||||||
|
} catch (Exception e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@Order(2009)
|
||||||
|
void verifyServersAndWebspaces() {
|
||||||
|
assumeThatWeAreImportingControlledTestData();
|
||||||
|
|
||||||
|
// no contacts yet => mostly null values
|
||||||
|
assertThat(toFormattedString(firstOfType(5, hostingAssets, CLOUD_SERVER, MANAGED_SERVER, MANAGED_WEBSPACE))).isEqualToIgnoringWhitespace("""
|
||||||
|
{
|
||||||
|
2000012=HsHostingAssetEntity(MANAGED_WEBSPACE, al000),
|
||||||
|
2000008=HsHostingAssetEntity(MANAGED_WEBSPACE, ahr01),
|
||||||
|
2000009=HsHostingAssetEntity(MANAGED_WEBSPACE, aih00),
|
||||||
|
2000007=HsHostingAssetEntity(MANAGED_WEBSPACE, ahr00),
|
||||||
|
2000003=HsHostingAssetEntity(MANAGED_WEBSPACE, agu00)
|
||||||
}
|
}
|
||||||
""");
|
""");
|
||||||
}
|
}
|
||||||
@ -115,7 +180,53 @@ public class ImportHostingAssets extends CsvDataImport {
|
|||||||
.identifier(rec.getString("inet_addr"))
|
.identifier(rec.getString("inet_addr"))
|
||||||
.caption(rec.getString("description"))
|
.caption(rec.getString("description"))
|
||||||
.build();
|
.build();
|
||||||
hostingAssets.put(rec.getInteger("inet_addr_id"), ipNumber);
|
hostingAssets.put(IP_NUMBER_ID_OFFSET + rec.getInteger("inet_addr_id"), ipNumber);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
private void importHives(final String[] header, final List<String[]> records) {
|
||||||
|
final var columns = new Columns(header);
|
||||||
|
records.stream()
|
||||||
|
.map(this::trimAll)
|
||||||
|
.map(row -> new Record(columns, row))
|
||||||
|
.forEach(rec -> {
|
||||||
|
final var hive_id = rec.getInteger("hive_id");
|
||||||
|
final var hive = new Hive(hive_id,
|
||||||
|
rec.getString("hive_name"),
|
||||||
|
rec.getInteger("inet_addr_id"));
|
||||||
|
hives.put(hive_id, hive);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
private void importServersAndWebspaces(final String[] header, final List<String[]> records) {
|
||||||
|
final var columns = new Columns(header);
|
||||||
|
records.stream()
|
||||||
|
.map(this::trimAll)
|
||||||
|
.map(row -> new Record(columns, row))
|
||||||
|
.forEach(rec -> {
|
||||||
|
|
||||||
|
final var packet_id = rec.getInteger("packet_id");
|
||||||
|
final var basepacket_code = rec.getString("basepacket_code");
|
||||||
|
final var packet_name = rec.getString("packet_name");
|
||||||
|
final var bp_id = rec.getInteger("bp_id");
|
||||||
|
final var hive_id = rec.getInteger("hive_id");
|
||||||
|
final var created = rec.getLocalDate("created");
|
||||||
|
final var cancelled = rec.getLocalDate("cancelled");
|
||||||
|
final var cur_inet_addr_id = rec.getInteger("cur_inet_addr_id");
|
||||||
|
final var old_inet_addr_id = rec.getInteger("old_inet_addr_id");
|
||||||
|
final var free = rec.getBoolean("free");
|
||||||
|
|
||||||
|
final var asset = HsHostingAssetEntity.builder()
|
||||||
|
.type(switch (rec.getString("basepacket_code")) {
|
||||||
|
case "SRV/CLD" -> CLOUD_SERVER;
|
||||||
|
case "SRV/MGD" -> MANAGED_SERVER;
|
||||||
|
case "PAC/WEB" -> MANAGED_WEBSPACE;
|
||||||
|
default -> throw new IllegalArgumentException("unknown basepacket_code: " + rec.getString("basepacket_code"));
|
||||||
|
})
|
||||||
|
.identifier(rec.getString("packet_name"))
|
||||||
|
// .caption(rec.getString("description"))
|
||||||
|
.build();
|
||||||
|
hostingAssets.put(PACKET_ID_OFFSET + packet_id, asset);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -130,6 +241,14 @@ public class ImportHostingAssets extends CsvDataImport {
|
|||||||
.collect(toMap(Map.Entry::getKey, Map.Entry::getValue));
|
.collect(toMap(Map.Entry::getKey, Map.Entry::getValue));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private Map<Integer, Object> first(
|
||||||
|
final int maxCount,
|
||||||
|
final Map<Integer, ?> entities) {
|
||||||
|
return entities.entrySet().stream()
|
||||||
|
.limit(maxCount)
|
||||||
|
.collect(toMap(Map.Entry::getKey, Map.Entry::getValue));
|
||||||
|
}
|
||||||
|
|
||||||
protected static boolean isImportingControlledTestData() {
|
protected static boolean isImportingControlledTestData() {
|
||||||
return MIGRATION_DATA_PATH.equals("migration");
|
return MIGRATION_DATA_PATH.equals("migration");
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user