Compare commits
No commits in common. "d7d532ac711ee1ed58b65bb3cbb8b9fe102864de" and "c191af2ea1005e7228697a0b4429448a91eb99ee" have entirely different histories.
d7d532ac71
...
c191af2ea1
10
.aliases
10
.aliases
@ -1,4 +1,4 @@
|
|||||||
# For using the alias gw-importOfficeData or gw-importHostingAssets,
|
# For using the alias import-office-tables,
|
||||||
# copy the file .tc-environment to .environment (ignored by git)
|
# copy the file .tc-environment to .environment (ignored by git)
|
||||||
# and amend them according to your external DB.
|
# and amend them according to your external DB.
|
||||||
|
|
||||||
@ -42,8 +42,7 @@ postgresAutodoc () {
|
|||||||
}
|
}
|
||||||
alias postgres-autodoc=postgresAutodoc
|
alias postgres-autodoc=postgresAutodoc
|
||||||
|
|
||||||
function importLegacyData() {
|
function importOfficeData() {
|
||||||
set target=$1
|
|
||||||
source .tc-environment
|
source .tc-environment
|
||||||
|
|
||||||
if [ -f .environment ]; then
|
if [ -f .environment ]; then
|
||||||
@ -53,10 +52,9 @@ function importLegacyData() {
|
|||||||
echo "using environment (with ending ';' for use in IntelliJ IDEA):"
|
echo "using environment (with ending ';' for use in IntelliJ IDEA):"
|
||||||
set | grep ^HSADMINNG_ | sed 's/$/;/'
|
set | grep ^HSADMINNG_ | sed 's/$/;/'
|
||||||
|
|
||||||
./gradlew $target --rerun
|
./gradlew importOfficeData --rerun
|
||||||
}
|
}
|
||||||
alias gw-importOfficeData='importLegacyData importOfficeData'
|
alias gw-importOfficeData=importOfficeData
|
||||||
alias gw-importHostingAssets='importLegacyData importHostingAssets'
|
|
||||||
|
|
||||||
alias podman-start='systemctl --user enable --now podman.socket && systemctl --user status podman.socket && ls -la /run/user/$UID/podman/podman.sock'
|
alias podman-start='systemctl --user enable --now podman.socket && systemctl --user status podman.socket && ls -la /run/user/$UID/podman/podman.sock'
|
||||||
alias podman-stop='systemctl --user disable --now podman.socket && systemctl --user status podman.socket && ls -la /run/user/$UID/podman/podman.sock'
|
alias podman-stop='systemctl --user disable --now podman.socket && systemctl --user status podman.socket && ls -la /run/user/$UID/podman/podman.sock'
|
||||||
|
5
.gitignore
vendored
5
.gitignore
vendored
@ -136,9 +136,4 @@ Desktop.ini
|
|||||||
# ESLint
|
# ESLint
|
||||||
######################
|
######################
|
||||||
.eslintcache
|
.eslintcache
|
||||||
|
|
||||||
######################
|
|
||||||
# Project Related
|
|
||||||
######################
|
|
||||||
/.environment*
|
/.environment*
|
||||||
/src/test/resources/migration-prod/
|
|
||||||
|
@ -1,232 +0,0 @@
|
|||||||
package net.hostsharing.hsadminng.hs.office.migration;
|
|
||||||
|
|
||||||
import com.opencsv.CSVParserBuilder;
|
|
||||||
import com.opencsv.CSVReader;
|
|
||||||
import com.opencsv.CSVReaderBuilder;
|
|
||||||
import net.hostsharing.hsadminng.rbac.context.ContextBasedTest;
|
|
||||||
import net.hostsharing.hsadminng.rbac.rbacobject.RbacObject;
|
|
||||||
import net.hostsharing.hsadminng.rbac.test.JpaAttempt;
|
|
||||||
import org.junit.jupiter.api.extension.BeforeEachCallback;
|
|
||||||
import org.junit.jupiter.api.extension.ExtensionContext;
|
|
||||||
import org.junit.jupiter.api.extension.TestWatcher;
|
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
|
||||||
import org.springframework.beans.factory.annotation.Value;
|
|
||||||
import org.springframework.boot.test.mock.mockito.MockBean;
|
|
||||||
import org.springframework.transaction.support.TransactionTemplate;
|
|
||||||
|
|
||||||
import jakarta.persistence.EntityManager;
|
|
||||||
import jakarta.persistence.PersistenceContext;
|
|
||||||
import jakarta.servlet.http.HttpServletRequest;
|
|
||||||
import jakarta.validation.constraints.NotNull;
|
|
||||||
|
|
||||||
import java.io.BufferedReader;
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.io.InputStreamReader;
|
|
||||||
import java.io.Reader;
|
|
||||||
import java.io.StringReader;
|
|
||||||
import java.io.StringWriter;
|
|
||||||
import java.math.BigDecimal;
|
|
||||||
import java.time.LocalDate;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
|
||||||
import java.util.TreeMap;
|
|
||||||
import java.util.stream.Collectors;
|
|
||||||
|
|
||||||
import static java.lang.Boolean.parseBoolean;
|
|
||||||
import static java.util.Arrays.stream;
|
|
||||||
import static java.util.Objects.requireNonNull;
|
|
||||||
import static java.util.Optional.ofNullable;
|
|
||||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
|
||||||
import static org.assertj.core.api.Assertions.assertThat;
|
|
||||||
import static org.assertj.core.api.Assumptions.assumeThat;
|
|
||||||
|
|
||||||
public class CsvDataImport extends ContextBasedTest {
|
|
||||||
|
|
||||||
public static final String MIGRATION_DATA_PATH = ofNullable(System.getenv("HSADMINNG_MIGRATION_DATA_PATH")).orElse("migration") + "/";
|
|
||||||
|
|
||||||
@Value("${spring.datasource.url}")
|
|
||||||
protected String jdbcUrl;
|
|
||||||
|
|
||||||
@Value("${spring.datasource.username}")
|
|
||||||
protected String postgresAdminUser;
|
|
||||||
|
|
||||||
@Value("${hsadminng.superuser}")
|
|
||||||
protected String rbacSuperuser;
|
|
||||||
|
|
||||||
@PersistenceContext
|
|
||||||
EntityManager em;
|
|
||||||
|
|
||||||
@Autowired
|
|
||||||
TransactionTemplate txTemplate;
|
|
||||||
|
|
||||||
@Autowired
|
|
||||||
JpaAttempt jpaAttempt;
|
|
||||||
|
|
||||||
@MockBean
|
|
||||||
HttpServletRequest request;
|
|
||||||
|
|
||||||
public List<String[]> readAllLines(Reader reader) throws Exception {
|
|
||||||
|
|
||||||
final var parser = new CSVParserBuilder()
|
|
||||||
.withSeparator(';')
|
|
||||||
.withQuoteChar('"')
|
|
||||||
.build();
|
|
||||||
|
|
||||||
final var filteredReader = skippingEmptyAndCommentLines(reader);
|
|
||||||
try (CSVReader csvReader = new CSVReaderBuilder(filteredReader)
|
|
||||||
.withCSVParser(parser)
|
|
||||||
.build()) {
|
|
||||||
return csvReader.readAll();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public static Reader skippingEmptyAndCommentLines(Reader reader) throws IOException {
|
|
||||||
try (var bufferedReader = new BufferedReader(reader);
|
|
||||||
StringWriter writer = new StringWriter()) {
|
|
||||||
|
|
||||||
String line;
|
|
||||||
while ((line = bufferedReader.readLine()) != null) {
|
|
||||||
if (!line.isBlank() && !line.startsWith("#")) {
|
|
||||||
writer.write(line);
|
|
||||||
writer.write("\n");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return new StringReader(writer.toString());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
protected static String[] justHeader(final List<String[]> lines) {
|
|
||||||
return stream(lines.getFirst()).map(String::trim).toArray(String[]::new);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected Reader resourceReader(@NotNull final String resourcePath) {
|
|
||||||
return new InputStreamReader(requireNonNull(getClass().getClassLoader().getResourceAsStream(resourcePath)));
|
|
||||||
}
|
|
||||||
|
|
||||||
protected List<String[]> withoutHeader(final List<String[]> records) {
|
|
||||||
return records.subList(1, records.size());
|
|
||||||
}
|
|
||||||
|
|
||||||
String[] trimAll(final String[] record) {
|
|
||||||
for (int i = 0; i < record.length; ++i) {
|
|
||||||
if (record[i] != null) {
|
|
||||||
record[i] = record[i].trim();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return record;
|
|
||||||
}
|
|
||||||
|
|
||||||
protected void persist(final Integer id, final RbacObject entity) {
|
|
||||||
try {
|
|
||||||
//System.out.println("persisting #" + entity.hashCode() + ": " + entity);
|
|
||||||
em.persist(entity);
|
|
||||||
// uncomment for debugging purposes
|
|
||||||
// em.flush();
|
|
||||||
// System.out.println("persisted #" + entity.hashCode() + " as " + entity.getUuid());
|
|
||||||
} catch (Exception exc) {
|
|
||||||
System.err.println("failed to persist #" + entity.hashCode() + ": " + entity);
|
|
||||||
System.err.println(exc);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
protected <E> String toFormattedString(final Map<Integer, E> map) {
|
|
||||||
if ( map.isEmpty() ) {
|
|
||||||
return "{}";
|
|
||||||
}
|
|
||||||
return "{\n" +
|
|
||||||
map.keySet().stream()
|
|
||||||
.map(id -> " " + id + "=" + map.get(id).toString())
|
|
||||||
.map(e -> e.replaceAll("\n ", " ").replace("\n", ""))
|
|
||||||
.collect(Collectors.joining(",\n")) +
|
|
||||||
"\n}\n";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
class Columns {
|
|
||||||
|
|
||||||
private final List<String> columnNames;
|
|
||||||
|
|
||||||
public Columns(final String[] header) {
|
|
||||||
columnNames = List.of(header);
|
|
||||||
}
|
|
||||||
|
|
||||||
int indexOf(final String columnName) {
|
|
||||||
int index = columnNames.indexOf(columnName);
|
|
||||||
if (index < 0) {
|
|
||||||
throw new RuntimeException("column name '" + columnName + "' not found in: " + columnNames);
|
|
||||||
}
|
|
||||||
return index;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
class Record {
|
|
||||||
|
|
||||||
private final Columns columns;
|
|
||||||
private final String[] row;
|
|
||||||
|
|
||||||
public Record(final Columns columns, final String[] row) {
|
|
||||||
this.columns = columns;
|
|
||||||
this.row = row;
|
|
||||||
}
|
|
||||||
|
|
||||||
String getString(final String columnName) {
|
|
||||||
return row[columns.indexOf(columnName)];
|
|
||||||
}
|
|
||||||
|
|
||||||
boolean isEmpty(final String columnName) {
|
|
||||||
final String value = getString(columnName);
|
|
||||||
return value == null || value.isBlank();
|
|
||||||
}
|
|
||||||
|
|
||||||
boolean getBoolean(final String columnName) {
|
|
||||||
final String value = getString(columnName);
|
|
||||||
return isNotBlank(value) &&
|
|
||||||
( parseBoolean(value.trim()) || value.trim().startsWith("t"));
|
|
||||||
}
|
|
||||||
|
|
||||||
Integer getInteger(final String columnName) {
|
|
||||||
final String value = getString(columnName);
|
|
||||||
return isNotBlank(value) ? Integer.parseInt(value.trim()) : 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
BigDecimal getBigDecimal(final String columnName) {
|
|
||||||
final String value = getString(columnName);
|
|
||||||
if (isNotBlank(value)) {
|
|
||||||
return new BigDecimal(value);
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
LocalDate getLocalDate(final String columnName) {
|
|
||||||
final String dateString = getString(columnName);
|
|
||||||
if (isNotBlank(dateString)) {
|
|
||||||
return LocalDate.parse(dateString);
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
class OrderedDependedTestsExtension implements TestWatcher, BeforeEachCallback {
|
|
||||||
|
|
||||||
private static boolean previousTestsPassed = true;
|
|
||||||
|
|
||||||
public void testFailed(ExtensionContext context, Throwable cause) {
|
|
||||||
previousTestsPassed = false;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void beforeEach(final ExtensionContext extensionContext) {
|
|
||||||
assumeThat(previousTestsPassed).isTrue();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
class WriteOnceMap<K, V> extends TreeMap<K, V> {
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public V put(final K k, final V v) {
|
|
||||||
assertThat(containsKey(k)).describedAs("overwriting " + get(k) + " index " + k + " with " + v).isFalse();
|
|
||||||
return super.put(k, v);
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,260 +0,0 @@
|
|||||||
package net.hostsharing.hsadminng.hs.office.migration;
|
|
||||||
|
|
||||||
import net.hostsharing.hsadminng.context.Context;
|
|
||||||
import net.hostsharing.hsadminng.hs.booking.item.HsBookingItemEntity;
|
|
||||||
import net.hostsharing.hsadminng.hs.hosting.asset.HsHostingAssetEntity;
|
|
||||||
import net.hostsharing.hsadminng.hs.hosting.asset.HsHostingAssetType;
|
|
||||||
import net.hostsharing.hsadminng.rbac.test.JpaAttempt;
|
|
||||||
import org.junit.jupiter.api.MethodOrderer;
|
|
||||||
import org.junit.jupiter.api.Order;
|
|
||||||
import org.junit.jupiter.api.Tag;
|
|
||||||
import org.junit.jupiter.api.Test;
|
|
||||||
import org.junit.jupiter.api.TestMethodOrder;
|
|
||||||
import org.junit.jupiter.api.extension.ExtendWith;
|
|
||||||
import org.springframework.boot.test.autoconfigure.orm.jpa.DataJpaTest;
|
|
||||||
import org.springframework.context.annotation.Import;
|
|
||||||
import org.springframework.test.annotation.DirtiesContext;
|
|
||||||
|
|
||||||
import java.io.Reader;
|
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
|
||||||
|
|
||||||
import static java.util.stream.Collectors.toMap;
|
|
||||||
import static net.hostsharing.hsadminng.hs.hosting.asset.HsHostingAssetType.CLOUD_SERVER;
|
|
||||||
import static net.hostsharing.hsadminng.hs.hosting.asset.HsHostingAssetType.IPV4_NUMBER;
|
|
||||||
import static net.hostsharing.hsadminng.hs.hosting.asset.HsHostingAssetType.MANAGED_SERVER;
|
|
||||||
import static net.hostsharing.hsadminng.hs.hosting.asset.HsHostingAssetType.MANAGED_WEBSPACE;
|
|
||||||
import static org.assertj.core.api.Assertions.assertThat;
|
|
||||||
|
|
||||||
/*
|
|
||||||
* This 'test' includes the complete legacy 'office' data import.
|
|
||||||
*
|
|
||||||
* There is no code in 'main' because the import is not needed a normal runtime.
|
|
||||||
* There is some test data in Java resources to verify the data conversion.
|
|
||||||
* For a real import a main method will be added later
|
|
||||||
* which reads CSV files from the file system.
|
|
||||||
*
|
|
||||||
* When run on a Hostsharing database, it needs the following settings (hsh99_... just examples).
|
|
||||||
*
|
|
||||||
* In a real Hostsharing environment, these are created via (the old) hsadmin:
|
|
||||||
|
|
||||||
CREATE USER hsh99_admin WITH PASSWORD 'password';
|
|
||||||
CREATE DATABASE hsh99_hsadminng ENCODING 'UTF8' TEMPLATE template0;
|
|
||||||
REVOKE ALL ON DATABASE hsh99_hsadminng FROM public; -- why does hsadmin do that?
|
|
||||||
ALTER DATABASE hsh99_hsadminng OWNER TO hsh99_admin;
|
|
||||||
|
|
||||||
CREATE USER hsh99_restricted WITH PASSWORD 'password';
|
|
||||||
|
|
||||||
\c hsh99_hsadminng
|
|
||||||
|
|
||||||
GRANT ALL PRIVILEGES ON SCHEMA public to hsh99_admin;
|
|
||||||
|
|
||||||
* Additionally, we need these settings (because the Hostsharing DB-Admin has no CREATE right):
|
|
||||||
|
|
||||||
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
|
|
||||||
|
|
||||||
-- maybe something like that is needed for the 2nd user
|
|
||||||
-- GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public to hsh99_restricted;
|
|
||||||
|
|
||||||
* Then copy the file .tc-environment to a file named .environment (excluded from git) and fill in your specific values.
|
|
||||||
|
|
||||||
* To finally import the office data, run:
|
|
||||||
*
|
|
||||||
* gw-importHostingAssets # comes from .aliases file and uses .environment
|
|
||||||
*/
|
|
||||||
@Tag("import")
|
|
||||||
@DataJpaTest(properties = {
|
|
||||||
"spring.datasource.url=${HSADMINNG_POSTGRES_JDBC_URL:jdbc:tc:postgresql:15.5-bookworm:///spring_boot_testcontainers}",
|
|
||||||
"spring.datasource.username=${HSADMINNG_POSTGRES_ADMIN_USERNAME:ADMIN}",
|
|
||||||
"spring.datasource.password=${HSADMINNG_POSTGRES_ADMIN_PASSWORD:password}",
|
|
||||||
"hsadminng.superuser=${HSADMINNG_SUPERUSER:superuser-alex@hostsharing.net}"
|
|
||||||
})
|
|
||||||
@DirtiesContext
|
|
||||||
@Import({ Context.class, JpaAttempt.class })
|
|
||||||
@TestMethodOrder(MethodOrderer.OrderAnnotation.class)
|
|
||||||
@ExtendWith(OrderedDependedTestsExtension.class)
|
|
||||||
public class ImportHostingAssets extends CsvDataImport {
|
|
||||||
|
|
||||||
private static final Integer IP_NUMBER_ID_OFFSET = 1000000;
|
|
||||||
private static final Integer PACKET_ID_OFFSET = 2000000;
|
|
||||||
|
|
||||||
record Hive(int hive_id, String hive_name, int inet_addr_id){};
|
|
||||||
|
|
||||||
private static Map<Integer, HsBookingItemEntity> bookingItems = new WriteOnceMap<>();
|
|
||||||
private static Map<Integer, Hive> hives = new WriteOnceMap<>();
|
|
||||||
private static Map<Integer, HsHostingAssetEntity> hostingAssets = new WriteOnceMap<>();
|
|
||||||
|
|
||||||
@Test
|
|
||||||
@Order(1010)
|
|
||||||
void importIpNumbers() {
|
|
||||||
try (Reader reader = resourceReader(MIGRATION_DATA_PATH + "inet_addr.csv")) {
|
|
||||||
final var lines = readAllLines(reader);
|
|
||||||
importIpNumbers(justHeader(lines), withoutHeader(lines));
|
|
||||||
} catch (Exception e) {
|
|
||||||
throw new RuntimeException(e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
@Order(1019)
|
|
||||||
void verifyIpNumbers() {
|
|
||||||
assumeThatWeAreImportingControlledTestData();
|
|
||||||
|
|
||||||
// no contacts yet => mostly null values
|
|
||||||
assertThat(toFormattedString(firstOfType(5, hostingAssets, IPV4_NUMBER))).isEqualToIgnoringWhitespace("""
|
|
||||||
{
|
|
||||||
1000333=HsHostingAssetEntity(IPV4_NUMBER, 83.223.95.4),
|
|
||||||
1000332=HsHostingAssetEntity(IPV4_NUMBER, 83.223.95.3),
|
|
||||||
1000331=HsHostingAssetEntity(IPV4_NUMBER, 83.223.95.2),
|
|
||||||
1000330=HsHostingAssetEntity(IPV4_NUMBER, 83.223.95.1),
|
|
||||||
1000329=HsHostingAssetEntity(IPV4_NUMBER, 83.223.95.0)
|
|
||||||
}
|
|
||||||
""");
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
@Order(1030)
|
|
||||||
void importHives() {
|
|
||||||
try (Reader reader = resourceReader(MIGRATION_DATA_PATH + "hive.csv")) {
|
|
||||||
final var lines = readAllLines(reader);
|
|
||||||
importHives(justHeader(lines), withoutHeader(lines));
|
|
||||||
} catch (Exception e) {
|
|
||||||
throw new RuntimeException(e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
@Order(1039)
|
|
||||||
void verifyHives() {
|
|
||||||
assumeThatWeAreImportingControlledTestData();
|
|
||||||
|
|
||||||
// no contacts yet => mostly null values
|
|
||||||
assertThat(toFormattedString(first(5, hives))).isEqualToIgnoringWhitespace("""
|
|
||||||
{
|
|
||||||
1=Hive[hive_id=1, hive_name=h01, inet_addr_id=358],
|
|
||||||
2=Hive[hive_id=2, hive_name=h02, inet_addr_id=359],
|
|
||||||
4=Hive[hive_id=4, hive_name=h03, inet_addr_id=360],
|
|
||||||
7=Hive[hive_id=7, hive_name=h04, inet_addr_id=361],
|
|
||||||
13=Hive[hive_id=13, hive_name=h05, inet_addr_id=430]
|
|
||||||
}
|
|
||||||
""");
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
@Order(2000)
|
|
||||||
void importServersAndWebspaces() {
|
|
||||||
try (Reader reader = resourceReader(MIGRATION_DATA_PATH + "packet.csv")) {
|
|
||||||
final var lines = readAllLines(reader);
|
|
||||||
importServersAndWebspaces(justHeader(lines), withoutHeader(lines));
|
|
||||||
} catch (Exception e) {
|
|
||||||
throw new RuntimeException(e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
@Order(2009)
|
|
||||||
void verifyServersAndWebspaces() {
|
|
||||||
assumeThatWeAreImportingControlledTestData();
|
|
||||||
|
|
||||||
// no contacts yet => mostly null values
|
|
||||||
assertThat(toFormattedString(firstOfType(5, hostingAssets, CLOUD_SERVER, MANAGED_SERVER, MANAGED_WEBSPACE))).isEqualToIgnoringWhitespace("""
|
|
||||||
{
|
|
||||||
2000012=HsHostingAssetEntity(MANAGED_WEBSPACE, al000),
|
|
||||||
2000008=HsHostingAssetEntity(MANAGED_WEBSPACE, ahr01),
|
|
||||||
2000009=HsHostingAssetEntity(MANAGED_WEBSPACE, aih00),
|
|
||||||
2000007=HsHostingAssetEntity(MANAGED_WEBSPACE, ahr00),
|
|
||||||
2000003=HsHostingAssetEntity(MANAGED_WEBSPACE, agu00)
|
|
||||||
}
|
|
||||||
""");
|
|
||||||
}
|
|
||||||
|
|
||||||
private void importIpNumbers(final String[] header, final List<String[]> records) {
|
|
||||||
final var columns = new Columns(header);
|
|
||||||
records.stream()
|
|
||||||
.map(this::trimAll)
|
|
||||||
.map(row -> new Record(columns, row))
|
|
||||||
.forEach(rec -> {
|
|
||||||
final var ipNumber = HsHostingAssetEntity.builder()
|
|
||||||
.type(IPV4_NUMBER)
|
|
||||||
.identifier(rec.getString("inet_addr"))
|
|
||||||
.caption(rec.getString("description"))
|
|
||||||
.build();
|
|
||||||
hostingAssets.put(IP_NUMBER_ID_OFFSET + rec.getInteger("inet_addr_id"), ipNumber);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
private void importHives(final String[] header, final List<String[]> records) {
|
|
||||||
final var columns = new Columns(header);
|
|
||||||
records.stream()
|
|
||||||
.map(this::trimAll)
|
|
||||||
.map(row -> new Record(columns, row))
|
|
||||||
.forEach(rec -> {
|
|
||||||
final var hive_id = rec.getInteger("hive_id");
|
|
||||||
final var hive = new Hive(hive_id,
|
|
||||||
rec.getString("hive_name"),
|
|
||||||
rec.getInteger("inet_addr_id"));
|
|
||||||
hives.put(hive_id, hive);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
private void importServersAndWebspaces(final String[] header, final List<String[]> records) {
|
|
||||||
final var columns = new Columns(header);
|
|
||||||
records.stream()
|
|
||||||
.map(this::trimAll)
|
|
||||||
.map(row -> new Record(columns, row))
|
|
||||||
.forEach(rec -> {
|
|
||||||
|
|
||||||
final var packet_id = rec.getInteger("packet_id");
|
|
||||||
final var basepacket_code = rec.getString("basepacket_code");
|
|
||||||
final var packet_name = rec.getString("packet_name");
|
|
||||||
final var bp_id = rec.getInteger("bp_id");
|
|
||||||
final var hive_id = rec.getInteger("hive_id");
|
|
||||||
final var created = rec.getLocalDate("created");
|
|
||||||
final var cancelled = rec.getLocalDate("cancelled");
|
|
||||||
final var cur_inet_addr_id = rec.getInteger("cur_inet_addr_id");
|
|
||||||
final var old_inet_addr_id = rec.getInteger("old_inet_addr_id");
|
|
||||||
final var free = rec.getBoolean("free");
|
|
||||||
|
|
||||||
final var asset = HsHostingAssetEntity.builder()
|
|
||||||
.type(switch (rec.getString("basepacket_code")) {
|
|
||||||
case "SRV/CLD" -> CLOUD_SERVER;
|
|
||||||
case "SRV/MGD" -> MANAGED_SERVER;
|
|
||||||
case "PAC/WEB" -> MANAGED_WEBSPACE;
|
|
||||||
default -> throw new IllegalArgumentException("unknown basepacket_code: " + rec.getString("basepacket_code"));
|
|
||||||
})
|
|
||||||
.identifier(rec.getString("packet_name"))
|
|
||||||
// .caption(rec.getString("description"))
|
|
||||||
.build();
|
|
||||||
hostingAssets.put(PACKET_ID_OFFSET + packet_id, asset);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
private Map<Integer, Object> firstOfType(
|
|
||||||
final int maxCount,
|
|
||||||
final Map<Integer, HsHostingAssetEntity> hostingAssets,
|
|
||||||
final HsHostingAssetType... types) {
|
|
||||||
final var typesList = Arrays.asList(types);
|
|
||||||
return hostingAssets.entrySet().stream()
|
|
||||||
.filter(ha -> typesList.contains(ha.getValue().getType()))
|
|
||||||
.limit(maxCount)
|
|
||||||
.collect(toMap(Map.Entry::getKey, Map.Entry::getValue));
|
|
||||||
}
|
|
||||||
|
|
||||||
private Map<Integer, Object> first(
|
|
||||||
final int maxCount,
|
|
||||||
final Map<Integer, ?> entities) {
|
|
||||||
return entities.entrySet().stream()
|
|
||||||
.limit(maxCount)
|
|
||||||
.collect(toMap(Map.Entry::getKey, Map.Entry::getValue));
|
|
||||||
}
|
|
||||||
|
|
||||||
protected static boolean isImportingControlledTestData() {
|
|
||||||
return MIGRATION_DATA_PATH.equals("migration");
|
|
||||||
}
|
|
||||||
|
|
||||||
protected static void assumeThatWeAreImportingControlledTestData() {
|
|
||||||
// assumeThat(isImportingControlledTestData()).isTrue(); FIXME
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
@ -1,6 +1,10 @@
|
|||||||
package net.hostsharing.hsadminng.hs.office.migration;
|
package net.hostsharing.hsadminng.hs.office.migration;
|
||||||
|
|
||||||
|
import com.opencsv.CSVParserBuilder;
|
||||||
|
import com.opencsv.CSVReader;
|
||||||
|
import com.opencsv.CSVReaderBuilder;
|
||||||
import net.hostsharing.hsadminng.context.Context;
|
import net.hostsharing.hsadminng.context.Context;
|
||||||
|
import net.hostsharing.hsadminng.rbac.context.ContextBasedTest;
|
||||||
import net.hostsharing.hsadminng.hs.office.bankaccount.HsOfficeBankAccountEntity;
|
import net.hostsharing.hsadminng.hs.office.bankaccount.HsOfficeBankAccountEntity;
|
||||||
import net.hostsharing.hsadminng.hs.office.contact.HsOfficeContactEntity;
|
import net.hostsharing.hsadminng.hs.office.contact.HsOfficeContactEntity;
|
||||||
import net.hostsharing.hsadminng.hs.office.coopassets.HsOfficeCoopAssetsTransactionEntity;
|
import net.hostsharing.hsadminng.hs.office.coopassets.HsOfficeCoopAssetsTransactionEntity;
|
||||||
@ -22,18 +26,34 @@ import net.hostsharing.hsadminng.rbac.test.JpaAttempt;
|
|||||||
import org.apache.commons.lang3.ArrayUtils;
|
import org.apache.commons.lang3.ArrayUtils;
|
||||||
import org.apache.commons.lang3.StringUtils;
|
import org.apache.commons.lang3.StringUtils;
|
||||||
import org.junit.jupiter.api.*;
|
import org.junit.jupiter.api.*;
|
||||||
|
import org.junit.jupiter.api.extension.BeforeEachCallback;
|
||||||
import org.junit.jupiter.api.extension.ExtendWith;
|
import org.junit.jupiter.api.extension.ExtendWith;
|
||||||
|
import org.junit.jupiter.api.extension.ExtensionContext;
|
||||||
|
import org.junit.jupiter.api.extension.TestWatcher;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
import org.springframework.beans.factory.annotation.Value;
|
||||||
import org.springframework.boot.test.autoconfigure.orm.jpa.DataJpaTest;
|
import org.springframework.boot.test.autoconfigure.orm.jpa.DataJpaTest;
|
||||||
|
import org.springframework.boot.test.mock.mockito.MockBean;
|
||||||
import org.springframework.context.annotation.Import;
|
import org.springframework.context.annotation.Import;
|
||||||
import org.springframework.test.annotation.Commit;
|
import org.springframework.test.annotation.Commit;
|
||||||
import org.springframework.test.annotation.DirtiesContext;
|
import org.springframework.test.annotation.DirtiesContext;
|
||||||
|
import org.springframework.transaction.support.TransactionTemplate;
|
||||||
|
|
||||||
|
import jakarta.persistence.EntityManager;
|
||||||
|
import jakarta.persistence.PersistenceContext;
|
||||||
|
import jakarta.servlet.http.HttpServletRequest;
|
||||||
|
import jakarta.validation.constraints.NotNull;
|
||||||
import java.io.*;
|
import java.io.*;
|
||||||
|
import java.math.BigDecimal;
|
||||||
|
import java.nio.file.Files;
|
||||||
|
import java.nio.file.Path;
|
||||||
import java.time.LocalDate;
|
import java.time.LocalDate;
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
import static java.lang.Boolean.parseBoolean;
|
||||||
import static java.util.Arrays.stream;
|
import static java.util.Arrays.stream;
|
||||||
|
import static java.util.Objects.requireNonNull;
|
||||||
import static java.util.Optional.ofNullable;
|
import static java.util.Optional.ofNullable;
|
||||||
import static net.hostsharing.hsadminng.mapper.PostgresDateRange.toPostgresDateRange;
|
import static net.hostsharing.hsadminng.mapper.PostgresDateRange.toPostgresDateRange;
|
||||||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||||
@ -76,7 +96,7 @@ import static org.assertj.core.api.Fail.fail;
|
|||||||
|
|
||||||
* To finally import the office data, run:
|
* To finally import the office data, run:
|
||||||
*
|
*
|
||||||
* gw-importOfficeTables # comes from .aliases file and uses .environment
|
* import-office-tables # comes from .aliases file and uses .environment
|
||||||
*/
|
*/
|
||||||
@Tag("import")
|
@Tag("import")
|
||||||
@DataJpaTest(properties = {
|
@DataJpaTest(properties = {
|
||||||
@ -89,7 +109,7 @@ import static org.assertj.core.api.Fail.fail;
|
|||||||
@Import({ Context.class, JpaAttempt.class })
|
@Import({ Context.class, JpaAttempt.class })
|
||||||
@TestMethodOrder(MethodOrderer.OrderAnnotation.class)
|
@TestMethodOrder(MethodOrderer.OrderAnnotation.class)
|
||||||
@ExtendWith(OrderedDependedTestsExtension.class)
|
@ExtendWith(OrderedDependedTestsExtension.class)
|
||||||
public class ImportOfficeData extends CsvDataImport {
|
public class ImportOfficeData extends ContextBasedTest {
|
||||||
|
|
||||||
private static final String[] SUBSCRIBER_ROLES = new String[] {
|
private static final String[] SUBSCRIBER_ROLES = new String[] {
|
||||||
"subscriber:operations-discussion",
|
"subscriber:operations-discussion",
|
||||||
@ -105,6 +125,7 @@ public class ImportOfficeData extends CsvDataImport {
|
|||||||
|
|
||||||
// at least as the number of lines in business-partners.csv from test-data, but less than real data partner count
|
// at least as the number of lines in business-partners.csv from test-data, but less than real data partner count
|
||||||
public static final int MAX_NUMBER_OF_TEST_DATA_PARTNERS = 100;
|
public static final int MAX_NUMBER_OF_TEST_DATA_PARTNERS = 100;
|
||||||
|
public static final String MIGRATION_DATA_PATH = ofNullable(System.getenv("HSADMINNG_MIGRATION_DATA_PATH")).orElse("migration") + "/";
|
||||||
|
|
||||||
static int relationId = 2000000;
|
static int relationId = 2000000;
|
||||||
|
|
||||||
@ -119,6 +140,15 @@ public class ImportOfficeData extends CsvDataImport {
|
|||||||
-1
|
-1
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@Value("${spring.datasource.url}")
|
||||||
|
private String jdbcUrl;
|
||||||
|
|
||||||
|
@Value("${spring.datasource.username}")
|
||||||
|
private String postgresAdminUser;
|
||||||
|
|
||||||
|
@Value("${hsadminng.superuser}")
|
||||||
|
private String rbacSuperuser;
|
||||||
|
|
||||||
private static Map<Integer, HsOfficeContactEntity> contacts = new WriteOnceMap<>();
|
private static Map<Integer, HsOfficeContactEntity> contacts = new WriteOnceMap<>();
|
||||||
private static Map<Integer, HsOfficePersonEntity> persons = new WriteOnceMap<>();
|
private static Map<Integer, HsOfficePersonEntity> persons = new WriteOnceMap<>();
|
||||||
private static Map<Integer, HsOfficePartnerEntity> partners = new WriteOnceMap<>();
|
private static Map<Integer, HsOfficePartnerEntity> partners = new WriteOnceMap<>();
|
||||||
@ -131,6 +161,18 @@ public class ImportOfficeData extends CsvDataImport {
|
|||||||
private static Map<Integer, HsOfficeCoopSharesTransactionEntity> coopShares = new WriteOnceMap<>();
|
private static Map<Integer, HsOfficeCoopSharesTransactionEntity> coopShares = new WriteOnceMap<>();
|
||||||
private static Map<Integer, HsOfficeCoopAssetsTransactionEntity> coopAssets = new WriteOnceMap<>();
|
private static Map<Integer, HsOfficeCoopAssetsTransactionEntity> coopAssets = new WriteOnceMap<>();
|
||||||
|
|
||||||
|
@PersistenceContext
|
||||||
|
EntityManager em;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
TransactionTemplate txTemplate;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
JpaAttempt jpaAttempt;
|
||||||
|
|
||||||
|
@MockBean
|
||||||
|
HttpServletRequest request;
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@Order(1010)
|
@Order(1010)
|
||||||
void importBusinessPartners() {
|
void importBusinessPartners() {
|
||||||
@ -560,6 +602,20 @@ public class ImportOfficeData extends CsvDataImport {
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void persist(final Integer id, final RbacObject entity) {
|
||||||
|
try {
|
||||||
|
//System.out.println("persisting #" + entity.hashCode() + ": " + entity);
|
||||||
|
em.persist(entity);
|
||||||
|
// uncomment for debugging purposes
|
||||||
|
// em.flush();
|
||||||
|
// System.out.println("persisted #" + entity.hashCode() + " as " + entity.getUuid());
|
||||||
|
} catch (Exception exc) {
|
||||||
|
System.err.println("failed to persist #" + entity.hashCode() + ": " + entity);
|
||||||
|
System.err.println(exc);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
private static boolean isImportingControlledTestData() {
|
private static boolean isImportingControlledTestData() {
|
||||||
return partners.size() <= MAX_NUMBER_OF_TEST_DATA_PARTNERS;
|
return partners.size() <= MAX_NUMBER_OF_TEST_DATA_PARTNERS;
|
||||||
}
|
}
|
||||||
@ -642,6 +698,37 @@ public class ImportOfficeData extends CsvDataImport {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public List<String[]> readAllLines(Reader reader) throws Exception {
|
||||||
|
|
||||||
|
final var parser = new CSVParserBuilder()
|
||||||
|
.withSeparator(';')
|
||||||
|
.withQuoteChar('"')
|
||||||
|
.build();
|
||||||
|
|
||||||
|
final var filteredReader = skippingEmptyAndCommentLines(reader);
|
||||||
|
try (CSVReader csvReader = new CSVReaderBuilder(filteredReader)
|
||||||
|
.withCSVParser(parser)
|
||||||
|
.build()) {
|
||||||
|
return csvReader.readAll();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static Reader skippingEmptyAndCommentLines(Reader reader) throws IOException {
|
||||||
|
try (var bufferedReader = new BufferedReader(reader);
|
||||||
|
StringWriter writer = new StringWriter()) {
|
||||||
|
|
||||||
|
String line;
|
||||||
|
while ((line = bufferedReader.readLine()) != null) {
|
||||||
|
if (!line.isBlank() && !line.startsWith("#")) {
|
||||||
|
writer.write(line);
|
||||||
|
writer.write("\n");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return new StringReader(writer.toString());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private void importBusinessPartners(final String[] header, final List<String[]> records) {
|
private void importBusinessPartners(final String[] header, final List<String[]> records) {
|
||||||
|
|
||||||
final var columns = new Columns(header);
|
final var columns = new Columns(header);
|
||||||
@ -1041,6 +1128,27 @@ public class ImportOfficeData extends CsvDataImport {
|
|||||||
return contact;
|
return contact;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private <E> String toFormattedString(final Map<Integer, E> map) {
|
||||||
|
if ( map.isEmpty() ) {
|
||||||
|
return "{}";
|
||||||
|
}
|
||||||
|
return "{\n" +
|
||||||
|
map.keySet().stream()
|
||||||
|
.map(id -> " " + id + "=" + map.get(id).toString())
|
||||||
|
.map(e -> e.replaceAll("\n ", " ").replace("\n", ""))
|
||||||
|
.collect(Collectors.joining(",\n")) +
|
||||||
|
"\n}\n";
|
||||||
|
}
|
||||||
|
|
||||||
|
private String[] trimAll(final String[] record) {
|
||||||
|
for (int i = 0; i < record.length; ++i) {
|
||||||
|
if (record[i] != null) {
|
||||||
|
record[i] = record[i].trim();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return record;
|
||||||
|
}
|
||||||
|
|
||||||
private Map<String, String> toPhoneNumbers(final Record rec) {
|
private Map<String, String> toPhoneNumbers(final Record rec) {
|
||||||
final var phoneNumbers = new LinkedHashMap<String, String>();
|
final var phoneNumbers = new LinkedHashMap<String, String>();
|
||||||
if (isNotBlank(rec.getString("phone_private")))
|
if (isNotBlank(rec.getString("phone_private")))
|
||||||
@ -1110,4 +1218,104 @@ public class ImportOfficeData extends CsvDataImport {
|
|||||||
private String toName(final String salut, final String title, final String firstname, final String lastname) {
|
private String toName(final String salut, final String title, final String firstname, final String lastname) {
|
||||||
return toCaption(salut, title, firstname, lastname, null);
|
return toCaption(salut, title, firstname, lastname, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private Reader resourceReader(@NotNull final String resourcePath) {
|
||||||
|
return new InputStreamReader(requireNonNull(getClass().getClassLoader().getResourceAsStream(resourcePath)));
|
||||||
|
}
|
||||||
|
|
||||||
|
private static String[] justHeader(final List<String[]> lines) {
|
||||||
|
return stream(lines.getFirst()).map(String::trim).toArray(String[]::new);
|
||||||
|
}
|
||||||
|
|
||||||
|
private List<String[]> withoutHeader(final List<String[]> records) {
|
||||||
|
return records.subList(1, records.size());
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
class Columns {
|
||||||
|
|
||||||
|
private final List<String> columnNames;
|
||||||
|
|
||||||
|
public Columns(final String[] header) {
|
||||||
|
columnNames = List.of(header);
|
||||||
|
}
|
||||||
|
|
||||||
|
int indexOf(final String columnName) {
|
||||||
|
int index = columnNames.indexOf(columnName);
|
||||||
|
if (index < 0) {
|
||||||
|
throw new RuntimeException("column name '" + columnName + "' not found in: " + columnNames);
|
||||||
|
}
|
||||||
|
return index;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class Record {
|
||||||
|
|
||||||
|
private final Columns columns;
|
||||||
|
private final String[] row;
|
||||||
|
|
||||||
|
public Record(final Columns columns, final String[] row) {
|
||||||
|
this.columns = columns;
|
||||||
|
this.row = row;
|
||||||
|
}
|
||||||
|
|
||||||
|
String getString(final String columnName) {
|
||||||
|
return row[columns.indexOf(columnName)];
|
||||||
|
}
|
||||||
|
|
||||||
|
boolean isEmpty(final String columnName) {
|
||||||
|
final String value = getString(columnName);
|
||||||
|
return value == null || value.isBlank();
|
||||||
|
}
|
||||||
|
|
||||||
|
boolean getBoolean(final String columnName) {
|
||||||
|
final String value = getString(columnName);
|
||||||
|
return isNotBlank(value) &&
|
||||||
|
( parseBoolean(value.trim()) || value.trim().startsWith("t"));
|
||||||
|
}
|
||||||
|
|
||||||
|
Integer getInteger(final String columnName) {
|
||||||
|
final String value = getString(columnName);
|
||||||
|
return isNotBlank(value) ? Integer.parseInt(value.trim()) : 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
BigDecimal getBigDecimal(final String columnName) {
|
||||||
|
final String value = getString(columnName);
|
||||||
|
if (isNotBlank(value)) {
|
||||||
|
return new BigDecimal(value);
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
LocalDate getLocalDate(final String columnName) {
|
||||||
|
final String dateString = getString(columnName);
|
||||||
|
if (isNotBlank(dateString)) {
|
||||||
|
return LocalDate.parse(dateString);
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class OrderedDependedTestsExtension implements TestWatcher, BeforeEachCallback {
|
||||||
|
|
||||||
|
private static boolean previousTestsPassed = true;
|
||||||
|
|
||||||
|
public void testFailed(ExtensionContext context, Throwable cause) {
|
||||||
|
previousTestsPassed = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void beforeEach(final ExtensionContext extensionContext) {
|
||||||
|
assumeThat(previousTestsPassed).isTrue();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class WriteOnceMap<K, V> extends TreeMap<K, V> {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public V put(final K k, final V v) {
|
||||||
|
assertThat(containsKey(k)).describedAs("overwriting " + get(k) + " index " + k + " with " + v).isFalse();
|
||||||
|
return super.put(k, v);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user