import inet_addr.csv

This commit is contained in:
Michael Hoennig 2024-07-16 13:35:54 +02:00
parent a54a3cd955
commit 2677ba93d1
10 changed files with 224 additions and 74 deletions

View File

@ -1,4 +1,4 @@
# For using the alias import-office-tables, # For using the alias gw-importOfficeData or gw-importHostingAssets,
# copy the file .tc-environment to .environment (ignored by git) # copy the file .tc-environment to .environment (ignored by git)
# and amend them according to your external DB. # and amend them according to your external DB.
@ -42,7 +42,8 @@ postgresAutodoc () {
} }
alias postgres-autodoc=postgresAutodoc alias postgres-autodoc=postgresAutodoc
function importOfficeData() { function importLegacyData() {
set target=$1
source .tc-environment source .tc-environment
if [ -f .environment ]; then if [ -f .environment ]; then
@ -52,9 +53,10 @@ function importOfficeData() {
echo "using environment (with ending ';' for use in IntelliJ IDEA):" echo "using environment (with ending ';' for use in IntelliJ IDEA):"
set | grep ^HSADMINNG_ | sed 's/$/;/' set | grep ^HSADMINNG_ | sed 's/$/;/'
./gradlew importOfficeData --rerun ./gradlew $target --rerun
} }
alias gw-importOfficeData=importOfficeData alias gw-importOfficeData='importLegacyData importOfficeData'
alias gw-importHostingAssets='importLegacyData importHostingAssets'
alias podman-start='systemctl --user enable --now podman.socket && systemctl --user status podman.socket && ls -la /run/user/$UID/podman/podman.sock' alias podman-start='systemctl --user enable --now podman.socket && systemctl --user status podman.socket && ls -la /run/user/$UID/podman/podman.sock'
alias podman-stop='systemctl --user disable --now podman.socket && systemctl --user status podman.socket && ls -la /run/user/$UID/podman/podman.sock' alias podman-stop='systemctl --user disable --now podman.socket && systemctl --user status podman.socket && ls -la /run/user/$UID/podman/podman.sock'

View File

@ -1,6 +1,10 @@
package net.hostsharing.hsadminng.hs.office.migration; package net.hostsharing.hsadminng.hs.office.migration;
import com.opencsv.CSVParserBuilder;
import com.opencsv.CSVReader;
import com.opencsv.CSVReaderBuilder;
import net.hostsharing.hsadminng.rbac.context.ContextBasedTest; import net.hostsharing.hsadminng.rbac.context.ContextBasedTest;
import net.hostsharing.hsadminng.rbac.rbacobject.RbacObject;
import net.hostsharing.hsadminng.rbac.test.JpaAttempt; import net.hostsharing.hsadminng.rbac.test.JpaAttempt;
import org.junit.jupiter.api.extension.BeforeEachCallback; import org.junit.jupiter.api.extension.BeforeEachCallback;
import org.junit.jupiter.api.extension.ExtensionContext; import org.junit.jupiter.api.extension.ExtensionContext;
@ -15,12 +19,18 @@ import jakarta.persistence.PersistenceContext;
import jakarta.servlet.http.HttpServletRequest; import jakarta.servlet.http.HttpServletRequest;
import jakarta.validation.constraints.NotNull; import jakarta.validation.constraints.NotNull;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader; import java.io.InputStreamReader;
import java.io.Reader; import java.io.Reader;
import java.io.StringReader;
import java.io.StringWriter;
import java.math.BigDecimal; import java.math.BigDecimal;
import java.time.LocalDate; import java.time.LocalDate;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.TreeMap; import java.util.TreeMap;
import java.util.stream.Collectors;
import static java.lang.Boolean.parseBoolean; import static java.lang.Boolean.parseBoolean;
import static java.util.Arrays.stream; import static java.util.Arrays.stream;
@ -55,6 +65,37 @@ public class CsvDataImport extends ContextBasedTest {
@MockBean @MockBean
HttpServletRequest request; HttpServletRequest request;
public List<String[]> readAllLines(Reader reader) throws Exception {
final var parser = new CSVParserBuilder()
.withSeparator(';')
.withQuoteChar('"')
.build();
final var filteredReader = skippingEmptyAndCommentLines(reader);
try (CSVReader csvReader = new CSVReaderBuilder(filteredReader)
.withCSVParser(parser)
.build()) {
return csvReader.readAll();
}
}
public static Reader skippingEmptyAndCommentLines(Reader reader) throws IOException {
try (var bufferedReader = new BufferedReader(reader);
StringWriter writer = new StringWriter()) {
String line;
while ((line = bufferedReader.readLine()) != null) {
if (!line.isBlank() && !line.startsWith("#")) {
writer.write(line);
writer.write("\n");
}
}
return new StringReader(writer.toString());
}
}
protected static String[] justHeader(final List<String[]> lines) { protected static String[] justHeader(final List<String[]> lines) {
return stream(lines.getFirst()).map(String::trim).toArray(String[]::new); return stream(lines.getFirst()).map(String::trim).toArray(String[]::new);
} }
@ -66,6 +107,41 @@ public class CsvDataImport extends ContextBasedTest {
protected List<String[]> withoutHeader(final List<String[]> records) { protected List<String[]> withoutHeader(final List<String[]> records) {
return records.subList(1, records.size()); return records.subList(1, records.size());
} }
String[] trimAll(final String[] record) {
for (int i = 0; i < record.length; ++i) {
if (record[i] != null) {
record[i] = record[i].trim();
}
}
return record;
}
protected void persist(final Integer id, final RbacObject entity) {
try {
//System.out.println("persisting #" + entity.hashCode() + ": " + entity);
em.persist(entity);
// uncomment for debugging purposes
// em.flush();
// System.out.println("persisted #" + entity.hashCode() + " as " + entity.getUuid());
} catch (Exception exc) {
System.err.println("failed to persist #" + entity.hashCode() + ": " + entity);
System.err.println(exc);
}
}
protected <E> String toFormattedString(final Map<Integer, E> map) {
if ( map.isEmpty() ) {
return "{}";
}
return "{\n" +
map.keySet().stream()
.map(id -> " " + id + "=" + map.get(id).toString())
.map(e -> e.replaceAll("\n ", " ").replace("\n", ""))
.collect(Collectors.joining(",\n")) +
"\n}\n";
}
} }
class Columns { class Columns {

View File

@ -0,0 +1,141 @@
package net.hostsharing.hsadminng.hs.office.migration;
import net.hostsharing.hsadminng.context.Context;
import net.hostsharing.hsadminng.hs.booking.item.HsBookingItemEntity;
import net.hostsharing.hsadminng.hs.hosting.asset.HsHostingAssetEntity;
import net.hostsharing.hsadminng.hs.hosting.asset.HsHostingAssetType;
import net.hostsharing.hsadminng.rbac.test.JpaAttempt;
import org.junit.jupiter.api.MethodOrderer;
import org.junit.jupiter.api.Order;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestMethodOrder;
import org.junit.jupiter.api.extension.ExtendWith;
import org.springframework.boot.test.autoconfigure.orm.jpa.DataJpaTest;
import org.springframework.context.annotation.Import;
import org.springframework.test.annotation.DirtiesContext;
import java.io.Reader;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import static java.util.stream.Collectors.toMap;
import static net.hostsharing.hsadminng.hs.hosting.asset.HsHostingAssetType.IPV4_NUMBER;
import static org.assertj.core.api.Assertions.assertThat;
/*
* This 'test' includes the complete legacy 'office' data import.
*
* There is no code in 'main' because the import is not needed a normal runtime.
* There is some test data in Java resources to verify the data conversion.
* For a real import a main method will be added later
* which reads CSV files from the file system.
*
* When run on a Hostsharing database, it needs the following settings (hsh99_... just examples).
*
* In a real Hostsharing environment, these are created via (the old) hsadmin:
CREATE USER hsh99_admin WITH PASSWORD 'password';
CREATE DATABASE hsh99_hsadminng ENCODING 'UTF8' TEMPLATE template0;
REVOKE ALL ON DATABASE hsh99_hsadminng FROM public; -- why does hsadmin do that?
ALTER DATABASE hsh99_hsadminng OWNER TO hsh99_admin;
CREATE USER hsh99_restricted WITH PASSWORD 'password';
\c hsh99_hsadminng
GRANT ALL PRIVILEGES ON SCHEMA public to hsh99_admin;
* Additionally, we need these settings (because the Hostsharing DB-Admin has no CREATE right):
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
-- maybe something like that is needed for the 2nd user
-- GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public to hsh99_restricted;
* Then copy the file .tc-environment to a file named .environment (excluded from git) and fill in your specific values.
* To finally import the office data, run:
*
* gw-importHostingAssets # comes from .aliases file and uses .environment
*/
@Tag("import")
@DataJpaTest(properties = {
"spring.datasource.url=${HSADMINNG_POSTGRES_JDBC_URL:jdbc:tc:postgresql:15.5-bookworm:///spring_boot_testcontainers}",
"spring.datasource.username=${HSADMINNG_POSTGRES_ADMIN_USERNAME:ADMIN}",
"spring.datasource.password=${HSADMINNG_POSTGRES_ADMIN_PASSWORD:password}",
"hsadminng.superuser=${HSADMINNG_SUPERUSER:superuser-alex@hostsharing.net}"
})
@DirtiesContext
@Import({ Context.class, JpaAttempt.class })
@TestMethodOrder(MethodOrderer.OrderAnnotation.class)
@ExtendWith(OrderedDependedTestsExtension.class)
public class ImportHostingAssets extends CsvDataImport {
private static Map<Integer, HsBookingItemEntity> bookingItems = new WriteOnceMap<>();
private static Map<Integer, HsHostingAssetEntity> hostingAssets = new WriteOnceMap<>();
@Test
@Order(1010)
void importIpNumbers() {
try (Reader reader = resourceReader(MIGRATION_DATA_PATH + "inet_addr.csv")) {
final var lines = readAllLines(reader);
importIpNumbers(justHeader(lines), withoutHeader(lines));
} catch (Exception e) {
throw new RuntimeException(e);
}
}
@Test
@Order(1019)
void verifyIpNumbers() {
assumeThatWeAreImportingControlledTestData();
// no contacts yet => mostly null values
assertThat(toFormattedString(firstOfType(5, hostingAssets, IPV4_NUMBER))).isEqualToIgnoringWhitespace("""
{
329=HsHostingAssetEntity(IPV4_NUMBER, 83.223.95.0),
330=HsHostingAssetEntity(IPV4_NUMBER, 83.223.95.1),
331=HsHostingAssetEntity(IPV4_NUMBER, 83.223.95.2),
332=HsHostingAssetEntity(IPV4_NUMBER, 83.223.95.3),
333=HsHostingAssetEntity(IPV4_NUMBER, 83.223.95.4)
}
""");
}
private void importIpNumbers(final String[] header, final List<String[]> records) {
final var columns = new Columns(header);
records.stream()
.map(this::trimAll)
.map(row -> new Record(columns, row))
.forEach(rec -> {
final var ipNumber = HsHostingAssetEntity.builder()
.type(IPV4_NUMBER)
.identifier(rec.getString("inet_addr"))
.caption(rec.getString("description"))
.build();
hostingAssets.put(rec.getInteger("inet_addr_id"), ipNumber);
});
}
private Map<Integer, Object> firstOfType(
final int maxCount,
final Map<Integer, HsHostingAssetEntity> hostingAssets,
final HsHostingAssetType... types) {
final var typesList = Arrays.asList(types);
return hostingAssets.entrySet().stream()
.filter(ha -> typesList.contains(ha.getValue().getType()))
.limit(maxCount)
.collect(toMap(Map.Entry::getKey, Map.Entry::getValue));
}
protected static boolean isImportingControlledTestData() {
return MIGRATION_DATA_PATH.equals("migration");
}
protected static void assumeThatWeAreImportingControlledTestData() {
// assumeThat(isImportingControlledTestData()).isTrue(); FIXME
}
}

View File

@ -1,8 +1,5 @@
package net.hostsharing.hsadminng.hs.office.migration; package net.hostsharing.hsadminng.hs.office.migration;
import com.opencsv.CSVParserBuilder;
import com.opencsv.CSVReader;
import com.opencsv.CSVReaderBuilder;
import net.hostsharing.hsadminng.context.Context; import net.hostsharing.hsadminng.context.Context;
import net.hostsharing.hsadminng.hs.office.bankaccount.HsOfficeBankAccountEntity; import net.hostsharing.hsadminng.hs.office.bankaccount.HsOfficeBankAccountEntity;
import net.hostsharing.hsadminng.hs.office.contact.HsOfficeContactEntity; import net.hostsharing.hsadminng.hs.office.contact.HsOfficeContactEntity;
@ -79,7 +76,7 @@ import static org.assertj.core.api.Fail.fail;
* To finally import the office data, run: * To finally import the office data, run:
* *
* import-office-tables # comes from .aliases file and uses .environment * gw-importOfficeTables # comes from .aliases file and uses .environment
*/ */
@Tag("import") @Tag("import")
@DataJpaTest(properties = { @DataJpaTest(properties = {
@ -563,20 +560,6 @@ public class ImportOfficeData extends CsvDataImport {
} }
private void persist(final Integer id, final RbacObject entity) {
try {
//System.out.println("persisting #" + entity.hashCode() + ": " + entity);
em.persist(entity);
// uncomment for debugging purposes
// em.flush();
// System.out.println("persisted #" + entity.hashCode() + " as " + entity.getUuid());
} catch (Exception exc) {
System.err.println("failed to persist #" + entity.hashCode() + ": " + entity);
System.err.println(exc);
}
}
private static boolean isImportingControlledTestData() { private static boolean isImportingControlledTestData() {
return partners.size() <= MAX_NUMBER_OF_TEST_DATA_PARTNERS; return partners.size() <= MAX_NUMBER_OF_TEST_DATA_PARTNERS;
} }
@ -659,37 +642,6 @@ public class ImportOfficeData extends CsvDataImport {
); );
} }
public List<String[]> readAllLines(Reader reader) throws Exception {
final var parser = new CSVParserBuilder()
.withSeparator(';')
.withQuoteChar('"')
.build();
final var filteredReader = skippingEmptyAndCommentLines(reader);
try (CSVReader csvReader = new CSVReaderBuilder(filteredReader)
.withCSVParser(parser)
.build()) {
return csvReader.readAll();
}
}
public static Reader skippingEmptyAndCommentLines(Reader reader) throws IOException {
try (var bufferedReader = new BufferedReader(reader);
StringWriter writer = new StringWriter()) {
String line;
while ((line = bufferedReader.readLine()) != null) {
if (!line.isBlank() && !line.startsWith("#")) {
writer.write(line);
writer.write("\n");
}
}
return new StringReader(writer.toString());
}
}
private void importBusinessPartners(final String[] header, final List<String[]> records) { private void importBusinessPartners(final String[] header, final List<String[]> records) {
final var columns = new Columns(header); final var columns = new Columns(header);
@ -1089,27 +1041,6 @@ public class ImportOfficeData extends CsvDataImport {
return contact; return contact;
} }
private <E> String toFormattedString(final Map<Integer, E> map) {
if ( map.isEmpty() ) {
return "{}";
}
return "{\n" +
map.keySet().stream()
.map(id -> " " + id + "=" + map.get(id).toString())
.map(e -> e.replaceAll("\n ", " ").replace("\n", ""))
.collect(Collectors.joining(",\n")) +
"\n}\n";
}
private String[] trimAll(final String[] record) {
for (int i = 0; i < record.length; ++i) {
if (record[i] != null) {
record[i] = record[i].trim();
}
}
return record;
}
private Map<String, String> toPhoneNumbers(final Record rec) { private Map<String, String> toPhoneNumbers(final Record rec) {
final var phoneNumbers = new LinkedHashMap<String, String>(); final var phoneNumbers = new LinkedHashMap<String, String>();
if (isNotBlank(rec.getString("phone_private"))) if (isNotBlank(rec.getString("phone_private")))