Compare commits

..

No commits in common. "fd7a63867a2fe0f20af0568c56d5c98f0a862d5b" and "8b777077a72378c92b4b460f8b7facf7635a4add" have entirely different histories.

9 changed files with 365 additions and 285 deletions

View File

@ -0,0 +1,63 @@
### rbac bookingItem
This code generated was by RbacViewMermaidFlowchartGenerator, do not amend manually.
```mermaid
%%{init:{'flowchart':{'htmlLabels':false}}}%%
flowchart TB
subgraph bookingItem["`**bookingItem**`"]
direction TB
style bookingItem fill:#dd4901,stroke:#274d6e,stroke-width:8px
subgraph bookingItem:roles[ ]
style bookingItem:roles fill:#dd4901,stroke:white
role:bookingItem:OWNER[[bookingItem:OWNER]]
role:bookingItem:ADMIN[[bookingItem:ADMIN]]
role:bookingItem:AGENT[[bookingItem:AGENT]]
role:bookingItem:TENANT[[bookingItem:TENANT]]
end
subgraph bookingItem:permissions[ ]
style bookingItem:permissions fill:#dd4901,stroke:white
perm:bookingItem:INSERT{{bookingItem:INSERT}}
perm:bookingItem:DELETE{{bookingItem:DELETE}}
perm:bookingItem:UPDATE{{bookingItem:UPDATE}}
perm:bookingItem:SELECT{{bookingItem:SELECT}}
end
end
subgraph project["`**project**`"]
direction TB
style project fill:#99bcdb,stroke:#274d6e,stroke-width:8px
subgraph project:roles[ ]
style project:roles fill:#99bcdb,stroke:white
role:project:OWNER[[project:OWNER]]
role:project:ADMIN[[project:ADMIN]]
role:project:AGENT[[project:AGENT]]
role:project:TENANT[[project:TENANT]]
end
end
%% granting roles to roles
role:project:OWNER -.-> role:project:ADMIN
role:project:ADMIN -.-> role:project:AGENT
role:project:AGENT -.-> role:project:TENANT
role:project:AGENT ==> role:bookingItem:OWNER
role:bookingItem:OWNER ==> role:bookingItem:ADMIN
role:bookingItem:ADMIN ==> role:bookingItem:AGENT
role:bookingItem:AGENT ==> role:bookingItem:TENANT
role:bookingItem:TENANT ==> role:project:TENANT
%% granting permissions to roles
role:rbac.global:ADMIN ==> perm:bookingItem:INSERT
role:rbac.global:ADMIN ==> perm:bookingItem:DELETE
role:project:ADMIN ==> perm:bookingItem:INSERT
role:bookingItem:ADMIN ==> perm:bookingItem:UPDATE
role:bookingItem:TENANT ==> perm:bookingItem:SELECT
```

View File

@ -0,0 +1,277 @@
--liquibase formatted sql
-- This code generated was by RbacViewPostgresGenerator, do not amend manually.
-- ============================================================================
--changeset michael.hoennig:hs-booking-item-rbac-OBJECT endDelimiter:--//
-- ----------------------------------------------------------------------------
call rbac.generateRelatedRbacObject('hs_booking_item');
--//
-- ============================================================================
--changeset michael.hoennig:hs-booking-item-rbac-ROLE-DESCRIPTORS endDelimiter:--//
-- ----------------------------------------------------------------------------
call rbac.generateRbacRoleDescriptors('hsBookingItem', 'hs_booking_item');
--//
-- ============================================================================
--changeset michael.hoennig:hs-booking-item-rbac-insert-trigger endDelimiter:--//
-- ----------------------------------------------------------------------------
/*
Creates the roles, grants and permission for the AFTER INSERT TRIGGER.
*/
create or replace procedure buildRbacSystemForHsBookingItem(
NEW hs_booking_item
)
language plpgsql as $$
declare
newProject hs_booking_project;
newParentItem hs_booking_item;
begin
call rbac.enterTriggerForObjectUuid(NEW.uuid);
SELECT * FROM hs_booking_project WHERE uuid = NEW.projectUuid INTO newProject;
SELECT * FROM hs_booking_item WHERE uuid = NEW.parentItemUuid INTO newParentItem;
perform rbac.defineRoleWithGrants(
hsBookingItemOWNER(NEW),
incomingSuperRoles => array[
hsBookingItemAGENT(newParentItem),
hsBookingProjectAGENT(newProject)]
);
perform rbac.defineRoleWithGrants(
hsBookingItemADMIN(NEW),
permissions => array['UPDATE'],
incomingSuperRoles => array[hsBookingItemOWNER(NEW)]
);
perform rbac.defineRoleWithGrants(
hsBookingItemAGENT(NEW),
incomingSuperRoles => array[hsBookingItemADMIN(NEW)]
);
perform rbac.defineRoleWithGrants(
hsBookingItemTENANT(NEW),
permissions => array['SELECT'],
incomingSuperRoles => array[hsBookingItemAGENT(NEW)],
outgoingSubRoles => array[
hsBookingItemTENANT(newParentItem),
hsBookingProjectTENANT(newProject)]
);
call rbac.grantPermissionToRole(rbac.createPermission(NEW.uuid, 'DELETE'), rbac.globalAdmin());
call rbac.leaveTriggerForObjectUuid(NEW.uuid);
end; $$;
/*
AFTER INSERT TRIGGER to create the role+grant structure for a new hs_booking_item row.
*/
create or replace function insertTriggerForHsBookingItem_tf()
returns trigger
language plpgsql
strict as $$
begin
call buildRbacSystemForHsBookingItem(NEW);
return NEW;
end; $$;
create trigger insertTriggerForHsBookingItem_tg
after insert on hs_booking_item
for each row
execute procedure insertTriggerForHsBookingItem_tf();
--//
-- ============================================================================
--changeset michael.hoennig:hs-booking-item-rbac-GRANTING-INSERT-PERMISSION endDelimiter:--//
-- ----------------------------------------------------------------------------
-- granting INSERT permission to global ----------------------------
/*
Grants INSERT INTO hs_booking_item permissions to specified role of pre-existing global rows.
*/
do language plpgsql $$
declare
row rbac.global%ROWTYPE;
begin
call base.defineContext('create INSERT INTO hs_booking_item permissions for pre-exising rbac.global rows');
FOR row IN SELECT * FROM rbac.global
-- unconditional for all rows in that table
LOOP
call rbac.grantPermissionToRole(
rbac.createPermission(row.uuid, 'INSERT', 'hs_booking_item'),
rbac.globalAdmin());
END LOOP;
end;
$$;
/**
Grants hs_booking_item INSERT permission to specified role of new rbac.global rows.
*/
create or replace function new_hs_booking_item_grants_insert_to_global_tf()
returns trigger
language plpgsql
strict as $$
begin
-- unconditional for all rows in that table
call rbac.grantPermissionToRole(
rbac.createPermission(NEW.uuid, 'INSERT', 'hs_booking_item'),
rbac.globalAdmin());
-- end.
return NEW;
end; $$;
-- z_... is to put it at the end of after insert triggers, to make sure the roles exist
create trigger z_new_hs_booking_item_grants_insert_to_global_tg
after insert on rbac.global
for each row
execute procedure new_hs_booking_item_grants_insert_to_global_tf();
-- granting INSERT permission to hs_booking_project ----------------------------
/*
Grants INSERT INTO hs_booking_item permissions to specified role of pre-existing hs_booking_project rows.
*/
do language plpgsql $$
declare
row hs_booking_project;
begin
call base.defineContext('create INSERT INTO hs_booking_item permissions for pre-exising hs_booking_project rows');
FOR row IN SELECT * FROM hs_booking_project
-- unconditional for all rows in that table
LOOP
call rbac.grantPermissionToRole(
rbac.createPermission(row.uuid, 'INSERT', 'hs_booking_item'),
hsBookingProjectADMIN(row));
END LOOP;
end;
$$;
/**
Grants hs_booking_item INSERT permission to specified role of new hs_booking_project rows.
*/
create or replace function new_hs_booking_item_grants_insert_to_hs_booking_project_tf()
returns trigger
language plpgsql
strict as $$
begin
-- unconditional for all rows in that table
call rbac.grantPermissionToRole(
rbac.createPermission(NEW.uuid, 'INSERT', 'hs_booking_item'),
hsBookingProjectADMIN(NEW));
-- end.
return NEW;
end; $$;
-- z_... is to put it at the end of after insert triggers, to make sure the roles exist
create trigger z_new_hs_booking_item_grants_insert_to_hs_booking_project_tg
after insert on hs_booking_project
for each row
execute procedure new_hs_booking_item_grants_insert_to_hs_booking_project_tf();
-- granting INSERT permission to hs_booking_item ----------------------------
-- Granting INSERT INTO hs_hosting_asset permissions to specified role of pre-existing hs_hosting_asset rows slipped,
-- because there cannot yet be any pre-existing rows in the same table yet.
/**
Grants hs_booking_item INSERT permission to specified role of new hs_booking_item rows.
*/
create or replace function new_hs_booking_item_grants_insert_to_hs_booking_item_tf()
returns trigger
language plpgsql
strict as $$
begin
-- unconditional for all rows in that table
call rbac.grantPermissionToRole(
rbac.createPermission(NEW.uuid, 'INSERT', 'hs_booking_item'),
hsBookingItemADMIN(NEW));
-- end.
return NEW;
end; $$;
-- z_... is to put it at the end of after insert triggers, to make sure the roles exist
create trigger z_new_hs_booking_item_grants_insert_to_hs_booking_item_tg
after insert on hs_booking_item
for each row
execute procedure new_hs_booking_item_grants_insert_to_hs_booking_item_tf();
-- ============================================================================
--changeset michael.hoennig:hs_booking_item-rbac-CHECKING-INSERT-PERMISSION endDelimiter:--//
-- ----------------------------------------------------------------------------
/**
Checks if the user respectively the assumed roles are allowed to insert a row to hs_booking_item.
*/
create or replace function hs_booking_item_insert_permission_check_tf()
returns trigger
language plpgsql as $$
declare
superObjectUuid uuid;
begin
-- check INSERT INSERT if rbac.Global ADMIN
if rbac.isGlobalAdmin() then
return NEW;
end if;
-- check INSERT permission via direct foreign key: NEW.projectUuid
if rbac.hasInsertPermission(NEW.projectUuid, 'hs_booking_item') then
return NEW;
end if;
-- check INSERT permission via direct foreign key: NEW.parentItemUuid
if rbac.hasInsertPermission(NEW.parentItemUuid, 'hs_booking_item') then
return NEW;
end if;
raise exception '[403] insert into hs_booking_item values(%) not allowed for current subjects % (%)',
NEW, base.currentSubjects(), rbac.currentSubjectOrAssumedRolesUuids();
end; $$;
create trigger hs_booking_item_insert_permission_check_tg
before insert on hs_booking_item
for each row
execute procedure hs_booking_item_insert_permission_check_tf();
--//
-- ============================================================================
--changeset michael.hoennig:hs-booking-item-rbac-IDENTITY-VIEW endDelimiter:--//
-- ----------------------------------------------------------------------------
call rbac.generateRbacIdentityViewFromProjection('hs_booking_item',
$idName$
caption
$idName$);
--//
-- ============================================================================
--changeset michael.hoennig:hs-booking-item-rbac-RESTRICTED-VIEW endDelimiter:--//
-- ----------------------------------------------------------------------------
call rbac.generateRbacRestrictedView('hs_booking_item',
$orderBy$
validity
$orderBy$,
$updates$
version = new.version,
caption = new.caption,
validity = new.validity,
resources = new.resources
$updates$);
--//

View File

@ -1,96 +0,0 @@
--liquibase formatted sql
-- TODO: These changesets are just for the external remote views to simulate the legacy tables.
-- Once we don't need the external remote views anymore, create revert changesets.
-- ============================================================================
--changeset hs-hosting-asset-MIGRATION-mapping:1 endDelimiter:--//
-- ----------------------------------------------------------------------------
CREATE TABLE hs_hosting_asset_legacy_id
(
uuid uuid NOT NULL REFERENCES hs_hosting_asset(uuid),
legacy_id integer NOT NULL
);
--//
-- ============================================================================
--changeset hs-hosting-asset-MIGRATION-sequence:1 endDelimiter:--//
-- ----------------------------------------------------------------------------
CREATE SEQUENCE IF NOT EXISTS hs_hosting_asset_legacy_id_seq
AS integer
START 1000000000
OWNED BY hs_hosting_asset_legacy_id.legacy_id;
--//
-- ============================================================================
--changeset hs-hosting-asset-MIGRATION-default:1 endDelimiter:--//
-- ----------------------------------------------------------------------------
ALTER TABLE hs_hosting_asset_legacy_id
ALTER COLUMN legacy_id
SET DEFAULT nextVal('hs_hosting_asset_legacy_id_seq');
--/
-- ============================================================================
--changeset hs-hosting-asset-MIGRATION-insert:1 endDelimiter:--//
-- ----------------------------------------------------------------------------
CALL base.defineContext('schema-migration');
INSERT INTO hs_hosting_asset_legacy_id(uuid, legacy_id)
SELECT uuid, nextVal('hs_hosting_asset_legacy_id_seq') FROM hs_hosting_asset;
--/
-- ============================================================================
--changeset hs-hosting-asset-MIGRATION-insert-trigger:1 endDelimiter:--//
-- ----------------------------------------------------------------------------
create or replace function insertassetLegacyIdMapping()
returns trigger
language plpgsql
strict as $$
begin
if TG_OP <> 'INSERT' then
raise exception 'invalid usage of trigger';
end if;
INSERT INTO hs_hosting_asset_legacy_id VALUES
(NEW.uuid, nextVal('hs_hosting_asset_legacy_id_seq'));
return NEW;
end; $$;
create trigger createassetLegacyIdMapping
after insert on hs_hosting_asset
for each row
execute procedure insertassetLegacyIdMapping();
--/
-- ============================================================================
--changeset hs-hosting-asset-MIGRATION-delete-trigger:1 endDelimiter:--//
-- ----------------------------------------------------------------------------
create or replace function deleteassetLegacyIdMapping_tf()
returns trigger
language plpgsql
strict as $$
begin
if TG_OP <> 'DELETE' then
raise exception 'invalid usage of trigger';
end if;
DELETE FROM hs_hosting_asset_legacy_id
WHERE uuid = OLD.uuid;
return OLD;
end; $$;
create trigger deleteassetLegacyIdMapping_tg
before delete on hs_hosting_asset
for each row
execute procedure deleteassetLegacyIdMapping_tf();
--/

View File

@ -146,17 +146,15 @@ databaseChangeLog:
- include: - include:
file: db/changelog/6-hs-booking/620-booking-project/6208-hs-booking-project-test-data.sql file: db/changelog/6-hs-booking/620-booking-project/6208-hs-booking-project-test-data.sql
- include: - include:
file: db/changelog/6-hs-booking/630-booking-item/6300-hs-booking-item.sql file: db/changelog/6-hs-booking/630-booking-item/6200-hs-booking-item.sql
- include: - include:
file: db/changelog/6-hs-booking/630-booking-item/6303-hs-booking-item-rbac.sql file: db/changelog/6-hs-booking/630-booking-item/6203-hs-booking-item-rbac.sql
- include: - include:
file: db/changelog/6-hs-booking/630-booking-item/6308-hs-booking-item-test-data.sql file: db/changelog/6-hs-booking/630-booking-item/6208-hs-booking-item-test-data.sql
- include: - include:
file: db/changelog/7-hs-hosting/701-hosting-asset/7010-hs-hosting-asset.sql file: db/changelog/7-hs-hosting/701-hosting-asset/7010-hs-hosting-asset.sql
- include: - include:
file: db/changelog/7-hs-hosting/701-hosting-asset/7013-hs-hosting-asset-rbac.sql file: db/changelog/7-hs-hosting/701-hosting-asset/7013-hs-hosting-asset-rbac.sql
- include:
file: db/changelog/7-hs-hosting/701-hosting-asset/7016-hs-hosting-asset-migration.sql
- include: - include:
file: db/changelog/7-hs-hosting/701-hosting-asset/7018-hs-hosting-asset-test-data.sql file: db/changelog/7-hs-hosting/701-hosting-asset/7018-hs-hosting-asset-test-data.sql
- include: - include:

View File

@ -17,6 +17,7 @@ import net.hostsharing.hsadminng.hs.office.relation.HsOfficeRelation;
import net.hostsharing.hsadminng.hs.office.relation.HsOfficeRelationRealEntity; import net.hostsharing.hsadminng.hs.office.relation.HsOfficeRelationRealEntity;
import net.hostsharing.hsadminng.hs.office.relation.HsOfficeRelationType; import net.hostsharing.hsadminng.hs.office.relation.HsOfficeRelationType;
import net.hostsharing.hsadminng.hs.office.sepamandate.HsOfficeSepaMandateEntity; import net.hostsharing.hsadminng.hs.office.sepamandate.HsOfficeSepaMandateEntity;
import net.hostsharing.hsadminng.rbac.object.BaseEntity;
import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeAll;
@ -614,7 +615,7 @@ public abstract class BaseOfficeDataImport extends CsvDataImport {
jpaAttempt.transacted(() -> { jpaAttempt.transacted(() -> {
context(rbacSuperuser); context(rbacSuperuser);
contacts.forEach(this::persist); contacts.forEach(this::persist);
updateLegacyIds(contacts, "hs_office_contact_legacy_id", "contact_id"); updateLegacyIds(contacts, "hs_office_contact_legacy_id", "contact_id");
}).assertSuccessful(); }).assertSuccessful();
jpaAttempt.transacted(() -> { jpaAttempt.transacted(() -> {
@ -698,6 +699,24 @@ public abstract class BaseOfficeDataImport extends CsvDataImport {
assumeThat(partners.size()).isLessThanOrEqualTo(MAX_NUMBER_OF_TEST_DATA_PARTNERS); assumeThat(partners.size()).isLessThanOrEqualTo(MAX_NUMBER_OF_TEST_DATA_PARTNERS);
} }
private <E extends BaseEntity> void updateLegacyIds(
Map<Integer, E> entities,
final String legacyIdTable,
final String legacyIdColumn) {
em.flush();
entities.forEach((id, entity) -> em.createNativeQuery("""
UPDATE ${legacyIdTable}
SET ${legacyIdColumn} = :legacyId
WHERE uuid = :uuid
"""
.replace("${legacyIdTable}", legacyIdTable)
.replace("${legacyIdColumn}", legacyIdColumn))
.setParameter("legacyId", id)
.setParameter("uuid", entity.getUuid())
.executeUpdate()
);
}
@Test @Test
@Order(9999) @Order(9999)
@ContinueOnFailure @ContinueOnFailure

View File

@ -334,24 +334,6 @@ public class CsvDataImport extends ContextBasedTest {
errors.clear(); errors.clear();
assertThat(errorsToLog).isEmpty(); assertThat(errorsToLog).isEmpty();
} }
protected <E extends BaseEntity> void updateLegacyIds(
Map<Integer, E> entities,
final String legacyIdTable,
final String legacyIdColumn) {
em.flush();
entities.forEach((id, entity) -> em.createNativeQuery("""
UPDATE ${legacyIdTable}
SET ${legacyIdColumn} = :legacyId
WHERE uuid = :uuid
"""
.replace("${legacyIdTable}", legacyIdTable)
.replace("${legacyIdColumn}", legacyIdColumn))
.setParameter("legacyId", id)
.setParameter("uuid", entity.getUuid())
.executeUpdate()
);
}
} }
class Columns { class Columns {

View File

@ -47,12 +47,12 @@ import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Function; import java.util.function.Function;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import java.util.stream.Collectors;
import static java.util.Arrays.stream; import static java.util.Arrays.stream;
import static java.util.Map.entry; import static java.util.Map.entry;
import static java.util.Map.ofEntries; import static java.util.Map.ofEntries;
import static java.util.Optional.ofNullable; import static java.util.Optional.ofNullable;
import static java.util.stream.Collectors.joining;
import static java.util.stream.Collectors.toMap; import static java.util.stream.Collectors.toMap;
import static java.util.stream.Collectors.toSet; import static java.util.stream.Collectors.toSet;
import static net.hostsharing.hsadminng.hs.hosting.asset.HsHostingAssetType.CLOUD_SERVER; import static net.hostsharing.hsadminng.hs.hosting.asset.HsHostingAssetType.CLOUD_SERVER;
@ -938,132 +938,6 @@ public class ImportHostingAssets extends BaseOfficeDataImport {
@Test @Test
@Order(19930) @Order(19930)
void verifyCloudServerLegacyIds() {
assumeThatWeAreImportingControlledTestData();
assertThat(fetchHosingAssetLegacyIds(CLOUD_SERVER)).isEqualTo("""
23611
""".trim());
assertThat(missingHostingAsstLegacyIds(CLOUD_SERVER)).isEmpty();
}
@Test
@Order(19931)
void verifyManagedServerLegacyIds() {
assumeThatWeAreImportingControlledTestData();
assertThat(fetchHosingAssetLegacyIds(MANAGED_SERVER)).isEqualTo("""
10968
10978
11061
11447
""".trim());
assertThat(missingHostingAsstLegacyIds(MANAGED_SERVER)).isEmpty();
}
@Test
@Order(19932)
void verifyManagedWebspaceLegacyIds() {
assumeThatWeAreImportingControlledTestData();
assertThat(fetchHosingAssetLegacyIds(MANAGED_WEBSPACE)).isEqualTo("""
10630
11094
11111
11112
19959
""".trim());
assertThat(missingHostingAsstLegacyIds(MANAGED_WEBSPACE)).isEmpty();
}
@Test
@Order(19933)
void verifyUnixUserLegacyIds() {
assumeThatWeAreImportingControlledTestData();
assertThat(fetchHosingAssetLegacyIds(UNIX_USER)).isEqualTo("""
5803
5805
5809
5811
5813
5835
5961
5964
5966
5990
6705
6824
7846
9546
9596
""".trim());
assertThat(missingHostingAsstLegacyIds(UNIX_USER)).isEmpty();
}
@Test
@Order(19934)
void verifyPgSqlDbLegacyIds() {
assumeThatWeAreImportingControlledTestData();
assertThat(fetchHosingAssetLegacyIds(PGSQL_DATABASE)).isEqualTo("""
1077
1858
1860
4931
4932
7522
7523
7605
""".trim());
assertThat(missingHostingAsstLegacyIds(PGSQL_DATABASE)).isEmpty();
}
@Test
@Order(19934)
void verifyPgSqlUserLegacyIds() {
assumeThatWeAreImportingControlledTestData();
assertThat(fetchHosingAssetLegacyIds(PGSQL_USER)).isEqualTo("""
1857
1859
1860
1861
4931
7522
7605
""".trim());
assertThat(missingHostingAsstLegacyIds(PGSQL_USER)).isEmpty();
}
@Test
@Order(19935)
void verifyMariaDbLegacyIds() {
assumeThatWeAreImportingControlledTestData();
assertThat(fetchHosingAssetLegacyIds(MARIADB_DATABASE)).isEqualTo("""
1786
1805
4908
4941
4942
7520
7521
7604
""".trim());
assertThat(missingHostingAsstLegacyIds(MARIADB_DATABASE)).isEmpty();
}
@Test
@Order(19936)
void verifyMariaDbUserLegacyIds() {
assumeThatWeAreImportingControlledTestData();
assertThat(fetchHosingAssetLegacyIds(MARIADB_USER)).isEqualTo("""
1858
4908
4909
4932
7520
7604
""".trim());
assertThat(missingHostingAsstLegacyIds(MARIADB_USER)).isEmpty();
}
@Test
@Order(19940)
void verifyProjectAgentsCanViewEmailAddresses() { void verifyProjectAgentsCanViewEmailAddresses() {
assumeThatWeAreImportingControlledTestData(); assumeThatWeAreImportingControlledTestData();
@ -1075,7 +949,6 @@ public class ImportHostingAssets extends BaseOfficeDataImport {
assertThat(haCount).isEqualTo(68); assertThat(haCount).isEqualTo(68);
} }
// ============================================================================================ // ============================================================================================
@Test @Test
@ -1133,11 +1006,6 @@ public class ImportHostingAssets extends BaseOfficeDataImport {
} }
).assertSuccessful() ).assertSuccessful()
); );
jpaAttempt.transacted(() -> {
context(rbacSuperuser);
updateLegacyIds(assets, "hs_hosting_asset_legacy_id", "legacy_id");
}).assertSuccessful();
} }
private void verifyActuallyPersistedHostingAssetCount( private void verifyActuallyPersistedHostingAssetCount(
@ -1742,7 +1610,7 @@ public class ImportHostingAssets extends BaseOfficeDataImport {
//noinspection unchecked //noinspection unchecked
zoneData.put("user-RR", ((ArrayList<ArrayList<Object>>) zoneData.get("user-RR")).stream() zoneData.put("user-RR", ((ArrayList<ArrayList<Object>>) zoneData.get("user-RR")).stream()
.map(userRR -> userRR.stream().map(Object::toString).collect(joining(" "))) .map(userRR -> userRR.stream().map(Object::toString).collect(Collectors.joining(" ")))
.toArray(String[]::new) .toArray(String[]::new)
); );
domainDnsSetupAsset.getConfig().putAll(zoneData); domainDnsSetupAsset.getConfig().putAll(zoneData);
@ -1890,35 +1758,4 @@ public class ImportHostingAssets extends BaseOfficeDataImport {
protected static void assumeThatWeAreImportingControlledTestData() { protected static void assumeThatWeAreImportingControlledTestData() {
assumeThat(isImportingControlledTestData()).isTrue(); assumeThat(isImportingControlledTestData()).isTrue();
} }
private String fetchHosingAssetLegacyIds(final HsHostingAssetType type) {
//noinspection unchecked
return ((List<List<?>>) em.createNativeQuery(
"""
SELECT li.* FROM hs_hosting_asset_legacy_id li
JOIN hs_hosting_asset ha ON ha.uuid=li.uuid
WHERE CAST(ha.type AS text)=:type
ORDER BY legacy_id
""",
List.class)
.setParameter("type", type.name())
.getResultList()
).stream().map(row -> row.get(1).toString()).collect(joining("\n"));
}
private String missingHostingAsstLegacyIds(final HsHostingAssetType type) {
//noinspection unchecked
return ((List<List<?>>) em.createNativeQuery(
"""
SELECT ha.uuid, ha.type, ha.identifier FROM hs_hosting_asset ha
JOIN hs_hosting_asset_legacy_id li ON li.uuid=ha.uuid
WHERE li.legacy_id is null AND CAST(ha.type AS text)=:type
ORDER BY li.legacy_id
""",
List.class)
.setParameter("type", type.name())
.getResultList()).stream()
.map(row -> row.stream().map(Object::toString).collect(joining(", ")))
.collect(joining("\n"));
}
} }