diff --git a/sql/historization.sql b/sql/historization.sql index b414b95c..a85c6ee9 100644 --- a/sql/historization.sql +++ b/sql/historization.sql @@ -5,25 +5,14 @@ drop view if exists hs_hosting_asset_hv; drop procedure if exists tx_create_historicization; drop view if exists tx_create_historical_view; drop function if exists tx_historicize_tf(); -drop table if exists tx_history; drop type if exists tx_operation; -- ======================================================== -- Historization -- -------------------------------------------------------- -create table "tx_history" -( - "tx_id" bigint not null unique, - "tx_timestamp" timestamp not null, - "user" varchar(64) not null, -- references postgres user - "task" varchar not null -); - create type "tx_operation" as enum ('INSERT', 'UPDATE', 'DELETE', 'TRUNCATE'); --- see https://www.postgresql.org/docs/current/plpgsql-trigger.html - create or replace function tx_historicize_tf() returns trigger language plpgsql @@ -63,10 +52,6 @@ begin "alive" := false; end if; - sql := format('INSERT INTO tx_history VALUES (txid_current(), now(), %1L, %2L) ON CONFLICT DO NOTHING', currentUser, - currentTask); - raise notice 'sql: %', sql; - execute sql; sql := format('INSERT INTO %3$I_ex VALUES (DEFAULT, txid_current(), %1$L, %2$L, $1.*)', TG_OP, alive, TG_TABLE_NAME); raise notice 'sql: %', sql; execute sql using "row"; @@ -79,13 +64,13 @@ create or replace procedure tx_create_historical_view(baseTable varchar) declare createTriggerSQL varchar; viewName varchar; - versionsTable varchar; + exVersionsTable varchar; createViewSQL varchar; baseCols varchar; begin viewName = quote_ident(format('%s_hv', baseTable)); - versionsTable = quote_ident(format('%s_ex', baseTable)); + exVersionsTable = quote_ident(format('%s_ex', baseTable)); baseCols = (select string_agg(quote_ident(column_name), ', ') from information_schema.columns where table_schema = 'public' @@ -99,14 +84,15 @@ begin ' WHERE alive = TRUE' || ' AND version_id IN' || ' (' || - ' SELECT max(vt.version_id) AS history_id' || - ' FROM %3$s AS vt' || - ' JOIN tx_history as txh ON vt.tx_id = txh.tx_id' || - ' WHERE txh.tx_timestamp <= current_setting(''hsadminng.timestamp'')::timestamp' || + ' SELECT max(ex.version_id) AS history_id' || + ' FROM %3$s AS ex' || + ' JOIN tx_context as txc ON ex.txid = txc.txid' || + ' WHERE txc.txtimestamp <= current_setting(''hsadminng.tx_history_timestamp'')::timestamp' || + ' OR txc.txid = current_setting(''hsadminng.tx_history_txid'')' || ' GROUP BY uuid' || ' )' || ')', - viewName, baseCols, versionsTable + viewName, baseCols, exVersionsTable ); raise notice 'sql: %', createViewSQL; execute createViewSQL; @@ -125,7 +111,7 @@ declare createHistTableSql varchar; createTriggerSQL varchar; viewName varchar; - versionsTable varchar; + exVersionsTable varchar; createViewSQL varchar; baseCols varchar; begin @@ -134,7 +120,7 @@ begin createHistTableSql = '' || 'CREATE TABLE ' || baseTable || '_ex (' || ' version_id serial PRIMARY KEY,' || - ' tx_id bigint NOT NULL REFERENCES tx_history(tx_id),' || + ' txid bigint NOT NULL REFERENCES tx_context(txid),' || ' trigger_op tx_operation NOT NULL,' || ' alive boolean not null,' || ' LIKE ' || baseTable || @@ -146,7 +132,7 @@ begin -- create the historical view viewName = quote_ident(format('%s_hv', baseTable)); - versionsTable = quote_ident(format('%s_ex', baseTable)); + exVersionsTable = quote_ident(format('%s_ex', baseTable)); baseCols = (select string_agg(quote_ident(column_name), ', ') from information_schema.columns where table_schema = 'public' @@ -160,14 +146,14 @@ begin ' WHERE alive = TRUE' || ' AND version_id IN' || ' (' || - ' SELECT max(vt.version_id) AS history_id' || - ' FROM %3$s AS vt' || - ' JOIN tx_history as txh ON vt.tx_id = txh.tx_id' || - ' WHERE txh.tx_timestamp <= current_setting(''hsadminng.timestamp'')::timestamp' || - ' GROUP BY id' || + ' SELECT max(ex.version_id) AS history_id' || + ' FROM %3$s AS ex' || + ' JOIN tx_context as txc ON ex.txid = txc.txid' || + ' WHERE txc.txtimestamp <= current_setting(''hsadminng.tx_history_timestamp'')::timestamp' || + ' GROUP BY uuid' || ' )' || ')', - viewName, baseCols, versionsTable + viewName, baseCols, exVersionsTable ); raise notice 'sql: %', createViewSQL; execute createViewSQL; @@ -186,58 +172,26 @@ call tx_create_historicization('hs_hosting_asset'); -- and expanded: -create table hs_hosting_asset_ex -( - version_id serial primary key, - tx_id bigint not null references tx_history (tx_id), - trigger_op tx_operation not null, - alive boolean not null, - like hs_hosting_asset excluding constraints excluding statistics -); - -create or replace view hs_hosting_asset_hv as -( - select uuid, - version, - bookingitemuuid, - type, - parentassetuuid, - assignedtoassetuuid, - identifier, - caption, - config, - alarmcontactuuid - from hs_hosting_asset_ex - where alive = true - and version_id in (select max(vt.version_id) as history_id - from hs_hosting_asset_ex as vt - join tx_history as txh on vt.tx_id = txh.tx_id - where txh.tx_timestamp <= current_setting('hsadminng.timestamp')::timestamp - group by uuid) -); - -CREATE TRIGGER hs_hosting_asset_tx_historicize_tf - AFTER INSERT OR DELETE OR UPDATE ON hs_hosting_asset - FOR EACH ROW EXECUTE PROCEDURE tx_historicize_tf(); - -- =========================================================================================== rollback; begin transaction; call defineContext('historization testing', null, 'superuser-alex@hostsharing.net', --- 'hs_booking_project#D-1000000-hshdefaultproject:ADMIN'); -- prod+test + 'hs_booking_project#D-1000000-hshdefaultproject:ADMIN'); -- prod+test -- 'hs_booking_project#D-1000300-mihdefaultproject:ADMIN'); -- prod - 'hs_booking_project#D-1000300-mimdefaultproject:ADMIN'); -- test +-- 'hs_booking_project#D-1000300-mimdefaultproject:ADMIN'); -- test -- update hs_hosting_asset set caption='lug00 b' where identifier = 'lug00' and type = 'MANAGED_WEBSPACE'; -- prod -update hs_hosting_asset set caption='mim00 d' where identifier = 'mim00' and type = 'MANAGED_WEBSPACE'; -- test +update hs_hosting_asset set caption='mim00 A ' || now()::text where identifier = 'mim00' and type = 'MANAGED_WEBSPACE'; -- test +update hs_hosting_asset set caption='mim00 B ' || now()::text where identifier = 'mim00' and type = 'MANAGED_WEBSPACE'; -- test commit; --- all versions -select txh.tx_timestamp, txh."user", txh.task, ha.* - from hs_hosting_asset_ex ha - join tx_history txh on ha.tx_id=txh.tx_id - where ha.identifier in ('lug00', 'mim00'); - -- single version at point in time -set hsadminng.timestamp to '2024-08-27 04:15:00'; -- UTC +-- set hsadminng.tx_history_timestamp to '2024-08-27 07:44:03'; -- UTC +set hsadminng.tx_history_timestamp to '2024-08-27 07:44:03'; -- UTC select uuid, version, identifier, caption from hs_hosting_asset_hv p where identifier in ('lug00', 'mim00'); + +-- all versions +select txc.txtimestamp, txc.currentUser, txc.currentTask, haex.* + from hs_hosting_asset_ex haex + join tx_context txc on haex.txid=txc.txid + where haex.identifier in ('lug00', 'mim00'); diff --git a/src/test/java/net/hostsharing/hsadminng/hs/migration/BaseOfficeDataImport.java b/src/test/java/net/hostsharing/hsadminng/hs/migration/BaseOfficeDataImport.java index 758ab68d..9cb774d2 100644 --- a/src/test/java/net/hostsharing/hsadminng/hs/migration/BaseOfficeDataImport.java +++ b/src/test/java/net/hostsharing/hsadminng/hs/migration/BaseOfficeDataImport.java @@ -610,7 +610,7 @@ public abstract class BaseOfficeDataImport extends CsvDataImport { deleteTestDataFromHsOfficeTables(); resetHsOfficeSequences(); deleteFromTestTables(); - deleteFromRbacTables(); + deleteFromCommonTables(); jpaAttempt.transacted(() -> { context(rbacSuperuser); diff --git a/src/test/java/net/hostsharing/hsadminng/hs/migration/CsvDataImport.java b/src/test/java/net/hostsharing/hsadminng/hs/migration/CsvDataImport.java index 2ce2e924..97c334ae 100644 --- a/src/test/java/net/hostsharing/hsadminng/hs/migration/CsvDataImport.java +++ b/src/test/java/net/hostsharing/hsadminng/hs/migration/CsvDataImport.java @@ -249,6 +249,7 @@ public class CsvDataImport extends ContextBasedTest { context(rbacSuperuser); // TODO.perf: could we instead skip creating test-data based on an env var? em.createNativeQuery("delete from hs_hosting_asset where true").executeUpdate(); + // FIXME em.createNativeQuery("delete from hs_hosting_asset_ex where true").executeUpdate(); em.createNativeQuery("delete from hs_booking_item where true").executeUpdate(); em.createNativeQuery("delete from hs_booking_project where true").executeUpdate(); em.createNativeQuery("delete from hs_office_coopassetstransaction where true").executeUpdate(); @@ -292,7 +293,7 @@ public class CsvDataImport extends ContextBasedTest { }).assertSuccessful(); } - protected void deleteFromRbacTables() { + protected void deleteFromCommonTables() { jpaAttempt.transacted(() -> { context(rbacSuperuser); em.createNativeQuery("delete from rbacuser_rv where name not like 'superuser-%'").executeUpdate();