B2B-84: add public message sync function and audit schema (#5)

* B2B-84: add public message sync function and audit schema

* clean up unnecessary comment

* clean up unnecessary seed file

* address comments

---------

Co-authored-by: Helena <helena@Helenas-MacBook-Pro.local>
This commit is contained in:
Helena
2025-06-06 13:34:25 +03:00
committed by GitHub
parent 7a0dac201e
commit f5079e4e97
11 changed files with 2438 additions and 284 deletions

View File

@@ -2,6 +2,7 @@
# https://app.supabase.com/project/_/settings/api
NEXT_PUBLIC_SUPABASE_URL=your-project-url
NEXT_PUBLIC_SUPABASE_ANON_KEY=your-anon-key
NEXT_PUBLIC_SUPABASE_SERVICE_ROLE_KEY=your-service-role-key
MEDIPOST_URL=your-medpost-url
MEDIPOST_USER=your-medpost-user

View File

@@ -1,10 +1,16 @@
import {
GetMessageListResponse,
MedipostAction,
MedipostPublicMessageResponse,
Message,
UuringuGrupp,
} from "@/lib/types/medipost";
import { Tables } from "@/supabase/database.types";
import { createClient, SupabaseClient } from "@supabase/supabase-js";
import axios from "axios";
import { xml2json } from "xml-js";
import { XMLParser } from "fast-xml-parser";
import { SyncStatus } from "@/lib/types/audit";
import { toArray } from "@/lib/utils";
const BASE_URL = process.env.MEDIPOST_URL!;
const USER = process.env.MEDIPOST_USER!;
@@ -15,9 +21,10 @@ export async function getMessages() {
const publicMessage = await getLatestPublicMessageListItem();
if (!publicMessage) {
return [];
return null;
}
//Teenused tuleb mappida kokku MedReport teenustega. <UuringId> alusel
return getPublicMessage(publicMessage.messageId);
} catch (error) {
console.error(error);
@@ -55,18 +62,13 @@ export async function getPublicMessage(messageId: string) {
Accept: "application/xml",
},
});
const parser = new XMLParser({ ignoreAttributes: false });
const parsed: MedipostPublicMessageResponse = parser.parse(data);
if (data.code && data.code !== 0) {
if (parsed.ANSWER?.CODE && parsed.ANSWER?.CODE !== 0) {
throw new Error(`Failed to get public message (id: ${messageId})`);
}
const parsed = JSON.parse(
xml2json(data, {
compact: true,
spaces: 2,
})
);
return parsed;
}
@@ -124,14 +126,8 @@ export async function getPrivateMessage(messageId: string) {
throw new Error(`Failed to get private message (id: ${messageId})`);
}
const parsed = JSON.parse(
xml2json(data, {
compact: true,
spaces: 2,
})
);
return parsed;
const parser = new XMLParser({ ignoreAttributes: false });
return parser.parse(data);
}
export async function deletePrivateMessage(messageId: string) {
@@ -170,6 +166,187 @@ export async function readPrivateMessageResponse() {
}
}
async function saveAnalysisGroup(
analysisGroup: UuringuGrupp,
supabase: SupabaseClient
) {
const { data: insertedAnalysisGroup, error } = await supabase
.from("analysis_groups")
.upsert(
{
original_id: analysisGroup.UuringuGruppId,
name: analysisGroup.UuringuGruppNimi,
order: analysisGroup.UuringuGruppJarjekord,
},
{ onConflict: "original_id", ignoreDuplicates: false }
)
.select("id");
if (error || !insertedAnalysisGroup[0]?.id) {
throw new Error(
`Failed to insert analysis group (id: ${analysisGroup.UuringuGruppId}), error: ${error?.message}`
);
}
const analysisGroupId = insertedAnalysisGroup[0].id;
const analysisGroupCodes = toArray(analysisGroup.Kood);
const codes: Partial<Tables<"codes">>[] = analysisGroupCodes.map((kood) => ({
hk_code: kood.HkKood,
hk_code_multiplier: kood.HkKoodiKordaja,
coefficient: kood.Koefitsient,
price: kood.Hind,
analysis_group_id: analysisGroupId,
}));
const analysisGroupItems = toArray(analysisGroup.Uuring);
for (const item of analysisGroupItems) {
const analysisElement = item.UuringuElement;
const { data: insertedAnalysisElement, error } = await supabase
.from("analysis_elements")
.upsert(
{
analysis_id_oid: analysisElement.UuringIdOID,
analysis_id_original: analysisElement.UuringId,
tehik_short_loinc: analysisElement.TLyhend,
tehik_loinc_name: analysisElement.KNimetus,
analysis_name_lab: analysisElement.UuringNimi,
order: analysisElement.Jarjekord,
parent_analysis_group_id: analysisGroupId,
material_groups: toArray(item.MaterjalideGrupp),
},
{ onConflict: "analysis_id_original", ignoreDuplicates: false }
)
.select('id');
if (error || !insertedAnalysisElement[0]?.id) {
throw new Error(
`Failed to insert analysis element (id: ${analysisElement.UuringId}), error: ${error?.message}`
);
}
const insertedAnalysisElementId = insertedAnalysisElement[0].id;
if (analysisElement.Kood) {
const analysisElementCodes = toArray(analysisElement.Kood);
codes.push(
...analysisElementCodes.map((kood) => ({
hk_code: kood.HkKood,
hk_code_multiplier: kood.HkKoodiKordaja,
coefficient: kood.Koefitsient,
price: kood.Hind,
analysis_element_id: insertedAnalysisElementId,
}))
);
}
const analyses = analysisElement.UuringuElement;
if (analyses?.length) {
for (const analysis of analyses) {
const { data: insertedAnalysis, error } = await supabase
.from("analyses")
.upsert(
{
analysis_id_oid: analysis.UuringIdOID,
analysis_id_original: analysis.UuringId,
tehik_short_loinc: analysis.TLyhend,
tehik_loinc_name: analysis.KNimetus,
analysis_name_lab: analysis.UuringNimi,
order: analysis.Jarjekord,
parent_analysis_element_id: insertedAnalysisElementId,
},
{ onConflict: "analysis_id_original", ignoreDuplicates: false }
)
.select('id');
if (error || !insertedAnalysis[0]?.id) {
throw new Error(
`Failed to insert analysis (id: ${analysis.UuringId}) error: ${error?.message}`
);
}
const insertedAnalysisId = insertedAnalysis[0].id;
if (analysisElement.Kood) {
const analysisCodes = toArray(analysis.Kood);
codes.push(
...analysisCodes.map((kood) => ({
hk_code: kood.HkKood,
hk_code_multiplier: kood.HkKoodiKordaja,
coefficient: kood.Koefitsient,
price: kood.Hind,
analysis_id: insertedAnalysisId,
}))
);
}
}
}
}
const { error: codesError } = await supabase
.from("codes")
.upsert(codes, { ignoreDuplicates: false });
if (codesError?.code) {
throw new Error(
`Failed to insert codes (analysis group id: ${analysisGroup.UuringuGruppId})`
);
}
}
export async function syncPublicMessage(
message?: MedipostPublicMessageResponse | null
) {
const supabase = createClient(
process.env.NEXT_PUBLIC_SUPABASE_URL!,
process.env.NEXT_PUBLIC_SUPABASE_SERVICE_ROLE_KEY!,
{
auth: {
persistSession: false,
autoRefreshToken: false,
detectSessionInUrl: false,
},
}
);
try {
const providers = toArray(message?.Saadetis?.Teenused.Teostaja);
const analysisGroups = providers.flatMap((provider) =>
toArray(provider.UuringuGrupp)
);
if (!message || !analysisGroups.length) {
return supabase.schema("audit").from("sync_entries").insert({
operation: "ANALYSES_SYNC",
comment: "No data received",
status: SyncStatus.Fail,
changed_by_role: "service_role",
});
}
for (const analysisGroup of analysisGroups) {
await saveAnalysisGroup(analysisGroup, supabase);
}
await supabase.schema("audit").from("sync_entries").insert({
operation: "ANALYSES_SYNC",
status: SyncStatus.Success,
changed_by_role: "service_role",
});
} catch (e) {
console.error(e);
await supabase
.schema("audit")
.from("sync_entries")
.insert({
operation: "ANALYSES_SYNC",
status: SyncStatus.Fail,
comment: JSON.stringify(e),
changed_by_role: "service_role",
});
}
}
function getLatestMessage(messages?: Message[]) {
if (!messages?.length) {
return null;

4
lib/types/audit.ts Normal file
View File

@@ -0,0 +1,4 @@
export enum SyncStatus {
Success = "SUCCESS",
Fail = "FAIL",
}

View File

@@ -20,3 +20,122 @@ export enum MedipostAction {
GetPrivateMessage = "GetPrivateMessage",
DeletePrivateMessage = "DeletePrivateMessage",
}
export type VoimalikVaartus = {
VaartusId: string;
Vaartus: "jah" | "ei";
VaartusJarjekord: number;
};
export type Sisendparameeter = {
"@_VastuseTyyp"?: "ARV" | "VABATEKST" | "KODEERITUD" | "AJAHETK";
"@_VastuseKoodistikuOID"?: string;
"@_VastuseKoodistikuNimi"?: string;
"@_URL"?: string;
UuringIdOID: string;
UuringId: string;
TLyhend: string;
KNimetus: string;
UuringNimi: string;
Jarjekord: number;
VoimalikVaartus: VoimalikVaartus[];
};
export type Kood = {
HkKood: string;
HkKoodiKordaja: number;
Koefitsient: number; // float
Hind: number; // float
};
export type UuringuAlamElement = {
UuringIdOID: string;
UuringId: string;
TLyhend: string;
KNimetus: string;
UuringNimi: string;
Jarjekord: string;
Kood?: Kood[];
};
export type UuringuElement = {
UuringIdOID: string;
UuringId: string;
TLyhend: string;
KNimetus: string;
UuringNimi: string;
Jarjekord: string;
Kood?: Kood[];
UuringuElement?: UuringuAlamElement[];
};
export type Uuring = {
tellitav: "JAH" | "EI";
UuringuElement: UuringuElement; //1..1
MaterjalideGrupp?: MaterjalideGrupp[]; //0..n
};
export type UuringuGrupp = {
UuringuGruppId: string;
UuringuGruppNimi: string;
UuringuGruppJarjekord: number;
Uuring: Uuring | Uuring[]; //1..n
Kood?: Kood | Kood[]; //0..n
};
export type Konteiner = {
ProovinouKoodOID: string;
ProovinouKood: string;
KonteineriNimi: string;
KonteineriKirjeldus: string;
};
export type Materjal = {
MaterjaliTyypOID: string;
MaterjaliTyyp: string;
MaterjaliNimi: string;
KonteineriOmadus: string;
MaterjaliPaige: { Kohustuslik: "JAH" | "EI" }; //0..1
Konteiner?: Konteiner[]; //0..n
};
export type MaterjalideGrupp = {
vaikimisi: "JAH" | "EI";
Materjal: Materjal; //1..n
};
export type Teostaja = {
UuringuGrupp?: UuringuGrupp | UuringuGrupp[]; //0...n
Asutus: {
AsutuseId: string;
AsutuseNimi: string;
AsutuseKood: string;
AllyksuseNimi: string;
Telefon: string;
Aadress: string;
};
Sisendparameeter?: Sisendparameeter | Sisendparameeter[]; //0...n
};
export type MedipostPublicMessageResponse = {
"?xml": {
"@_version": string;
"@_encoding": "UTF-8";
"@_standalone"?: "yes" | "no";
};
ANSWER?: { CODE: number };
Saadetis?: {
Pais: {
Pakett: { "#text": "SL" | "OL" | "AL" | "ME" }; // SL - Teenused, OL - Tellimus (meie poolt saadetav saatekiri), AL - Vastus (saatekirja vastus), ME - Teade
Saatja: string;
Saaja: string;
Aeg: string;
SaadetisId: string;
};
Teenused: {
Teostaja: Teostaja | Teostaja[]; //1..n
};
};
};

View File

@@ -4,3 +4,8 @@ import { twMerge } from "tailwind-merge";
export function cn(...inputs: ClassValue[]) {
return twMerge(clsx(inputs));
}
export function toArray<T>(input?: T | T[] | null): T[] {
if (!input) return [];
return Array.isArray(input) ? input : [input];
}

794
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -16,13 +16,13 @@
"axios": "^1.9.0",
"class-variance-authority": "^0.7.0",
"clsx": "^2.1.1",
"fast-xml-parser": "^5.2.3",
"lucide-react": "^0.468.0",
"next": "latest",
"next-themes": "^0.4.3",
"prettier": "^3.3.3",
"react": "19.0.0",
"react-dom": "19.0.0",
"xml-js": "^1.6.11"
"react-dom": "19.0.0"
},
"devDependencies": {
"@hookform/resolvers": "^5.0.1",

View File

@@ -10,7 +10,7 @@ enabled = true
port = 54321
# Schemas to expose in your API. Tables, views and stored procedures in this schema will get API
# endpoints. `public` and `graphql_public` schemas are included by default.
schemas = ["public", "graphql_public"]
schemas = ["public", "graphql_public", "audit"]
# Extra schemas to add to the search_path of every request.
extra_search_path = ["public", "extensions"]
# The maximum number of rows returns from a view, table, or stored procedure. Limits payload size

1285
supabase/database.types.ts Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,206 @@
create table "public"."analyses" (
"id" bigint generated by default as identity not null,
"analysis_id_oid" text not null,
"analysis_id_original" text not null,
"tehik_short_loinc" text,
"tehik_loinc_name" text,
"analysis_name_lab" text,
"order" smallint,
"created_at" timestamp with time zone not null default now(),
"updated_at" timestamp with time zone default now(),
"parent_analysis_element_id" bigint not null
);
alter table "public"."analyses" enable row level security;
create table "public"."analysis_elements" (
"id" bigint generated by default as identity not null,
"analysis_id_oid" text,
"analysis_id_original" text not null,
"tehik_short_loinc" text,
"tehik_loinc_name" text,
"analysis_name_lab" text,
"order" smallint,
"created_at" timestamp with time zone not null default now(),
"updated_at" timestamp with time zone default now(),
"parent_analysis_group_id" bigint not null,
"material_groups" jsonb[]
);
alter table "public"."analysis_elements" enable row level security;
create table "public"."analysis_groups" (
"id" bigint generated by default as identity not null,
"original_id" text not null,
"name" text,
"order" smallint,
"created_at" timestamp with time zone not null default now(),
"updated_at" timestamp with time zone default now()
);
alter table "public"."analysis_groups" enable row level security;
create table "public"."codes" (
"id" bigint generated by default as identity not null,
"hk_code" text not null,
"hk_code_multiplier" bigint not null,
"coefficient" double precision not null,
"price" double precision not null,
"analysis_group_id" bigint,
"analysis_element_id" bigint,
"analysis_id" bigint,
"updated_at" timestamp with time zone default now(),
"created_at" timestamp with time zone not null default now()
);
alter table "public"."codes" enable row level security;
CREATE UNIQUE INDEX analysis_elements_pkey ON public.analysis_elements USING btree (id);
CREATE UNIQUE INDEX analysis_elements_original_id_key ON public.analysis_elements USING btree (analysis_id_original);
CREATE UNIQUE INDEX analysis_group_original_id_key ON public.analysis_groups USING btree (original_id);
CREATE UNIQUE INDEX analysis_group_pkey ON public.analysis_groups USING btree (id);
CREATE UNIQUE INDEX analysis_pkey ON public.analyses USING btree (id);
CREATE UNIQUE INDEX analysis_original_id_key ON public.analysis_elements USING btree (analysis_id_original);
CREATE UNIQUE INDEX codes_pkey ON public.codes USING btree (id);
CREATE UNIQUE INDEX analyses_analysis_id_original_key ON public.analyses USING btree (analysis_id_original);
CREATE UNIQUE INDEX analysis_elements_analysis_id_original_key ON public.analysis_elements USING btree (analysis_id_original);
alter table "public"."analyses" add constraint "analyses_analysis_id_original_key" UNIQUE using index "analyses_analysis_id_original_key";
alter table "public"."analysis_elements" add constraint "analysis_elements_analysis_id_original_key" UNIQUE using index "analysis_elements_analysis_id_original_key";
alter table "public"."analyses" add constraint "analysis_pkey" PRIMARY KEY using index "analysis_pkey";
alter table "public"."analysis_elements" add constraint "analysis_elements_pkey" PRIMARY KEY using index "analysis_elements_pkey";
alter table "public"."analysis_groups" add constraint "analysis_group_pkey" PRIMARY KEY using index "analysis_group_pkey";
alter table "public"."codes" add constraint "codes_pkey" PRIMARY KEY using index "codes_pkey";
alter table "public"."analyses" add constraint "analyses_parent_analysis_element_id_fkey" FOREIGN KEY (parent_analysis_element_id) REFERENCES analysis_elements(id) ON UPDATE CASCADE ON DELETE CASCADE not valid;
alter table "public"."analyses" validate constraint "analyses_parent_analysis_element_id_fkey";
alter table "public"."analysis_elements" add constraint "analysis_elements_parent_analysis_group_id_fkey" FOREIGN KEY (parent_analysis_group_id) REFERENCES analysis_groups(id) ON UPDATE CASCADE ON DELETE CASCADE not valid;
alter table "public"."analysis_elements" validate constraint "analysis_elements_parent_analysis_group_id_fkey";
alter table "public"."analysis_groups" add constraint "analysis_group_original_id_key" UNIQUE using index "analysis_group_original_id_key";
alter table "public"."codes" add constraint "codes_analysis_element_id_fkey" FOREIGN KEY (analysis_element_id) REFERENCES analysis_elements(id) ON UPDATE CASCADE ON DELETE CASCADE not valid;
alter table "public"."codes" validate constraint "codes_analysis_element_id_fkey";
alter table "public"."codes" add constraint "codes_analysis_group_id_fkey" FOREIGN KEY (analysis_group_id) REFERENCES analysis_groups(id) ON UPDATE CASCADE ON DELETE CASCADE not valid;
alter table "public"."codes" validate constraint "codes_analysis_group_id_fkey";
alter table "public"."codes" add constraint "codes_analysis_id_fkey" FOREIGN KEY (analysis_id) REFERENCES analyses(id) ON UPDATE CASCADE ON DELETE CASCADE not valid;
alter table "public"."codes" validate constraint "codes_analysis_id_fkey";
grant delete on table "public"."analyses" to "service_role";
grant insert on table "public"."analyses" to "service_role";
grant references on table "public"."analyses" to "service_role";
grant select on table "public"."analyses" to "service_role";
grant trigger on table "public"."analyses" to "service_role";
grant truncate on table "public"."analyses" to "service_role";
grant update on table "public"."analyses" to "service_role";
grant delete on table "public"."analysis_elements" to "service_role";
grant insert on table "public"."analysis_elements" to "service_role";
grant references on table "public"."analysis_elements" to "service_role";
grant select on table "public"."analysis_elements" to "service_role";
grant trigger on table "public"."analysis_elements" to "service_role";
grant truncate on table "public"."analysis_elements" to "service_role";
grant update on table "public"."analysis_elements" to "service_role";
grant delete on table "public"."analysis_groups" to "service_role";
grant insert on table "public"."analysis_groups" to "service_role";
grant references on table "public"."analysis_groups" to "service_role";
grant select on table "public"."analysis_groups" to "service_role";
grant trigger on table "public"."analysis_groups" to "service_role";
grant truncate on table "public"."analysis_groups" to "service_role";
grant update on table "public"."analysis_groups" to "service_role";
grant delete on table "public"."codes" to "service_role";
grant insert on table "public"."codes" to "service_role";
grant references on table "public"."codes" to "service_role";
grant select on table "public"."codes" to "service_role";
grant trigger on table "public"."codes" to "service_role";
grant truncate on table "public"."codes" to "service_role";
grant update on table "public"."codes" to "service_role";
create policy "analysis_all"
on "public"."analyses"
as permissive
for all
to service_role
using (true);
create policy "analysis_elements_all"
on "public"."analysis_elements"
as permissive
for all
to service_role
using (true);
create policy "analysis_groups_all"
on "public"."analysis_groups"
as permissive
for all
to service_role
using (true);
create policy "codes_all"
on "public"."codes"
as permissive
for all
to service_role
using (true);
CREATE TRIGGER analysis_change_record_timestamps AFTER INSERT OR DELETE OR UPDATE ON public.analyses FOR EACH ROW EXECUTE FUNCTION trigger_set_timestamps();
CREATE TRIGGER analysis_elements_change_record_timestamps AFTER INSERT OR DELETE OR UPDATE ON public.analysis_elements FOR EACH ROW EXECUTE FUNCTION trigger_set_timestamps();
CREATE TRIGGER analysis_groups_change_record_timestamps AFTER INSERT OR DELETE OR UPDATE ON public.analysis_groups FOR EACH ROW EXECUTE FUNCTION trigger_set_timestamps();
CREATE TRIGGER codes_change_record_timestamps AFTER INSERT OR DELETE OR UPDATE ON public.codes FOR EACH ROW EXECUTE FUNCTION trigger_set_timestamps();

View File

@@ -0,0 +1,87 @@
create schema if not exists audit;
create table if not exists audit.log_entries (
"id" bigint generated by default as identity not null,
"schema_name" text not null,
"table_name" text not null,
"record_key" bigint,
"operation" text not null,
"row_data" jsonb,
"changed_data" jsonb,
"changed_by" uuid,
"changed_by_role" text,
"changed_at" timestamptz not null default now()
);
alter table "audit"."log_entries" enable row level security;
create policy "service_role_all"
on "audit"."log_entries"
as permissive
for all
to service_role
using (true);
create or replace function audit.log_audit_changes()
returns trigger
language plpgsql
as $$
declare
current_user_id uuid;
current_user_role text;
begin
begin
current_user_id := auth.uid();
current_user_role := auth.jwt() ->> 'role';
end;
insert into audit.log_entries (
schema_name,
table_name,
record_key,
operation,
row_data,
changed_data,
changed_by,
changed_by_role
)
values (
tg_table_schema,
tg_table_name,
case when tg_op in ('DELETE', 'UPDATE') then old.id else null end,
tg_op,
case when tg_op in ('DELETE', 'UPDATE') then to_jsonb(old) else null end,
case when tg_op in ('INSERT', 'UPDATE') then to_jsonb(new) else null end,
current_user_id,
current_user_role
);
return null;
end;
$$;
create table "audit"."sync_entries" (
"id" bigint generated by default as identity not null,
"status" text not null,
"operation" text not null,
"comment" text,
"created_at" timestamp with time zone not null default now(),
"changed_by_role" text not null
);
create type "audit"."sync_status" as enum ('SUCCESS', 'FAIL');
alter table "audit"."sync_entries" enable row level security;
CREATE UNIQUE INDEX sync_entries_pkey ON audit.sync_entries USING btree (id);
alter table "audit"."sync_entries" add constraint "sync_entries_pkey" PRIMARY KEY using index "sync_entries_pkey";
create policy "service_role_all"
on "audit"."sync_entries"
as permissive
for all
to public
using (true);
GRANT USAGE ON SCHEMA audit TO service_role;
GRANT ALL ON ALL TABLES IN SCHEMA audit TO service_role;