send analyses to ai

This commit is contained in:
Danel Kungla
2025-09-19 16:54:21 +03:00
parent 1b634f9736
commit addd3a5dae
4 changed files with 66 additions and 230 deletions

View File

@@ -1,3 +1,5 @@
import { Suspense } from 'react';
import { redirect } from 'next/navigation';
import { toTitleCase } from '@/lib/utils';
@@ -13,10 +15,8 @@ import { withI18n } from '~/lib/i18n/with-i18n';
import Dashboard from '../_components/dashboard';
import DashboardCards from '../_components/dashboard-cards';
import Recommendations from '../_components/recommendations';
import { loadAnalyses } from '../_lib/server/load-analyses';
import { loadRecommendations } from '../_lib/server/load-recommendations';
import { loadCurrentUserAccount } from '../_lib/server/load-user-account';
import { analysisResponses } from './analysis-results/test/test-responses';
import Loading from '../loading';
export const generateMetadata = async () => {
const i18n = await createI18nServerInstance();
@@ -28,21 +28,11 @@ export const generateMetadata = async () => {
};
async function UserHomePage() {
console.log('process.env', process.env.OPENAI_API_KEY);
const client = getSupabaseServerClient();
const { account } = await loadCurrentUserAccount();
const api = createUserAnalysesApi(client);
const bmiThresholds = await api.fetchBmiThresholds();
const { analyses, countryCode } = await loadAnalyses();
const analysisRecommendations = await loadRecommendations(
analysisResponses,
analyses,
account,
);
console.log('analysisRecommendations', analysisRecommendations);
if (!account) {
redirect('/');
@@ -62,13 +52,9 @@ async function UserHomePage() {
/>
<PageBody>
<Dashboard account={account} bmiThresholds={bmiThresholds} />
<Recommendations
recommended={analyses.filter((analysis) =>
analysisRecommendations.includes(analysis.title),
)}
countryCode={countryCode}
/>
<Suspense fallback={null}>
<Recommendations account={account} />
</Suspense>
</PageBody>
</>
);

View File

@@ -1,24 +1,40 @@
'use client';
'use server';
import React from 'react';
import React, { useState } from 'react';
import { AccountWithParams } from '@/packages/features/accounts/src/types/accounts';
import { analysisResponses } from '../(dashboard)/analysis-results/test/test-responses';
import { loadAnalyses } from '../_lib/server/load-analyses';
import { loadRecommendations } from '../_lib/server/load-recommendations';
import OrderAnalysesCards, { OrderAnalysisCard } from './order-analyses-cards';
export default function Recommendations({
recommended,
countryCode,
export default async function Recommendations({
account,
}: {
recommended: OrderAnalysisCard[];
countryCode: string;
account: AccountWithParams;
}) {
if (recommended.length < 1) {
const { analyses, countryCode } = await loadAnalyses();
const [isLoadingTimeSlots, setIsLoadingTimeSlots] = useState(false);
const analysisResults = analysisResponses;
console.log('selectedDate', isLoadingTimeSlots);
const analysisRecommendations = await loadRecommendations(
analysisResults,
analyses,
account,
);
const orderAnalyses = analyses.filter((analysis) =>
analysisRecommendations.includes(analysis.title),
);
console.log('analysisRecommendations', analysisRecommendations);
if (orderAnalyses.length < 1) {
return null;
}
return (
<div>
<h4>Medreport soovitab teile</h4>
<OrderAnalysesCards analyses={recommended} countryCode={countryCode} />
<OrderAnalysesCards analyses={orderAnalyses} countryCode={countryCode} />
</div>
);
}

View File

@@ -1,133 +0,0 @@
import type { Mock } from 'jest-mock';
// ---- Mocks you can tweak per test ----
const createResponseMock = jest.fn();
const getLatestResponseTimeMock = jest.fn();
const getLatestUniqueAnalysResponsesMock = jest.fn();
const parsePersonalCodeMock = jest.fn(() => ({ gender: { value: 'male' } }));
// Mock OpenAI SDK
jest.mock('openai', () => {
return {
__esModule: true,
default: class OpenAI {
responses = { create: createResponseMock };
},
};
});
// Mock next/cache (global cache map so it persists between calls)
const globalCache = new Map<string, unknown>();
jest.mock('next/cache', () => {
return {
__esModule: true,
unstable_cache:
(fn: (...args: any[]) => Promise<any>, keyParts: any[], _opts?: any) =>
async (...args: any[]) => {
const key = JSON.stringify(keyParts);
if (globalCache.has(key)) return globalCache.get(key);
const val = await fn(...args);
globalCache.set(key, val);
return val;
},
};
});
// Mock your analysis helpers + personal code parser
jest.mock('../src/analysis-utils', () => ({
__esModule: true,
getLatestUniqueAnalysResponses: getLatestUniqueAnalysResponsesMock,
getLatestResponseTime: getLatestResponseTimeMock,
}));
jest.mock('../src/personal-code', () => ({
__esModule: true,
PersonalCode: { parsePersonalCode: parsePersonalCodeMock },
}));
describe('loadRecommendations', () => {
beforeEach(() => {
createResponseMock.mockReset();
getLatestResponseTimeMock.mockReset();
getLatestUniqueAnalysResponsesMock.mockReset();
globalCache.clear();
});
it('should call OpenAI once when latest date stays the same (cache hit on 2nd call)', async () => {
const date1 = new Date('2025-09-16T12:00:00Z');
getLatestResponseTimeMock.mockReturnValue(date1);
getLatestUniqueAnalysResponsesMock.mockImplementation((arr: any[]) => arr);
createResponseMock.mockResolvedValue({
output_text: JSON.stringify({ recommended: ['A', 'B'] }),
});
const { loadRecommendations } = await import('./load-recommendations');
const analysisResponses = [
{ name: 'x', value: 1, responseTime: date1 },
] as any[];
const analyses = [{ title: 't', description: 'd' }] as any[];
const account = { id: 'u1', personal_code: '12345678901' } as any;
// Act: 1st call (MISS)
const out1 = await loadRecommendations(
analysisResponses,
analyses,
account,
);
// Act: 2nd call with same date (HIT)
const out2 = await loadRecommendations(
analysisResponses,
analyses,
account,
);
// Assert: only one API call, result reused
expect(createResponseMock).toHaveBeenCalledTimes(1);
expect(out1).toEqual(['A', 'B']);
expect(out2).toEqual(['A', 'B']);
});
it('should call OpenAI again when latest date changes (new cache key)', async () => {
const date1 = new Date('2025-09-16T12:00:00Z');
const date2 = new Date('2025-09-17T00:00:00Z');
getLatestResponseTimeMock.mockReturnValueOnce(date1);
getLatestResponseTimeMock.mockReturnValueOnce(date2);
getLatestUniqueAnalysResponsesMock.mockImplementation((arr: any[]) => arr);
createResponseMock
.mockResolvedValueOnce({
output_text: JSON.stringify({ recommended: ['A', 'B'] }),
})
.mockResolvedValueOnce({
output_text: JSON.stringify({ recommended: ['C'] }),
});
const { loadRecommendations } = await import('./load-recommendations');
const analysisResponses = [
{ name: 'x', value: 1, responseTime: date1 },
] as any[];
const analyses = [{ title: 't', description: 'd' }] as any[];
const account = { id: 'u1', personal_code: '12345678901' } as any;
const out1 = await loadRecommendations(
analysisResponses,
analyses,
account,
);
const out2 = await loadRecommendations(
analysisResponses,
analyses,
account,
);
expect(createResponseMock).toHaveBeenCalledTimes(2);
expect(out1).toEqual(['A', 'B']);
expect(out2).toEqual(['C']);
});
});

View File

@@ -1,9 +1,6 @@
import { cache } from 'react';
import { unstable_cache } from 'next/cache';
import { AccountWithParams } from '@/packages/features/accounts/src/types/accounts';
import crypto from 'crypto';
import OpenAI from 'openai';
import PersonalCode from '~/lib/utils';
@@ -19,20 +16,6 @@ type FormattedAnalysisResponse = {
responseTime: string;
};
function canonical(v: unknown): string {
return JSON.stringify(v, (_k, val) => {
if (val && typeof val === 'object' && !Array.isArray(val)) {
// sort object keys for stable JSON
return Object.keys(val)
.sort()
.reduce((o: any, k) => ((o[k] = (val as any)[k]), o), {});
}
return val;
});
}
const sha256 = (s: string) =>
crypto.createHash('sha256').update(s).digest('hex');
const getLatestResponseTime = (items: FormattedAnalysisResponse[]) => {
if (!items?.length) return null;
@@ -56,23 +39,29 @@ const getLatestUniqueAnalysResponses = (
console.log('analysisElements', analysisElements.length);
const map = new Map();
for (const it of analysisElements) {
const prev = map.get(it.analysisName);
if (it.results.responseTime) {
if (
!prev ||
new Date(it.results.responseTime) > new Date(prev.responseTime)
) {
map.set(it.analysisName, {
name: it.analysisName,
value: it.results.responseValue,
responseTime: it.results.responseTime,
});
const byName = analysisElements.reduce<
Record<string, { name: string; value: string; responseTime: string }>
>((acc, it) => {
const responseTime = it?.results?.responseTime;
const responseValue = it?.results?.responseValue;
if (!responseTime || !responseValue) return acc;
const key = it.analysisName;
const cur = acc[key];
const t = Date.parse(responseTime);
const prevT = cur ? Date.parse(cur.responseTime) : -Infinity;
if (!cur || t > prevT) {
acc[key] = {
name: key,
value: responseValue.toString(),
responseTime,
};
}
}
}
return [...map.values()];
return acc;
}, {});
return Object.values(byName);
};
async function recommendationsLoader(
@@ -80,6 +69,9 @@ async function recommendationsLoader(
analyses: OrderAnalysisCard[],
account: AccountWithParams | null,
): Promise<any> {
if (!process.env.OPENAI_API_KEY) {
return [];
}
if (!account?.personal_code) {
return [];
}
@@ -90,11 +82,11 @@ async function recommendationsLoader(
console.log('analyises', analyses);
const latestUniqueAnalysResponses =
getLatestUniqueAnalysResponses(analysisResponses);
const latestResponseTime = getLatestResponseTime(latestUniqueAnalysResponses);
const latestISO = latestResponseTime
? new Date(latestResponseTime).toISOString()
: 'none';
console.log('latestResponseTime', latestResponseTime);
// const latestResponseTime = getLatestResponseTime(latestUniqueAnalysResponses);
// const latestISO = latestResponseTime
// ? new Date(latestResponseTime).toISOString()
// : 'none';
// console.log('latestResponseTime', latestResponseTime);
const formattedAnalysisResponses = latestUniqueAnalysResponses.map(
({ name, value }) => ({ name, value }),
);
@@ -108,8 +100,9 @@ async function recommendationsLoader(
'latestUniqueAnalysResponses',
JSON.stringify(latestUniqueAnalysResponses),
);
const response = await client.responses.create({
model: 'gpt-5',
model: 'gpt-5-mini',
store: false,
prompt: {
id: 'pmpt_68ca9c8bfa8c8193b27eadc6496c36440df449ece4f5a8dd',
@@ -121,33 +114,7 @@ async function recommendationsLoader(
},
});
const responseJson = JSON.parse(response.output_text);
console.log('responseJson: ', responseJson);
const keyPayload = {
model: 'gpt-5', // swap to a model your project can access
promptId: 'pmpt_68ca9c8bfa8c8193b27eadc6496c36440df449ece4f5a8dd',
latestISO,
};
const key = 'recs:' + sha256(canonical(keyPayload));
const run = unstable_cache(async () => {
const response = await client.responses.create({
model: keyPayload.model,
store: false,
prompt: {
id: keyPayload.promptId,
variables: {
analyses: JSON.stringify(formattedAnalyses),
results: JSON.stringify(latestUniqueAnalysResponses),
gender: gender.value,
},
},
});
const json = JSON.parse(response.output_text);
console.log('response.output_text', response.output_text);
return json.recommended;
}, ['recommendations', key]);
return await run();
}