Skip to content

Commit

Permalink
Merge pull request #195 from mikepsinn/develop
Browse files Browse the repository at this point in the history
llm library and started conversation2measurements library
  • Loading branch information
mikepsinn authored Apr 29, 2024
2 parents 7d7c1c9 + 71903fa commit f15c528
Show file tree
Hide file tree
Showing 7 changed files with 107 additions and 30 deletions.
76 changes: 76 additions & 0 deletions apps/nextjs/lib/conversation2measurements.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
import { Measurement } from "@/types/models/Measurement";
import {textCompletion} from "@/lib/llm";

// IMPORTANT! Set the runtime to edge
export const runtime = 'edge';

export function conversation2MeasurementsPrompt(statement: string,
localDateTime: string | null | undefined,
previousStatements: string | null | undefined): string {


if(!localDateTime) {
const now = new Date();
localDateTime = now.toISOString().slice(0, 19);
}
return `
You are a robot designed to collect diet, treatment, and symptom data from the user.
Immediately begin asking the user the following questions
- What did you eat today?
- What did you drink today?
- What treatments did you take today?
- Rate all your symptoms on a scale of 1 to 5.
Convert the responses to the following JSON format
[
\t{
\t\t"combinationOperation" : "SUM",
\t\t"startAt" : "{ISO_DATETIME_IN_UTC}",
\t\t"unitName" : "grams",
\t\t"value" : "5",
\t\t"variableCategoryName" : "Treatments",
\t\t"variableName" : "NMN",
\t\t"note" : "{MAYBE_THE_ORIGINAL_STATEMENT_FOR_REFERENCE}"
\t}
]
That would be the result if they said, "I took 5 grams of NMN."
For ratings, use the unit \`/5\`. The \`unitName\` should never be an empty string.
Also, after asking each question and getting a response, check if there's anything else the user want to add to the first question response. For instance, after getting a response to "What did you eat today?", your next question should be, "Did you eat anything else today?". If they respond in the negative, move on to the next question.
Your responses should be in JSON format and have 2 properties called data and message. The message property should contain the message to the user. The data property should contain an array of measurement objects created from the last user response.
${previousStatements ? `The following are the previous statements:
${previousStatements}` : ''}
// Use the current local datetime ${localDateTime} to determine startDateLocal. If specified, also determine startTimeLocal, endDateLocal, and endTimeLocal or just leave them null.\`\`\`
The following is a user request:
"""
${statement}
"""
The following is the user request translated into a JSON object with 2 spaces of indentation and no properties with the value undefined:
`;
}

export async function conversation2measurements(statement: string,
localDateTime: string | null | undefined,
previousStatements: string | null | undefined): Promise<Measurement[]> {
let promptText = conversation2MeasurementsPrompt(statement, localDateTime, previousStatements);
const maxTokenLength = 1500;
if(promptText.length > maxTokenLength) {
// truncate to less than 1500 characters
promptText = promptText.slice(0, maxTokenLength);

}
const str = await textCompletion(promptText, "json_object");
const measurements: Measurement[] = [];
let jsonArray = JSON.parse(str);
jsonArray.measurements.forEach((measurement: Measurement) => {
measurements.push(measurement);
});
return measurements;
}
27 changes: 27 additions & 0 deletions apps/nextjs/lib/llm.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
import OpenAI from 'openai';
// Create an OpenAI API client (that's edge-friendly!)
const openai = new OpenAI({
apiKey: process.env.OPENAI_API_KEY || '',
});

export async function textCompletion(promptText: string, returnType: "text" | "json_object"): Promise<string> {

// Ask OpenAI for a streaming chat completion given the prompt
const response = await openai.chat.completions.create({
model: 'gpt-4-turbo',
stream: false,
//max_tokens: 150,
messages: [
{"role": "system", "content": `You are a helpful assistant that translates user requests into JSON objects`},
{role: "user", "content": promptText},
],
response_format: { type: returnType },
});

if(!response.choices[0].message.content) {
throw new Error('No content in response');
}

return response.choices[0].message.content;
}

34 changes: 4 additions & 30 deletions apps/nextjs/lib/text2measurements.ts
Original file line number Diff line number Diff line change
@@ -1,13 +1,5 @@
import OpenAI from 'openai';
import { Measurement } from "@/types/models/Measurement";

// Create an OpenAI API client (that's edge-friendly!)
const openai = new OpenAI({
apiKey: process.env.OPENAI_API_KEY || '',
});

// IMPORTANT! Set the runtime to edge
export const runtime = 'edge';
import {textCompletion} from "@/lib/llm";

export function generateText2MeasurementsPrompt(statement: string,
localDateTime: string | null | undefined): string {
Expand Down Expand Up @@ -201,28 +193,10 @@ The following is the user request translated into a JSON object with 2 spaces of
export async function text2measurements(statement: string,
localDateTime: string | null | undefined): Promise<Measurement[]> {
const promptText = generateText2MeasurementsPrompt(statement, localDateTime);

// Ask OpenAI for a streaming chat completion given the prompt
const response = await openai.chat.completions.create({
model: 'gpt-4-turbo',
stream: false,
//max_tokens: 150,
messages: [
{"role": "system", "content": `You are a helpful assistant that translates user requests into JSON objects`},
{role: "user", "content": promptText},
],
response_format: { type: "json_object" },
});

// Convert the response into an array of Measurement objects
const str = await textCompletion(promptText, "json_object");
const json = JSON.parse(str);
const measurements: Measurement[] = [];
//console.log(response.choices[0].message.content);
let str = response.choices[0].message.content;
if(!str) {
throw new Error('No content in response');
}
let jsonArray = JSON.parse(str);
jsonArray.measurements.forEach((measurement: Measurement) => {
json.measurements.forEach((measurement: Measurement) => {
measurements.push(measurement);
});
return measurements;
Expand Down
Binary file added docs/images/dfda-framework-diagram.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added docs/logo/fdai_qr_with_big_logo.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.

0 comments on commit f15c528

Please sign in to comment.