Skip to content

Commit

Permalink
Merge branch 'main' of github.com:Libertai/libertai-js
Browse files Browse the repository at this point in the history
  • Loading branch information
moshemalawach committed Jul 16, 2024
2 parents 92b3639 + 905a4a8 commit e0796a9
Show file tree
Hide file tree
Showing 6 changed files with 361 additions and 336 deletions.
24 changes: 12 additions & 12 deletions package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@libertai/libertai-js",
"version": "0.0.4",
"version": "0.0.7",
"description": "In-browser SDK for interacting with LibertAI Decentralized AI Network",
"keywords": [],
"type": "module",
Expand Down Expand Up @@ -49,22 +49,22 @@
"prepare": "yarn build"
},
"devDependencies": {
"@types/node": "^20.12.2",
"@typescript-eslint/eslint-plugin": "^7.4.0",
"@typescript-eslint/parser": "^7.4.0",
"esbuild": "^0.20.2",
"@types/node": "^20.14.9",
"@typescript-eslint/eslint-plugin": "^7.15.0",
"@typescript-eslint/parser": "^7.15.0",
"esbuild": "^0.23.0",
"eslint": "^8.57.0",
"prettier": "^3.0.0",
"rimraf": "^5.0.5",
"ts-node": "^10.2.1",
"typescript": "^5.4.3"
"prettier": "^3.3.2",
"rimraf": "^5.0.7",
"ts-node": "^10.9.2",
"typescript": "^5.5.3"
},
"dependencies": {
"axios": "^1.6.8",
"langchain": "^0.1.30",
"axios": "^1.7.2",
"langchain": "^0.1.37",
"localforage": "^1.10.0",
"ml-distance": "^4.0.1",
"uuid": "^9.0.1"
"uuid": "^10.0.0"
},
"resolutions": {
"@langchain/core": "^0.1"
Expand Down
21 changes: 9 additions & 12 deletions src/inference.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import axios from 'axios';

import { Persona, Model, Message } from './types.js';
import { Message, Model, Persona } from './types.js';
import { calculateTokenLength } from './utils.js';

// Simple wrapper class around basic AI inference
Expand Down Expand Up @@ -69,9 +69,9 @@ export class LlamaCppApiEngine {
}

let fullResults = compoundedResult + lastResult;
for (let i = 0; i < stop_sequences.length; i++) {
fullResults = fullResults.split(`\n${stop_sequences[i]}`).join('|||||');
fullResults = fullResults.split(`${stop_sequences[i]}`).join('|||||');
for (const element of stop_sequences) {
fullResults = fullResults.split(`\n${element}`).join('|||||');
fullResults = fullResults.split(`${element}`).join('|||||');
}
const results = fullResults.split('|||||');

Expand Down Expand Up @@ -152,11 +152,11 @@ export class LlamaCppApiEngine {
targetUser = messages[messages.length - 1].role;
}

// Set {{char}} based on persona.name
// Set {{char}} based on persona.role
// Set {{user}} based on targetUser
// Set {{model}} based on model.name
let description = persona.description;
description = description.replace(/\{\{char\}\}/g, persona.name);
description = description.replace(/\{\{char\}\}/g, persona.role);
description = description.replace(/\{\{user\}\}/g, targetUser);
description = description.replace(/\{\{model\}\}/g, model.name);

Expand All @@ -170,16 +170,13 @@ export class LlamaCppApiEngine {
// Determine how many tokens we have left
usedTokens = calculateTokenLength(systemPrompt);

// Iterate over messagse in reverse order
// Iterate over messages in reverse order
// to generate the chat log
let chatLog = `${promptFormat.userPrepend}${persona.name.toLowerCase()}${promptFormat.userAppend}`;
let chatLog = `${promptFormat.userPrepend}${persona.role.toLowerCase()}${promptFormat.userAppend}`;
for (let i = messages.length - 1; i >= 0; i--) {
const message = messages[i];
const timestamp_string = message.timestamp
? ` (at ${message.timestamp.toString()})`
: '';
let messageLog = '';
messageLog += `${promptFormat.userPrepend}${message.role.toLowerCase()}${timestamp_string}${promptFormat.userAppend}`;
messageLog += `${promptFormat.userPrepend}${message.role.toLowerCase()}${promptFormat.userAppend}`;
messageLog += `${message.content}`;
messageLog += `${promptFormat.stopSequence}`;
messageLog += `${promptFormat.lineSeparator}`;
Expand Down
11 changes: 5 additions & 6 deletions src/knowledge-store.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,12 @@ import { distance } from 'ml-distance';
import {
Document,
Embedding,
SearchResult,
KnowledgeStoreConfig,
SearchResult,
} from './types.js';
import { defaultKnowledgeStoreConfig } from './config.js';
import idb from './idb.js';
import { chunkText, embed, createDocument, createEmbedding } from './utils.js';
import { chunkText, createDocument, createEmbedding, embed } from './utils.js';

export class KnowledgeStore {
config: KnowledgeStoreConfig;
Expand Down Expand Up @@ -65,7 +65,7 @@ export class KnowledgeStore {
async addDocument(
title: string,
content: string,
tags = []
tags: string[] = []
): Promise<Document> {
// Create a new document object
const doc = createDocument(title, tags);
Expand Down Expand Up @@ -105,7 +105,7 @@ export class KnowledgeStore {

/**
* Remove a document from the store
* @param documentIdd The ID of the document to remove
* @param documentId The ID of the document to remove
* @returns The document that was removed
* @throws An error if the document is not found
*/
Expand Down Expand Up @@ -149,7 +149,6 @@ export class KnowledgeStore {
/**
* Search the documents in the store for the given query for similarity by euclidean distance
* @param query The query to search for
* @param callback A callback to be called with each result
* @param k The number of results to return
* @param max_distance The maximum distance between the query and a result
* @param tags The tags to filter by. If empty, no filtering is done
Expand All @@ -159,7 +158,7 @@ export class KnowledgeStore {
query: string,
k = 5,
max_distance = 15,
tags = []
tags: string[] = []
): Promise<SearchResult[]> {
const query_vector = await embed(query, this.config.embeddingApiUrl);
let matches: SearchResult[] | null = null;
Expand Down
17 changes: 7 additions & 10 deletions src/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,11 @@

// Document metadata
export interface Document {
// Documents should have a uniqueid
// Documents should have a unique id
id: string;
// Title of the document
title: string;
// Tags for the document. Can be used to place docments in a collection.
// Tags for the document. Can be used to place documents in a collection.
tags: string[];
}

Expand All @@ -16,7 +16,7 @@ export interface Embedding {
id: string;
// Embeddings refer back to the document they are from
documentId: string;
// The actual emebedding content
// The actual embedding content
content: string;
// The embedding vector (float[768])
vector: number[];
Expand Down Expand Up @@ -63,18 +63,15 @@ export interface PromptFormat {
// and extensible persona format of our own
// https://github.com/TavernAI/TavernAI/tree/main
export interface Persona {
// avatarUrl
avatarUrl: string;

// Persona name
name: string;
// Persona role
role: string;

// Persona description
description: string;
}

// Common model definition across different engine types
// Defines resource, paramterization, persona, and prompt formatting
// Defines resource, parameterization, persona, and prompt formatting
// for the model
export interface Model {
/* Resource definition */
Expand Down Expand Up @@ -111,6 +108,6 @@ export type KnowledgeStoreConfig = {
embeddingApiUrl: string;
// Name of the local store to use
storeName: string;
// Special key that identifies docuements metadata in the store
// Special key that identifies documents metadata in the store
documentsKey: string;
};
2 changes: 1 addition & 1 deletion src/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ export function calculateTokenLength(input: string): number {
/* Embedding utility functions */

/*
* Split a single Text Document into mutliple Documents of a given size
* Split a single Text Document into multiple Documents of a given size
*/
export async function chunkText(
title: string,
Expand Down
Loading

0 comments on commit e0796a9

Please sign in to comment.