-
Notifications
You must be signed in to change notification settings - Fork 230
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
5 changed files
with
370 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,11 @@ | ||
OpenAI and Ollama Clients | ||
|
||
- Streaming output | ||
- The OpenAI interface employs the `ai` prefix for user-friendly input. | ||
- Option for controllable return values | ||
- Supports chat context retention | ||
- Customizable prompt functionality for `ai do` | ||
- Refer to [prompt.nu](prompt.nu) for definition guidelines | ||
- Default model can be overridden using `--model` | ||
- Importing and exporting of Ollama models | ||
- Connection details managed through environment variables |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,24 @@ | ||
export-env { | ||
use ollama.nu * | ||
use openai.nu * | ||
use prompt.nu * | ||
} | ||
|
||
export use ollama.nu * | ||
export use openai.nu * | ||
|
||
|
||
export def 'similarity cosine' [a b] { | ||
if ($a | length) != ($b | length) { | ||
print "The lengths of the vectors must be equal." | ||
} | ||
$a | zip $b | reduce -f {p: 0, a: 0, b: 0} {|i,a| | ||
{ | ||
p: ($a.p + ($i.0 * $i.1)) | ||
a: ($a.a + ($i.0 * $i.0)) | ||
b: ($a.b + ($i.1 * $i.1)) | ||
} | ||
} | ||
| $in.p / (($in.a | math sqrt) * ($in.b | math sqrt)) | ||
} | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,186 @@ | ||
export-env { | ||
$env.OLLAMA_HOST = "http://localhost:11434" | ||
$env.OLLAMA_CHAT = {} | ||
$env.OLLAMA_HOME = [$env.HOME .ollama] | path join | ||
} | ||
|
||
def "nu-complete models" [] { | ||
http get $"($env.OLLAMA_HOST)/api/tags" | ||
| get models | ||
| each {{value: $in.name, description: $in.modified_at}} | ||
} | ||
|
||
export def "ollama info" [model: string@"nu-complete models"] { | ||
http post -t application/json $"($env.OLLAMA_HOST)/api/show" {name: $model} | ||
} | ||
|
||
export def "ollama embed" [ | ||
model: string@"nu-complete models" | ||
input: string | ||
] { | ||
http post -t application/json $"($env.OLLAMA_HOST)/api/embed" { | ||
model: $model, input: [$input] | ||
} | ||
| get embeddings.0 | ||
} | ||
|
||
|
||
export def "ollama gen" [ | ||
model: string@"nu-complete models" | ||
prompt: string | ||
--image(-i): path | ||
--full(-f) | ||
] { | ||
let content = $in | default "" | ||
let img = if ($image | is-empty) { | ||
{} | ||
} else { | ||
{images: [(open $image | encode base64)]} | ||
} | ||
let r = http post -t application/json $"($env.OLLAMA_HOST)/api/generate" { | ||
model: $model | ||
prompt: ($prompt | str replace "{}" $content) | ||
stream: false | ||
...$img | ||
} | ||
if $full { | ||
$r | ||
} else { | ||
$r.response | ||
} | ||
} | ||
|
||
|
||
export def --env "ollama chat" [ | ||
model: string@"nu-complete models" | ||
message: string | ||
--image(-i): path | ||
--reset(-r) | ||
--forget(-f) | ||
--placehold(-p): string = '{}' | ||
--out(-o) | ||
--debug | ||
] { | ||
let content = $in | default "" | ||
let img = if ($image | is-empty) { | ||
{} | ||
} else { | ||
{images: [(open $image | encode base64)]} | ||
} | ||
let msg = { | ||
role: "user" | ||
content: ($message | str replace -m $placehold $content) | ||
...$img | ||
} | ||
if $debug { | ||
print $"(ansi grey)($msg.content)(ansi reset)" | ||
} | ||
if not $forget { | ||
if ($env.OLLAMA_CHAT | is-empty) or ($model not-in $env.OLLAMA_CHAT) { | ||
$env.OLLAMA_CHAT = ($env.OLLAMA_CHAT | insert $model []) | ||
} | ||
if $reset { | ||
$env.OLLAMA_CHAT = ($env.OLLAMA_CHAT | update $model []) | ||
print '✨' | ||
} | ||
$env.OLLAMA_CHAT = ($env.OLLAMA_CHAT | update $model {|x| $x | get $model | append $msg}) | ||
} | ||
|
||
let r = http post -t application/json $"($env.OLLAMA_HOST)/api/chat" { | ||
model: $model | ||
messages: [ | ||
...(if $forget { [] } else { $env.OLLAMA_CHAT | get $model }) | ||
$msg | ||
] | ||
stream: true | ||
} | ||
| lines | ||
| reduce -f {msg: '', token: 0} {|i,a| | ||
let x = $i | parse -r '.*?(?<data>\{.*)' | ||
if ($x | is-empty) { return $a } | ||
let x = $x | get 0.data | from json | ||
let m = $x.message.content | ||
print -n $m | ||
$a | ||
| update msg {|x| $x.msg + $m } | ||
| update token {|x| $x.token + 1 } | ||
} | ||
if not $forget { | ||
let r = {role: 'assistant', content: $r.msg, token: $r.token} | ||
$env.OLLAMA_CHAT = ($env.OLLAMA_CHAT | update $model {|x| $x | get $model | append $r }) | ||
} | ||
if $out { $r.msg } | ||
} | ||
|
||
|
||
|
||
|
||
def "nu-complete ollama model" [] { | ||
cd $"($env.OLLAMA_HOME)/models/manifests/" | ||
ls **/* | where type == file | get name | ||
} | ||
|
||
export def "ollama export" [ | ||
model: string@"nu-complete ollama model" | ||
target | ||
--home: string | ||
] { | ||
if ($target | path exists) { | ||
if ([y n] | input list "already exists, remove it?") == 'y' { | ||
rm -rf $target | ||
} else { | ||
return | ||
} | ||
} | ||
mkdir $target | ||
|
||
let base = { | ||
blob: ([$env.OLLAMA_HOME models blobs] | path join) | ||
manifests: ([$env.OLLAMA_HOME models manifests] | path join) | ||
} | ||
|
||
let tg = { | ||
bin: ([$target model.bin] | path join) | ||
model: ([$target Modelfile] | path join) | ||
source: ([$target source.txt] | path join) | ||
} | ||
|
||
$model | split row '/' | $"($in | range 0..<-1 | str join '/'):($in | last)" | save $tg.source | ||
|
||
|
||
let manifests = open ([$base.manifests $model] | path join) | from json | ||
|
||
for i in $manifests.layers { | ||
|
||
let digest = $i.digest | ||
let type = $i.mediaType | split row '.' | last | ||
let blob = [$base.blob ($i.digest | str replace ':' '-')] | path join | ||
match $type { | ||
model => { | ||
cp $blob $tg.bin | ||
$"FROM ./model.bin(char newline)" | save -a $tg.model | ||
} | ||
params => { | ||
let p = open $blob | from json | ||
$p | ||
| items {|k,v| {k: $k, v: $v} } | ||
| each {|x| $x.v | each {|y| $'PARAMETER ($x.k) "($y)"' } } | ||
| flatten | ||
| str join (char newline) | ||
| $"(char newline)($in)" | ||
| save -a $tg.model | ||
} | ||
_ => { | ||
$'(char newline)($type | str upcase) """(cat $blob)"""' | save -a $tg.model | ||
} | ||
} | ||
} | ||
|
||
print 'success' | ||
} | ||
|
||
export def "ollama import" [dir] { | ||
cd $dir | ||
let model = cat source.txt | ||
ollama create $model | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,124 @@ | ||
export-env { | ||
$env.OPENAI_HOST = "http://localhost:11434" | ||
$env.OPENAI_CHAT = {} | ||
$env.OPENAI_API_KEY = 'secret' | ||
$env.OPENAI_ORG_ID = '' | ||
$env.OPENAI_PROJECT_ID = '' | ||
} | ||
|
||
|
||
def "nu-complete models" [] { | ||
http get --headers [ | ||
Authorization $"Bearer ($env.OPENAI_API_KEY)" | ||
OpenAI-Organization $env.OPENAI_ORG_ID | ||
OpenAI-Project $env.OPENAI_PROJECT_ID | ||
] $"($env.OPENAI_HOST)/v1/models" | ||
| get data.id | ||
} | ||
|
||
|
||
export def --env "ai chat" [ | ||
model: string@"nu-complete models" | ||
message: string | ||
--image(-i): path | ||
--reset(-r) | ||
--forget(-f) | ||
--placehold(-p): string = '{}' | ||
--out(-o) | ||
--debug | ||
] { | ||
let content = $in | default "" | ||
let img = if ($image | is-empty) { | ||
{} | ||
} else { | ||
{images: [(open $image | encode base64)]} | ||
} | ||
let msg = { | ||
role: "user" | ||
content: ($message | str replace -m $placehold $content) | ||
...$img | ||
} | ||
if $debug { | ||
print $"(ansi grey)($message)\n---\n($placehold)\n---(ansi reset)" | ||
print $"(ansi grey)($msg.content)\n---(ansi reset)" | ||
} | ||
if not $forget { | ||
if ($env.OPENAI_CHAT | is-empty) or ($model not-in $env.OPENAI_CHAT) { | ||
$env.OPENAI_CHAT = ($env.OPENAI_CHAT | insert $model []) | ||
} | ||
if $reset { | ||
$env.OPENAI_CHAT = ($env.OPENAI_CHAT | update $model []) | ||
print '✨' | ||
} | ||
$env.OPENAI_CHAT = ($env.OPENAI_CHAT | update $model {|x| $x | get $model | append $msg}) | ||
} | ||
|
||
let r = http post -t application/json --headers [ | ||
Authorization $"Bearer ($env.OPENAI_API_KEY)" | ||
] $"($env.OPENAI_HOST)/v1/chat/completions" { | ||
model: $model | ||
messages: [ | ||
...(if $forget { [] } else { $env.OPENAI_CHAT | get $model }) | ||
$msg | ||
] | ||
stream: true | ||
} | ||
| lines | ||
| reduce -f {msg: '', token: 0} {|i,a| | ||
let x = $i | parse -r '.*?(?<data>\{.*)' | ||
if ($x | is-empty) { return $a } | ||
let x = $x | get 0.data | from json | ||
let m = $x.choices | each { $in.delta.content } | str join | ||
print -n $m | ||
$a | ||
| update msg {|x| $x.msg + $m } | ||
| update token {|x| $x.token + 1 } | ||
} | ||
if not $forget { | ||
let r = {role: 'assistant', content: $r.msg, token: $r.token} | ||
$env.OPENAI_CHAT = ($env.OPENAI_CHAT | update $model {|x| $x | get $model | append $r }) | ||
} | ||
if $out { $r.msg } | ||
} | ||
|
||
|
||
export def "ai embed" [ | ||
model: string@"nu-complete models" | ||
input: string | ||
] { | ||
http post -t application/json $"($env.OPENAI_HOST)/v1/embeddings" { | ||
model: $model, input: [$input], encoding_format: 'float' | ||
} | ||
| get data.0.embedding | ||
} | ||
|
||
|
||
def 'nu-complete role' [ctx] { | ||
$env.OPENAI_PROMPT | items {|k, v| {value: $k, description: $v.description? } } | ||
} | ||
|
||
export def 'ai do' [ | ||
role: string@"nu-complete role" | ||
input?: string | ||
--out(-o) | ||
--model(-m): string@"nu-complete models" | ||
--debug | ||
] { | ||
let input = if ($in | is-empty) { $input } else { $in } | ||
let placehold = $"<(random chars -l 6)>" | ||
let role = $env.OPENAI_PROMPT | get $role | ||
let model = if ($model | is-empty) { | ||
$role | get model | ||
} else { | ||
$model | ||
} | ||
let prompt = $role | get prompt | each {|x| | ||
if ($x | str replace -ar "['\"`]+" '' | $in == '{}') { | ||
$x | str replace '{}' $placehold | ||
} else { | ||
$x | ||
} | ||
} | str join (char newline) | ||
|
||
$input | ai chat $model -p $placehold --out=$out --debug=$debug $prompt | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,25 @@ | ||
export-env { | ||
$env.OPENAI_PROMPT = { | ||
'json-to-sql': { | ||
prompt: [ | ||
"Analyze the following JSON data to convert it into a SQL statement for creating a table:" | ||
"```" | ||
"{}" | ||
"```" | ||
], | ||
model: 'qwen2:1.5b', | ||
description: 'Analyze JSON content, converting it into a SQL create table statement' | ||
}, | ||
'trans-to-en': { | ||
prompt: [ | ||
"Translate the following text into English:" | ||
"```" | ||
"{}" | ||
"```" | ||
], | ||
model: 'qwen2:1.5b', | ||
description: 'Translation to English' | ||
} | ||
|
||
} | ||
} |