From c629b7ae34a932a83d1f68d606311d7e63ea967f Mon Sep 17 00:00:00 2001 From: Egor Shmorgun Date: Fri, 28 Jul 2023 12:47:56 +0300 Subject: [PATCH 1/3] add text_service --- Project.toml | 2 +- src/GoogleCloud.jl | 5 +++-- src/api/api.jl | 1 + src/api/text_service.jl | 22 ++++++++++++++++++++++ 4 files changed, 27 insertions(+), 3 deletions(-) create mode 100644 src/api/text_service.jl diff --git a/Project.toml b/Project.toml index a214844..85e2e5c 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "GoogleCloud" uuid = "55e21f81-8b0a-565e-b5ad-6816892a5ee7" -version = "0.11.1" +version = "0.11.2" [deps] Base64 = "2a0f44e3-6c83-55bd-87e4-b1978d98bd5f" diff --git a/src/GoogleCloud.jl b/src/GoogleCloud.jl index 0b77083..ccbc1dd 100644 --- a/src/GoogleCloud.jl +++ b/src/GoogleCloud.jl @@ -7,7 +7,7 @@ export JSONCredentials, MetadataCredentials, GoogleSession, authorize, set_session!, get_session export - iam, storage, compute, container, pubsub, logging, datastore + iam, storage, compute, container, pubsub, logging, datastore, text_service export KeyStore, commit!, fetch!, sync!, clearcache!, clearpending!, destroy!, connect!, watch, unwatch @@ -33,6 +33,7 @@ import .api: _container.container, _pubsub.pubsub, _logging.logging, - _datastore.datastore + _datastore.datastore, + _text_service.text_service end diff --git a/src/api/api.jl b/src/api/api.jl index 837c936..b98f8c1 100644 --- a/src/api/api.jl +++ b/src/api/api.jl @@ -362,5 +362,6 @@ include("container.jl") include("pubsub.jl") include("logging.jl") include("datastore.jl") +include("text_service.jl") end diff --git a/src/api/text_service.jl b/src/api/text_service.jl new file mode 100644 index 0000000..b05c722 --- /dev/null +++ b/src/api/text_service.jl @@ -0,0 +1,22 @@ +module _text_service + +export text_service + +using ..api +using ...root + + +text_service = APIRoot( + "https://{region}-aiplatform.googleapis.com/v1/projects/{project_id}", + Dict{String,String}( + "cloud-platform" => "Full access", + "cloud-platform.read-only" => "Read only" + ), + PALM=APIResource( + "locations/{region}/publishers/google/models", + generate_text=APIMethod(:POST, "text-bison:predict", "Generate text from prompt"), + generate_embedding=APIMethod(:POST, "textembedding-gecko:predict", "Generate text from prompt") + ) +) + +end From e532c5619aa90971d733fb5536ecc6399f75363c Mon Sep 17 00:00:00 2001 From: Egor Shmorgun Date: Mon, 21 Aug 2023 15:45:53 +0300 Subject: [PATCH 2/3] update google cloud to predict with model name as parameter --- src/GoogleCloud.jl | 4 ++++ src/api/text_service.jl | 10 ++++++---- 2 files changed, 10 insertions(+), 4 deletions(-) diff --git a/src/GoogleCloud.jl b/src/GoogleCloud.jl index ccbc1dd..2c80981 100644 --- a/src/GoogleCloud.jl +++ b/src/GoogleCloud.jl @@ -8,6 +8,8 @@ export set_session!, get_session export iam, storage, compute, container, pubsub, logging, datastore, text_service +export + BISON_TEXT_MODEL_NAME, GEKKO_EMBEDDING_MODEL_NAME export KeyStore, commit!, fetch!, sync!, clearcache!, clearpending!, destroy!, connect!, watch, unwatch @@ -36,4 +38,6 @@ import .api: _datastore.datastore, _text_service.text_service +using .api._text_service: BISON_TEXT_MODEL_NAME, GEKKO_EMBEDDING_MODEL_NAME + end diff --git a/src/api/text_service.jl b/src/api/text_service.jl index b05c722..cfe04dd 100644 --- a/src/api/text_service.jl +++ b/src/api/text_service.jl @@ -1,10 +1,13 @@ module _text_service -export text_service +export text_service, BISON_TEXT_MODEL_NAME, GEKKO_EMBEDDING_MODEL_NAME using ..api using ...root +const BISON_TEXT_MODEL_NAME = "text-bison" +const GEKKO_EMBEDDING_MODEL_NAME = "textembedding-gecko" + text_service = APIRoot( "https://{region}-aiplatform.googleapis.com/v1/projects/{project_id}", @@ -13,9 +16,8 @@ text_service = APIRoot( "cloud-platform.read-only" => "Read only" ), PALM=APIResource( - "locations/{region}/publishers/google/models", - generate_text=APIMethod(:POST, "text-bison:predict", "Generate text from prompt"), - generate_embedding=APIMethod(:POST, "textembedding-gecko:predict", "Generate text from prompt") + "locations/{region}/publishers/google/models/{model_name}:predict", + predict=APIMethod(:POST, "", "Perform an online prediction.") ) ) From 27256aaf0d12d8dbb3870f316ce8e16f6d045e15 Mon Sep 17 00:00:00 2001 From: Egor Shmorgun Date: Thu, 11 Jan 2024 16:06:38 +0300 Subject: [PATCH 3/3] add test with mocks --- Project.toml | 1 + src/api/api.jl | 5 +- test/fixtures/text_service_response.json | 59 ++++++++++++++++++++++++ test/runtests.jl | 5 +- test/text_service.jl | 55 ++++++++++++++++++++++ 5 files changed, 121 insertions(+), 4 deletions(-) create mode 100644 test/fixtures/text_service_response.json create mode 100644 test/text_service.jl diff --git a/Project.toml b/Project.toml index 85e2e5c..d3ab0ce 100644 --- a/Project.toml +++ b/Project.toml @@ -10,6 +10,7 @@ JSON = "682c06a0-de6a-54ab-a142-c8b1cf79cde6" Libz = "2ec943e9-cfe8-584d-b93d-64dcb6d567b7" Markdown = "d6f4376e-aef5-505a-96c1-9c027394607a" MbedTLS = "739be429-bea8-5141-9913-cc70e7f3736d" +Mocking = "78c3b35d-d492-501b-9361-3d52fe80e533" MsgPack = "99f44e22-a591-53d1-9472-aa23ef4bd671" Printf = "de0858da-6303-5e67-8744-51eddeeeb8d7" diff --git a/src/api/api.jl b/src/api/api.jl index b98f8c1..3afd7ff 100644 --- a/src/api/api.jl +++ b/src/api/api.jl @@ -12,6 +12,7 @@ import MbedTLS import Libz import JSON using Markdown +using Mocking using ..session using ..error @@ -238,7 +239,7 @@ function execute(session::GoogleSession, resource::APIResource, method::APIMetho end # obtain and use access token - auth = authorize(session) + auth = @mock authorize(session) headers = Dict{String, String}( "Authorization" => "$(auth[:token_type]) $(auth[:access_token])" ) @@ -288,7 +289,7 @@ function execute(session::GoogleSession, resource::APIResource, method::APIMetho @info("Attempt: $attempt") end res = try - HTTP.request(string(method.verb), + @mock HTTP.request(string(method.verb), path_replace(method.path, path_args), headers, data; query=params ) catch e diff --git a/test/fixtures/text_service_response.json b/test/fixtures/text_service_response.json new file mode 100644 index 0000000..7416dca --- /dev/null +++ b/test/fixtures/text_service_response.json @@ -0,0 +1,59 @@ +{ + "metadata": { + "tokenMetadata": { + "inputTokenCount": { + "totalTokens": 3, + "totalBillableCharacters": 17 + }, + "outputTokenCount": { + "totalTokens": 200, + "totalBillableCharacters": 837 + } + } + }, + "predictions": [ + { + "safetyAttributes": { + "scores": [ + 0.1, + 0.1 + ], + "categories": [ + "Finance", + "Health" + ], + "safetyRatings": [ + { + "probabilityScore": 0, + "severity": "NEGLIGIBLE", + "category": "Dangerous Content", + "severityScore": 0 + }, + { + "probabilityScore": 0, + "severity": "NEGLIGIBLE", + "category": "Harassment", + "severityScore": 0 + }, + { + "probabilityScore": 0, + "severity": "NEGLIGIBLE", + "category": "Hate Speech", + "severityScore": 0 + }, + { + "probabilityScore": 0, + "severity": "NEGLIGIBLE", + "category": "Sexually Explicit", + "severityScore": 0 + } + ], + "blocked": false + }, + "content": " As an AI language model, I don't have a physical presence or personal experiences like humans do. However, I can provide you with information about my capabilities and the technology behind me.\n\nI am a large language model trained by Google. My training data includes a vast corpus of text and code from the web, books, and other sources. This training enables me to understand and generate human language, answer questions, write creatively, and assist with various tasks.\n\nMy responses are based on the patterns and knowledge I have learned from the training data. I do not have emotions, opinions, or personal biases. My goal is to provide accurate and informative responses based on the information available to me.\n\nI am continuously learning and improving as I interact with users and receive feedback. The more I am used, the better I become at understanding and responding to human requests.\n\nIf you have any specific questions or tasks you would like assistance with, feel free to ask. I'll do my best", + "citationMetadata": { + "citations": [] + } + } + ] +} diff --git a/test/runtests.jl b/test/runtests.jl index e2f88af..9d582b2 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -1,5 +1,6 @@ using GoogleCloud -using Test +using Test include("api.jl") -#include("storage.jl") +# include("storage.jl") +include("text_service.jl") diff --git a/test/text_service.jl b/test/text_service.jl new file mode 100644 index 0000000..bd12f3c --- /dev/null +++ b/test/text_service.jl @@ -0,0 +1,55 @@ +using GoogleCloud +using Mocking +using Test +using HTTP +using JSON + +Mocking.activate() + +const FIXTURES_DIR = joinpath(@__DIR__, "fixtures") + +model_params = ( + temperature=0.7, + maxOutputTokens=200, + topP=0.7, + topK=40 +) + +params = Dict( + :instances => [ + Dict(:prompt => "Tell about yourself") + ], + :parameters => model_params +) + +http_response_mock = HTTP.Response( + 200, + Dict("Content-Type" => "application/json"), + read(joinpath(FIXTURES_DIR, "text_service_response.json")) +) + +authorize_response_mock = Dict(:access_token => "test-token", :token_type => "Bearer") + +@testset "Testing text_service" begin + http_patch = @patch HTTP.request(args...; kwargs...) = http_response_mock + authorize_patch = @patch GoogleCloud.api.authorize(_session) = authorize_response_mock + + default_region = "us-central1" + project_id = "test-project-id" + + apply([http_patch, authorize_patch]) do + response = text_service( + :PALM, + :predict, + default_region, + project_id, + default_region, + GoogleCloud.BISON_TEXT_MODEL_NAME, + data=params + ) + + @test response isa AbstractDict + @test haskey(response, :predictions) + @test haskey(response, :metadata) + end +end