Skip to content

Commit

Permalink
regend client
Browse files Browse the repository at this point in the history
  • Loading branch information
anish-palakurthi committed Jul 16, 2024
1 parent 69d9ca9 commit 27d35c8
Show file tree
Hide file tree
Showing 13 changed files with 162 additions and 156 deletions.
10 changes: 9 additions & 1 deletion docs/docs/snippets/clients/providers/vertex.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,15 @@ title: vertex-ai
slug: docs/snippets/clients/providers/vertex
---

The `vertex-ai` provider is used to interact with the Google Vertex AI services.
The `vertex-ai` provider is used to interact with the Google Vertex AI services, specifically the following endpoints:

`
https://${LOCATION}-aiplatform.googleapis.com/v1/projects/${PROJECT_ID}/locations/${LOCATION}/publishers/google/models/${MODEL_ID}:streamGenerateContent
`

`https://${LOCATION}-aiplatform.googleapis.com/v1/projects/${PROJECT_ID}/locations/${LOCATION}/publishers/google/models/${MODEL_ID}:generateContent`



Example:
```baml BAML
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -103,8 +103,8 @@ impl TryFrom<(&ClientProperty, &RuntimeContext)> for LLMPrimitiveProvider {
"google-ai" => {
GoogleAIClient::dynamic_new(value, ctx).map(LLMPrimitiveProvider::Google)
}
// "aws-bedrock" => aws::AwsClient::dynamic_new(value, ctx).map(LLMPrimitiveProvider::Aws),
"vertex-ai" => VertexClient::dynamic_new(value, ctx).map(LLMPrimitiveProvider::Vertex),
// dynamic_new is not implemented for aws::AwsClient
other => {
let options = [
"openai",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -428,7 +428,7 @@ impl RequestBuilder for VertexClient {
};

let credentials = self.properties.service_account_details.clone();
log::info!("Credentials: {:?}", credentials);

let access_token = if let Some((key, value)) = &credentials {
if key == "GOOGLE_TOKEN" {
value.clone()
Expand Down Expand Up @@ -647,8 +647,6 @@ fn convert_chat_prompt_to_body(
.collect::<serde_json::Value>(),
);

log::debug!("converted chat prompt to body: {:#?}", map);

return map;
}

Expand Down
3 changes: 2 additions & 1 deletion engine/language_client_typescript/native.d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -113,10 +113,11 @@ export interface BamlLogEvent {
startTime: string
}

export function invoke_runtime_cli(params: Array<string>): void
export declare function invoke_runtime_cli(params: Array<string>): void

export interface LogEventMetadata {
eventId: string
parentId?: string
rootEventId: string
}

3 changes: 1 addition & 2 deletions integ-tests/baml_src/clients.baml
Original file line number Diff line number Diff line change
Expand Up @@ -89,8 +89,7 @@ client<llm> Gemini {
}

client<llm> Vertex {
provider vertex-ai

provider vertex-ai
options {
model gemini-1.5-pro
project_id anish-testing-426119
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
// test for int
function TestFnNamedArgsSingleInt(myInt: int) -> string {
client Vertex
client GPT35
prompt #"
Return this value back to me: {{myInt}}
"#
Expand Down
2 changes: 1 addition & 1 deletion integ-tests/baml_src/test-files/providers/providers.baml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ function TestAnthropic(input: string) -> string {
"#
}

function TestOpenAI(input: string) -> string {
function PromptTestOpenAI(input: string) -> string {
client GPT35
prompt #"
Write a nice haiku about {{ input }}
Expand Down
114 changes: 57 additions & 57 deletions integ-tests/python/baml_client/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -992,6 +992,30 @@ async def PromptTestClaudeChatNoSystem(
mdl = create_model("PromptTestClaudeChatNoSystemReturnType", inner=(str, ...))
return coerce(mdl, raw.parsed())

async def PromptTestOpenAI(
self,
input: str,
baml_options: BamlCallOptions = {},
) -> str:
__tb__ = baml_options.get("tb", None)
if __tb__ is not None:
tb = __tb__._tb
else:
tb = None
__cr__ = baml_options.get("client_registry", None)

raw = await self.__runtime.call_function(
"PromptTestOpenAI",
{
"input": input,
},
self.__ctx_manager.get(),
tb,
__cr__,
)
mdl = create_model("PromptTestOpenAIReturnType", inner=(str, ...))
return coerce(mdl, raw.parsed())

async def PromptTestOpenAIChat(
self,
input: str,
Expand Down Expand Up @@ -1448,30 +1472,6 @@ async def TestOllama(
mdl = create_model("TestOllamaReturnType", inner=(str, ...))
return coerce(mdl, raw.parsed())

async def TestOpenAI(
self,
input: str,
baml_options: BamlCallOptions = {},
) -> str:
__tb__ = baml_options.get("tb", None)
if __tb__ is not None:
tb = __tb__._tb
else:
tb = None
__cr__ = baml_options.get("client_registry", None)

raw = await self.__runtime.call_function(
"TestOpenAI",
{
"input": input,
},
self.__ctx_manager.get(),
tb,
__cr__,
)
mdl = create_model("TestOpenAIReturnType", inner=(str, ...))
return coerce(mdl, raw.parsed())

async def TestOpenAILegacyProvider(
self,
input: str,
Expand Down Expand Up @@ -2893,6 +2893,39 @@ def PromptTestClaudeChatNoSystem(
self.__ctx_manager.get(),
)

def PromptTestOpenAI(
self,
input: str,
baml_options: BamlCallOptions = {},
) -> baml_py.BamlStream[Optional[str], str]:
__tb__ = baml_options.get("tb", None)
if __tb__ is not None:
tb = __tb__._tb
else:
tb = None
__cr__ = baml_options.get("client_registry", None)

raw = self.__runtime.stream_function(
"PromptTestOpenAI",
{
"input": input,
},
None,
self.__ctx_manager.get(),
tb,
__cr__,
)

mdl = create_model("PromptTestOpenAIReturnType", inner=(str, ...))
partial_mdl = create_model("PromptTestOpenAIPartialReturnType", inner=(Optional[str], ...))

return baml_py.BamlStream[Optional[str], str](
raw,
lambda x: coerce(partial_mdl, x),
lambda x: coerce(mdl, x),
self.__ctx_manager.get(),
)

def PromptTestOpenAIChat(
self,
input: str,
Expand Down Expand Up @@ -3520,39 +3553,6 @@ def TestOllama(
self.__ctx_manager.get(),
)

def TestOpenAI(
self,
input: str,
baml_options: BamlCallOptions = {},
) -> baml_py.BamlStream[Optional[str], str]:
__tb__ = baml_options.get("tb", None)
if __tb__ is not None:
tb = __tb__._tb
else:
tb = None
__cr__ = baml_options.get("client_registry", None)

raw = self.__runtime.stream_function(
"TestOpenAI",
{
"input": input,
},
None,
self.__ctx_manager.get(),
tb,
__cr__,
)

mdl = create_model("TestOpenAIReturnType", inner=(str, ...))
partial_mdl = create_model("TestOpenAIPartialReturnType", inner=(Optional[str], ...))

return baml_py.BamlStream[Optional[str], str](
raw,
lambda x: coerce(partial_mdl, x),
lambda x: coerce(mdl, x),
self.__ctx_manager.get(),
)

def TestOpenAILegacyProvider(
self,
input: str,
Expand Down
Loading

0 comments on commit 27d35c8

Please sign in to comment.