From f631b158874bd921377834a1d6ab983721f1d81d Mon Sep 17 00:00:00 2001 From: Anish Palakurthi Date: Thu, 25 Jul 2024 09:24:17 -0700 Subject: [PATCH] fixed build --- .../baml-schema-wasm/src/runtime_wasm/mod.rs | 147 ------------------ .../test-files/strategies/fallback.baml | 2 + integ-tests/python/baml_client/inlinedbaml.py | 2 +- integ-tests/ruby/baml_client/inlined.rb | 2 +- .../typescript/baml_client/inlinedbaml.ts | 2 +- .../src/baml_wasm_web/EventListener.tsx | 1 - 6 files changed, 5 insertions(+), 151 deletions(-) diff --git a/engine/baml-schema-wasm/src/runtime_wasm/mod.rs b/engine/baml-schema-wasm/src/runtime_wasm/mod.rs index bebc314f9..8435f601a 100644 --- a/engine/baml-schema-wasm/src/runtime_wasm/mod.rs +++ b/engine/baml-schema-wasm/src/runtime_wasm/mod.rs @@ -1145,31 +1145,6 @@ impl WasmRuntime { None } - #[wasm_bindgen] - pub fn get_function_of_testcase( - &self, - file_name: &str, - cursor_idx: usize, - ) -> Option { - let testcases = self.list_testcases(); - - for tc in testcases { - let span = tc.span; - if span.file_path.as_str().ends_with(file_name) - && ((span.start + 1)..=(span.end + 1)).contains(&cursor_idx) - { - let first_function = tc - .parent_functions - .iter() - .find(|f| f.start <= cursor_idx && cursor_idx <= f.end) - .cloned(); - - return first_function; - } - } - None - } - #[wasm_bindgen] pub fn list_testcases(&self) -> Vec { self.runtime @@ -1302,128 +1277,6 @@ impl WasmRuntime { } None } - - #[wasm_bindgen] - pub fn list_testcases(&self) -> Vec { - self.runtime - .internal() - .ir() - .walk_tests() - .map(|tc| { - let params = match tc.test_case_params(&self.runtime.env_vars()) { - Ok(params) => Ok(params - .iter() - .map(|(k, v)| { - let as_str = match v { - Ok(v) => match serde_json::to_string(v) { - Ok(s) => Ok(s), - Err(e) => Err(e.to_string()), - }, - Err(e) => Err(e.to_string()), - }; - - let (value, error) = match as_str { - Ok(s) => (Some(s), None), - Err(e) => (None, Some(e)), - }; - - WasmParam { - name: k.to_string(), - value, - error, - } - }) - .collect()), - Err(e) => Err(e.to_string()), - }; - - let (mut params, error) = match params { - Ok(p) => (p, None), - Err(e) => (Vec::new(), Some(e)), - }; - - // Any missing params should be set to an error - let _ = tc.function().inputs().right().map(|func_params| { - for (param_name, t) in func_params { - if !params.iter().any(|p| p.name.cmp(param_name).is_eq()) - && !t.is_optional() - { - params.insert( - 0, - WasmParam { - name: param_name.to_string(), - value: None, - error: Some("Missing parameter".to_string()), - }, - ); - } - } - }); - - let wasm_span = match tc.span() { - Some(span) => span.into(), - None => WasmSpan::default(), - }; - - WasmTestCase { - name: tc.test_case().name.clone(), - inputs: params, - error, - span: wasm_span, - parent_functions: tc - .test_case() - .functions - .iter() - .map(|f| { - let (start, end) = f - .attributes - .span - .as_ref() - .map_or((0, 0), |f| (f.start, f.end)); - WasmParentFunction { - start, - end, - name: f.elem.name().to_string(), - } - }) - .collect(), - } - }) - .collect() - } - - #[wasm_bindgen] - pub fn get_testcase_from_position( - &self, - parent_function: WasmFunction, - cursor_idx: usize, - ) -> Option { - let testcases = parent_function.test_cases; - for testcase in testcases { - let span = testcase.clone().span; - - if span.file_path.as_str() == (parent_function.span.file_path) - && ((span.start + 1)..=(span.end + 1)).contains(&cursor_idx) - { - return Some(testcase); - } - } - None - } -} -// Define a new struct to store the important information -#[wasm_bindgen(getter_with_clone, inspectable)] -#[derive(Serialize, Deserialize, Debug)] -pub struct SerializableOrchestratorNode { - pub provider: String, -} - -impl From<&OrchestratorNode> for SerializableOrchestratorNode { - fn from(node: &OrchestratorNode) -> Self { - SerializableOrchestratorNode { - provider: node.provider.to_string(), - } - } } // Define a new struct to store the important information #[wasm_bindgen(getter_with_clone, inspectable)] diff --git a/integ-tests/baml_src/test-files/strategies/fallback.baml b/integ-tests/baml_src/test-files/strategies/fallback.baml index 9378e3f9c..a90af694f 100644 --- a/integ-tests/baml_src/test-files/strategies/fallback.baml +++ b/integ-tests/baml_src/test-files/strategies/fallback.baml @@ -16,6 +16,8 @@ client FallbackClient { FaultyClient, RetryClientConstant, GPT35 + Gemini + ] } } diff --git a/integ-tests/python/baml_client/inlinedbaml.py b/integ-tests/python/baml_client/inlinedbaml.py index 375084dcc..5c4dfb64a 100644 --- a/integ-tests/python/baml_client/inlinedbaml.py +++ b/integ-tests/python/baml_client/inlinedbaml.py @@ -66,7 +66,7 @@ "test-files/functions/prompts/with-chat-messages.baml": "\nfunction PromptTestOpenAIChat(input: string) -> string {\n client GPT35\n prompt #\"\n {{ _.role(\"system\") }}\n You are an assistant that always responds in a very excited way with emojis and also outputs this word 4 times after giving a response: {{ input }}\n \n {{ _.role(\"user\") }}\n Tell me a haiku about {{ input }}\n \"#\n}\n\nfunction PromptTestOpenAIChatNoSystem(input: string) -> string {\n client GPT35\n prompt #\"\n You are an assistant that always responds in a very excited way with emojis and also outputs this word 4 times after giving a response: {{ input }}\n \n {{ _.role(\"user\") }}\n Tell me a haiku about {{ input }}\n \"#\n}\n\nfunction PromptTestClaudeChat(input: string) -> string {\n client Claude\n prompt #\"\n {{ _.role(\"system\") }}\n You are an assistant that always responds in a very excited way with emojis and also outputs this word 4 times after giving a response: {{ input }}\n \n {{ _.role(\"user\") }}\n Tell me a haiku about {{ input }}\n \"#\n}\n\nfunction PromptTestClaudeChatNoSystem(input: string) -> string {\n client Claude\n prompt #\"\n You are an assistant that always responds in a very excited way with emojis and also outputs this word 4 times after giving a response: {{ input }}\n \n {{ _.role(\"user\") }}\n Tell me a haiku about {{ input }}\n \"#\n}\n\ntest TestSystemAndNonSystemChat1 {\n functions [PromptTestClaude, PromptTestOpenAI, PromptTestOpenAIChat, PromptTestOpenAIChatNoSystem, PromptTestClaudeChat, PromptTestClaudeChatNoSystem]\n args {\n input \"cats\"\n }\n}\n\ntest TestSystemAndNonSystemChat2 {\n functions [PromptTestClaude, PromptTestOpenAI, PromptTestOpenAIChat, PromptTestOpenAIChatNoSystem, PromptTestClaudeChat, PromptTestClaudeChatNoSystem]\n args {\n input \"lion\"\n }\n}", "test-files/functions/v2/basic.baml": "\n\nfunction ExtractResume2(resume: string) -> Resume {\n client GPT4\n prompt #\"\n {{ _.role('system') }}\n\n Extract the following information from the resume:\n\n Resume:\n <<<<\n {{ resume }}\n <<<<\n\n Output JSON schema:\n {{ ctx.output_format }}\n\n JSON:\n \"#\n}\n\n\nclass WithReasoning {\n value string\n reasoning string @description(#\"\n Why the value is a good fit.\n \"#)\n}\n\n\nclass SearchParams {\n dateRange int? @description(#\"\n In ISO duration format, e.g. P1Y2M10D.\n \"#)\n location string[]\n jobTitle WithReasoning? @description(#\"\n An exact job title, not a general category.\n \"#)\n company WithReasoning? @description(#\"\n The exact name of the company, not a product or service.\n \"#)\n description WithReasoning[] @description(#\"\n Any specific projects or features the user is looking for.\n \"#)\n tags (Tag | string)[]\n}\n\nenum Tag {\n Security\n AI\n Blockchain\n}\n\nfunction GetQuery(query: string) -> SearchParams {\n client GPT4\n prompt #\"\n Extract the following information from the query:\n\n Query:\n <<<<\n {{ query }}\n <<<<\n\n OUTPUT_JSON_SCHEMA:\n {{ ctx.output_format }}\n\n Before OUTPUT_JSON_SCHEMA, list 5 intentions the user may have.\n --- EXAMPLES ---\n 1. \n 2. \n 3. \n 4. \n 5. \n\n {\n ... // OUTPUT_JSON_SCHEMA\n }\n \"#\n}\n\nclass RaysData {\n dataType DataType\n value Resume | Event\n}\n\nenum DataType {\n Resume\n Event\n}\n\nclass Event {\n title string\n date string\n location string\n description string\n}\n\nfunction GetDataType(text: string) -> RaysData {\n client GPT4\n prompt #\"\n Extract the relevant info.\n\n Text:\n <<<<\n {{ text }}\n <<<<\n\n Output JSON schema:\n {{ ctx.output_format }}\n\n JSON:\n \"#\n}", "test-files/providers/providers.baml": "function TestAnthropic(input: string) -> string {\n client Claude\n prompt #\"\n Write a nice haiku about {{ input }}\n \"#\n}\n\nfunction PromptTestOpenAI(input: string) -> string {\n client GPT35\n prompt #\"\n Write a nice haiku about {{ input }}\n \"#\n}\n\nfunction TestOpenAILegacyProvider(input: string) -> string {\n client GPT35LegacyProvider\n prompt #\"\n Write a nice haiku about {{ input }}\n \"#\n}\n\nfunction TestAzure(input: string) -> string {\n client GPT35Azure\n prompt #\"\n Write a nice haiku about {{ input }}\n \"#\n}\n\nfunction TestOllama(input: string) -> string {\n client Ollama\n prompt #\"\n Write a nice haiku about {{ input }}\n \"#\n}\n\nfunction TestGemini(input: string) -> string {\n client Gemini\n prompt #\"\n Write a nice short story about {{ input }}\n \"#\n}\n\nfunction TestVertex(input: string) -> string {\n client Vertex\n prompt #\"\n Write a nice short story about {{ input }}\n \"#\n\n}\n\nfunction TestAws(input: string) -> string {\n client AwsBedrock\n prompt #\"\n Write a nice short story about {{ input }}\n \"#\n}\n\n\ntest TestProvider {\n functions [TestAnthropic, TestVertex, PromptTestOpenAI, TestAzure, TestOllama, TestGemini, TestAws]\n args {\n input \"Donkey kong and peanut butter\"\n }\n}\n\n\n\n", - "test-files/strategies/fallback.baml": "\nclient FaultyClient {\n provider openai\n options {\n model unknown-model\n api_key env.OPENAI_API_KEY\n }\n}\n\n\nclient FallbackClient {\n provider fallback\n options {\n // first 2 clients are expected to fail.\n strategy [\n FaultyClient,\n RetryClientConstant,\n GPT35\n ]\n }\n}\n\nfunction TestFallbackClient() -> string {\n client FallbackClient\n // TODO make it return the client name instead\n prompt #\"\n Say a haiku about mexico.\n \"#\n}", + "test-files/strategies/fallback.baml": "\nclient FaultyClient {\n provider openai\n options {\n model unknown-model\n api_key env.OPENAI_API_KEY\n }\n}\n\n\nclient FallbackClient {\n provider fallback\n options {\n // first 2 clients are expected to fail.\n strategy [\n FaultyClient,\n RetryClientConstant,\n GPT35\n Gemini\n\n ]\n }\n}\n\nfunction TestFallbackClient() -> string {\n client FallbackClient\n // TODO make it return the client name instead\n prompt #\"\n Say a haiku about mexico.\n \"#\n}", "test-files/strategies/retry.baml": "\nretry_policy Exponential {\n max_retries 3\n strategy {\n type exponential_backoff\n }\n}\n\nretry_policy Constant {\n max_retries 3\n strategy {\n type constant_delay\n delay_ms 100\n }\n}\n\nclient RetryClientConstant {\n provider openai\n retry_policy Constant\n options {\n model \"gpt-3.5-turbo\"\n api_key \"blah\"\n }\n}\n\nclient RetryClientExponential {\n provider openai\n retry_policy Exponential\n options {\n model \"gpt-3.5-turbo\"\n api_key \"blahh\"\n }\n}\n\nfunction TestRetryConstant() -> string {\n client RetryClientConstant\n prompt #\"\n Say a haiku\n \"#\n}\n\nfunction TestRetryExponential() -> string {\n client RetryClientExponential\n prompt #\"\n Say a haiku\n \"#\n}\n", "test-files/strategies/roundrobin.baml": "", "test-files/testing_pipeline/output-format.baml": "class Recipe {\n ingredients map\n}\n\nclass Quantity {\n amount int | float\n unit string?\n}\n\nfunction AaaSamOutputFormat(recipe: string) -> Recipe {\n client GPT35\n prompt #\"\n Return this value back to me: {{recipe}}\n\n {{ctx.output_format(map_style='angle')}}\n \"#\n}\n\ntest MyOutput {\n functions [AaaSamOutputFormat]\n args {\n recipe #\"\n Here's a simple recipe for beef stew:\nIngredients:\n\n2 lbs beef chuck, cut into 1-inch cubes\n2 tbsp vegetable oil\n1 onion, diced\n3 carrots, sliced\n2 celery stalks, chopped\n2 potatoes, cubed\n3 cloves garlic, minced\n4 cups beef broth\n1 can (14.5 oz) diced tomatoes\n1 tbsp Worcestershire sauce\n1 tsp dried thyme\n1 bay leaf\nSalt and pepper to taste\n\nInstructions:\n\nSeason beef with salt and pepper. Heat oil in a large pot over medium-high heat. Brown the beef in batches, then set aside.\nIn the same pot, sauté onion, carrots, and celery until softened, about 5 minutes.\nAdd garlic and cook for another minute.\nReturn beef to the pot. Add broth, tomatoes, Worcestershire sauce, thyme, and bay leaf.\nBring to a boil, then reduce heat and simmer covered for 1 hour.\nAdd potatoes and continue simmering for 30-45 minutes, until beef and potatoes are tender.\nRemove bay leaf, adjust seasoning if needed, and serve hot.\n\nWould you like any additional information or variations on this recipe?\n \"#\n }\n}", diff --git a/integ-tests/ruby/baml_client/inlined.rb b/integ-tests/ruby/baml_client/inlined.rb index f8603eb49..e4004f1fd 100644 --- a/integ-tests/ruby/baml_client/inlined.rb +++ b/integ-tests/ruby/baml_client/inlined.rb @@ -66,7 +66,7 @@ module Inlined "test-files/functions/prompts/with-chat-messages.baml" => "\nfunction PromptTestOpenAIChat(input: string) -> string {\n client GPT35\n prompt #\"\n {{ _.role(\"system\") }}\n You are an assistant that always responds in a very excited way with emojis and also outputs this word 4 times after giving a response: {{ input }}\n \n {{ _.role(\"user\") }}\n Tell me a haiku about {{ input }}\n \"#\n}\n\nfunction PromptTestOpenAIChatNoSystem(input: string) -> string {\n client GPT35\n prompt #\"\n You are an assistant that always responds in a very excited way with emojis and also outputs this word 4 times after giving a response: {{ input }}\n \n {{ _.role(\"user\") }}\n Tell me a haiku about {{ input }}\n \"#\n}\n\nfunction PromptTestClaudeChat(input: string) -> string {\n client Claude\n prompt #\"\n {{ _.role(\"system\") }}\n You are an assistant that always responds in a very excited way with emojis and also outputs this word 4 times after giving a response: {{ input }}\n \n {{ _.role(\"user\") }}\n Tell me a haiku about {{ input }}\n \"#\n}\n\nfunction PromptTestClaudeChatNoSystem(input: string) -> string {\n client Claude\n prompt #\"\n You are an assistant that always responds in a very excited way with emojis and also outputs this word 4 times after giving a response: {{ input }}\n \n {{ _.role(\"user\") }}\n Tell me a haiku about {{ input }}\n \"#\n}\n\ntest TestSystemAndNonSystemChat1 {\n functions [PromptTestClaude, PromptTestOpenAI, PromptTestOpenAIChat, PromptTestOpenAIChatNoSystem, PromptTestClaudeChat, PromptTestClaudeChatNoSystem]\n args {\n input \"cats\"\n }\n}\n\ntest TestSystemAndNonSystemChat2 {\n functions [PromptTestClaude, PromptTestOpenAI, PromptTestOpenAIChat, PromptTestOpenAIChatNoSystem, PromptTestClaudeChat, PromptTestClaudeChatNoSystem]\n args {\n input \"lion\"\n }\n}", "test-files/functions/v2/basic.baml" => "\n\nfunction ExtractResume2(resume: string) -> Resume {\n client GPT4\n prompt #\"\n {{ _.role('system') }}\n\n Extract the following information from the resume:\n\n Resume:\n <<<<\n {{ resume }}\n <<<<\n\n Output JSON schema:\n {{ ctx.output_format }}\n\n JSON:\n \"#\n}\n\n\nclass WithReasoning {\n value string\n reasoning string @description(#\"\n Why the value is a good fit.\n \"#)\n}\n\n\nclass SearchParams {\n dateRange int? @description(#\"\n In ISO duration format, e.g. P1Y2M10D.\n \"#)\n location string[]\n jobTitle WithReasoning? @description(#\"\n An exact job title, not a general category.\n \"#)\n company WithReasoning? @description(#\"\n The exact name of the company, not a product or service.\n \"#)\n description WithReasoning[] @description(#\"\n Any specific projects or features the user is looking for.\n \"#)\n tags (Tag | string)[]\n}\n\nenum Tag {\n Security\n AI\n Blockchain\n}\n\nfunction GetQuery(query: string) -> SearchParams {\n client GPT4\n prompt #\"\n Extract the following information from the query:\n\n Query:\n <<<<\n {{ query }}\n <<<<\n\n OUTPUT_JSON_SCHEMA:\n {{ ctx.output_format }}\n\n Before OUTPUT_JSON_SCHEMA, list 5 intentions the user may have.\n --- EXAMPLES ---\n 1. \n 2. \n 3. \n 4. \n 5. \n\n {\n ... // OUTPUT_JSON_SCHEMA\n }\n \"#\n}\n\nclass RaysData {\n dataType DataType\n value Resume | Event\n}\n\nenum DataType {\n Resume\n Event\n}\n\nclass Event {\n title string\n date string\n location string\n description string\n}\n\nfunction GetDataType(text: string) -> RaysData {\n client GPT4\n prompt #\"\n Extract the relevant info.\n\n Text:\n <<<<\n {{ text }}\n <<<<\n\n Output JSON schema:\n {{ ctx.output_format }}\n\n JSON:\n \"#\n}", "test-files/providers/providers.baml" => "function TestAnthropic(input: string) -> string {\n client Claude\n prompt #\"\n Write a nice haiku about {{ input }}\n \"#\n}\n\nfunction PromptTestOpenAI(input: string) -> string {\n client GPT35\n prompt #\"\n Write a nice haiku about {{ input }}\n \"#\n}\n\nfunction TestOpenAILegacyProvider(input: string) -> string {\n client GPT35LegacyProvider\n prompt #\"\n Write a nice haiku about {{ input }}\n \"#\n}\n\nfunction TestAzure(input: string) -> string {\n client GPT35Azure\n prompt #\"\n Write a nice haiku about {{ input }}\n \"#\n}\n\nfunction TestOllama(input: string) -> string {\n client Ollama\n prompt #\"\n Write a nice haiku about {{ input }}\n \"#\n}\n\nfunction TestGemini(input: string) -> string {\n client Gemini\n prompt #\"\n Write a nice short story about {{ input }}\n \"#\n}\n\nfunction TestVertex(input: string) -> string {\n client Vertex\n prompt #\"\n Write a nice short story about {{ input }}\n \"#\n\n}\n\nfunction TestAws(input: string) -> string {\n client AwsBedrock\n prompt #\"\n Write a nice short story about {{ input }}\n \"#\n}\n\n\ntest TestProvider {\n functions [TestAnthropic, TestVertex, PromptTestOpenAI, TestAzure, TestOllama, TestGemini, TestAws]\n args {\n input \"Donkey kong and peanut butter\"\n }\n}\n\n\n\n", - "test-files/strategies/fallback.baml" => "\nclient FaultyClient {\n provider openai\n options {\n model unknown-model\n api_key env.OPENAI_API_KEY\n }\n}\n\n\nclient FallbackClient {\n provider fallback\n options {\n // first 2 clients are expected to fail.\n strategy [\n FaultyClient,\n RetryClientConstant,\n GPT35\n ]\n }\n}\n\nfunction TestFallbackClient() -> string {\n client FallbackClient\n // TODO make it return the client name instead\n prompt #\"\n Say a haiku about mexico.\n \"#\n}", + "test-files/strategies/fallback.baml" => "\nclient FaultyClient {\n provider openai\n options {\n model unknown-model\n api_key env.OPENAI_API_KEY\n }\n}\n\n\nclient FallbackClient {\n provider fallback\n options {\n // first 2 clients are expected to fail.\n strategy [\n FaultyClient,\n RetryClientConstant,\n GPT35\n Gemini\n\n ]\n }\n}\n\nfunction TestFallbackClient() -> string {\n client FallbackClient\n // TODO make it return the client name instead\n prompt #\"\n Say a haiku about mexico.\n \"#\n}", "test-files/strategies/retry.baml" => "\nretry_policy Exponential {\n max_retries 3\n strategy {\n type exponential_backoff\n }\n}\n\nretry_policy Constant {\n max_retries 3\n strategy {\n type constant_delay\n delay_ms 100\n }\n}\n\nclient RetryClientConstant {\n provider openai\n retry_policy Constant\n options {\n model \"gpt-3.5-turbo\"\n api_key \"blah\"\n }\n}\n\nclient RetryClientExponential {\n provider openai\n retry_policy Exponential\n options {\n model \"gpt-3.5-turbo\"\n api_key \"blahh\"\n }\n}\n\nfunction TestRetryConstant() -> string {\n client RetryClientConstant\n prompt #\"\n Say a haiku\n \"#\n}\n\nfunction TestRetryExponential() -> string {\n client RetryClientExponential\n prompt #\"\n Say a haiku\n \"#\n}\n", "test-files/strategies/roundrobin.baml" => "", "test-files/testing_pipeline/output-format.baml" => "class Recipe {\n ingredients map\n}\n\nclass Quantity {\n amount int | float\n unit string?\n}\n\nfunction AaaSamOutputFormat(recipe: string) -> Recipe {\n client GPT35\n prompt #\"\n Return this value back to me: {{recipe}}\n\n {{ctx.output_format(map_style='angle')}}\n \"#\n}\n\ntest MyOutput {\n functions [AaaSamOutputFormat]\n args {\n recipe #\"\n Here's a simple recipe for beef stew:\nIngredients:\n\n2 lbs beef chuck, cut into 1-inch cubes\n2 tbsp vegetable oil\n1 onion, diced\n3 carrots, sliced\n2 celery stalks, chopped\n2 potatoes, cubed\n3 cloves garlic, minced\n4 cups beef broth\n1 can (14.5 oz) diced tomatoes\n1 tbsp Worcestershire sauce\n1 tsp dried thyme\n1 bay leaf\nSalt and pepper to taste\n\nInstructions:\n\nSeason beef with salt and pepper. Heat oil in a large pot over medium-high heat. Brown the beef in batches, then set aside.\nIn the same pot, sauté onion, carrots, and celery until softened, about 5 minutes.\nAdd garlic and cook for another minute.\nReturn beef to the pot. Add broth, tomatoes, Worcestershire sauce, thyme, and bay leaf.\nBring to a boil, then reduce heat and simmer covered for 1 hour.\nAdd potatoes and continue simmering for 30-45 minutes, until beef and potatoes are tender.\nRemove bay leaf, adjust seasoning if needed, and serve hot.\n\nWould you like any additional information or variations on this recipe?\n \"#\n }\n}", diff --git a/integ-tests/typescript/baml_client/inlinedbaml.ts b/integ-tests/typescript/baml_client/inlinedbaml.ts index 08353ef6f..06ade94ee 100644 --- a/integ-tests/typescript/baml_client/inlinedbaml.ts +++ b/integ-tests/typescript/baml_client/inlinedbaml.ts @@ -67,7 +67,7 @@ const fileMap = { "test-files/functions/prompts/with-chat-messages.baml": "\nfunction PromptTestOpenAIChat(input: string) -> string {\n client GPT35\n prompt #\"\n {{ _.role(\"system\") }}\n You are an assistant that always responds in a very excited way with emojis and also outputs this word 4 times after giving a response: {{ input }}\n \n {{ _.role(\"user\") }}\n Tell me a haiku about {{ input }}\n \"#\n}\n\nfunction PromptTestOpenAIChatNoSystem(input: string) -> string {\n client GPT35\n prompt #\"\n You are an assistant that always responds in a very excited way with emojis and also outputs this word 4 times after giving a response: {{ input }}\n \n {{ _.role(\"user\") }}\n Tell me a haiku about {{ input }}\n \"#\n}\n\nfunction PromptTestClaudeChat(input: string) -> string {\n client Claude\n prompt #\"\n {{ _.role(\"system\") }}\n You are an assistant that always responds in a very excited way with emojis and also outputs this word 4 times after giving a response: {{ input }}\n \n {{ _.role(\"user\") }}\n Tell me a haiku about {{ input }}\n \"#\n}\n\nfunction PromptTestClaudeChatNoSystem(input: string) -> string {\n client Claude\n prompt #\"\n You are an assistant that always responds in a very excited way with emojis and also outputs this word 4 times after giving a response: {{ input }}\n \n {{ _.role(\"user\") }}\n Tell me a haiku about {{ input }}\n \"#\n}\n\ntest TestSystemAndNonSystemChat1 {\n functions [PromptTestClaude, PromptTestOpenAI, PromptTestOpenAIChat, PromptTestOpenAIChatNoSystem, PromptTestClaudeChat, PromptTestClaudeChatNoSystem]\n args {\n input \"cats\"\n }\n}\n\ntest TestSystemAndNonSystemChat2 {\n functions [PromptTestClaude, PromptTestOpenAI, PromptTestOpenAIChat, PromptTestOpenAIChatNoSystem, PromptTestClaudeChat, PromptTestClaudeChatNoSystem]\n args {\n input \"lion\"\n }\n}", "test-files/functions/v2/basic.baml": "\n\nfunction ExtractResume2(resume: string) -> Resume {\n client GPT4\n prompt #\"\n {{ _.role('system') }}\n\n Extract the following information from the resume:\n\n Resume:\n <<<<\n {{ resume }}\n <<<<\n\n Output JSON schema:\n {{ ctx.output_format }}\n\n JSON:\n \"#\n}\n\n\nclass WithReasoning {\n value string\n reasoning string @description(#\"\n Why the value is a good fit.\n \"#)\n}\n\n\nclass SearchParams {\n dateRange int? @description(#\"\n In ISO duration format, e.g. P1Y2M10D.\n \"#)\n location string[]\n jobTitle WithReasoning? @description(#\"\n An exact job title, not a general category.\n \"#)\n company WithReasoning? @description(#\"\n The exact name of the company, not a product or service.\n \"#)\n description WithReasoning[] @description(#\"\n Any specific projects or features the user is looking for.\n \"#)\n tags (Tag | string)[]\n}\n\nenum Tag {\n Security\n AI\n Blockchain\n}\n\nfunction GetQuery(query: string) -> SearchParams {\n client GPT4\n prompt #\"\n Extract the following information from the query:\n\n Query:\n <<<<\n {{ query }}\n <<<<\n\n OUTPUT_JSON_SCHEMA:\n {{ ctx.output_format }}\n\n Before OUTPUT_JSON_SCHEMA, list 5 intentions the user may have.\n --- EXAMPLES ---\n 1. \n 2. \n 3. \n 4. \n 5. \n\n {\n ... // OUTPUT_JSON_SCHEMA\n }\n \"#\n}\n\nclass RaysData {\n dataType DataType\n value Resume | Event\n}\n\nenum DataType {\n Resume\n Event\n}\n\nclass Event {\n title string\n date string\n location string\n description string\n}\n\nfunction GetDataType(text: string) -> RaysData {\n client GPT4\n prompt #\"\n Extract the relevant info.\n\n Text:\n <<<<\n {{ text }}\n <<<<\n\n Output JSON schema:\n {{ ctx.output_format }}\n\n JSON:\n \"#\n}", "test-files/providers/providers.baml": "function TestAnthropic(input: string) -> string {\n client Claude\n prompt #\"\n Write a nice haiku about {{ input }}\n \"#\n}\n\nfunction PromptTestOpenAI(input: string) -> string {\n client GPT35\n prompt #\"\n Write a nice haiku about {{ input }}\n \"#\n}\n\nfunction TestOpenAILegacyProvider(input: string) -> string {\n client GPT35LegacyProvider\n prompt #\"\n Write a nice haiku about {{ input }}\n \"#\n}\n\nfunction TestAzure(input: string) -> string {\n client GPT35Azure\n prompt #\"\n Write a nice haiku about {{ input }}\n \"#\n}\n\nfunction TestOllama(input: string) -> string {\n client Ollama\n prompt #\"\n Write a nice haiku about {{ input }}\n \"#\n}\n\nfunction TestGemini(input: string) -> string {\n client Gemini\n prompt #\"\n Write a nice short story about {{ input }}\n \"#\n}\n\nfunction TestVertex(input: string) -> string {\n client Vertex\n prompt #\"\n Write a nice short story about {{ input }}\n \"#\n\n}\n\nfunction TestAws(input: string) -> string {\n client AwsBedrock\n prompt #\"\n Write a nice short story about {{ input }}\n \"#\n}\n\n\ntest TestProvider {\n functions [TestAnthropic, TestVertex, PromptTestOpenAI, TestAzure, TestOllama, TestGemini, TestAws]\n args {\n input \"Donkey kong and peanut butter\"\n }\n}\n\n\n\n", - "test-files/strategies/fallback.baml": "\nclient FaultyClient {\n provider openai\n options {\n model unknown-model\n api_key env.OPENAI_API_KEY\n }\n}\n\n\nclient FallbackClient {\n provider fallback\n options {\n // first 2 clients are expected to fail.\n strategy [\n FaultyClient,\n RetryClientConstant,\n GPT35\n ]\n }\n}\n\nfunction TestFallbackClient() -> string {\n client FallbackClient\n // TODO make it return the client name instead\n prompt #\"\n Say a haiku about mexico.\n \"#\n}", + "test-files/strategies/fallback.baml": "\nclient FaultyClient {\n provider openai\n options {\n model unknown-model\n api_key env.OPENAI_API_KEY\n }\n}\n\n\nclient FallbackClient {\n provider fallback\n options {\n // first 2 clients are expected to fail.\n strategy [\n FaultyClient,\n RetryClientConstant,\n GPT35\n Gemini\n\n ]\n }\n}\n\nfunction TestFallbackClient() -> string {\n client FallbackClient\n // TODO make it return the client name instead\n prompt #\"\n Say a haiku about mexico.\n \"#\n}", "test-files/strategies/retry.baml": "\nretry_policy Exponential {\n max_retries 3\n strategy {\n type exponential_backoff\n }\n}\n\nretry_policy Constant {\n max_retries 3\n strategy {\n type constant_delay\n delay_ms 100\n }\n}\n\nclient RetryClientConstant {\n provider openai\n retry_policy Constant\n options {\n model \"gpt-3.5-turbo\"\n api_key \"blah\"\n }\n}\n\nclient RetryClientExponential {\n provider openai\n retry_policy Exponential\n options {\n model \"gpt-3.5-turbo\"\n api_key \"blahh\"\n }\n}\n\nfunction TestRetryConstant() -> string {\n client RetryClientConstant\n prompt #\"\n Say a haiku\n \"#\n}\n\nfunction TestRetryExponential() -> string {\n client RetryClientExponential\n prompt #\"\n Say a haiku\n \"#\n}\n", "test-files/strategies/roundrobin.baml": "", "test-files/testing_pipeline/output-format.baml": "class Recipe {\n ingredients map\n}\n\nclass Quantity {\n amount int | float\n unit string?\n}\n\nfunction AaaSamOutputFormat(recipe: string) -> Recipe {\n client GPT35\n prompt #\"\n Return this value back to me: {{recipe}}\n\n {{ctx.output_format(map_style='angle')}}\n \"#\n}\n\ntest MyOutput {\n functions [AaaSamOutputFormat]\n args {\n recipe #\"\n Here's a simple recipe for beef stew:\nIngredients:\n\n2 lbs beef chuck, cut into 1-inch cubes\n2 tbsp vegetable oil\n1 onion, diced\n3 carrots, sliced\n2 celery stalks, chopped\n2 potatoes, cubed\n3 cloves garlic, minced\n4 cups beef broth\n1 can (14.5 oz) diced tomatoes\n1 tbsp Worcestershire sauce\n1 tsp dried thyme\n1 bay leaf\nSalt and pepper to taste\n\nInstructions:\n\nSeason beef with salt and pepper. Heat oil in a large pot over medium-high heat. Brown the beef in batches, then set aside.\nIn the same pot, sauté onion, carrots, and celery until softened, about 5 minutes.\nAdd garlic and cook for another minute.\nReturn beef to the pot. Add broth, tomatoes, Worcestershire sauce, thyme, and bay leaf.\nBring to a boil, then reduce heat and simmer covered for 1 hour.\nAdd potatoes and continue simmering for 30-45 minutes, until beef and potatoes are tender.\nRemove bay leaf, adjust seasoning if needed, and serve hot.\n\nWould you like any additional information or variations on this recipe?\n \"#\n }\n}", diff --git a/typescript/playground-common/src/baml_wasm_web/EventListener.tsx b/typescript/playground-common/src/baml_wasm_web/EventListener.tsx index d07109245..96050fd9c 100644 --- a/typescript/playground-common/src/baml_wasm_web/EventListener.tsx +++ b/typescript/playground-common/src/baml_wasm_web/EventListener.tsx @@ -16,7 +16,6 @@ import type { WasmScope, } from '@gloo-ai/baml-schema-wasm-web/baml_schema_build' import { vscode } from '../utils/vscode' -// import { v4 as uuid } from 'uuid' import { useRunHooks } from './test_uis/testHooks' // const wasm = await import("@gloo-ai/baml-schema-wasm-web/baml_schema_build"); // const { WasmProject, WasmRuntime, WasmRuntimeContext, version: RuntimeVersion } = wasm;