Skip to content

Commit

Permalink
fixed build
Browse files Browse the repository at this point in the history
  • Loading branch information
anish-palakurthi committed Jul 25, 2024
1 parent 2dead0d commit f631b15
Show file tree
Hide file tree
Showing 6 changed files with 5 additions and 151 deletions.
147 changes: 0 additions & 147 deletions engine/baml-schema-wasm/src/runtime_wasm/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1145,31 +1145,6 @@ impl WasmRuntime {
None
}

#[wasm_bindgen]
pub fn get_function_of_testcase(
&self,
file_name: &str,
cursor_idx: usize,
) -> Option<WasmParentFunction> {
let testcases = self.list_testcases();

for tc in testcases {
let span = tc.span;
if span.file_path.as_str().ends_with(file_name)
&& ((span.start + 1)..=(span.end + 1)).contains(&cursor_idx)
{
let first_function = tc
.parent_functions
.iter()
.find(|f| f.start <= cursor_idx && cursor_idx <= f.end)
.cloned();

return first_function;
}
}
None
}

#[wasm_bindgen]
pub fn list_testcases(&self) -> Vec<WasmTestCase> {
self.runtime
Expand Down Expand Up @@ -1302,128 +1277,6 @@ impl WasmRuntime {
}
None
}

#[wasm_bindgen]
pub fn list_testcases(&self) -> Vec<WasmTestCase> {
self.runtime
.internal()
.ir()
.walk_tests()
.map(|tc| {
let params = match tc.test_case_params(&self.runtime.env_vars()) {
Ok(params) => Ok(params
.iter()
.map(|(k, v)| {
let as_str = match v {
Ok(v) => match serde_json::to_string(v) {
Ok(s) => Ok(s),
Err(e) => Err(e.to_string()),
},
Err(e) => Err(e.to_string()),
};

let (value, error) = match as_str {
Ok(s) => (Some(s), None),
Err(e) => (None, Some(e)),
};

WasmParam {
name: k.to_string(),
value,
error,
}
})
.collect()),
Err(e) => Err(e.to_string()),
};

let (mut params, error) = match params {
Ok(p) => (p, None),
Err(e) => (Vec::new(), Some(e)),
};

// Any missing params should be set to an error
let _ = tc.function().inputs().right().map(|func_params| {
for (param_name, t) in func_params {
if !params.iter().any(|p| p.name.cmp(param_name).is_eq())
&& !t.is_optional()
{
params.insert(
0,
WasmParam {
name: param_name.to_string(),
value: None,
error: Some("Missing parameter".to_string()),
},
);
}
}
});

let wasm_span = match tc.span() {
Some(span) => span.into(),
None => WasmSpan::default(),
};

WasmTestCase {
name: tc.test_case().name.clone(),
inputs: params,
error,
span: wasm_span,
parent_functions: tc
.test_case()
.functions
.iter()
.map(|f| {
let (start, end) = f
.attributes
.span
.as_ref()
.map_or((0, 0), |f| (f.start, f.end));
WasmParentFunction {
start,
end,
name: f.elem.name().to_string(),
}
})
.collect(),
}
})
.collect()
}

#[wasm_bindgen]
pub fn get_testcase_from_position(
&self,
parent_function: WasmFunction,
cursor_idx: usize,
) -> Option<WasmTestCase> {
let testcases = parent_function.test_cases;
for testcase in testcases {
let span = testcase.clone().span;

if span.file_path.as_str() == (parent_function.span.file_path)
&& ((span.start + 1)..=(span.end + 1)).contains(&cursor_idx)
{
return Some(testcase);
}
}
None
}
}
// Define a new struct to store the important information
#[wasm_bindgen(getter_with_clone, inspectable)]
#[derive(Serialize, Deserialize, Debug)]
pub struct SerializableOrchestratorNode {
pub provider: String,
}

impl From<&OrchestratorNode> for SerializableOrchestratorNode {
fn from(node: &OrchestratorNode) -> Self {
SerializableOrchestratorNode {
provider: node.provider.to_string(),
}
}
}
// Define a new struct to store the important information
#[wasm_bindgen(getter_with_clone, inspectable)]
Expand Down
2 changes: 2 additions & 0 deletions integ-tests/baml_src/test-files/strategies/fallback.baml
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,8 @@ client<llm> FallbackClient {
FaultyClient,
RetryClientConstant,
GPT35
Gemini

]
}
}
Expand Down
2 changes: 1 addition & 1 deletion integ-tests/python/baml_client/inlinedbaml.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@
"test-files/functions/prompts/with-chat-messages.baml": "\nfunction PromptTestOpenAIChat(input: string) -> string {\n client GPT35\n prompt #\"\n {{ _.role(\"system\") }}\n You are an assistant that always responds in a very excited way with emojis and also outputs this word 4 times after giving a response: {{ input }}\n \n {{ _.role(\"user\") }}\n Tell me a haiku about {{ input }}\n \"#\n}\n\nfunction PromptTestOpenAIChatNoSystem(input: string) -> string {\n client GPT35\n prompt #\"\n You are an assistant that always responds in a very excited way with emojis and also outputs this word 4 times after giving a response: {{ input }}\n \n {{ _.role(\"user\") }}\n Tell me a haiku about {{ input }}\n \"#\n}\n\nfunction PromptTestClaudeChat(input: string) -> string {\n client Claude\n prompt #\"\n {{ _.role(\"system\") }}\n You are an assistant that always responds in a very excited way with emojis and also outputs this word 4 times after giving a response: {{ input }}\n \n {{ _.role(\"user\") }}\n Tell me a haiku about {{ input }}\n \"#\n}\n\nfunction PromptTestClaudeChatNoSystem(input: string) -> string {\n client Claude\n prompt #\"\n You are an assistant that always responds in a very excited way with emojis and also outputs this word 4 times after giving a response: {{ input }}\n \n {{ _.role(\"user\") }}\n Tell me a haiku about {{ input }}\n \"#\n}\n\ntest TestSystemAndNonSystemChat1 {\n functions [PromptTestClaude, PromptTestOpenAI, PromptTestOpenAIChat, PromptTestOpenAIChatNoSystem, PromptTestClaudeChat, PromptTestClaudeChatNoSystem]\n args {\n input \"cats\"\n }\n}\n\ntest TestSystemAndNonSystemChat2 {\n functions [PromptTestClaude, PromptTestOpenAI, PromptTestOpenAIChat, PromptTestOpenAIChatNoSystem, PromptTestClaudeChat, PromptTestClaudeChatNoSystem]\n args {\n input \"lion\"\n }\n}",
"test-files/functions/v2/basic.baml": "\n\nfunction ExtractResume2(resume: string) -> Resume {\n client GPT4\n prompt #\"\n {{ _.role('system') }}\n\n Extract the following information from the resume:\n\n Resume:\n <<<<\n {{ resume }}\n <<<<\n\n Output JSON schema:\n {{ ctx.output_format }}\n\n JSON:\n \"#\n}\n\n\nclass WithReasoning {\n value string\n reasoning string @description(#\"\n Why the value is a good fit.\n \"#)\n}\n\n\nclass SearchParams {\n dateRange int? @description(#\"\n In ISO duration format, e.g. P1Y2M10D.\n \"#)\n location string[]\n jobTitle WithReasoning? @description(#\"\n An exact job title, not a general category.\n \"#)\n company WithReasoning? @description(#\"\n The exact name of the company, not a product or service.\n \"#)\n description WithReasoning[] @description(#\"\n Any specific projects or features the user is looking for.\n \"#)\n tags (Tag | string)[]\n}\n\nenum Tag {\n Security\n AI\n Blockchain\n}\n\nfunction GetQuery(query: string) -> SearchParams {\n client GPT4\n prompt #\"\n Extract the following information from the query:\n\n Query:\n <<<<\n {{ query }}\n <<<<\n\n OUTPUT_JSON_SCHEMA:\n {{ ctx.output_format }}\n\n Before OUTPUT_JSON_SCHEMA, list 5 intentions the user may have.\n --- EXAMPLES ---\n 1. <intent>\n 2. <intent>\n 3. <intent>\n 4. <intent>\n 5. <intent>\n\n {\n ... // OUTPUT_JSON_SCHEMA\n }\n \"#\n}\n\nclass RaysData {\n dataType DataType\n value Resume | Event\n}\n\nenum DataType {\n Resume\n Event\n}\n\nclass Event {\n title string\n date string\n location string\n description string\n}\n\nfunction GetDataType(text: string) -> RaysData {\n client GPT4\n prompt #\"\n Extract the relevant info.\n\n Text:\n <<<<\n {{ text }}\n <<<<\n\n Output JSON schema:\n {{ ctx.output_format }}\n\n JSON:\n \"#\n}",
"test-files/providers/providers.baml": "function TestAnthropic(input: string) -> string {\n client Claude\n prompt #\"\n Write a nice haiku about {{ input }}\n \"#\n}\n\nfunction PromptTestOpenAI(input: string) -> string {\n client GPT35\n prompt #\"\n Write a nice haiku about {{ input }}\n \"#\n}\n\nfunction TestOpenAILegacyProvider(input: string) -> string {\n client GPT35LegacyProvider\n prompt #\"\n Write a nice haiku about {{ input }}\n \"#\n}\n\nfunction TestAzure(input: string) -> string {\n client GPT35Azure\n prompt #\"\n Write a nice haiku about {{ input }}\n \"#\n}\n\nfunction TestOllama(input: string) -> string {\n client Ollama\n prompt #\"\n Write a nice haiku about {{ input }}\n \"#\n}\n\nfunction TestGemini(input: string) -> string {\n client Gemini\n prompt #\"\n Write a nice short story about {{ input }}\n \"#\n}\n\nfunction TestVertex(input: string) -> string {\n client Vertex\n prompt #\"\n Write a nice short story about {{ input }}\n \"#\n\n}\n\nfunction TestAws(input: string) -> string {\n client AwsBedrock\n prompt #\"\n Write a nice short story about {{ input }}\n \"#\n}\n\n\ntest TestProvider {\n functions [TestAnthropic, TestVertex, PromptTestOpenAI, TestAzure, TestOllama, TestGemini, TestAws]\n args {\n input \"Donkey kong and peanut butter\"\n }\n}\n\n\n\n",
"test-files/strategies/fallback.baml": "\nclient<llm> FaultyClient {\n provider openai\n options {\n model unknown-model\n api_key env.OPENAI_API_KEY\n }\n}\n\n\nclient<llm> FallbackClient {\n provider fallback\n options {\n // first 2 clients are expected to fail.\n strategy [\n FaultyClient,\n RetryClientConstant,\n GPT35\n ]\n }\n}\n\nfunction TestFallbackClient() -> string {\n client FallbackClient\n // TODO make it return the client name instead\n prompt #\"\n Say a haiku about mexico.\n \"#\n}",
"test-files/strategies/fallback.baml": "\nclient<llm> FaultyClient {\n provider openai\n options {\n model unknown-model\n api_key env.OPENAI_API_KEY\n }\n}\n\n\nclient<llm> FallbackClient {\n provider fallback\n options {\n // first 2 clients are expected to fail.\n strategy [\n FaultyClient,\n RetryClientConstant,\n GPT35\n Gemini\n\n ]\n }\n}\n\nfunction TestFallbackClient() -> string {\n client FallbackClient\n // TODO make it return the client name instead\n prompt #\"\n Say a haiku about mexico.\n \"#\n}",
"test-files/strategies/retry.baml": "\nretry_policy Exponential {\n max_retries 3\n strategy {\n type exponential_backoff\n }\n}\n\nretry_policy Constant {\n max_retries 3\n strategy {\n type constant_delay\n delay_ms 100\n }\n}\n\nclient<llm> RetryClientConstant {\n provider openai\n retry_policy Constant\n options {\n model \"gpt-3.5-turbo\"\n api_key \"blah\"\n }\n}\n\nclient<llm> RetryClientExponential {\n provider openai\n retry_policy Exponential\n options {\n model \"gpt-3.5-turbo\"\n api_key \"blahh\"\n }\n}\n\nfunction TestRetryConstant() -> string {\n client RetryClientConstant\n prompt #\"\n Say a haiku\n \"#\n}\n\nfunction TestRetryExponential() -> string {\n client RetryClientExponential\n prompt #\"\n Say a haiku\n \"#\n}\n",
"test-files/strategies/roundrobin.baml": "",
"test-files/testing_pipeline/output-format.baml": "class Recipe {\n ingredients map<string, Quantity>\n}\n\nclass Quantity {\n amount int | float\n unit string?\n}\n\nfunction AaaSamOutputFormat(recipe: string) -> Recipe {\n client GPT35\n prompt #\"\n Return this value back to me: {{recipe}}\n\n {{ctx.output_format(map_style='angle')}}\n \"#\n}\n\ntest MyOutput {\n functions [AaaSamOutputFormat]\n args {\n recipe #\"\n Here's a simple recipe for beef stew:\nIngredients:\n\n2 lbs beef chuck, cut into 1-inch cubes\n2 tbsp vegetable oil\n1 onion, diced\n3 carrots, sliced\n2 celery stalks, chopped\n2 potatoes, cubed\n3 cloves garlic, minced\n4 cups beef broth\n1 can (14.5 oz) diced tomatoes\n1 tbsp Worcestershire sauce\n1 tsp dried thyme\n1 bay leaf\nSalt and pepper to taste\n\nInstructions:\n\nSeason beef with salt and pepper. Heat oil in a large pot over medium-high heat. Brown the beef in batches, then set aside.\nIn the same pot, sauté onion, carrots, and celery until softened, about 5 minutes.\nAdd garlic and cook for another minute.\nReturn beef to the pot. Add broth, tomatoes, Worcestershire sauce, thyme, and bay leaf.\nBring to a boil, then reduce heat and simmer covered for 1 hour.\nAdd potatoes and continue simmering for 30-45 minutes, until beef and potatoes are tender.\nRemove bay leaf, adjust seasoning if needed, and serve hot.\n\nWould you like any additional information or variations on this recipe?\n \"#\n }\n}",
Expand Down
Loading

0 comments on commit f631b15

Please sign in to comment.