Skip to content

Commit

Permalink
release 0.0.10 with anthropic support
Browse files Browse the repository at this point in the history
  • Loading branch information
jrhizor committed Mar 11, 2024
1 parent bb2c981 commit 936366a
Show file tree
Hide file tree
Showing 6 changed files with 190 additions and 23 deletions.
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ All of these issues led me to create my own lightweight library.
| TypeScript library |||
| OpenAI generation support |||
| Cohere generation support |||
| Anthropic generation support |||
| Emphasis on typed LLM outputs |||
| Easily composable multi-step LLM workflows |||
| Convenient API for single chat completions |||
Expand Down
3 changes: 2 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "elelem",
"version": "0.0.9",
"version": "0.0.10",
"description": "Simple, opinionated, JSON-typed, and traced LLM framework for TypeScript.",
"main": "dist/index.js",
"types": "dist/index.d.ts",
Expand Down Expand Up @@ -30,6 +30,7 @@
"homepage": "https://github.com/jrhizor/elelem#readme",
"dependencies": {
"@anatine/zod-mock": "^3.13.3",
"@anthropic-ai/sdk": "^0.17.1",
"@faker-js/faker": "^8.2.0",
"@opentelemetry/api": "^1.6.0",
"@types/object-hash": "^3.0.5",
Expand Down
47 changes: 47 additions & 0 deletions src/elelem.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ import {
LangchainJsonSchemaFormatter,
} from "./formatters";
import { ConsoleSpanExporter } from "@opentelemetry/sdk-trace-node";
import Anthropic from "@anthropic-ai/sdk";

const sdk = new opentelemetry.NodeSDK({
serviceName: "elelem-test",
Expand All @@ -33,9 +34,14 @@ const cohere = new CohereClient({
token: process.env.COHERE_API_KEY || "",
});

const anthropic = new Anthropic({
apiKey: process.env.ANTHROPIC,
});

const llm = elelem.init({
openai: openAiClient,
cohere: cohere,
anthropic: anthropic,
cache: { redis: redisClient },
});

Expand Down Expand Up @@ -452,3 +458,44 @@ describe("action", () => {
expect(counter).toBe(1);
});
});

describe("anthropic", () => {
test("e2e example", async () => {
const { result, usage } = await llm.session(
"e2e-example",
{ anthropic: { model: "claude-3-opus-20240229" } },
async (c) => {
const { result: capitol } = await c.anthropic(
"capitol",
{ max_tokens: 100, temperature: 0 },
`What is the capitol of the country provided?`,
"USA",
capitolResponseSchema,
JsonSchemaAndExampleFormatter,
);
console.log("capitol", capitol);

const { result: cityDescription } = await c.anthropic(
"city-description",
{
max_tokens: 100,
temperature: 0,
},
`For the given capitol city, return the founding year and an estimate of the population of the city.`,
capitol.capitol,
cityResponseSchema,
JsonSchemaAndExampleFormatter,
);
console.log("cityDescription", cityDescription);

return cityDescription;
},
);

console.log(result);
console.log(usage);

expect(result.foundingYear).toBe("1790");
expect(result.populationEstimate).toBeGreaterThan(500000);
}, 20000);
});
134 changes: 112 additions & 22 deletions src/elelem.ts
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,8 @@ import { ChatCompletionCreateParamsNonStreaming } from "openai/resources/chat/co
import { ZodType } from "zod";
import {GenerateRequest} from "cohere-ai/api";
import {CohereClient} from "cohere-ai";
import Anthropic from "@anthropic-ai/sdk";
import {MessageCreateParamsNonStreaming} from "@anthropic-ai/sdk/resources";

function getTracer() {
return trace.getTracer("elelem", "0.0.1");
Expand Down Expand Up @@ -130,6 +132,48 @@ const callCohereApi = async (
});
};

const callAnthropicApi = async (
anthropic: Anthropic,
systemPromptWithFormat: string,
userPrompt: string,
modelOptions: Omit<MessageCreateParamsNonStreaming, "messages" | "system">,
): Promise<string> => {
return await getTracer().startActiveSpan(`openai-call`, async (span) => {
span.setAttribute("anthropic.prompt.system", systemPromptWithFormat);
span.setAttribute("anthropic.prompt.user", userPrompt);

const chat = await anthropic.messages.create({
...modelOptions,
system: systemPromptWithFormat,
messages: [
{ role: "user", content: userPrompt },
],
});

if (
chat !== undefined &&
chat.content !== undefined
) {
const response = chat.content[0].text;

if (response === null) {
span.end();
throw new Error("Null response from api!");
}

// todo: add usage tracking for anthropic

span.setAttribute("anthropic.response", response);

span.end();
return response;
} else {
span.end();
throw new Error("No chat response from api!");
}
});
};

async function withRetries<T>(
spanName: string,
operation: (span: Span, parentSpan: Span) => Promise<T>,
Expand Down Expand Up @@ -364,7 +408,7 @@ async function generate<T, ModelOpt extends object>(

export const elelem: Elelem = {
init: (config: ElelemConfig) => {
const { backoffOptions, cache: cacheConfig, openai, cohere } = config;
const { backoffOptions, cache: cacheConfig, openai, cohere, anthropic } = config;

const cache: ElelemCache = getCache(cacheConfig || {});

Expand Down Expand Up @@ -434,29 +478,29 @@ export const elelem: Elelem = {
);
},
cohere: async (
chatId,
modelOptions,
systemPrompt,
userPrompt,
schema,
formatter: ElelemFormatter,
chatId,
modelOptions,
systemPrompt,
userPrompt,
schema,
formatter: ElelemFormatter,
) => {
if (cohere === undefined) {
throw new Error("You must configure Cohere!");
}

const apiCaller = async (
systemPromptWithFormat: string,
userPrompt: string,
combinedOptions: Omit<GenerateRequest, "prompt"> & {
max_tokens: number;
},
systemPromptWithFormat: string,
userPrompt: string,
combinedOptions: Omit<GenerateRequest, "prompt"> & {
max_tokens: number;
},
): Promise<string> => {
return await callCohereApi(
cohere,
systemPromptWithFormat,
userPrompt,
combinedOptions,
cohere,
systemPromptWithFormat,
userPrompt,
combinedOptions,
);
};

Expand All @@ -472,15 +516,61 @@ export const elelem: Elelem = {
const prefixedUserPrompt = `\nInput: ${userPrompt}`;

return await generate(
chatId,
combinedOptions,
systemPrompt,
prefixedUserPrompt,
schema,
formatter,
backoffOptions,
cache,
apiCaller,
);
},
anthropic: async (
chatId,
combinedOptions,
modelOptions,
systemPrompt,
prefixedUserPrompt,
userPrompt,
schema,
formatter,
backoffOptions,
cache,
apiCaller,
formatter: ElelemFormatter,
) => {
if (
anthropic === undefined
) {
throw new Error("You must configure Anthropic!");
}

const apiCaller = async (
systemPromptWithFormat: string,
userPrompt: string,
combinedOptions: Omit<MessageCreateParamsNonStreaming, "messages" | "system">,
): Promise<string> => {
return await callAnthropicApi(
anthropic,
systemPromptWithFormat,
userPrompt,
combinedOptions,
);
};

const combinedOptions: Omit<MessageCreateParamsNonStreaming, "messages" | "system"> = {
model: "claude-3-opus-20240229",
max_tokens: 200,
...defaultModelOptions.anthropic,
...modelOptions,
};

return await generate(
chatId,
combinedOptions,
systemPrompt,
userPrompt,
schema,
formatter,
backoffOptions,
cache,
apiCaller,
);
},
action: async <AC extends object, T>(
Expand Down
13 changes: 13 additions & 0 deletions src/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@ import { CompletionUsage } from "openai/resources";
import { Span } from "@opentelemetry/api";
import {GenerateRequest, GenerationFinalResponse} from "cohere-ai/api";
import {CohereClient} from "cohere-ai";
import Anthropic from '@anthropic-ai/sdk';
import {MessageCreateParamsNonStreaming} from "@anthropic-ai/sdk/resources";

export interface ElelemCache {
// keys will be hashed using object-hash
Expand Down Expand Up @@ -37,6 +39,7 @@ export interface ElelemConfig {
cache?: ElelemCacheConfig;
openai?: OpenAI;
cohere?: CohereClient;
anthropic?: Anthropic;
}

export interface Elelem {
Expand All @@ -48,6 +51,7 @@ export type ElelemFormatter = <T>(schema: ZodType<T>) => string;
export interface ElelemModelOptions {
openai?: Omit<ChatCompletionCreateParamsNonStreaming, "messages">;
cohere?: Partial<Omit<GenerateRequest, "prompt">>;
anthropic?: Partial<Omit<MessageCreateParamsNonStreaming, "messages" | "system">>;
}

export interface PartialElelemModelOptions {
Expand Down Expand Up @@ -84,6 +88,15 @@ export interface ElelemContext {
formatter: ElelemFormatter,
) => Promise<{ result: T; usage: ElelemUsage }>;

anthropic: <T>(
chatId: string,
modelOptions: Partial<Omit<MessageCreateParamsNonStreaming, "messages" | "system">>,
systemPrompt: string,
userPrompt: string,
schema: ZodType<T>,
formatter: ElelemFormatter,
) => Promise<{ result: T; usage: ElelemUsage }>;

action: <AC extends object, T>(
actionId: string,
actionContext: AC,
Expand Down
15 changes: 15 additions & 0 deletions yarn.lock
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,21 @@
dependencies:
randexp "^0.5.3"

"@anthropic-ai/sdk@^0.17.1":
version "0.17.1"
resolved "https://registry.yarnpkg.com/@anthropic-ai/sdk/-/sdk-0.17.1.tgz#31f712f3a47091cf493e1a70f6ddae1ec9ad6afb"
integrity sha512-ke/JGfaa4sc1PB58L4B9hXI/BlJphXc696+cVX8Z8gQt51l++a9umZTN/7UymV8Dcat6KKYNQE8P8yeeyAldHg==
dependencies:
"@types/node" "^18.11.18"
"@types/node-fetch" "^2.6.4"
abort-controller "^3.0.0"
agentkeepalive "^4.2.1"
digest-fetch "^1.3.0"
form-data-encoder "1.7.2"
formdata-node "^4.3.2"
node-fetch "^2.6.7"
web-streams-polyfill "^3.2.1"

"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.12.13", "@babel/code-frame@^7.22.13":
version "7.22.13"
resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.22.13.tgz#e3c1c099402598483b7a8c46a721d1038803755e"
Expand Down

0 comments on commit 936366a

Please sign in to comment.