diff --git a/docs/assets/images/docs_latest/vscode-output-dropdown.png b/docs/assets/images/docs_latest/vscode-output-dropdown.png new file mode 100644 index 000000000..f24a4669f Binary files /dev/null and b/docs/assets/images/docs_latest/vscode-output-dropdown.png differ diff --git a/docs/assets/images/docs_latest/vscode-reload-window.png b/docs/assets/images/docs_latest/vscode-reload-window.png new file mode 100644 index 000000000..950bbb048 Binary files /dev/null and b/docs/assets/images/docs_latest/vscode-reload-window.png differ diff --git a/docs/docs.yml b/docs/docs.yml index b0c6a8c02..0f75448cd 100644 --- a/docs/docs.yml +++ b/docs/docs.yml @@ -8,7 +8,6 @@ instances: repo: baml branch: canary - title: BAML Documentation navigation: @@ -25,8 +24,6 @@ navigation: path: docs/get-started/quickstart/python.mdx - page: TypeScript path: docs/get-started/quickstart/typescript.mdx - - page: Ruby - path: docs/get-started/quickstart/ruby.mdx - page: Any Language (OpenAPI) path: docs/get-started/quickstart/openapi.mdx - page: VSCode @@ -100,6 +97,8 @@ navigation: path: docs/snippets/clients/providers/vllm.mdx - page: LMStudio path: docs/snippets/clients/providers/lmstudio.mdx + - page: KeywordsAI + path: docs/snippets/clients/providers/keywordsai.mdx - section: provider strategies contents: - page: fallback @@ -150,7 +149,7 @@ navigation: path: docs/calling-baml/dynamic-types.mdx - page: Client Registry path: docs/calling-baml/client-registry.mdx - - section: BAML with Python/TS/Ruby + - section: BAML with Python/TS/OpenAPI contents: - page: Generate the BAML Client path: docs/calling-baml/generate-baml-client.mdx diff --git a/docs/docs/baml-nextjs/baml-nextjs.mdx b/docs/docs/baml-nextjs/baml-nextjs.mdx index 159148b62..f035aa193 100644 --- a/docs/docs/baml-nextjs/baml-nextjs.mdx +++ b/docs/docs/baml-nextjs/baml-nextjs.mdx @@ -20,94 +20,97 @@ You will need to use Server Actions, from the App Router, for this tutorial. You - Install the VSCode extension and Save a baml file to generate the client (or use `npx baml-cli generate`). -### Create streamable baml server actions +### Create some helper utilities to stream BAML functions Let's add some helpers to export our baml functions as streamable server actions. See the last line in this file, where we export the `extractResume` function. -In `app/actions/streamable_objects.tsx` add the following code: +In `app/utils/streamableObject.tsx` add the following code: ```typescript -"use server"; -import { createStreamableValue, StreamableValue } from "ai/rsc"; -import { b, Resume } from "@/baml_client"; +import { createStreamableValue, StreamableValue as BaseStreamableValue } from "ai/rsc"; import { BamlStream } from "@boundaryml/baml"; +import { b } from "@/baml_client"; // You can change the path of this to wherever your baml_client is located. -const MAX_ERROR_LENGTH = 3000; -const TRUNCATION_MARKER = "[ERROR_LOG_TRUNCATED]"; -function truncateError(error: string): string { - if (error.length <= MAX_ERROR_LENGTH) return error; - const halfLength = Math.floor( - (MAX_ERROR_LENGTH - TRUNCATION_MARKER.length) / 2 - ); - return ( - error.slice(0, halfLength) + TRUNCATION_MARKER + error.slice(-halfLength) - ); -} - -type BamlStreamReturnType = T extends BamlStream ? P : never; - -type StreamFunction = (...args: any[]) => BamlStream; +// ------------------------------ +// Helper functions +// ------------------------------ +/** + * Type alias for defining a StreamableValue based on a BamlStream. + * It captures either a partial or final result depending on the stream state. + */ +type StreamableValue> = + | { partial: T extends BamlStream ? StreamRet : never } + | { final: T extends BamlStream ? Ret : never }; -async function streamHelper( - streamFunction: (...args: any[]) => BamlStream, - ...args: Parameters +/** + * Helper function to manage and handle a BamlStream. + * It consumes the stream, updates the streamable value for each partial event, + * and finalizes the stream when complete. + * + * @param bamlStream - The BamlStream to be processed. + * @returns A promise that resolves with an object containing the BaseStreamableValue. + */ +export async function streamHelper>( + bamlStream: T, ): Promise<{ - object: StreamableValue>; + object: BaseStreamableValue>; }> { - const stream = createStreamableValue(); + const stream = createStreamableValue>(); + // Asynchronous function to process the BamlStream events (async () => { try { - const bamlStream = streamFunction(...args); + // Iterate through the stream and update the stream value with partial data for await (const event of bamlStream) { - console.log("event", event); - if (event) { - stream.update(event as T); - } + stream.update({ partial: event }); } + + // Obtain the final response once all events are processed const response = await bamlStream.getFinalResponse(); - stream.update(response as T); - stream.done(); + stream.done({ final: response }); } catch (err) { - const errorMsg = truncateError((err as Error).message); - console.log("error", errorMsg); - stream.error(errorMsg); + // Handle any errors during stream processing + stream.error(err); } })(); return { object: stream.value }; } -const streamableFunctions = { - extractResume: b.stream.ExtractResume, - extractUnstructuredResume: b.stream.ExtractResumeNoStructure, - analyzeBook: b.stream.AnalyzeBooks, - answerQuestion: b.stream.AnswerQuestion, - getRecipe: b.stream.GetRecipe, -} as const; - -type StreamableFunctionName = keyof typeof streamableFunctions; - -function createStreamableFunction( - functionName: T -): (...args: Parameters<(typeof streamableFunctions)[T]>) => Promise<{ - object: StreamableValue< - Partial>> - >; +/** + * Utility function to create a streamable function from a BamlStream-producing function. + * This function returns an asynchronous function that manages the streaming process. + * + * @param func - A function that produces a BamlStream when called. + * @returns An asynchronous function that returns a BaseStreamableValue for the stream. + */ +export function makeStreamable< + BamlStreamFunc extends (...args: any) => BamlStream, +>( + func: BamlStreamFunc +): (...args: Parameters) => Promise<{ + object: BaseStreamableValue>>; }> { - return async (...args) => - // need to bind to b.stream since we lose context here. - streamHelper( - streamableFunctions[functionName].bind(b.stream) as any, - ...args - ); + return async (...args) => { + const boundFunc = func.bind(b.stream); + const stream = boundFunc(...args); + return streamHelper(stream); + }; } -export const extractResume = createStreamableFunction("extractResume"); ``` +### Export your BAML functions to streamable server actions -### Create a hook to use the streamable functions +In `app/actions/extract.tsx` add the following code: +```typescript +import { makeStreamable } from "../_baml_utils/streamableObjects"; + + +export const extractResume = makeStreamable(b.stream.ExtractResume); +``` + +### Create a hook to use the streamable functions in React Components This hook will work like [react-query](https://react-query.tanstack.com/), but for BAML functions. It will give you partial data, the loading status, and whether the stream was completed. @@ -116,23 +119,28 @@ In `app/_hooks/useStream.ts` add: import { useState, useEffect } from "react"; import { readStreamableValue, StreamableValue } from "ai/rsc"; +/** + * A hook that streams data from a server action. The server action must return a StreamableValue. + * See the example actiimport { useState, useEffect } from "react"; +import { readStreamableValue, StreamableValue } from "ai/rsc"; + /** * A hook that streams data from a server action. The server action must return a StreamableValue. * See the example action in app/actions/streamable_objects.tsx * **/ -export function useStream( - serverAction: (...args: P) => Promise<{ object: StreamableValue, any> }> +export function useStream( + serverAction: (...args: P) => Promise<{ object: StreamableValue<{ partial: PartialRet } | { final: Ret }, any> }> ) { const [isLoading, setIsLoading] = useState(false); const [isComplete, setIsComplete] = useState(false); const [isError, setIsError] = useState(false); const [error, setError] = useState(null); - const [partialData, setPartialData] = useState | undefined>(undefined); // Initialize data state - const [data, setData] = useState(undefined); // full non-partial data + const [partialData, setPartialData] = useState(undefined); // Initialize data state + const [streamResult, setData] = useState(undefined); // full non-partial data const mutate = async ( ...params: Parameters - ): Promise => { + ): Promise => { console.log("mutate", params); setIsLoading(true); setIsError(false); @@ -142,7 +150,6 @@ export function useStream( const { object } = await serverAction(...params); const asyncIterable = readStreamableValue(object); - let streamedData: Partial | undefined; for await (const value of asyncIterable) { if (value !== undefined) { @@ -151,16 +158,18 @@ export function useStream( // options.onData(value as T); // } console.log("value", value); - streamedData = value; - setPartialData(streamedData); // Update data state with the latest value + if ("partial" in value) { + setPartialData(value.partial); // Update data state with the latest value + } else if ("final" in value) { + setData(value.final); // Update data state with the latest value + setIsComplete(true); + return value.final; + } } } - - setIsComplete(true); - setData(streamedData as T); - // If it completes, it means it's the full data. - return streamedData as T; + // // If it completes, it means it's the full data. + // return streamedData; } catch (err) { console.log("error", err); @@ -173,8 +182,9 @@ export function useStream( }; // If you use the "data" property, your component will re-render when the data gets updated. - return { data, partialData, isLoading, isComplete, isError, error, mutate }; + return { data: streamResult, partialData, isLoading, isComplete, isError, error, mutate }; } + ``` @@ -193,11 +203,13 @@ import { Resume } from "@/baml_client"; export default function Home() { // you can also rename these fields by using ":", like how we renamed partialData to "partialResume" - const { data, partialData: partialResume, isLoading, isError, error, mutate } = useStream(extractResume); + // `mutate` is a function that will start the stream. It takes in the same arguments as the BAML function. + const { data: completedData, partialData: partialResume, isLoading, isError, error, mutate } = useStream(extractResume); return (

BoundaryML Next.js Example

+ {isLoading &&

Loading...

} {isError &&

Error: {error?.message}

} diff --git a/docs/docs/calling-baml/calling-functions.mdx b/docs/docs/calling-baml/calling-functions.mdx index 0772bb40a..ad969c5d3 100644 --- a/docs/docs/calling-baml/calling-functions.mdx +++ b/docs/docs/calling-baml/calling-functions.mdx @@ -83,28 +83,6 @@ if (require.main === module) { ``` - - -BAML will generate `Baml.Client.ClassifyMessage()` for you, which you can use like so: - -```ruby main.rb -require_relative "baml_client/client" - -$b = Baml.Client - -def main - category = $b.ClassifyMessage(input: "I want to cancel my order") - puts category - category == Baml::Types::Category::CancelOrder -end - -if __FILE__ == $0 - puts main -end - -``` - - If you're using an OpenAPI-generated client, every BAML function will generate diff --git a/docs/docs/calling-baml/client-registry.mdx b/docs/docs/calling-baml/client-registry.mdx index f4b5f1a83..16f82078c 100644 --- a/docs/docs/calling-baml/client-registry.mdx +++ b/docs/docs/calling-baml/client-registry.mdx @@ -2,7 +2,7 @@ title: Client Registry slug: docs/calling-baml/client-registry --- -{/* TODO: use fern Group elements instead of CodeBlock elements for Python/TS/Ruby */} +{/* TODO: use fern Group elements instead of CodeBlock elements for Python/TS/OpenAPI */} If you need to modify the model / parameters for an LLM client at runtime, you can modify the `ClientRegistry` for any specified function. @@ -52,39 +52,8 @@ async function run() { ``` - - -```ruby -require_relative "baml_client/client" - -def run - cr = Baml::ClientRegistry.new - - # Creates a new client - cr.add_llm_client( - name: 'MyAmazingClient', - provider: 'openai', - options: { - model: 'gpt-4o', - temperature: 0.7, - api_key: ENV['OPENAI_API_KEY'] - } - ) - - # Sets MyAmazingClient as the primary client - cr.set_primary('MyAmazingClient') - - # ExtractResume will now use MyAmazingClient as the calling client - res = Baml.Client.extract_resume(input: '...', baml_options: { client_registry: cr }) -end - -# Call the asynchronous function -run -``` - - -Dynamic types are not yet supported when used via OpenAPI. +Client registries are not yet supported when used via OpenAPI. Please let us know if you want this feature, either via [Discord] or [GitHub][openapi-feedback-github-issue]. diff --git a/docs/docs/calling-baml/concurrent-calls.mdx b/docs/docs/calling-baml/concurrent-calls.mdx index 5e6dec1d7..eb1a2411c 100644 --- a/docs/docs/calling-baml/concurrent-calls.mdx +++ b/docs/docs/calling-baml/concurrent-calls.mdx @@ -33,6 +33,7 @@ function ClassifyMessage(input: string) -> Category { + You can make concurrent `b.ClassifyMessage()` calls like so: @@ -77,11 +78,10 @@ if (require.main === module) { ``` - - -BAML Ruby (beta) does not currently support async/concurrent calls. - -Please [contact us](/contact) if this is something you need. + +Please reference the concurrency docs for your language of choice. +We'll add examples for how to do this soon, though! + \ No newline at end of file diff --git a/docs/docs/calling-baml/dynamic-types.mdx b/docs/docs/calling-baml/dynamic-types.mdx index 2a79df2fe..bf3c340e6 100644 --- a/docs/docs/calling-baml/dynamic-types.mdx +++ b/docs/docs/calling-baml/dynamic-types.mdx @@ -2,9 +2,9 @@ slug: docs/calling-baml/dynamic-types --- -Sometimes you have a **output schemas that change at runtime** -- for example if -you have a list of Categories that you need to classify that come from a -database, or your schema is user-provided. +Sometimes you have **output schemas that change at runtime** -- for example if +the list of `Categories` you need to classify comes from a database, or your +users can customize a prompt schema. **Dynamic types are types that can be modified at runtime**, which means you can change the output schema of a function at runtime. @@ -12,95 +12,78 @@ change the output schema of a function at runtime. Here are the steps to make this work: 1. Add `@@dynamic` to the class or enum definition to mark it as dynamic -```rust baml -enum Category { - VALUE1 // normal static enum values that don't change - VALUE2 - @@dynamic // this enum can have more values added at runtime -} - -function DynamicCategorizer(input: string) -> Category { - client GPT4 - prompt #" - Given a string, classify it into a category - {{ input }} - - {{ ctx.output_format }} - "# -} - -``` + ```rust baml + enum Category { + VALUE1 // normal static enum values that don't change + VALUE2 + @@dynamic // this enum can have more values added at runtime + } + + function DynamicCategorizer(input: string) -> Category { + client GPT4 + prompt #" + Given a string, classify it into a category + {{ input }} + + {{ ctx.output_format }} + "# + } + + ``` 2. Create a TypeBuilder and modify the existing type. All dynamic types you define in BAML will be available as properties of `TypeBuilder`. Think of the typebuilder as a registry of modified runtime types that the baml function will read from when building the output schema in the prompt. - - - -```python -from baml_client.type_builder import TypeBuilder -from baml_client import b - -async def run(): - tb = TypeBuilder() - tb.Category.add_value('VALUE3') - tb.Category.add_value('VALUE4') - # Pass the typebuilder in the baml_options argument -- the last argument of the function. - res = await b.DynamicCategorizer("some input", { "tb": tb }) - # Now res can be VALUE1, VALUE2, VALUE3, or VALUE4 - print(res) - -``` - - - -```typescript -import TypeBuilder from '../baml_client/type_builder' -import { - b -} from '../baml_client' - -async function run() { - const tb = new TypeBuilder() - tb.Category.addValue('VALUE3') - tb.Category.addValue('VALUE4') - const res = await b.DynamicCategorizer("some input", { tb: tb }) - // Now res can be VALUE1, VALUE2, VALUE3, or VALUE4 - console.log(res) -} -``` - - - -```ruby -require_relative '../baml_client' - -def run - tb = Baml::TypeBuilder.new - tb.Category.add_value('VALUE3') - tb.Category.add_value('VALUE4') - res = Baml.Client.dynamic_categorizer(input: "some input", baml_options: {tb: tb}) - # Now res can be VALUE1, VALUE2, VALUE3, or VALUE4 - puts res -end -``` - - - -Dynamic types are not yet supported when used via OpenAPI. - -Please let us know if you want this feature, either via [Discord] or [GitHub][openapi-feedback-github-issue]. - -[Discord]: https://discord.gg/BTNBeXGuaS -[openapi-feedback-github-issue]: https://github.com/BoundaryML/baml/issues/892 - - - + + + + ```python + from baml_client.type_builder import TypeBuilder + from baml_client import b + + async def run(): + tb = TypeBuilder() + tb.Category.add_value('VALUE3') + tb.Category.add_value('VALUE4') + # Pass the typebuilder in the baml_options argument -- the last argument of the function. + res = await b.DynamicCategorizer("some input", { "tb": tb }) + # Now res can be VALUE1, VALUE2, VALUE3, or VALUE4 + print(res) + + ``` + + + + ```typescript + import TypeBuilder from '../baml_client/type_builder' + import { + b + } from '../baml_client' + + async function run() { + const tb = new TypeBuilder() + tb.Category.addValue('VALUE3') + tb.Category.addValue('VALUE4') + const res = await b.DynamicCategorizer("some input", { tb: tb }) + // Now res can be VALUE1, VALUE2, VALUE3, or VALUE4 + console.log(res) + } + ``` + + + + Dynamic types are not yet supported when used via OpenAPI. + + Please let us know if you want this feature, either via [Discord] or [GitHub][openapi-feedback-github-issue]. + + + ### Dynamic BAML Classes -Existing BAML classes marked with @@dynamic will be available as properties of `TypeBuilder`. + +Existing BAML classes marked with `@@dynamic` will be available as properties of `TypeBuilder`. ```rust BAML class User { @@ -122,7 +105,8 @@ function DynamicUserCreator(user_info: string) -> User { Modify the `User` schema at runtime: - + + ```python Python from baml_client.type_builder import TypeBuilder @@ -137,8 +121,11 @@ async def run(): print(res) ``` + + + -```typescript TypeScript +```typescript import TypeBuilder from '../baml_client/type_builder' import { b @@ -153,29 +140,23 @@ async function run() { console.log(res) } ``` + -```ruby Ruby -require_relative 'baml_client/client' + -def run - tb = Baml::TypeBuilder.new - tb.User.add_property('email', tb.string) - tb.User.add_property('address', tb.string) - - res = Baml.Client.dynamic_user_creator(input: "some user info", baml_options: {tb: tb}) - # Now res can have email and address fields - puts res -end -``` - +Dynamic types are not yet supported when used via OpenAPI. -### Creating new dynamic classes or enums not in BAML -Here we create a new `Hobbies` enum, and a new class called `Address`. +Please let us know if you want this feature, either via [Discord] or [GitHub][openapi-feedback-github-issue]. + + - +### Creating new dynamic classes or enums not in BAML +Here we create a new `Hobbies` enum, and a new class called `Address`. -```python Python + + +```python from baml_client.type_builder import TypeBuilder from baml_client import b @@ -195,8 +176,11 @@ async def run(): print(res) ``` + + + -```typescript TypeScript +```typescript import TypeBuilder from '../baml_client/type_builder' import { b } from '../baml_client' @@ -217,49 +201,46 @@ async function run() { console.log(res) } ``` + -```ruby Ruby -require_relative 'baml_client/client' + -def run - tb = Baml::TypeBuilder.new - hobbies_enum = tb.add_enum('Hobbies') - hobbies_enum.add_value('Soccer') - hobbies_enum.add_value('Reading') +Dynamic types are not yet supported when used via OpenAPI. - address_class = tb.add_class('Address') - address_class.add_property('street', tb.string) +Please let us know if you want this feature, either via [Discord] or [GitHub][openapi-feedback-github-issue]. - tb.User.add_property('hobby', hobbies_enum.type.optional) - tb.User.add_property('address', address_class.type.optional) - - res = Baml::Client.dynamic_user_creator(input: "some user info", baml_options: { tb: tb }) - # Now res might have the hobby property, which can be Soccer or Reading - puts res -end -``` - + + ### Adding descriptions to dynamic types - + + +```python +from baml_client.type_builder import TypeBuilder +from baml_client import b -```python Python -tb = TypeBuilder() -tb.User.add_property("email", tb.string()).description("The user's email") +async def run(): + tb = TypeBuilder() + tb.User.add_property("email", tb.string()).description("The user's email") ``` + -```typescript TypeScript + +```typescript const tb = new TypeBuilder() tb.User.addProperty("email", tb.string()).description("The user's email") ``` + -```ruby Ruby -tb = Baml::TypeBuilder.new -tb.User.add_property("email", tb.string).description("The user's email") -``` + + +Dynamic types are not yet supported when used via OpenAPI. + +Please let us know if you want this feature, either via [Discord] or [GitHub][openapi-feedback-github-issue]. - + + ### Building dynamic types from JSON schema @@ -267,9 +248,10 @@ We have a working implementation of this, but are waiting for a concrete use cas Please chime in on [the GitHub issue](https://github.com/BoundaryML/baml/issues/771) if this is something you'd like to use. - - -```python Python + + +```python +import pydantic import pydantic from baml_client import b @@ -285,8 +267,11 @@ res = await b.ExtractPeople( {"tb": tb}, ) ``` + -```typescript TypeScript + +```typescript +import 'z' from zod import 'z' from zod import 'zodToJsonSchema' from zod-to-json-schema import { b } from '../baml_client' @@ -308,15 +293,16 @@ const res = await b.ExtractPeople( ) ``` -```ruby Ruby -tb = Baml::TypeBuilder.new -tb.unstable_features.add_json_schema(...) + -res = Baml::Client.extract_people( - input: "My name is Harrison. My hair is black and I'm 6 feet tall. I'm pretty good around the hoop. I like giraffes.", - baml_options: { tb: tb } -) + -puts res -``` - +Dynamic types are not yet supported when used via OpenAPI. + +Please let us know if you want this feature, either via [Discord] or [GitHub][openapi-feedback-github-issue]. + + + + +[Discord]: https://discord.gg/BTNBeXGuaS +[openapi-feedback-github-issue]: https://github.com/BoundaryML/baml/issues/892 \ No newline at end of file diff --git a/docs/docs/calling-baml/exceptions.mdx b/docs/docs/calling-baml/exceptions.mdx index ae666abe9..1cee2a1e8 100644 --- a/docs/docs/calling-baml/exceptions.mdx +++ b/docs/docs/calling-baml/exceptions.mdx @@ -21,8 +21,8 @@ from baml_py.errors import BamlError, BamlInvalidArgumentError, BamlClientError, // "BamlError: BamlClientError: BamlClientHttpError:" ``` -```ruby Ruby -Not available yet +```text OpenAPI +We do not generate error types in OpenAPI. ``` @@ -44,6 +44,8 @@ Subclass of `BamlError`. Raised when one or multiple arguments to a function are invalid. +When using BAML-over-HTTP a.k.a. OpenAPI, this is `400 Bad Request`. + ### BamlClientError Subclass of `BamlError`. @@ -54,11 +56,13 @@ Raised when a client fails to return a valid response. In the case of aggregate clients like `fallback` or those with `retry_policy`, only the last client's error is raised. +When using BAML-over-HTTP a.k.a. OpenAPI, this is `502 Bad Gateway`. + #### BamlClientHttpError Subclass of `BamlClientError`. -Raised when the HTTP request made by a client fails with a non-200 status code. +Raised when BAML successfully makes an HTTP request to an LLM provider, but the provider returns a non-200 status code. The raw text from the LLM that failed to parse into the expected return type of a function. + +When using BAML-over-HTTP a.k.a. OpenAPI, this is `500 Internal Server Error` (we expect to use a more specific status code here soon). diff --git a/docs/docs/calling-baml/generate-baml-client.mdx b/docs/docs/calling-baml/generate-baml-client.mdx index fa5f0b0ec..42afedaa9 100644 --- a/docs/docs/calling-baml/generate-baml-client.mdx +++ b/docs/docs/calling-baml/generate-baml-client.mdx @@ -16,50 +16,60 @@ you save a BAML file. Otherwise, you can generate the client manually: [BAML extension]: https://marketplace.visualstudio.com/items?itemName=Boundary.baml-extension - + + -```bash Python + +```bash pipx pipx run baml-cli generate --from path/to/baml_src +``` -# If using your local installation, venv or conda: +```bash pip pip install baml-py baml-cli generate --from path/to/baml_src +``` -# If using poetry: +```bash poetry poetry add baml-py poetry run baml-cli generate --from path/to/baml_src +``` -# If using pipenv: +```bash pipenv pipenv install baml-py pipenv run baml-cli generate --from path/to/baml_src ``` + + -```bash TypeScript + + +```bash npx npx @boundaryml/baml generate --from path/to/baml_src +``` -# If using npm: +```bash npm npm install @boundaryml/baml npm run baml-cli generate --from path/to/baml_src +``` -# If using pnpm: +```bash pnpm pnpm install @boundaryml/baml pnpm run baml-cli generate --from path/to/baml_src +``` -# If using pnpm: +```bash yarn yarn add @boundaryml/baml yarn run baml-cli generate --from path/to/baml_src ``` + + -```bash Ruby (beta) -bundle add baml -bundle exec baml-cli generate --from path/to/baml_src -``` - + ```bash OpenAPI npx @boundaryml/baml-cli generate --from path/to/baml_src ``` - - + + ## Best Practices @@ -76,7 +86,7 @@ generate code for each of them. ```baml Python generator target { - // Valid values: "python/pydantic", "typescript", "ruby/sorbet" + // Valid values: "python/pydantic", "typescript", "rest/openapi" output_type "python/pydantic" // Where the generated code will be saved (relative to baml_src/) @@ -94,7 +104,7 @@ generator target { ```baml TypeScript generator target { - // Valid values: "python/pydantic", "typescript", "ruby/sorbet" + // Valid values: "python/pydantic", "typescript", "rest/openapi" output_type "typescript" // Where the generated code will be saved (relative to baml_src/) @@ -110,22 +120,9 @@ generator target { } ``` -```baml Ruby (beta) -generator target { - // Valid values: "python/pydantic", "typescript", "ruby/sorbet" - output_type "ruby/sorbet" - - // Where the generated code will be saved (relative to baml_src/) - output_dir "../" - - // Version of runtime to generate code for (should match installed `baml` package version) - version "0.54.0" -} -``` - ```baml OpenAPI generator target { - // Valid values: "python/pydantic", "typescript", "ruby/sorbet", "rest/openapi" + // Valid values: "python/pydantic", "typescript", "rest/openapi" output_type "rest/openapi" // Where the generated code will be saved (relative to baml_src/) @@ -170,7 +167,7 @@ jobs: steps: - uses: actions/checkout@v4 - # Install your Python/Node/Ruby (beta) dependencies here + # Install your Python/Node/etc dependencies here - name: Generate BAML client run: baml-cli generate --from baml_src diff --git a/docs/docs/calling-baml/multi-modal.mdx b/docs/docs/calling-baml/multi-modal.mdx index 90af56e51..a7fd7e91f 100644 --- a/docs/docs/calling-baml/multi-modal.mdx +++ b/docs/docs/calling-baml/multi-modal.mdx @@ -47,8 +47,8 @@ import { Image } from "@boundaryml/baml" ``` -```ruby Ruby (beta) -we're working on it! +```bash OpenAPI +# Use the generated BamlImage type. ``` @@ -94,7 +94,8 @@ import { Audio } from "@boundaryml/baml" ``` -```ruby Ruby (beta) -we're working on it! +```bash OpenAPI +# Use the generated BamlAudio type. ``` + diff --git a/docs/docs/calling-baml/set-env-vars.mdx b/docs/docs/calling-baml/set-env-vars.mdx index 7f6d0ceb9..f2bc7ecf5 100644 --- a/docs/docs/calling-baml/set-env-vars.mdx +++ b/docs/docs/calling-baml/set-env-vars.mdx @@ -52,14 +52,12 @@ dotenv.config() import { b } from 'baml-client' ``` -```ruby Ruby (beta) -require 'dotenv/load' +```text OpenAPI +Make sure to set the environment variables wherever you run `baml-cli dev` or `baml-cli serve`. -# Wait to import the BAML client until after loading environment variables -require 'baml_client' +See the [deployment instructions](/docs/get-started/deploying/openapi) to learn more. ``` - @@ -69,7 +67,6 @@ Environment variables are primarily used in clients to propagate authorization credentials, such as API keys, like so: ```baml - client GPT4o { provider baml-openai-chat options { diff --git a/docs/docs/calling-baml/streaming.mdx b/docs/docs/calling-baml/streaming.mdx index f03b6e291..ce5a012a8 100644 --- a/docs/docs/calling-baml/streaming.mdx +++ b/docs/docs/calling-baml/streaming.mdx @@ -163,74 +163,9 @@ if (require.main === module) { ``` - -BAML will generate `Baml.Client.stream.ExtractReceiptInfo()` for you, -which you can use like so: - -```ruby main.rb -require_relative "baml_client/client" - -$b = Baml.Client - -# Using both iteration and get_final_response() from a stream -def example1(receipt) - stream = $b.stream.ExtractReceiptInfo(receipt) - - stream.each do |partial| - puts "partial: #{partial.items&.length} items" - end - - final = stream.get_final_response - puts "final: #{final.items.length} items" -end - -# Using only iteration of a stream -def example2(receipt) - $b.stream.ExtractReceiptInfo(receipt).each do |partial| - puts "partial: #{partial.items&.length} items" - end -end - -# Using only get_final_response() of a stream -# -# In this case, you should just use BamlClient.ExtractReceiptInfo(receipt) instead, -# which is faster and more efficient. -def example3(receipt) - final = $b.stream.ExtractReceiptInfo(receipt).get_final_response - puts "final: #{final.items.length} items" -end - -receipt = <<~RECEIPT - 04/14/2024 1:05 pm - - Ticket: 220000082489 - Register: Shop Counter - Employee: Connor - Customer: Sam - Item # Price - Guide leash (1 Pair) uni UNI - 1 $34.95 - The Index Town Walls - 1 $35.00 - Boot Punch - 3 $60.00 - Subtotal $129.95 - Tax ($129.95 @ 9%) $11.70 - Total Tax $11.70 - Total $141.65 -RECEIPT - -if __FILE__ == $0 - example1(receipt) - example2(receipt) - example3(receipt) -end -``` - - -Streaming is not yet supported via OpenAPI, but it will be coming soon! +Streaming is not yet supported via OpenAPI, but it's coming soon! \ No newline at end of file diff --git a/docs/docs/get-started/debugging/vscode-playground.mdx b/docs/docs/get-started/debugging/vscode-playground.mdx index 8ef7736af..431fca540 100644 --- a/docs/docs/get-started/debugging/vscode-playground.mdx +++ b/docs/docs/get-started/debugging/vscode-playground.mdx @@ -3,15 +3,34 @@ slug: docs/get-started/debugging/vscode-playground --- ## General debugging strategy + - Check [Discord (#announcements channel)](https://discord.gg/BTNBeXGuaS) / [Github](https://github.com/BoundaryML/baml/issues) for any known issues - Close the playground and reopen it - Try reloading the entire window by pressing `Ctrl + Shift + P` or `Cmd + Shift + P` and typing `Developer: Reload Window` -- Ensure your VSCode Extension for BAML is up-to-date (It should should it its up-to-date in the Extensions tab in VSCode) - -- If nothing works, please file an issue on [Github](https://github.com/BoundaryML/baml/issues), ideally with a screenshot of the error and the steps to reproduce it. + +- Ensure your VSCode Extension for BAML is up-to-date (Cursor users must [update manually][cursor-ext-update]) + + + +- Check the BAML extension logs + + - Open the `Output` panel in VSCode (press `Ctrl + Shift + U` or `Cmd + Shift + U`) + + + + - Find the dropdown currently showing `Main`, and select `BAML Language Server` from the dropdown + + - Share these logs with us when you reach out to us + +If you continue to have issues, please [let us know on Discord](https://discord.gg/BTNBeXGuaS) or [file an issue on Github](https://github.com/BoundaryML/baml/issues). + +[cursor-ext-update]: https://www.cursor.com/how-to-install-extension + +{/* placeholder split */} ## Common Issues + ### No code lens in BAML files This can happen in two cases: @@ -19,8 +38,9 @@ This can happen in two cases: 2. BAML extension is broken. Please try the tools above! -### BAML extension is not working +{/* placeholder split */} +## BAML extension is not working ### Tests hanging We've seen sparse repros of this, but closing the playground and reopening it should fix it. diff --git a/docs/docs/get-started/quickstart/ruby.mdx b/docs/docs/get-started/quickstart/ruby.mdx deleted file mode 100644 index e649b38ee..000000000 --- a/docs/docs/get-started/quickstart/ruby.mdx +++ /dev/null @@ -1,77 +0,0 @@ ---- -slug: docs/get-started/quickstart/ruby ---- - -Here's a sample repository: https://github.com/BoundaryML/baml-examples/tree/main/ruby-example - -To set up BAML in ruby do the following: - - - ### Install BAML VSCode Extension - https://marketplace.visualstudio.com/items?itemName=boundary.baml-extension - - - syntax highlighting - - testing playground - - prompt previews - - - ### Install baml - ```bash - bundle init - bundle add baml sorbet-runtime - ``` - - ### Add some starter code - This will give you some starter BAML code in a `baml_src` directory. - - ```bash - bundle exec baml-cli init - ``` - - - ### Generate Ruby code from .baml files - - This command will help you convert `.baml` files to `.rb` files. Everytime you modify your `.baml` files, - you must re-run this command, and regenerate the `baml_client` folder. - - - Our [VSCode extension](https://marketplace.visualstudio.com/items?itemName=Boundary.baml-extension) automatically runs this command when you save a BAML file. - - - ```bash - bundle exec baml-cli generate - ``` - - - ### Use a baml function in Ruby! - If `baml_client` doesn't exist, make sure to run the previous step! - - ```ruby main.rb - require_relative "baml_client/client" - - def example(raw_resume) - # r is an instance of Baml::Types::Resume, defined in baml_client/types - r = Baml.Client.ExtractResume(resume: raw_resume) - - puts "ExtractResume response:" - puts r.inspect - end - - def example_stream(raw_resume) - stream = Baml.Client.stream.ExtractResume(resume: raw_resume) - - stream.each do |msg| - # msg is an instance of Baml::PartialTypes::Resume - # defined in baml_client/partial_types - puts msg.inspect - end - - stream.get_final_response - end - - example 'Grace Hopper created COBOL' - example_stream 'Grace Hopper created COBOL' - ``` - - - diff --git a/docs/docs/get-started/what-is-baml.mdx b/docs/docs/get-started/what-is-baml.mdx index 40d552f3d..467a4d67b 100644 --- a/docs/docs/get-started/what-is-baml.mdx +++ b/docs/docs/get-started/what-is-baml.mdx @@ -23,7 +23,7 @@ Share your creations and ask questions in our [Discord](https://discord.gg/BTNBe ## Features ### Language features -- **Python / Typescript / Ruby support**: Plug-and-play BAML with other languages +- **Python / Typescript / OpenAPI / HTTP support**: use BAML with any language or tech stack - **JSON correction**: BAML fixes bad JSON returned by LLMs (e.g. unquoted keys, newlines, comments, extra quotes, and more) - **Wide model support**: Ollama, Openai, Anthropic, Gemini. Tested on small models like Llama2 - **Streaming**: Stream structured partial outputs diff --git a/docs/docs/snippets/clients/overview.mdx b/docs/docs/snippets/clients/overview.mdx index 64ba4d081..42aaca7de 100644 --- a/docs/docs/snippets/clients/overview.mdx +++ b/docs/docs/snippets/clients/overview.mdx @@ -40,7 +40,7 @@ variable, or you want to point `base_url` to a different endpoint, you should us the latter form. -If you want to specify which client to use at runtime, in your Python/TS/Ruby code, +If you want to specify which client to use at runtime, in your Python/TS code, you can use the [client registry](/docs/calling-baml/client-registry) to do so. This can come in handy if you're trying to, say, send 10% of your requests to a diff --git a/docs/docs/snippets/clients/providers/keywordsai.mdx b/docs/docs/snippets/clients/providers/keywordsai.mdx new file mode 100644 index 000000000..d3382e8b0 --- /dev/null +++ b/docs/docs/snippets/clients/providers/keywordsai.mdx @@ -0,0 +1,7 @@ +--- +title: Keywords AI +slug: docs/snippets/clients/providers/keywordsai +--- +Keywords AI is a proxying layer that allows you to route requests to hundreds of models. + +Follow the [Keywords AI + BAML Installation Guide](https://docs.keywordsai.co/integration/development-frameworks/baml) to get started! \ No newline at end of file diff --git a/docs/docs/snippets/functions/classification.mdx b/docs/docs/snippets/functions/classification.mdx index f7dd0b521..f2eadf5df 100644 --- a/docs/docs/snippets/functions/classification.mdx +++ b/docs/docs/snippets/functions/classification.mdx @@ -59,20 +59,10 @@ import { Category } from 'baml_client/types' ... const result = await b.ClassifyMessage("I want to cancel my order") assert(result === Category.Cancel) +``` -```ruby ruby -require_relative "baml_client/client" - -$b = Baml.Client - -def main - category = $b.ClassifyMessage(input: "I want to cancel my order") - puts category == Baml::Types::Category::CancelOrder -end - -if __FILE__ == $0 - main -end +```bash OpenAPI +curl localhost:2024/call/ClassifyMessage -d '{"input": "I want to cancel my order"}' ``` diff --git a/docs/docs/snippets/functions/extraction.mdx b/docs/docs/snippets/functions/extraction.mdx index 5f7c45c81..2b59422df 100644 --- a/docs/docs/snippets/functions/extraction.mdx +++ b/docs/docs/snippets/functions/extraction.mdx @@ -66,14 +66,11 @@ import { Category } from 'baml_client/types' const result_from_image = await b.DescribeCharacter(Image.fromUrl("http://...")) ``` -```ruby ruby -require_relative "baml_client/client" - -$b = Baml.Client - -# images are not supported in Ruby -def example - stream = $b.DescribeCharacter("Bob the builder wears overalls") -end +```bash OpenAPI +curl localhost:2024/call/ClassifyMessage -d '{ \ + "image_or_paragraph": { \ + "url": "http://..." \ + } +}' ``` \ No newline at end of file diff --git a/docs/docs/snippets/functions/overview.mdx b/docs/docs/snippets/functions/overview.mdx index c7990162a..0e2b99d80 100644 --- a/docs/docs/snippets/functions/overview.mdx +++ b/docs/docs/snippets/functions/overview.mdx @@ -46,7 +46,7 @@ from baml_client import b from baml_client.types import Resume async def main(): -resume_text = """Jason Doe\nPython, Rust\nUniversity of California, Berkeley, B.S.\nin Computer Science, 2020\nAlso an expert in Tableau, SQL, and C++\n""" + resume_text = """Jason Doe\nPython, Rust\nUniversity of California, Berkeley, B.S.\nin Computer Science, 2020\nAlso an expert in Tableau, SQL, and C++\n""" # this function comes from the autogenerated "baml_client". # It calls the LLM you specified and handles the parsing. @@ -72,19 +72,10 @@ async function main() { } ``` -```ruby ruby - -require_relative "baml_client/client" -b = Baml.Client - -# Note this is not async -res = b.TestFnNamedArgsSingleClass( - myArg: Baml::Types::Resume.new( - key: "key", - key_two: true, - key_three: 52, - ) -) +```bash OpenAPI +curl localhost:2024/call/AnalyzeResume -d '{ \ + "resume_text": "Jason Doe\nPython, Rust\nUniversity of California, Berkeley, B.S.\nin Computer Science, 2020\nAlso an expert in Tableau, SQL, and C++\n" +}' ``` @@ -103,7 +94,7 @@ from baml_client import b ```` -```typescript typescript +```typescript TypeScript import { Resume, b } from "baml_client" ... @@ -113,16 +104,13 @@ import { Resume, b } from "baml_client" }) ```` -```ruby Ruby -require_relative "baml_client/client" -b = Baml.Client -... -res = b.AnalyzeResume( - myArg: Baml::Types::Resume.new( - name: "key", - education: [...] - ) -) +```bash OpenAPI +curl localhost:2024/call/AnalyzeResume -d '{ \ + "input": { \ + "name": "Mark", \ + "education": [...] \ + } +}' ``` diff --git a/docs/docs/snippets/supported-types.mdx b/docs/docs/snippets/supported-types.mdx index 2181c258c..793cbe7cd 100644 --- a/docs/docs/snippets/supported-types.mdx +++ b/docs/docs/snippets/supported-types.mdx @@ -70,25 +70,6 @@ import { Image } from "@boundaryml/baml" ) ``` -```ruby Ruby -require_relative "baml_client/client" - -b = Baml.Client -Image = Baml::Image - -def test_image_input - # from URL - res = b.TestImageInput( - img: Image.from_url("https://upload.wikimedia.org/wikipedia/en/4/4d/Shrek_%28character%29.png") - ) - - # Base64 image - image_b64 = "iVBORw0K...." - res = b.TestImageInput( - img: Image.from_base64("image/png", image_b64) - ) -end -``` ### `audio` @@ -144,27 +125,6 @@ import { Audio } from "@boundaryml/baml" ``` -```ruby Ruby -require_relative "baml_client/client" - -b = Baml.Client -Audio = Baml::Audio - -def test_audio_input - # from URL - res = b.TestAudioInput( - audio: Audio.from_url( - "https://actions.google.com/sounds/v1/emergency/beeper_emergency_call.ogg" - ) - ) - - # Base64 image - audio_b64 = "iVBORw0K...." - res = b.TestAudioInput( - audio: Audio.from_base64("audio/mp3", audio_b64) - ) -end -``` ## Composite/Structured Types diff --git a/engine/.gitignore b/engine/.gitignore index 7804beb73..d22a97c54 100644 --- a/engine/.gitignore +++ b/engine/.gitignore @@ -8,6 +8,16 @@ target/ # in VSCode settings. target-rust-analyzer/ +# NOTE(sam): I use this on my own laptop, so that `cargo build` doesn't have +# to fight rust-analyzer for the build directory lock. +# +# To get the same, add this to your VSCode settings: +# +# "rust-analyzer.cargo.targetDir": "./target-rust-analyzer", +# +# I'm not turning it on for everyone, because this is expensive compute/storage wise. +target-rust-analyzer/ + # These are backup files generated by rustfmt **/*.rs.bk diff --git a/engine/baml-runtime/src/cli/init.rs b/engine/baml-runtime/src/cli/init.rs index 78975481d..9149f412e 100644 --- a/engine/baml-runtime/src/cli/init.rs +++ b/engine/baml-runtime/src/cli/init.rs @@ -149,7 +149,7 @@ fn generate_main_baml_content( "{cmd} --additional-properties gemName=baml_client", ), Some("rust") => format!( - "{cmd} --additional-properties packageName=baml-client", + "{cmd} --additional-properties packageName=baml-client,avoidBoxedModels=true", ), _ => cmd, }; @@ -187,7 +187,7 @@ fn generate_main_baml_content( // your choice. You can have multiple generators if you use multiple languages. // Just ensure that the output_dir is different for each generator. generator target {{ - // Valid values: "python/pydantic", "typescript", "ruby/sorbet", "rest/openapi" + // Valid values: "python/pydantic", "typescript", "rest/openapi" output_type "{output_type}" // Where the generated code will be saved (relative to baml_src/) @@ -225,7 +225,7 @@ mod tests { // your choice. You can have multiple generators if you use multiple languages. // Just ensure that the output_dir is different for each generator. generator target {{ - // Valid values: "python/pydantic", "typescript", "ruby/sorbet", "rest/openapi" + // Valid values: "python/pydantic", "typescript", "rest/openapi" output_type "python/pydantic" // Where the generated code will be saved (relative to baml_src/) @@ -254,7 +254,7 @@ generator target {{ // your choice. You can have multiple generators if you use multiple languages. // Just ensure that the output_dir is different for each generator. generator target {{ - // Valid values: "python/pydantic", "typescript", "ruby/sorbet", "rest/openapi" + // Valid values: "python/pydantic", "typescript", "rest/openapi" output_type "typescript" // Where the generated code will be saved (relative to baml_src/) @@ -283,7 +283,7 @@ generator target {{ // your choice. You can have multiple generators if you use multiple languages. // Just ensure that the output_dir is different for each generator. generator target {{ - // Valid values: "python/pydantic", "typescript", "ruby/sorbet", "rest/openapi" + // Valid values: "python/pydantic", "typescript", "rest/openapi" output_type "ruby/sorbet" // Where the generated code will be saved (relative to baml_src/) @@ -308,7 +308,7 @@ generator target {{ // your choice. You can have multiple generators if you use multiple languages. // Just ensure that the output_dir is different for each generator. generator target {{ - // Valid values: "python/pydantic", "typescript", "ruby/sorbet", "rest/openapi" + // Valid values: "python/pydantic", "typescript", "rest/openapi" output_type "rest/openapi" // Where the generated code will be saved (relative to baml_src/) @@ -337,7 +337,7 @@ generator target {{ // your choice. You can have multiple generators if you use multiple languages. // Just ensure that the output_dir is different for each generator. generator target {{ - // Valid values: "python/pydantic", "typescript", "ruby/sorbet", "rest/openapi" + // Valid values: "python/pydantic", "typescript", "rest/openapi" output_type "rest/openapi" // Where the generated code will be saved (relative to baml_src/) @@ -366,7 +366,7 @@ generator target {{ // your choice. You can have multiple generators if you use multiple languages. // Just ensure that the output_dir is different for each generator. generator target {{ - // Valid values: "python/pydantic", "typescript", "ruby/sorbet", "rest/openapi" + // Valid values: "python/pydantic", "typescript", "rest/openapi" output_type "rest/openapi" // Where the generated code will be saved (relative to baml_src/) diff --git a/typescript/vscode-ext/packages/README.md b/typescript/vscode-ext/packages/README.md index 248584f1d..9011667b8 100644 --- a/typescript/vscode-ext/packages/README.md +++ b/typescript/vscode-ext/packages/README.md @@ -1,48 +1,30 @@ -# Baml Language VS Code Extension +# BAML Language VSCode Extension -This VS Code extension provides support for the Baml language used to define LLM functions, test them in the integrated LLM Playground and build agentic workflows. +Provides integrated support for BAML, an expressive language for structured text generation. -### General features +Please check out our [documentation] for the most up-to-date information. -1. **Syntax Highlighting**: Provides enhanced readability and coding experience by highlighting the Baml language syntax for any file with the `.baml` extension. -2. **Dynamic playground**: Run and test your prompts in real-time. -3. **Build typed clients in several languages**: Command +S a baml file to build a baml client to call your functions in Python or TS. +To provide feedback, please join our [Discord], file an issue on [Github], or +any [other support channel](https://docs.boundaryml.com/contact). -## Usage - -1. **Install BAML dependency**: - -- python: `pip install baml-py` -- typescript: `npm install @boundaryml/baml` -- ruby: `bundle init && bundle add baml sorbet-runtime` - -2. **Create a baml_src directory with a main.baml file and you're all set!** +[Discord]: https://discord.gg/BTNBeXGuaS +[Github]: https://github.com/BoundaryML/baml/issues - Or you can try our `init` script to get an example directory setup for you: +### Features -```bash Python -# If using your local installation, venv or conda: -pip install baml-py -baml-cli generate init -``` +- **Syntax Highlighting**: a cornerstone of any programming language +- **Playground**: preview your prompt in real-time as you edit your prompts +- **Code Lenses**: press "▶ Open Playground" or "▶ Run Test" from any BAML file +- **100% local**: your prompts and data stay on your machine +- **Statically typed templates**: get compile-time errors for prompt template expansion +- **Integrated support for any programming language**: we auto-generate native bindings for Python and TypeScript, and expose an HTTP/RESTful interface with built-in OpenAPI support for all other languages -```bash TypeScript -# If using npm: -npm install @boundaryml/baml -npm run baml-cli init -``` - -```bash Ruby -bundle add baml -bundle exec baml-cli init -``` - -3. **Add your own api keys in the playground (settings icon) to test your functions** +## Usage -4. See more examples at \*\*[promptfiddle.com](promptfiddle.com) +To get started, choose the relevant quickstart guide in our [documentation]. -## Documentation +**Add your own API keys in the playground (settings icon) to test your functions** -See our [documentation](https://docs.boundaryml.com) +Check out more examples at [promptfiddle.com](https://promptfiddle.com) -For any issues, feature requests, or contributions, please reach out at contact@boundaryml.com +[documentation]: https://docs.boundaryml.com