Skip to content

Commit

Permalink
Updated examples
Browse files Browse the repository at this point in the history
  • Loading branch information
ddebowczyk committed Aug 7, 2024
1 parent b400236 commit cca0a4e
Show file tree
Hide file tree
Showing 13 changed files with 204 additions and 70 deletions.
17 changes: 13 additions & 4 deletions docs/cookbook/examples/api_support/anthropic.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -51,19 +51,28 @@ $instructor = (new Instructor)->withClient($client);
$user = $instructor->respond(
messages: "Jason (@jxnlco) is 25 years old and is the admin of this project. He likes playing football and reading books.",
responseModel: User::class,
model: 'claude-3-haiku-20240307',
mode: Mode::Tools,
examples: [[
'input' => 'Ive got email Frank - their developer. He asked to come back to him [email protected]. Btw, he plays on drums!',
'output' => ['age' => null, 'name' => 'Frank', 'role' => 'developer', 'hobbies' => ['playing drums'],],
'input' => 'Ive got email Frank - their developer, who\'s 30. He asked to come back to him [email protected]. Btw, he plays on drums!',
'output' => ['age' => 30, 'name' => 'Frank', 'username' => '[email protected]', 'role' => 'developer', 'hobbies' => ['playing drums'],],
]],
model: 'claude-3-haiku-20240307',
mode: Mode::Json,
);

print("Completed response model:\n\n");

dump($user);

assert(isset($user->name));
assert(isset($user->role));
assert(isset($user->age));
assert(isset($user->hobbies));
assert(isset($user->username));
assert(is_array($user->hobbies));
assert(count($user->hobbies) > 0);
assert($user->role === UserType::Admin);
assert($user->age === 25);
assert($user->name === 'Jason');
assert(in_array($user->username, ['jxnlco', '@jxnlco']));
?>
```
30 changes: 21 additions & 9 deletions docs/cookbook/examples/api_support/azure_openai.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ $loader = require 'vendor/autoload.php';
$loader->add('Cognesy\\Instructor\\', __DIR__ . '../../src/');

use Cognesy\Instructor\Clients\Azure\AzureClient;
use Cognesy\Instructor\Enums\Mode;
use Cognesy\Instructor\Instructor;
use Cognesy\Instructor\Utils\Env;

Expand All @@ -37,30 +38,41 @@ class User {
}

/// Custom client parameters: base URI
$resourceName = Env::get('AZURE_OPENAI_RESOURCE_NAME'); // set your own value/source

$client = (new AzureClient(
apiKey: Env::get('AZURE_OPENAI_API_KEY'),
resourceName: 'instructor-dev', // set your own value/source
deploymentId: 'gpt-35-turbo-16k', // set your own value/source
apiVersion: '2024-02-01',
apiKey: Env::get('AZURE_OPENAI_API_KEY'), // set your own value/source
resourceName: Env::get('AZURE_OPENAI_RESOURCE_NAME'), // set your own value/source
deploymentId: Env::get('AZURE_OPENAI_DEPLOYMENT_NAME'), // set your own value/source
apiVersion: Env::get('AZURE_OPENAI_API_VERSION'), // set your own value/source
));

/// Get Instructor with the default client component overridden with your own
$instructor = (new Instructor)->withClient($client);

// Call with your model name and preferred execution mode
$user = $instructor->respond(
messages: "Our user Jason is 25 years old.",
messages: "Jason (@jxnlco) is 25 years old and is the admin of this project. He likes playing football and reading books.",
responseModel: User::class,
model: 'gpt-35-turbo-16k', // set your own value/source
//options: ['stream' => true ]
examples: [[
'input' => 'Ive got email Frank - their developer, who\'s 30. He asked to come back to him [email protected]. Btw, he plays on drums!',
'output' => ['age' => 30, 'name' => 'Frank', 'username' => '[email protected]', 'role' => 'developer', 'hobbies' => ['playing drums'],],
]],
model: 'gpt-4o-mini', // set your own value/source
mode: Mode::Json,
);

print("Completed response model:\n\n");
dump($user);

assert(isset($user->name));
assert(isset($user->role));
assert(isset($user->age));
assert(isset($user->hobbies));
assert(isset($user->username));
assert(is_array($user->hobbies));
assert(count($user->hobbies) > 0);
assert($user->role === UserType::Admin);
assert($user->age === 25);
assert($user->name === 'Jason');
assert(in_array($user->username, ['jxnlco', '@jxnlco']));
?>
```
23 changes: 16 additions & 7 deletions docs/cookbook/examples/api_support/cohere.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -9,13 +9,13 @@ Instructor supports Cohere API - you can find the details on how to configure
the client in the example below.

Mode compatibility:
- Mode::MdJson - supported, recommended
- Mode::Json - not supported by Cohere
- Mode::MdJson - supported, recommended as a fallback from JSON mode
- Mode::Json - supported, recommended
- Mode::Tools - partially supported, not recommended

Reasons Mode::Tools is not recommended:

- Cohere does not support JSON Schema, which only allows to extract very simple data schemas.
- Cohere does not support JSON Schema, which only allows to extract very simple, flat data schemas.
- Performance of the currently available versions of Cohere models in tools mode for Instructor use case (data extraction) is extremely poor.


Expand Down Expand Up @@ -57,19 +57,28 @@ $instructor = (new Instructor)->withClient($client);
$user = $instructor->respond(
messages: "Jason (@jxnlco) is 25 years old and is the admin of this project. He likes playing football and reading books.",
responseModel: User::class,
model: 'command-r-plus',
mode: Mode::Json,
examples: [[
'input' => 'Ive got email Frank - their developer. He asked to come back to him [email protected]. Btw, he plays on drums!',
'output' => ['age' => null, 'name' => 'Frank', 'role' => 'developer', 'hobbies' => ['playing drums'],],
'input' => 'Ive got email Frank - their developer, who\'s 30. He asked to come back to him [email protected]. Btw, he plays on drums!',
'output' => ['age' => 30, 'name' => 'Frank', 'username' => '[email protected]', 'role' => 'developer', 'hobbies' => ['playing drums'],],
]],
model: 'command-r-plus',
mode: Mode::Json,
);

print("Completed response model:\n\n");

dump($user);

assert(isset($user->name));
assert(isset($user->role));
assert(isset($user->age));
assert(isset($user->hobbies));
assert(isset($user->username));
assert(is_array($user->hobbies));
assert(count($user->hobbies) > 0);
assert($user->role === UserType::Admin);
assert($user->age === 25);
assert($user->name === 'Jason');
assert(in_array($user->username, ['jxnlco', '@jxnlco']));
?>
```
14 changes: 13 additions & 1 deletion docs/cookbook/examples/api_support/fireworks.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -56,15 +56,27 @@ $user = $instructor
->respond(
messages: "Jason (@jxnlco) is 25 years old and is the admin of this project. He likes playing football and reading books.",
responseModel: User::class,
examples: [[
'input' => 'Ive got email Frank - their developer, who\'s 30. He asked to come back to him [email protected]. Btw, he plays on drums!',
'output' => ['age' => 30, 'name' => 'Frank', 'username' => '[email protected]', 'role' => 'developer', 'hobbies' => ['playing drums'],],
]],
model: 'accounts/fireworks/models/mixtral-8x7b-instruct',
mode: Mode::Json,
//options: ['stream' => true ]
);

print("Completed response model:\n\n");
dump($user);

assert(isset($user->name));
assert(isset($user->role));
assert(isset($user->age));
assert(isset($user->hobbies));
assert(isset($user->username));
assert(is_array($user->hobbies));
assert(count($user->hobbies) > 0);
assert($user->role === UserType::Admin);
assert($user->age === 25);
assert($user->name === 'Jason');
assert(in_array($user->username, ['jxnlco', '@jxnlco']));
?>
```
28 changes: 20 additions & 8 deletions docs/cookbook/examples/api_support/google_gemini.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,14 @@ docname: 'google_gemini'

## Overview

Google offers Gemini models which perform well in bechmarks.
Here's how you can use Instructor with Gemini API.
Google offers Gemini models which perform well in benchmarks.

Supported modes:
- Mode::MdJson - fallback mode
- Mode::Json - recommended
- Mode::Tools - supported

## Example
Here's how you can use Instructor with Gemini API.

```php
<?php
Expand Down Expand Up @@ -51,19 +54,28 @@ $user = $instructor
->respond(
messages: "Jason (@jxnlco) is 25 years old and is the admin of this project. He likes playing football and reading books.",
responseModel: User::class,
model: 'gemini-1.5-flash',
//options: ['stream' => true],
examples: [[
'input' => 'Ive got email Frank - their developer. He asked to come back to him [email protected]. Btw, he plays on drums!',
'output' => ['age' => null, 'name' => 'Frank', 'role' => 'developer', 'hobbies' => ['playing drums'],],
'input' => 'Ive got email Frank - their developer, who\'s 30. He asked to come back to him [email protected]. Btw, he plays on drums!',
'output' => ['age' => 30, 'name' => 'Frank', 'username' => '[email protected]', 'role' => 'developer', 'hobbies' => ['playing drums'],],
]],
mode: Mode::MdJson,
model: 'gemini-1.5-flash',
//options: ['stream' => true],
mode: Mode::Json,
);

print("Completed response model:\n\n");
dump($user);

assert(isset($user->name));
assert(isset($user->role));
assert(isset($user->age));
assert(isset($user->hobbies));
assert(isset($user->username));
assert(is_array($user->hobbies));
assert(count($user->hobbies) > 0);
assert($user->role === UserType::Admin);
assert($user->age === 25);
assert($user->name === 'Jason');
assert(in_array($user->username, ['jxnlco', '@jxnlco']));
?>
```
13 changes: 10 additions & 3 deletions docs/cookbook/examples/api_support/groq.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,13 @@ docname: 'groq'

Groq is LLM providers offering a very fast inference thanks to their
custom hardware. They provide a several models - Llama2, Mixtral and Gemma.
Here's how you can use Instructor with Groq API.

Supported modes depend on the specific model, but generally include:
- Mode::MdJson - fallback mode
- Mode::Json - recommended
- Mode::Tools - supported

Here's how you can use Instructor with Groq API.

## Example

Expand Down Expand Up @@ -54,8 +59,8 @@ $user = $instructor
responseModel: User::class,
prompt: 'Parse the user data to JSON, respond using following JSON Schema: <|json_schema|>',
examples: [[
'input' => 'Ive got email Frank - their developer. Asked to connect via Twitter @frankch. Btw, he plays on drums!',
'output' => ['name' => 'Frank', 'role' => 'developer', 'hobbies' => ['playing drums'], 'username' => 'frankch', 'age' => null],
'input' => 'Ive got email Frank - their developer, who\'s 30. He asked to come back to him [email protected]. Btw, he plays on drums!',
'output' => ['age' => 30, 'name' => 'Frank', 'username' => '[email protected]', 'role' => 'developer', 'hobbies' => ['playing drums'],],
],[
'input' => 'We have a meeting with John, our new user. He is 30 years old - check his profile: @jx90.',
'output' => ['name' => 'John', 'role' => 'admin', 'hobbies' => [], 'username' => 'jx90', 'age' => 30],
Expand All @@ -74,10 +79,12 @@ assert(isset($user->name));
assert(isset($user->role));
assert(isset($user->age));
assert(isset($user->hobbies));
assert(isset($user->username));
assert(is_array($user->hobbies));
assert(count($user->hobbies) > 0);
assert($user->role === UserType::Admin);
assert($user->age === 25);
assert($user->name === 'Jason');
assert(in_array($user->username, ['jxnlco', '@jxnlco']));
?>
```
25 changes: 20 additions & 5 deletions docs/cookbook/examples/api_support/mistralai.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,9 @@ Please note that the larger Mistral models support Mode::Json, which is much mor
reliable than Mode::MdJson.

Mode compatibility:
- Mode::Tools - Mistral-Small / Mistral-Medium / Mistral-Large
- Mode::Json - Mistral-Small / Mistral-Medium / Mistral-Large
- Mode::MdJson - Mistral 7B / Mixtral 8x7B

- Mode::Tools - supported (Mistral-Small / Mistral-Medium / Mistral-Large)
- Mode::Json - recommended (Mistral-Small / Mistral-Medium / Mistral-Large)
- Mode::MdJson - fallback mode (Mistral 7B / Mixtral 8x7B)

## Example

Expand Down Expand Up @@ -61,14 +60,30 @@ $user = $instructor
->respond(
messages: "Jason (@jxnlco) is 25 years old and is the admin of this project. He likes playing football and reading books.",
responseModel: User::class,
model: 'open-mixtral-8x7b',
examples: [[
'input' => 'Ive got email Frank - their developer, who\'s 30. He asked to come back to him [email protected]. Btw, he plays on drums!',
'output' => ['age' => 30, 'name' => 'Frank', 'username' => '[email protected]', 'role' => 'developer', 'hobbies' => ['playing drums'],],
],[
'input' => 'We have a meeting with John, our new user. He is 30 years old - check his profile: @jx90.',
'output' => ['name' => 'John', 'role' => 'admin', 'hobbies' => [], 'username' => 'jx90', 'age' => 30],
]],
model: 'mistral-small-latest', //'open-mixtral-8x7b',
mode: Mode::Json,
);

print("Completed response model:\n\n");
dump($user);

assert(isset($user->name));
assert(isset($user->role));
assert(isset($user->age));
assert(isset($user->hobbies));
assert(isset($user->username));
assert(is_array($user->hobbies));
assert(count($user->hobbies) > 0);
assert($user->role === UserType::Admin);
assert($user->age === 25);
assert($user->name === 'Jason');
assert(in_array($user->username, ['jxnlco', '@jxnlco']));
?>
```
38 changes: 25 additions & 13 deletions docs/cookbook/examples/api_support/ollama.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,14 @@ docname: 'ollama'

## Overview

You can use Instructor with local Ollama instance. Please note that, at least currently,
OS models do not perform on par with OpenAI (GPT-3.5 or GPT-4) model.
You can use Instructor with local Ollama instance.

Please note that, at least currently, OS models do not perform on par with OpenAI (GPT-3.5 or GPT-4) model for complex data schemas.

Supported modes:
- Mode::MdJson - fallback mode
- Mode::Json - recommended
- Mode::Tools - supported

## Example

Expand Down Expand Up @@ -44,16 +49,14 @@ $client = new OllamaClient();
$instructor = (new Instructor)->withClient($client);

// Listen to events to print request/response data
$instructor->onEvent(RequestSentToLLM::class, function($event) {
print("Request sent to LLM:\n\n");

dump($event->request);
});
$instructor->onEvent(ResponseReceivedFromLLM::class, function($event) {
print("Received response from LLM:\n\n");

dump($event->response);
});
//$instructor->onEvent(RequestSentToLLM::class, function($event) {
// print("Request sent to LLM:\n\n");
// dump($event->request);
//});
//$instructor->onEvent(ResponseReceivedFromLLM::class, function($event) {
// print("Received response from LLM:\n\n");
// dump($event->response);
//});

$user = $instructor->respond(
messages: "Jason (@jxnlco) is 25 years old and is the admin of this project. He likes playing football and reading books.",
Expand All @@ -66,7 +69,7 @@ $user = $instructor->respond(
'input' => 'We have a meeting with John, our new user. He is 30 years old - check his profile: @jx90.',
'output' => ['name' => 'John', 'role' => 'admin', 'hobbies' => [], 'username' => 'jx90', 'age' => 30],
]],
model: 'qwen2',
model: 'gemma2:2b',
mode: Mode::Json,
);

Expand All @@ -76,6 +79,15 @@ print("Completed response model:\n\n");
dump($user);

assert(isset($user->name));
assert(isset($user->role));
assert(isset($user->age));
assert(isset($user->hobbies));
assert(isset($user->username));
assert(is_array($user->hobbies));
assert(count($user->hobbies) > 0);
assert($user->role === UserType::Admin);
assert($user->age === 25);
assert($user->name === 'Jason');
assert(in_array($user->username, ['jxnlco', '@jxnlco']));
?>
```
Loading

0 comments on commit cca0a4e

Please sign in to comment.