-
Notifications
You must be signed in to change notification settings - Fork 358
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
e2e: init llamaindex e2e test (#697)
- Loading branch information
Showing
18 changed files
with
407 additions
and
6 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1 @@ | ||
logs |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,38 @@ | ||
# LlamaIndexTS Core E2E Tests | ||
|
||
## Overview | ||
|
||
We are using Node.js Test Runner to run E2E tests for LlamaIndexTS Core. | ||
|
||
It supports the following features: | ||
|
||
- Run tests in parallel | ||
- Pure Node.js Environment | ||
- Switch between mock and real LLM API | ||
- Customizable logics | ||
|
||
## Usage | ||
|
||
- Run with mock register: | ||
|
||
```shell | ||
node --import tsx --import ./mock-register.js --test ./node/basic.e2e.ts | ||
``` | ||
|
||
- Run without mock register: | ||
|
||
```shell | ||
node --import tsx --test ./node/basic.e2e.ts | ||
``` | ||
|
||
- Run with specific test: | ||
|
||
```shell | ||
node --import tsx --import ./mock-register.js --test-name-pattern=agent --test ./node/basic.e2e.ts | ||
``` | ||
|
||
- Run with debug logs: | ||
|
||
```shell | ||
CONSOLA_LEVEL=5 node --import tsx --import ./mock-register.js --test-name-pattern=agent --test ./node/basic.e2e.ts | ||
``` |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,68 @@ | ||
import { faker } from "@faker-js/faker"; | ||
import type { | ||
ChatResponse, | ||
ChatResponseChunk, | ||
CompletionResponse, | ||
LLM, | ||
LLMChatParamsNonStreaming, | ||
LLMChatParamsStreaming, | ||
LLMCompletionParamsNonStreaming, | ||
LLMCompletionParamsStreaming, | ||
} from "llamaindex/llm/types"; | ||
|
||
export function getOpenAISession() { | ||
return {}; | ||
} | ||
|
||
export function isFunctionCallingModel() { | ||
return true; | ||
} | ||
|
||
export class OpenAI implements LLM { | ||
get metadata() { | ||
return { | ||
model: "mock-model", | ||
temperature: 0.1, | ||
topP: 1, | ||
contextWindow: 2048, | ||
tokenizer: undefined, | ||
isFunctionCallingModel: true, | ||
}; | ||
} | ||
chat( | ||
params: LLMChatParamsStreaming<Record<string, unknown>>, | ||
): Promise<AsyncIterable<ChatResponseChunk>>; | ||
chat( | ||
params: LLMChatParamsNonStreaming<Record<string, unknown>>, | ||
): Promise<ChatResponse>; | ||
chat( | ||
params: | ||
| LLMChatParamsStreaming<Record<string, unknown>> | ||
| LLMChatParamsNonStreaming<Record<string, unknown>>, | ||
): unknown { | ||
if (params.stream) { | ||
return { | ||
[Symbol.asyncIterator]: async function* () { | ||
yield { | ||
delta: faker.word.words(), | ||
} satisfies ChatResponseChunk; | ||
}, | ||
}; | ||
} | ||
return { | ||
message: { | ||
content: faker.lorem.paragraph(), | ||
role: "assistant", | ||
}, | ||
} satisfies ChatResponse; | ||
} | ||
complete( | ||
params: LLMCompletionParamsStreaming, | ||
): Promise<AsyncIterable<CompletionResponse>>; | ||
complete( | ||
params: LLMCompletionParamsNonStreaming, | ||
): Promise<CompletionResponse>; | ||
async complete(params: unknown): Promise<unknown> { | ||
throw new Error("Method not implemented."); | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,36 @@ | ||
/** | ||
* This script will replace the resolved module with the corresponding fixture file. | ||
*/ | ||
import { stat } from "node:fs/promises"; | ||
import { join, relative } from "node:path"; | ||
import { fileURLToPath, pathToFileURL } from "node:url"; | ||
const packageDistDir = fileURLToPath(new URL("../dist", import.meta.url)); | ||
const fixturesDir = fileURLToPath(new URL("./fixtures", import.meta.url)); | ||
|
||
export async function resolve(specifier, context, nextResolve) { | ||
const result = await nextResolve(specifier, context); | ||
if (result.format === "builtin" || result.url.startsWith("node:")) { | ||
return result; | ||
} | ||
const targetUrl = fileURLToPath(result.url).replace(/\.js$/, ".ts"); | ||
const relativePath = relative(packageDistDir, targetUrl); | ||
if (relativePath.startsWith(".") || relativePath.startsWith("/")) { | ||
return result; | ||
} | ||
const url = pathToFileURL(join(fixturesDir, relativePath)).toString(); | ||
const exist = await stat(fileURLToPath(url)) | ||
.then((stat) => stat.isFile()) | ||
.catch((err) => { | ||
if (err.code === "ENOENT") { | ||
return false; | ||
} | ||
throw err; | ||
}); | ||
if (!exist) { | ||
return result; | ||
} | ||
return { | ||
url, | ||
format: "module", | ||
}; | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,3 @@ | ||
import { register } from "node:module"; | ||
|
||
register("./mock-module.js", import.meta.url); |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,139 @@ | ||
/* eslint-disable @typescript-eslint/no-floating-promises */ | ||
import { consola } from "consola"; | ||
import { | ||
OpenAI, | ||
OpenAIAgent, | ||
Settings, | ||
type LLM, | ||
type LLMEndEvent, | ||
type LLMStartEvent, | ||
} from "llamaindex"; | ||
import { ok } from "node:assert"; | ||
import type { WriteStream } from "node:fs"; | ||
import { createWriteStream } from "node:fs"; | ||
import { mkdir } from "node:fs/promises"; | ||
import { join } from "node:path"; | ||
import { after, before, beforeEach, describe, test } from "node:test"; | ||
import { inspect } from "node:util"; | ||
|
||
let llm: LLM; | ||
let fsStream: WriteStream; | ||
before(async () => { | ||
const logUrl = new URL( | ||
join( | ||
"..", | ||
"logs", | ||
`basic.e2e.${new Date().toISOString().replace(/:/g, "-").replace(/\./g, "-")}.log`, | ||
), | ||
import.meta.url, | ||
); | ||
await mkdir(new URL(".", logUrl), { recursive: true }); | ||
fsStream = createWriteStream(logUrl, { | ||
encoding: "utf-8", | ||
}); | ||
}); | ||
|
||
after(() => { | ||
fsStream.end(); | ||
}); | ||
|
||
beforeEach((s) => { | ||
fsStream.write("start: " + s.name + "\n"); | ||
}); | ||
|
||
const llmEventStartHandler = (event: LLMStartEvent) => { | ||
const { payload } = event.detail; | ||
fsStream.write( | ||
"llmEventStart: " + | ||
inspect(payload, { | ||
depth: Infinity, | ||
}) + | ||
"\n", | ||
); | ||
}; | ||
|
||
const llmEventEndHandler = (event: LLMEndEvent) => { | ||
const { payload } = event.detail; | ||
fsStream.write( | ||
"llmEventEnd: " + | ||
inspect(payload, { | ||
depth: Infinity, | ||
}) + | ||
"\n", | ||
); | ||
}; | ||
|
||
before(() => { | ||
Settings.llm = new OpenAI({ | ||
model: "gpt-3.5-turbo", | ||
}); | ||
llm = Settings.llm; | ||
Settings.callbackManager.on("llm-start", llmEventStartHandler); | ||
Settings.callbackManager.on("llm-end", llmEventEndHandler); | ||
}); | ||
|
||
after(() => { | ||
Settings.callbackManager.off("llm-start", llmEventStartHandler); | ||
Settings.callbackManager.off("llm-end", llmEventEndHandler); | ||
}); | ||
|
||
describe("llm", () => { | ||
test("llm.chat", async () => { | ||
const response = await llm.chat({ | ||
messages: [ | ||
{ | ||
content: "Hello", | ||
role: "user", | ||
}, | ||
], | ||
}); | ||
consola.debug("response:", response); | ||
ok(typeof response.message.content === "string"); | ||
}); | ||
|
||
test("stream llm.chat", async () => { | ||
const iter = await llm.chat({ | ||
stream: true, | ||
messages: [ | ||
{ | ||
content: "hello", | ||
role: "user", | ||
}, | ||
], | ||
}); | ||
for await (const chunk of iter) { | ||
consola.debug("chunk:", chunk); | ||
ok(typeof chunk.delta === "string"); | ||
} | ||
}); | ||
}); | ||
|
||
describe("agent", () => { | ||
test("agent.chat", async () => { | ||
const agent = new OpenAIAgent({ | ||
tools: [ | ||
{ | ||
call: async () => { | ||
return "35 degrees and sunny in San Francisco"; | ||
}, | ||
metadata: { | ||
name: "Weather", | ||
description: "Get the weather", | ||
parameters: { | ||
type: "object", | ||
properties: { | ||
location: { type: "string" }, | ||
}, | ||
required: ["location"], | ||
}, | ||
}, | ||
}, | ||
], | ||
}); | ||
const result = await agent.chat({ | ||
message: "What is the weather in San Francisco?", | ||
}); | ||
consola.debug("response:", result.response); | ||
ok(typeof result.response === "string"); | ||
}); | ||
}); |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,16 @@ | ||
{ | ||
"name": "@llamaindex/core-e2e", | ||
"private": true, | ||
"version": "0.0.2", | ||
"type": "module", | ||
"scripts": { | ||
"e2e": "node --import tsx --import ./mock-register.js --test ./node/*.e2e.ts", | ||
"e2e:nomock": "node --import tsx --test ./node/*.e2e.ts" | ||
}, | ||
"devDependencies": { | ||
"@faker-js/faker": "^8.4.1", | ||
"consola": "^3.2.3", | ||
"llamaindex": "workspace:*", | ||
"tsx": "^4.7.2" | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,23 @@ | ||
{ | ||
"extends": "../../../tsconfig.json", | ||
"compilerOptions": { | ||
"outDir": "./lib", | ||
"module": "node16", | ||
"moduleResolution": "node16", | ||
"target": "ESNext" | ||
}, | ||
"include": [ | ||
"./**/*.ts", | ||
"./mock-module.js", | ||
"./mock-register.js", | ||
"./fixtures" | ||
], | ||
"references": [ | ||
{ | ||
"path": "../../core/tsconfig.json" | ||
}, | ||
{ | ||
"path": "../../env/tsconfig.json" | ||
} | ||
] | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.