Skip to content

Commit

Permalink
Feature/seq agents (#2798)
Browse files Browse the repository at this point in the history
* update build functions

* sequential agents

* update langchain to 0.2, added sequential agent nodes

* add marketplace templates

* update howto wordings

* Merge branch 'main' into feature/Seq-Agents

# Conflicts:
#	pnpm-lock.yaml

* update deprecated functions and add new sequential nodes

* add marketplace templates

* update marketplace templates, add structured output to llm node

* add multi agents template

* update llm node with bindmodels

* update cypress version

* update templates sticky note wordings

* update tool node to include human in loop action

* update structured outputs error from models

* update cohere package to resolve google genai pipeThrough bug

* update mistral package version, added message reconstruction before invoke seq agent

* add HITL to agent

* update state messages restructuring

* update load and split methods for s3 directory
  • Loading branch information
HenryHengZJ authored Jul 22, 2024
1 parent 34d0e43 commit bca4de0
Show file tree
Hide file tree
Showing 152 changed files with 55,111 additions and 35,040 deletions.
8 changes: 4 additions & 4 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@
"@babel/preset-env": "^7.19.4",
"@babel/preset-typescript": "7.18.6",
"@types/express": "^4.17.13",
"@typescript-eslint/typescript-estree": "^5.39.0",
"@typescript-eslint/typescript-estree": "^7.13.1",
"eslint": "^8.24.0",
"eslint-config-prettier": "^8.3.0",
"eslint-config-react-app": "^7.0.1",
Expand All @@ -50,7 +50,7 @@
"rimraf": "^3.0.2",
"run-script-os": "^1.1.6",
"turbo": "1.10.16",
"typescript": "^4.8.4"
"typescript": "^5.4.5"
},
"pnpm": {
"onlyBuiltDependencies": [
Expand All @@ -63,8 +63,8 @@
"pnpm": ">=9"
},
"resolutions": {
"@qdrant/openapi-typescript-fetch": "1.2.1",
"@google/generative-ai": "^0.7.0",
"@qdrant/openapi-typescript-fetch": "1.2.6",
"@google/generative-ai": "^0.15.0",
"openai": "4.51.0"
},
"eslintIgnore": [
Expand Down
8 changes: 4 additions & 4 deletions packages/components/nodes/chains/ApiChain/OpenAPIChain.ts
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
import { ChatOpenAI } from '@langchain/openai'
import { APIChain, createOpenAPIChain } from 'langchain/chains'
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses } from '../../../src/utils'
import { ConsoleCallbackHandler, CustomChainHandler, additionalCallbacks } from '../../../src/handler'
import { checkInputs, Moderation, streamResponse } from '../../moderation/Moderation'
import { formatResponse } from '../../outputparsers/OutputParserHelpers'
import { getFileFromStorage } from '../../../src'
import { BaseChatModel } from '@langchain/core/language_models/chat_models'

class OpenApiChain_Chains implements INode {
label: string
Expand All @@ -29,9 +29,9 @@ class OpenApiChain_Chains implements INode {
this.baseClasses = [this.type, ...getBaseClasses(APIChain)]
this.inputs = [
{
label: 'ChatOpenAI Model',
label: 'Chat Model',
name: 'model',
type: 'ChatOpenAI'
type: 'BaseChatModel'
},
{
label: 'YAML Link',
Expand Down Expand Up @@ -96,7 +96,7 @@ class OpenApiChain_Chains implements INode {
}

const initChain = async (nodeData: INodeData, options: ICommonObject) => {
const model = nodeData.inputs?.model as ChatOpenAI
const model = nodeData.inputs?.model as BaseChatModel
const headers = nodeData.inputs?.headers as string
const yamlLink = nodeData.inputs?.yamlLink as string
const yamlFileBase64 = nodeData.inputs?.yamlFile as string
Expand Down
Original file line number Diff line number Diff line change
@@ -1,10 +1,9 @@
import { BaseCache } from '@langchain/core/caches'
import { BaseChatModelParams } from '@langchain/core/language_models/chat_models'
import { BaseBedrockInput } from '@langchain/community/dist/utils/bedrock'
import { ICommonObject, IMultiModalOption, INode, INodeData, INodeOptionsValue, INodeParams } from '../../../src/Interface'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { BedrockChat } from './FlowiseAWSChatBedrock'
import { getModels, getRegions, MODEL_TYPE } from '../../../src/modelLoader'
import { BedrockChatFields } from '@langchain/community/chat_models/bedrock'

/**
* @author Michael Connor <[email protected]>
Expand Down Expand Up @@ -116,7 +115,7 @@ class AWSChatBedrock_ChatModels implements INode {
const cache = nodeData.inputs?.cache as BaseCache
const streaming = nodeData.inputs?.streaming as boolean

const obj: BaseBedrockInput & BaseChatModelParams = {
const obj: BedrockChatFields = {
region: iRegion,
model: customModel ? customModel : iModel,
maxTokens: parseInt(iMax_tokens_to_sample, 10),
Expand Down Expand Up @@ -154,7 +153,7 @@ class AWSChatBedrock_ChatModels implements INode {
}

const amazonBedrock = new BedrockChat(nodeData.id, obj)
if (obj.model.includes('anthropic.claude-3')) amazonBedrock.setMultiModalOption(multiModalOption)
if (obj.model?.includes('anthropic.claude-3')) amazonBedrock.setMultiModalOption(multiModalOption)
return amazonBedrock
}
}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,4 @@
import { BaseChatModelParams } from '@langchain/core/language_models/chat_models'
import { BedrockChat as LCBedrockChat } from '@langchain/community/chat_models/bedrock'
import { BaseBedrockInput } from '@langchain/community/dist/utils/bedrock'
import { BedrockChatFields, BedrockChat as LCBedrockChat } from '@langchain/community/chat_models/bedrock'
import { IVisionChatModal, IMultiModalOption } from '../../../src'

export class BedrockChat extends LCBedrockChat implements IVisionChatModal {
Expand All @@ -9,16 +7,16 @@ export class BedrockChat extends LCBedrockChat implements IVisionChatModal {
multiModalOption: IMultiModalOption
id: string

constructor(id: string, fields: BaseBedrockInput & BaseChatModelParams) {
constructor(id: string, fields: BedrockChatFields) {
super(fields)
this.id = id
this.configuredModel = fields?.model || ''
this.configuredMaxToken = fields?.maxTokens
}

revertToOriginalModel(): void {
super.model = this.configuredModel
super.maxTokens = this.configuredMaxToken
this.model = this.configuredModel
this.maxTokens = this.configuredMaxToken
}

setMultiModalOption(multiModalOption: IMultiModalOption): void {
Expand All @@ -27,8 +25,8 @@ export class BedrockChat extends LCBedrockChat implements IVisionChatModal {

setVisionModel(): void {
if (!this.model.startsWith('claude-3')) {
super.model = 'anthropic.claude-3-haiku-20240307-v1:0'
super.maxTokens = this.configuredMaxToken ? this.configuredMaxToken : 1024
this.model = 'anthropic.claude-3-haiku-20240307-v1:0'
this.maxTokens = this.configuredMaxToken ? this.configuredMaxToken : 1024
}
}
}
57 changes: 0 additions & 57 deletions packages/components/nodes/chatmodels/Bittensor/Bittensor.ts

This file was deleted.

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,8 @@ export class ChatAnthropic extends LangchainChatAnthropic implements IVisionChat
}

revertToOriginalModel(): void {
super.modelName = this.configuredModel
super.maxTokens = this.configuredMaxToken
this.modelName = this.configuredModel
this.maxTokens = this.configuredMaxToken
}

setMultiModalOption(multiModalOption: IMultiModalOption): void {
Expand All @@ -26,8 +26,8 @@ export class ChatAnthropic extends LangchainChatAnthropic implements IVisionChat

setVisionModel(): void {
if (!this.modelName.startsWith('claude-3')) {
super.modelName = 'claude-3-haiku-20240307'
super.maxTokens = this.configuredMaxToken ? this.configuredMaxToken : 2048
this.modelName = 'claude-3-haiku-20240307'
this.maxTokens = this.configuredMaxToken ? this.configuredMaxToken : 2048
}
}
}
Loading

0 comments on commit bca4de0

Please sign in to comment.