From 2afe9278d8eec52f22ce25a6446b6a0a4b800e05 Mon Sep 17 00:00:00 2001 From: sharath <29162020+tnfssc@users.noreply.github.com> Date: Sun, 2 Jun 2024 21:52:06 +0530 Subject: [PATCH 1/2] chore(community/webllm): upgraded @mlc/web-llm dependency and updated it's ChatModel --- libs/langchain-community/package.json | 4 ++-- .../src/chat_models/webllm.ts | 20 ++++++------------ yarn.lock | 21 +++++++++++++------ 3 files changed, 23 insertions(+), 22 deletions(-) diff --git a/libs/langchain-community/package.json b/libs/langchain-community/package.json index ee6b54930c36..e4e93b6d5e78 100644 --- a/libs/langchain-community/package.json +++ b/libs/langchain-community/package.json @@ -86,7 +86,7 @@ "@langchain/scripts": "~0.0.14", "@layerup/layerup-security": "^1.5.12", "@mendable/firecrawl-js": "^0.0.13", - "@mlc-ai/web-llm": "^0.2.35", + "@mlc-ai/web-llm": "^0.2.40", "@mozilla/readability": "^0.4.4", "@neondatabase/serverless": "^0.9.1", "@notionhq/client": "^2.2.10", @@ -245,7 +245,7 @@ "@huggingface/inference": "^2.6.4", "@layerup/layerup-security": "^1.5.12", "@mendable/firecrawl-js": "^0.0.13", - "@mlc-ai/web-llm": "^0.2.35", + "@mlc-ai/web-llm": "^0.2.40", "@mozilla/readability": "*", "@neondatabase/serverless": "*", "@notionhq/client": "^2.2.10", diff --git a/libs/langchain-community/src/chat_models/webllm.ts b/libs/langchain-community/src/chat_models/webllm.ts index e56d10f1f40f..86eaf7e29997 100644 --- a/libs/langchain-community/src/chat_models/webllm.ts +++ b/libs/langchain-community/src/chat_models/webllm.ts @@ -43,7 +43,7 @@ export interface WebLLMCallOptions extends BaseLanguageModelCallOptions {} export class ChatWebLLM extends SimpleChatModel { static inputs: WebLLMInputs; - protected engine: webllm.EngineInterface; + protected engine: webllm.MLCEngine; appConfig?: webllm.AppConfig; @@ -63,6 +63,7 @@ export class ChatWebLLM extends SimpleChatModel { this.chatOptions = inputs.chatOptions; this.model = inputs.model; this.temperature = inputs.temperature; + this.engine = new webllm.MLCEngine(); } _llmType() { @@ -70,12 +71,10 @@ export class ChatWebLLM extends SimpleChatModel { } async initialize(progressCallback?: webllm.InitProgressCallback) { - this.engine = new webllm.Engine(); if (progressCallback !== undefined) { this.engine.setInitProgressCallback(progressCallback); } await this.reload(this.model, this.chatOptions, this.appConfig); - this.engine.setInitProgressCallback(() => {}); } async reload( @@ -83,11 +82,7 @@ export class ChatWebLLM extends SimpleChatModel { newAppConfig?: webllm.AppConfig, newChatOpts?: webllm.ChatOptions ) { - if (this.engine !== undefined) { - await this.engine.reload(modelId, newAppConfig, newChatOpts); - } else { - throw new Error("Initialize model before reloading."); - } + await this.engine.reload(modelId, newChatOpts, newAppConfig); } async *_streamResponseChunks( @@ -95,8 +90,6 @@ export class ChatWebLLM extends SimpleChatModel { options: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun ): AsyncGenerator { - await this.initialize(); - const messagesInput: ChatCompletionMessageParam[] = messages.map( (message) => { if (typeof message.content !== "string") { @@ -124,14 +117,13 @@ export class ChatWebLLM extends SimpleChatModel { } ); - const stream = this.engine.chatCompletionAsyncChunkGenerator( + const stream = await this.engine.chat.completions.create( { stream: true, messages: messagesInput, stop: options.stop, logprobs: true, - }, - {} + } ); for await (const chunk of stream) { // Last chunk has undefined content @@ -146,7 +138,7 @@ export class ChatWebLLM extends SimpleChatModel { }, }), }); - await runManager?.handleLLMNewToken(text ?? ""); + await runManager?.handleLLMNewToken(text); } } diff --git a/yarn.lock b/yarn.lock index 3ab88496cd0e..75650f8d3482 100644 --- a/yarn.lock +++ b/yarn.lock @@ -9099,7 +9099,7 @@ __metadata: "@langchain/scripts": ~0.0.14 "@layerup/layerup-security": ^1.5.12 "@mendable/firecrawl-js": ^0.0.13 - "@mlc-ai/web-llm": ^0.2.35 + "@mlc-ai/web-llm": ^0.2.40 "@mozilla/readability": ^0.4.4 "@neondatabase/serverless": ^0.9.1 "@notionhq/client": ^2.2.10 @@ -9266,7 +9266,7 @@ __metadata: "@huggingface/inference": ^2.6.4 "@layerup/layerup-security": ^1.5.12 "@mendable/firecrawl-js": ^0.0.13 - "@mlc-ai/web-llm": ^0.2.35 + "@mlc-ai/web-llm": ^0.2.40 "@mozilla/readability": "*" "@neondatabase/serverless": "*" "@notionhq/client": ^2.2.10 @@ -10445,10 +10445,12 @@ __metadata: languageName: node linkType: hard -"@mlc-ai/web-llm@npm:^0.2.35": - version: 0.2.35 - resolution: "@mlc-ai/web-llm@npm:0.2.35" - checksum: 03c1d1847340f88474e1eeed7a91cc09e29299a1216e378385ffe5479c203d39a8656d98c9187864322453a91f046b874d7073662ab04033527079d9bb29bee3 +"@mlc-ai/web-llm@npm:^0.2.40": + version: 0.2.40 + resolution: "@mlc-ai/web-llm@npm:0.2.40" + dependencies: + loglevel: ^1.9.1 + checksum: 44d46178f7b7f899893ee8096fd4188b8c343589a10428c52f87b1b7e708f7a94b2b6315c8a6f8075f14d6d92aebfd8afc7f6d049a2ef60f8b8dc950b98a82e2 languageName: node linkType: hard @@ -28431,6 +28433,13 @@ __metadata: languageName: node linkType: hard +"loglevel@npm:^1.9.1": + version: 1.9.1 + resolution: "loglevel@npm:1.9.1" + checksum: e1c8586108c4d566122e91f8a79c8df728920e3a714875affa5120566761a24077ec8ec9e5fc388b022e39fc411ec6e090cde1b5775871241b045139771eeb06 + languageName: node + linkType: hard + "long@npm:*, long@npm:^5.2.1, long@npm:~5.2.3": version: 5.2.3 resolution: "long@npm:5.2.3" From fb633bf814b126fc2162c49646485bbd9f99aef2 Mon Sep 17 00:00:00 2001 From: jacoblee93 Date: Tue, 4 Jun 2024 14:34:47 -0700 Subject: [PATCH 2/2] Format --- libs/langchain-community/src/chat_models/webllm.ts | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/libs/langchain-community/src/chat_models/webllm.ts b/libs/langchain-community/src/chat_models/webllm.ts index 86eaf7e29997..e3ecda6bd1c1 100644 --- a/libs/langchain-community/src/chat_models/webllm.ts +++ b/libs/langchain-community/src/chat_models/webllm.ts @@ -117,14 +117,12 @@ export class ChatWebLLM extends SimpleChatModel { } ); - const stream = await this.engine.chat.completions.create( - { - stream: true, - messages: messagesInput, - stop: options.stop, - logprobs: true, - } - ); + const stream = await this.engine.chat.completions.create({ + stream: true, + messages: messagesInput, + stop: options.stop, + logprobs: true, + }); for await (const chunk of stream) { // Last chunk has undefined content const text = chunk.choices[0].delta.content ?? "";