Skip to content

Commit

Permalink
fix: type and log
Browse files Browse the repository at this point in the history
  • Loading branch information
linonetwo committed Apr 13, 2024
1 parent b350570 commit 16949cd
Show file tree
Hide file tree
Showing 3 changed files with 15 additions and 7 deletions.
9 changes: 7 additions & 2 deletions src/services/languageModel/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@ import { IPreferenceService } from '@services/preferences/interface';
import serviceIdentifier from '@services/serviceIdentifier';
import { IWindowService } from '@services/windows/interface';
import { WindowNames } from '@services/windows/WindowProperties';
import { NoBinaryFoundError } from 'node-llama-cpp';
import { ILanguageModelAPIResponse, ILanguageModelService, IRunLLAmaOptions, LanguageModelRunner } from './interface';
import { LLMWorker } from './llmWorker/index';

Expand Down Expand Up @@ -150,6 +149,12 @@ export class LanguageModel implements ILanguageModelService {
let observable;
if (options.loadModelOnly === true) {
observable = worker.loadLLama({ ...config, modelPath }, conversationID);
observable.subscribe({
complete: () => {
this.updateModelLoaded({ [runner]: true });
this.updateModelLoadProgress({ [runner]: 1 });
},
});
} else {
// load and run model
const texts = { timeout: i18n.t('LanguageModel.GenerationTimeout'), disposed: i18n.t('LanguageModel.ModelDisposed') };
Expand Down Expand Up @@ -197,7 +202,7 @@ export class LanguageModel implements ILanguageModelService {
this.updateModelLoaded({ [runner]: false });
this.updateModelLoadProgress({ [runner]: 0 });
const message = `${(error as Error).message} ${(error as Error).stack ?? 'no stack'}`;
if (error instanceof NoBinaryFoundError) {
if (message.includes('NoBinaryFound')) {
void this.nativeService.showElectronMessageBox({
title: i18n.t('LanguageModel.NoBinaryFoundError'),
message,
Expand Down
2 changes: 1 addition & 1 deletion src/services/languageModel/interface.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { LanguageModelChannel } from '@/constants/channels';
import { ProxyPropertyType } from 'electron-ipc-cat/common';
import { LLamaChatPromptOptions, LlamaModelOptions } from 'node-llama-cpp';
import type { LLamaChatPromptOptions, LlamaModelOptions } from 'node-llama-cpp';
import type { Observable } from 'rxjs';

export enum LanguageModelRunner {
Expand Down
11 changes: 7 additions & 4 deletions src/services/languageModel/llmWorker/llamaCpp.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ import { LLAMA_PREBUILT_BINS_DIRECTORY } from './preload';

import debounce from 'lodash/debounce';
import { getLlama, Llama, LlamaChatSession, LlamaContext, LlamaContextSequence, LlamaModel, LlamaModelOptions } from 'node-llama-cpp';
import { Observable } from 'rxjs';
import { Observable, Subscriber } from 'rxjs';
import { ILanguageModelWorkerResponse, IRunLLAmaOptions } from '../interface';
import { DEFAULT_TIMEOUT_DURATION } from './constants';

Expand All @@ -19,7 +19,7 @@ export function loadLLamaAndModel(
// subscriber.next({ message: 'async importing library', ...loggerCommonMeta });
return new Observable<ILanguageModelWorkerResponse>((subscriber) => {
async function loadLLamaAndModelIIFE() {
subscriber.next({ message: 'library loaded, new LLM now', ...loggerCommonMeta });
subscriber.next({ message: `library loaded, new LLM now with LLAMA_PREBUILT_BINS_DIRECTORY ${LLAMA_PREBUILT_BINS_DIRECTORY}`, ...loggerCommonMeta });
try {
llamaInstance = await getLlama({
skipDownload: true,
Expand All @@ -42,7 +42,7 @@ export function loadLLamaAndModel(
...loadConfigOverwrite,
};
modelInstance = await llamaInstance.loadModel(loadConfig);
subscriber.next({ message: 'instance loaded', ...loggerCommonMeta });
subscriber.next({ message: 'loadLLamaAndModel instance loaded', ...loggerCommonMeta });
subscriber.complete();
} catch (error) {
console.error(error);
Expand All @@ -58,9 +58,11 @@ export function loadLLamaAndModel(
async function waitLoadLLamaAndModel(
loadConfigOverwrite: Partial<LlamaModelOptions> & Pick<LlamaModelOptions, 'modelPath'>,
conversationID: string,
subscriber: Subscriber<ILanguageModelWorkerResponse>,
): Promise<LlamaModel> {
return await new Promise((resolve, reject) => {
loadLLamaAndModel(loadConfigOverwrite, conversationID).subscribe({
next: subscriber.next.bind(subscriber),
complete: () => {
resolve(modelInstance!);
},
Expand Down Expand Up @@ -96,7 +98,8 @@ export function runLLama(
void (async function runLLamaObservableIIFE() {
try {
if (modelInstance === undefined) {
modelInstance = await waitLoadLLamaAndModel(loadConfig, conversationID);
subscriber.next({ message: `waitLoadLLamaAndModel with LLAMA_PREBUILT_BINS_DIRECTORY ${LLAMA_PREBUILT_BINS_DIRECTORY}`, ...loggerCommonMeta });
modelInstance = await waitLoadLLamaAndModel(loadConfig, conversationID, subscriber);
} else {
// tell UI we have model loaded already.
subscriber.next({
Expand Down

0 comments on commit 16949cd

Please sign in to comment.