Add LM Studio support (#22)
This commit is contained in:
committed by
GitHub
parent
3529627172
commit
5fc49231ee
43
src/hooks/useLMStudioModels.ts
Normal file
43
src/hooks/useLMStudioModels.ts
Normal file
@@ -0,0 +1,43 @@
|
||||
import { useCallback } from "react";
|
||||
import { useAtom } from "jotai";
|
||||
import {
|
||||
lmStudioModelsAtom,
|
||||
lmStudioModelsLoadingAtom,
|
||||
lmStudioModelsErrorAtom,
|
||||
} from "@/atoms/localModelsAtoms";
|
||||
import { IpcClient } from "@/ipc/ipc_client";
|
||||
|
||||
export function useLocalLMSModels() {
|
||||
const [models, setModels] = useAtom(lmStudioModelsAtom);
|
||||
const [loading, setLoading] = useAtom(lmStudioModelsLoadingAtom);
|
||||
const [error, setError] = useAtom(lmStudioModelsErrorAtom);
|
||||
|
||||
const ipcClient = IpcClient.getInstance();
|
||||
|
||||
/**
|
||||
* Load local models from Ollama
|
||||
*/
|
||||
const loadModels = useCallback(async () => {
|
||||
setLoading(true);
|
||||
try {
|
||||
const modelList = await ipcClient.listLocalLMStudioModels();
|
||||
setModels(modelList);
|
||||
setError(null);
|
||||
|
||||
return modelList;
|
||||
} catch (error) {
|
||||
console.error("Error loading local LMStudio models:", error);
|
||||
setError(error instanceof Error ? error : new Error(String(error)));
|
||||
return [];
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
}, [ipcClient, setModels, setError, setLoading]);
|
||||
|
||||
return {
|
||||
models,
|
||||
loading,
|
||||
error,
|
||||
loadModels,
|
||||
};
|
||||
}
|
||||
Reference in New Issue
Block a user