Skip to content

Commit 1dfc26f

Browse files
feat: show available models list in settings (#83)
* create utils/models * add availableModels prop * fetch models on startup and key change * remove default model list * consider model fetch for loading spinner * clearer description * remove accidental debug delay * Update src/components/App.tsx * Update src/components/App.tsx * Update src/components/App.tsx * distinguish isAnythingSaving and isAnythingLoading --------- Co-authored-by: t11s <[email protected]>
1 parent 54ebe4c commit 1dfc26f

File tree

4 files changed

+82
-6
lines changed

4 files changed

+82
-6
lines changed

src/components/App.tsx

+50-2
Original file line numberDiff line numberDiff line change
@@ -44,6 +44,7 @@ import {
4444
} from "../utils/fluxNode";
4545
import { useLocalStorage } from "../utils/lstore";
4646
import { mod } from "../utils/mod";
47+
import { getAvailableChatModels } from "../utils/models";
4748
import { generateNodeId, generateStreamId } from "../utils/nodeId";
4849
import { messagesFromLineage, promptFromLineage } from "../utils/prompt";
4950
import { getQueryParam, resetURL } from "../utils/qparams";
@@ -864,11 +865,57 @@ function App() {
864865

865866
const [apiKey, setApiKey] = useLocalStorage<string>(API_KEY_LOCAL_STORAGE_KEY);
866867

867-
const isAnythingLoading = isSavingReactFlow || isSavingSettings;
868+
const [availableModels, setAvailableModels] = useState<string[] | null>(null);
869+
870+
// modelsLoadCounter lets us discard the results of the requests if a concurrent newer one was made.
871+
const modelsLoadCounter = useRef(0);
872+
useEffect(() => {
873+
if (isValidAPIKey(apiKey)) {
874+
const modelsLoadIndex = modelsLoadCounter.current + 1;
875+
modelsLoadCounter.current = modelsLoadIndex;
876+
877+
setAvailableModels(null);
878+
879+
(async () => {
880+
let modelList: string[] = [];
881+
try {
882+
modelList = await getAvailableChatModels(apiKey!);
883+
} catch (e) {
884+
toast({
885+
title: "Failed to load model list!",
886+
status: "error",
887+
...TOAST_CONFIG,
888+
});
889+
}
890+
if (modelsLoadIndex !== modelsLoadCounter.current) return;
891+
892+
if (modelList.length === 0) modelList.push(settings.model);
893+
894+
setAvailableModels(modelList);
895+
896+
if (!modelList.includes(settings.model)) {
897+
const oldModel = settings.model;
898+
const newModel = modelList.includes(DEFAULT_SETTINGS.model) ? DEFAULT_SETTINGS.model : modelList[0];
899+
900+
setSettings((settings) => ({ ...settings, model: newModel }));
901+
902+
toast({
903+
title: `Model "${oldModel}" no longer available!`,
904+
description: `Switched to "${newModel}"`,
905+
status: "warning",
906+
...TOAST_CONFIG,
907+
});
908+
}
909+
})();
910+
}
911+
}, [apiKey]);
912+
913+
const isAnythingSaving = isSavingReactFlow || isSavingSettings;
914+
const isAnythingLoading = isAnythingSaving || (availableModels === null);
868915

869916
useBeforeunload((event: BeforeUnloadEvent) => {
870917
// Prevent leaving the page before saving.
871-
if (isAnythingLoading) event.preventDefault();
918+
if (isAnythingSaving) event.preventDefault();
872919
});
873920

874921
/*//////////////////////////////////////////////////////////////
@@ -1000,6 +1047,7 @@ function App() {
10001047
onClose={onCloseSettingsModal}
10011048
apiKey={apiKey}
10021049
setApiKey={setApiKey}
1050+
availableModels={availableModels}
10031051
/>
10041052
<Column
10051053
mainAxisAlignment="center"

src/components/modals/SettingsModal.tsx

+4-2
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
import { MIXPANEL_TOKEN } from "../../main";
22
import { getFluxNodeTypeDarkColor } from "../../utils/color";
3-
import { DEFAULT_SETTINGS, SUPPORTED_MODELS } from "../../utils/constants";
3+
import { DEFAULT_SETTINGS } from "../../utils/constants";
44
import { Settings, FluxNodeType } from "../../utils/types";
55
import { APIKeyInput } from "../utils/APIKeyInput";
66
import { LabeledSelect, LabeledSlider } from "../utils/LabeledInputs";
@@ -26,13 +26,15 @@ export const SettingsModal = memo(function SettingsModal({
2626
setSettings,
2727
apiKey,
2828
setApiKey,
29+
availableModels
2930
}: {
3031
isOpen: boolean;
3132
onClose: () => void;
3233
settings: Settings;
3334
setSettings: (settings: Settings) => void;
3435
apiKey: string | null;
3536
setApiKey: (apiKey: string) => void;
37+
availableModels: string[] | null;
3638
}) {
3739
const reset = () => {
3840
if (
@@ -78,7 +80,7 @@ export const SettingsModal = memo(function SettingsModal({
7880
<LabeledSelect
7981
label="Model"
8082
value={settings.model}
81-
options={SUPPORTED_MODELS}
83+
options={availableModels || [settings.model]}
8284
setValue={(v: string) => {
8385
setSettings({ ...settings, model: v });
8486

src/utils/constants.ts

-2
Original file line numberDiff line numberDiff line change
@@ -15,8 +15,6 @@ export const REACT_FLOW_NODE_TYPES: Record<
1515
LabelUpdater: LabelUpdaterNode,
1616
};
1717

18-
export const SUPPORTED_MODELS = ["gpt-3.5-turbo", "gpt-4", "gpt-4-32k"];
19-
2018
export const DEFAULT_SETTINGS: Settings = {
2119
temp: 1.2,
2220
n: 3,

src/utils/models.ts

+28
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,28 @@
1+
export function getAvailableModels(apiKey: string): Promise<string[]> {
2+
return new Promise(async (resolve, reject) => {
3+
try {
4+
const response = await fetch("https://api.openai.com/v1/models", {
5+
method: "GET",
6+
headers: {
7+
Authorization: `Bearer ${apiKey}`,
8+
},
9+
})
10+
const data = await response.json();
11+
resolve(data.data.map((model: any) => model.id).sort());
12+
} catch (err) {
13+
reject(err);
14+
}
15+
});
16+
};
17+
18+
export function getAvailableChatModels(apiKey: string): Promise<string[]> {
19+
return new Promise((resolve, reject) => {
20+
getAvailableModels(apiKey)
21+
.then((models) => {
22+
resolve(models.filter((model) => model.startsWith("gpt-")));
23+
})
24+
.catch((err) => {
25+
reject(err);
26+
});
27+
});
28+
};

0 commit comments

Comments
 (0)