Skip to content

Commit

Permalink
[Widgets refactor] typing use enums
Browse files Browse the repository at this point in the history
  • Loading branch information
mishig25 committed Oct 30, 2023
1 parent d0180ef commit 9164314
Show file tree
Hide file tree
Showing 3 changed files with 24 additions and 16 deletions.
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
<script lang="ts">
import type { WidgetProps, ModelLoadInfo, LoadState } from "../types";
import { type WidgetProps, type ModelLoadInfo, LoadState, ComputeType } from "../types";
import IconAzureML from "../../../Icons/IconAzureML.svelte";
export let model: WidgetProps["model"];
Expand All @@ -9,19 +8,19 @@
export let modelLoadInfo: ModelLoadInfo | undefined = undefined;
const state = {
Loadable: "This model can be loaded on the Inference API on-demand.",
Loaded: "This model is currently loaded and running on the Inference API.",
TooBig:
[LoadState.Loadable]: "This model can be loaded on the Inference API on-demand.",
[LoadState.Loaded]: "This model is currently loaded and running on the Inference API.",
[LoadState.TooBig]:
"Model is too large to load onto the free Inference API. To try the model, launch it on Inference Endpoints instead.",
error: "⚠️ This model could not be loaded by the inference API. ⚠️",
[LoadState.Error]: "⚠️ This model could not be loaded by the inference API. ⚠️",
} as const;
const azureState = {
Loadable: "This model can be loaded loaded on AzureML Managed Endpoint",
Loaded: "This model is loaded and running on AzureML Managed Endpoint",
TooBig:
[LoadState.Loadable]: "This model can be loaded loaded on AzureML Managed Endpoint",
[LoadState.Loaded]: "This model is loaded and running on AzureML Managed Endpoint",
[LoadState.TooBig]:
"Model is too large to load onto the free Inference API. To try the model, launch it on Inference Endpoints instead.",
error: "⚠️ This model could not be loaded.",
[LoadState.Error]: "⚠️ This model could not be loaded.",
} as const;
function getStatusReport(
Expand All @@ -32,15 +31,15 @@
if (!modelLoadInfo) {
return "Model state unknown";
}
if (modelLoadInfo.compute_type === "cpu" && modelLoadInfo.state === "Loaded" && !isAzure) {
if (modelLoadInfo.compute_type === ComputeType.CPU && modelLoadInfo.state === LoadState.Loaded && !isAzure) {
return `The model is loaded and running on <a class="hover:underline" href="https://huggingface.co/intel" target="_blank">Intel Xeon 3rd Gen Scalable CPU</a>`;
}
return statuses[modelLoadInfo.state];
}
function getComputeTypeMsg(): string {
const computeType = modelLoadInfo?.compute_type ?? "cpu";
if (computeType === "cpu") {
const computeType = modelLoadInfo?.compute_type ?? ComputeType.CPU;
if (computeType === ComputeType.CPU) {
return "Intel Xeon 3rd Gen Scalable cpu";
}
return computeType;
Expand Down
3 changes: 2 additions & 1 deletion js/src/lib/components/InferenceWidget/shared/helpers.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ import type { ModelData } from "../../../interfaces/Types";
import { randomItem, parseJSON } from "../../../utils/ViewUtils";
import type { WidgetExample } from "./WidgetExample";
import type { ModelLoadInfo, TableData } from "./types";
import { LoadState } from "./types";

type KeysOfUnion<T> = T extends any ? keyof T : never;
export type QueryParam = KeysOfUnion<WidgetExample>;
Expand Down Expand Up @@ -186,7 +187,7 @@ export async function getModelLoadInfo(
return { compute_type, state };
} else {
console.warn(response.status, output.error);
return { state: "error" };
return { state: LoadState.Error };
}
}

Expand Down
12 changes: 10 additions & 2 deletions js/src/lib/components/InferenceWidget/shared/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -22,9 +22,17 @@ export interface ExampleRunOpts {
inferenceOpts?: InferenceRunFlags;
}

export type LoadState = "Loadable" | "Loaded" | "TooBig" | "error";
export enum LoadState {
Loadable = "Loadable",
Loaded = "Loaded",
TooBig = "TooBig",
Error = "error",
}

export type ComputeType = "cpu" | "gpu";
export enum ComputeType {
CPU = "cpu",
GPU = "gpu",
}

export interface ModelLoadInfo {
state: LoadState;
Expand Down

0 comments on commit 9164314

Please sign in to comment.