Skip to content

Commit

Permalink
[Widgets] refactor call api-inference function
Browse files Browse the repository at this point in the history
  • Loading branch information
mishig25 committed Oct 17, 2023
1 parent a32e426 commit ebfbb0d
Showing 1 changed file with 33 additions and 50 deletions.
83 changes: 33 additions & 50 deletions js/src/lib/components/InferenceWidget/shared/helpers.ts
Original file line number Diff line number Diff line change
Expand Up @@ -52,16 +52,42 @@ export async function getBlobFromUrl(url: string): Promise<Blob> {
return blob;
}

async function callApi(
interface Success<T> {
computeTime: string;
output: T;
outputJson: string;
response: Response;
status: "success";
}

interface LoadingModel {
error: string;
estimatedTime: number;
status: "loading-model";
}

interface Error {
error: string;
status: "error";
}

interface CacheNotFound {
status: "cache not found";
}

type Result<T> = Success<T> | LoadingModel | Error | CacheNotFound;

export async function getResponse<T>(
url: string,
repoId: string,
requestBody: Record<string, any>,
apiToken = "",
outputParsingFn: (x: unknown) => T,
waitForModel = false, // If true, the server will only respond once the model has been loaded on the inference API,
useCache = true,
includeCredentials = false,
isOnLoadCall = false
): Promise<Response> {
isOnLoadCall = false, // If true, the server will try to answer from cache and not do anything if not
useCache = true
): Promise<Result<T>> {
const contentType =
"file" in requestBody && "type" in requestBody["file"] ? requestBody["file"]["type"] : "application/json";

Expand All @@ -80,57 +106,14 @@ async function callApi(
headers.set("X-Load-Model", "0");
}

const body: File | string = "file" in requestBody ? requestBody.file : JSON.stringify(requestBody);
const reqBody: File | string = "file" in requestBody ? requestBody.file : JSON.stringify(requestBody);

return await fetch(`${url}/models/${repoId}`, {
const response = await fetch(`${url}/models/${repoId}`, {
method: "POST",
body,
body: reqBody,
headers,
credentials: includeCredentials ? "include" : "same-origin",
});
}

export async function getResponse<T>(
url: string,
repoId: string,
requestBody: Record<string, any>,
apiToken = "",
outputParsingFn: (x: unknown) => T,
waitForModel = false, // If true, the server will only respond once the model has been loaded on the inference API,
includeCredentials = false,
isOnLoadCall = false, // If true, the server will try to answer from cache and not do anything if not
useCache = true
): Promise<
| {
computeTime: string;
output: T;
outputJson: string;
response: Response;
status: "success";
}
| {
error: string;
estimatedTime: number;
status: "loading-model";
}
| {
error: string;
status: "error";
}
| {
status: "cache not found";
}
> {
const response = await callApi(
url,
repoId,
requestBody,
apiToken,
waitForModel,
useCache,
includeCredentials,
isOnLoadCall
);

if (response.ok) {
// Success
Expand Down

0 comments on commit ebfbb0d

Please sign in to comment.