Skip to content

Commit

Permalink
separate snippets
Browse files Browse the repository at this point in the history
  • Loading branch information
hanouticelina committed Nov 15, 2024
1 parent 91fffb0 commit 7cfb641
Show file tree
Hide file tree
Showing 3 changed files with 16 additions and 6 deletions.
8 changes: 8 additions & 0 deletions docs/api-inference/tasks/chat-completion.md
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,7 @@ curl 'https://api-inference.huggingface.co/models/google/gemma-2-2b-it/v1/chat/c
</curl>

<python>
With huggingface_hub client:
```py
from huggingface_hub import InferenceClient

Expand All @@ -102,6 +103,7 @@ for chunk in stream:
print(chunk.choices[0].delta.content, end="")
```

With openai client:
```py
from openai import OpenAI

Expand Down Expand Up @@ -132,6 +134,7 @@ To use the Python client, see `huggingface_hub`'s [package reference](https://hu
</python>

<js>
With huggingface_hub client:
```js
import { HfInference } from "@huggingface/inference"

Expand Down Expand Up @@ -159,6 +162,7 @@ for await (const chunk of stream) {
}
```

With openai client:
```js
import { OpenAI } from "openai"

Expand Down Expand Up @@ -233,6 +237,7 @@ curl 'https://api-inference.huggingface.co/models/meta-llama/Llama-3.2-11B-Visio
</curl>

<python>
With huggingface_hub client:
```py
from huggingface_hub import InferenceClient

Expand Down Expand Up @@ -267,6 +272,7 @@ for chunk in stream:
print(chunk.choices[0].delta.content, end="")
```

With openai client:
```py
from openai import OpenAI

Expand Down Expand Up @@ -308,6 +314,7 @@ To use the Python client, see `huggingface_hub`'s [package reference](https://hu
</python>

<js>
With huggingface_hub client:
```js
import { HfInference } from "@huggingface/inference"

Expand Down Expand Up @@ -346,6 +353,7 @@ for await (const chunk of stream) {
}
```

With openai client:
```js
import { OpenAI } from "openai"

Expand Down
2 changes: 2 additions & 0 deletions docs/api-inference/tasks/image-text-to-text.md
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@ curl https://api-inference.huggingface.co/models/meta-llama/Llama-3.2-11B-Vision
</curl>

<python>
With huggingface_hub client:
```py
import requests

Expand All @@ -68,6 +69,7 @@ for chunk in stream:
print(chunk.choices[0].delta.content, end="")
```

With openai client:
```py
import requests

Expand Down
12 changes: 6 additions & 6 deletions scripts/api-inference/scripts/generate.ts
Original file line number Diff line number Diff line change
Expand Up @@ -100,22 +100,22 @@ const TASKS_DATA = (await response.json()) as any;
///////////////////////

const formatSnippets = (result: snippets.types.InferenceSnippet | snippets.types.InferenceSnippet[], defaultClient: string, language: string): string => {
// For single snippet, return just the content
// For single snippet, just wrap with code block
if (!Array.isArray(result) || result.length === 1) {
const snippet = Array.isArray(result) ? result[0] : result;
return `\`\`\`${language}\n${snippet.content}\n\`\`\``;
}

// For multiple snippets, wrap each one in its own code block
// For multiple snippets, add description and wrap each one
return result
.map(snippet =>
`\`\`\`${language}\n${snippet.content}\n\`\`\``
)
.map(snippet => {
const client = snippet.client || defaultClient;
return `With ${client} client:\n\`\`\`${language}\n${snippet.content}\n\`\`\``;
})
.join('\n\n');
};



const GET_SNIPPET_FN = {
curl: (modelData: any, token: string) => {
const result = snippets.curl.getCurlInferenceSnippet(modelData, token);
Expand Down

0 comments on commit 7cfb641

Please sign in to comment.