- Notifications
You must be signed in to change notification settings - Fork0
zzzweakman/huggingface.js
Folders and files
Name | Name | Last commit message | Last commit date | |
---|---|---|---|---|
Repository files navigation
// Programatically interact with the HubawaitcreateRepo({repo:{type:"model",name:"my-user/nlp-model"},credentials:{accessToken:HF_TOKEN}});awaituploadFile({repo:"my-user/nlp-model",credentials:{accessToken:HF_TOKEN},// Can work with native File in browsersfile:{path:"pytorch_model.bin",content:newBlob(...)}});// Use hosted inferenceawaitinference.translation({model:'t5-base',inputs:'My name is Wolfgang and I live in Berlin'})awaitinference.textToImage({model:'stabilityai/stable-diffusion-2',inputs:'award winning high resolution photo of a giant tortoise/((ladybird)) hybrid, [trending on artstation]',parameters:{negative_prompt:'blurry',}})// and much more…
This is a collection of JS libraries to interact with the Hugging Face API, with TS types included.
- @huggingface/inference: Use Inference Endpoints (dedicated) and Inference API (serverless) to make calls to 100,000+ Machine Learning models
- @huggingface/hub: Interact with huggingface.co to create or delete repos and commit / download files
- @huggingface/agents: Interact with HF models through a natural language interface
- @huggingface/gguf: A GGUF parser that works on remotely hosted files.
- @huggingface/tasks: The definition files and source-of-truth for the Hub's main primitives like pipeline tasks, model libraries, etc.
- @huggingface/space-header: Use the Space
mini_header
outside Hugging Face
We use modern features to avoid polyfills and dependencies, so the libraries will only work on modern browsers / Node.js >= 18 / Bun / Deno.
The libraries are still very young, please help us by opening issues!
To install via NPM, you can download the libraries as needed:
npm install @huggingface/inferencenpm install @huggingface/hubnpm install @huggingface/agents
Then import the libraries in your code:
import{HfInference}from"@huggingface/inference";import{HfAgent}from"@huggingface/agents";import{createRepo,commit,deleteRepo,listFiles}from"@huggingface/hub";importtype{RepoId,Credentials}from"@huggingface/hub";
You can run our packages with vanilla JS, without any bundler, by using a CDN or static hosting. UsingES modules, i.e.<script type="module">
, you can import the libraries in your code:
<scripttype="module">import{HfInference}from'https://cdn.jsdelivr.net/npm/@huggingface/inference@2.7.0/+esm';import{createRepo,commit,deleteRepo,listFiles}from"https://cdn.jsdelivr.net/npm/@huggingface/hub@0.15.1/+esm";</script>
// esm.shimport{HfInference}from"https://esm.sh/@huggingface/inference"import{HfAgent}from"https://esm.sh/@huggingface/agents";import{createRepo,commit,deleteRepo,listFiles}from"https://esm.sh/@huggingface/hub"// or npm:import{HfInference}from"npm:@huggingface/inference"import{HfAgent}from"npm:@huggingface/agents";import{createRepo,commit,deleteRepo,listFiles}from"npm:@huggingface/hub"
Get your HF access token in youraccount settings.
import{HfInference}from"@huggingface/inference";constHF_TOKEN="hf_...";constinference=newHfInference(HF_TOKEN);// Chat completion APIconstout=awaitinference.chatCompletion({model:"mistralai/Mistral-7B-Instruct-v0.2",messages:[{role:"user",content:"Complete the this sentence with words one plus one is equal "}],max_tokens:100});console.log(out.choices[0].message);// Streaming chat completion APIforawait(constchunkofinference.chatCompletionStream({model:"mistralai/Mistral-7B-Instruct-v0.2",messages:[{role:"user",content:"Complete the this sentence with words one plus one is equal "}],max_tokens:100})){console.log(chunk.choices[0].delta.content);}// You can also omit "model" to use the recommended model for the taskawaitinference.translation({model:'t5-base',inputs:'My name is Wolfgang and I live in Amsterdam'})awaitinference.textToImage({model:'stabilityai/stable-diffusion-2',inputs:'award winning high resolution photo of a giant tortoise/((ladybird)) hybrid, [trending on artstation]',parameters:{negative_prompt:'blurry',}})awaitinference.imageToText({data:await(awaitfetch('https://picsum.photos/300/300')).blob(),model:'nlpconnect/vit-gpt2-image-captioning',})// Using your own dedicated inference endpoint: https://hf.co/docs/inference-endpoints/constgpt2=inference.endpoint('https://xyz.eu-west-1.aws.endpoints.huggingface.cloud/gpt2');const{ generated_text}=awaitgpt2.textGeneration({inputs:'The answer to the universe is'});//Chat Completionconstmistal=inference.endpoint("https://api-inference.huggingface.co/models/mistralai/Mistral-7B-Instruct-v0.2");constout=awaitmistal.chatCompletion({model:"mistralai/Mistral-7B-Instruct-v0.2",messages:[{role:"user",content:"Complete the this sentence with words one plus one is equal "}],max_tokens:100,});console.log(out.choices[0].message);
import{createRepo,uploadFile,deleteFiles}from"@huggingface/hub";constHF_TOKEN="hf_...";awaitcreateRepo({repo:"my-user/nlp-model",// or {type: "model", name: "my-user/nlp-test"},credentials:{accessToken:HF_TOKEN}});awaituploadFile({repo:"my-user/nlp-model",credentials:{accessToken:HF_TOKEN},// Can work with native File in browsersfile:{path:"pytorch_model.bin",content:newBlob(...)}});awaitdeleteFiles({repo:{type:"space",name:"my-user/my-space"},// or "spaces/my-user/my-space"credentials:{accessToken:HF_TOKEN},paths:["README.md",".gitattributes"]});
import{HfAgent,LLMFromHub,defaultTools}from'@huggingface/agents';constHF_TOKEN="hf_...";constagent=newHfAgent(HF_TOKEN,LLMFromHub(HF_TOKEN),[...defaultTools]);// you can generate the code, inspect it and then run itconstcode=awaitagent.generateCode("Draw a picture of a cat wearing a top hat. Then caption the picture and read it out loud.");console.log(code);constmessages=awaitagent.evaluateCode(code)console.log(messages);// contains the data// or you can run the code directly, however you can't check that the code is safe to execute this way, use at your own risk.constmessages=awaitagent.run("Draw a picture of a cat wearing a top hat. Then caption the picture and read it out loud.")console.log(messages);
There are more features of course, check each library's README!
sudo corepack enablepnpm installpnpm -r format:checkpnpm -r lint:checkpnpm -r test
pnpm -r build
This will generate ESM and CJS javascript files inpackages/*/dist
, egpackages/inference/dist/index.mjs
.
About
Add LivePortrait
Resources
License
Uh oh!
There was an error while loading.Please reload this page.
Stars
Watchers
Forks
Releases
Packages0
Languages
- TypeScript71.6%
- Svelte18.8%
- JavaScript9.2%
- Other0.4%