- Notifications
You must be signed in to change notification settings - Fork0
Utilities to use the Hugging Face hub API
License
classicvalues/huggingface.js
Folders and files
Name | Name | Last commit message | Last commit date | |
---|---|---|---|---|
Repository files navigation
awaitinference.translation({model:'t5-base',inputs:'My name is Wolfgang and I live in Berlin'})awaithf.translation({model:"facebook/nllb-200-distilled-600M",inputs:"how is the weather like in Gaborone",parameters :{src_lang:"eng_Latn",tgt_lang:"sot_Latn"}})awaitinference.textToImage({model:'stabilityai/stable-diffusion-2',inputs:'award winning high resolution photo of a giant tortoise/((ladybird)) hybrid, [trending on artstation]',parameters:{negative_prompt:'blurry',}})
This is a collection of JS libraries to interact with the Hugging Face API, with TS types included.
- @huggingface/inference: Use the Inference API to make calls to 100,000+ Machine Learning models, or your owninference endpoints!
- @huggingface/agents: Interact with HF models through a natural language interface
- @huggingface/hub: Interact with huggingface.co to create or delete repos and commit / download files
With more to come, like@huggingface/endpoints
to manage your HF Endpoints!
We use modern features to avoid polyfills and dependencies, so the libraries will only work on modern browsers / Node.js >= 18 / Bun / Deno.
The libraries are still very young, please help us by opening issues!
To install via NPM, you can download the libraries as needed:
npm install @huggingface/inferencenpm install @huggingface/hubnpm install @huggingface/agents
Then import the libraries in your code:
import{HfInference}from"@huggingface/inference";import{HfAgent}from"@huggingface/agents";import{createRepo,commit,deleteRepo,listFiles}from"@huggingface/hub";importtype{RepoId,Credentials}from"@huggingface/hub";
You can run our packages with vanilla JS, without any bundler, by using a CDN or static hosting. UsingES modules, i.e.<script type="module">
, you can import the libraries in your code:
<scripttype="module">import{HfInference}from'https://cdn.jsdelivr.net/npm/@huggingface/inference@2.6.4/+esm';import{createRepo,commit,deleteRepo,listFiles}from"https://cdn.jsdelivr.net/npm/@huggingface/hub@0.12.3/+esm";</script>
// esm.shimport{HfInference}from"https://esm.sh/@huggingface/inference"import{HfAgent}from"https://esm.sh/@huggingface/agents";import{createRepo,commit,deleteRepo,listFiles}from"https://esm.sh/@huggingface/hub"// or npm:import{HfInference}from"npm:@huggingface/inference"import{HfAgent}from"npm:@huggingface/agents";import{createRepo,commit,deleteRepo,listFiles}from"npm:@huggingface/hub"
Get your HF access token in youraccount settings.
import{HfInference}from"@huggingface/inference";constHF_TOKEN="hf_...";constinference=newHfInference(HF_TOKEN);// You can also omit "model" to use the recommended model for the taskawaitinference.translation({model:'t5-base',inputs:'My name is Wolfgang and I live in Amsterdam'})awaitinference.textToImage({model:'stabilityai/stable-diffusion-2',inputs:'award winning high resolution photo of a giant tortoise/((ladybird)) hybrid, [trending on artstation]',parameters:{negative_prompt:'blurry',}})awaitinference.imageToText({data:await(awaitfetch('https://picsum.photos/300/300')).blob(),model:'nlpconnect/vit-gpt2-image-captioning',})// Using your own inference endpoint: https://hf.co/docs/inference-endpoints/constgpt2=inference.endpoint('https://xyz.eu-west-1.aws.endpoints.huggingface.cloud/gpt2');const{ generated_text}=awaitgpt2.textGeneration({inputs:'The answer to the universe is'});
import{HfAgent,LLMFromHub,defaultTools}from'@huggingface/agents';constHF_TOKEN="hf_...";constagent=newHfAgent(HF_TOKEN,LLMFromHub(HF_TOKEN),[...defaultTools]);// you can generate the code, inspect it and then run itconstcode=awaitagent.generateCode("Draw a picture of a cat wearing a top hat. Then caption the picture and read it out loud.");console.log(code);constmessages=awaitagent.evaluateCode(code)console.log(messages);// contains the data// or you can run the code directly, however you can't check that the code is safe to execute this way, use at your own risk.constmessages=awaitagent.run("Draw a picture of a cat wearing a top hat. Then caption the picture and read it out loud.")console.log(messages);
import{createRepo,uploadFile,deleteFiles}from"@huggingface/hub";constHF_TOKEN="hf_...";awaitcreateRepo({repo:"my-user/nlp-model",// or {type: "model", name: "my-user/nlp-test"},credentials:{accessToken:HF_TOKEN}});awaituploadFile({repo:"my-user/nlp-model",credentials:{accessToken:HF_TOKEN},// Can work with native File in browsersfile:{path:"pytorch_model.bin",content:newBlob(...)}});awaitdeleteFiles({repo:{type:"space",name:"my-user/my-space"},// or "spaces/my-user/my-space"credentials:{accessToken:HF_TOKEN},paths:["README.md",".gitattributes"]});
There are more features of course, check each library's README!
sudo corepack enablepnpm installpnpm -r format:checkpnpm -r lint:checkpnpm -r test
pnpm -r build
This will generate ESM and CJS javascript files inpackages/*/dist
, egpackages/inference/dist/index.mjs
.
About
Utilities to use the Hugging Face hub API
Resources
License
Uh oh!
There was an error while loading.Please reload this page.
Stars
Watchers
Forks
Releases
Packages0
Languages
- TypeScript71.9%
- Svelte21.9%
- JavaScript5.5%
- Other0.7%