eiffel-tower-llama-demo / requirements.txt
dlouapre's picture
dlouapre HF Staff
Creating the steering demo
c5681ae
raw
history blame
8.97 kB
# This file was autogenerated by uv via the following command:
# uv pip compile pyproject.toml -o requirements.txt
accelerate==1.11.0
# via
# eiffel-demo (pyproject.toml)
# nnsight
# transformer-lens
aiofiles==24.1.0
# via gradio
aiohappyeyeballs==2.6.1
# via aiohttp
aiohttp==3.13.2
# via fsspec
aiosignal==1.4.0
# via aiohttp
annotated-doc==0.0.3
# via fastapi
annotated-types==0.7.0
# via pydantic
anyio==4.11.0
# via
# gradio
# httpx
# starlette
astor==0.8.1
# via nnsight
asttokens==3.0.0
# via stack-data
attrs==25.4.0
# via aiohttp
babe==0.0.7
# via sae-lens
beartype==0.14.1
# via transformer-lens
better-abc==0.0.3
# via transformer-lens
bidict==0.23.1
# via python-socketio
brotli==1.1.0
# via gradio
certifi==2025.10.5
# via
# httpcore
# httpx
# requests
# sentry-sdk
charset-normalizer==3.4.4
# via requests
click==8.3.0
# via
# nltk
# typer
# uvicorn
# wandb
cloudpickle==3.1.2
# via nnsight
config2py==0.1.42
# via py2store
datasets==4.4.0
# via
# sae-lens
# transformer-lens
decorator==5.2.1
# via ipython
dill==0.4.0
# via
# datasets
# multiprocess
docstring-parser==0.17.0
# via simple-parsing
dol==0.3.31
# via
# config2py
# graze
# py2store
einops==0.8.1
# via transformer-lens
executing==2.2.1
# via stack-data
fancy-einsum==0.0.3
# via transformer-lens
fastapi==0.121.0
# via gradio
ffmpy==0.6.4
# via gradio
filelock==3.20.0
# via
# datasets
# huggingface-hub
# torch
# transformers
frozenlist==1.8.0
# via
# aiohttp
# aiosignal
fsspec==2025.10.0
# via
# datasets
# gradio-client
# huggingface-hub
# torch
gitdb==4.0.12
# via gitpython
gitpython==3.1.45
# via wandb
gradio==5.49.1
# via eiffel-demo (pyproject.toml)
gradio-client==1.13.3
# via gradio
graze==0.1.39
# via babe
groovy==0.1.2
# via gradio
h11==0.16.0
# via
# httpcore
# uvicorn
# wsproto
hf-transfer==0.1.9
# via eiffel-demo (pyproject.toml)
hf-xet==1.2.0
# via huggingface-hub
httpcore==1.0.9
# via httpx
httpx==0.28.1
# via
# datasets
# gradio
# gradio-client
# safehttpx
huggingface-hub==0.36.0
# via
# accelerate
# datasets
# gradio
# gradio-client
# tokenizers
# transformers
i2==0.1.58
# via config2py
idna==3.11
# via
# anyio
# httpx
# requests
# yarl
importlib-resources==6.5.2
# via py2store
ipython==9.6.0
# via nnsight
ipython-pygments-lexers==1.1.1
# via ipython
jaxtyping==0.3.3
# via transformer-lens
jedi==0.19.2
# via ipython
jinja2==3.1.6
# via
# gradio
# torch
joblib==1.5.2
# via nltk
markdown-it-py==4.0.0
# via rich
markupsafe==3.0.3
# via
# gradio
# jinja2
matplotlib-inline==0.2.1
# via ipython
mdurl==0.1.2
# via markdown-it-py
mpmath==1.3.0
# via sympy
multidict==6.7.0
# via
# aiohttp
# yarl
multiprocess==0.70.18
# via datasets
narwhals==2.10.1
# via plotly
networkx==3.5
# via torch
nltk==3.9.2
# via sae-lens
nnsight==0.5.10
# via eiffel-demo (pyproject.toml)
numpy==1.26.4
# via
# accelerate
# datasets
# gradio
# pandas
# patsy
# plotly-express
# scipy
# statsmodels
# transformer-lens
# transformers
nvidia-cublas-cu12==12.8.4.1
# via
# nvidia-cudnn-cu12
# nvidia-cusolver-cu12
# torch
nvidia-cuda-cupti-cu12==12.8.90
# via torch
nvidia-cuda-nvrtc-cu12==12.8.93
# via torch
nvidia-cuda-runtime-cu12==12.8.90
# via torch
nvidia-cudnn-cu12==9.10.2.21
# via torch
nvidia-cufft-cu12==11.3.3.83
# via torch
nvidia-cufile-cu12==1.13.1.3
# via torch
nvidia-curand-cu12==10.3.9.90
# via torch
nvidia-cusolver-cu12==11.7.3.90
# via torch
nvidia-cusparse-cu12==12.5.8.93
# via
# nvidia-cusolver-cu12
# torch
nvidia-cusparselt-cu12==0.7.1
# via torch
nvidia-nccl-cu12==2.27.5
# via torch
nvidia-nvjitlink-cu12==12.8.93
# via
# nvidia-cufft-cu12
# nvidia-cusolver-cu12
# nvidia-cusparse-cu12
# torch
nvidia-nvshmem-cu12==3.3.20
# via torch
nvidia-nvtx-cu12==12.8.90
# via torch
orjson==3.11.4
# via gradio
packaging==25.0
# via
# accelerate
# datasets
# gradio
# gradio-client
# huggingface-hub
# plotly
# statsmodels
# transformers
# wandb
pandas==2.3.3
# via
# babe
# datasets
# gradio
# plotly-express
# statsmodels
# transformer-lens
parso==0.8.5
# via jedi
patsy==1.0.2
# via
# plotly-express
# statsmodels
pexpect==4.9.0
# via ipython
pillow==11.3.0
# via gradio
platformdirs==4.5.0
# via wandb
plotly==6.3.1
# via
# plotly-express
# sae-lens
plotly-express==0.4.1
# via sae-lens
prompt-toolkit==3.0.52
# via ipython
propcache==0.4.1
# via
# aiohttp
# yarl
protobuf==6.33.0
# via wandb
psutil==7.1.3
# via accelerate
ptyprocess==0.7.0
# via pexpect
pure-eval==0.2.3
# via stack-data
py2store==0.1.22
# via babe
pyarrow==22.0.0
# via datasets
pydantic==2.11.10
# via
# fastapi
# gradio
# nnsight
# wandb
pydantic-core==2.33.2
# via pydantic
pydub==0.25.1
# via gradio
pygments==2.19.2
# via
# ipython
# ipython-pygments-lexers
# rich
python-dateutil==2.9.0.post0
# via pandas
python-dotenv==1.2.1
# via sae-lens
python-engineio==4.12.3
# via python-socketio
python-multipart==0.0.20
# via gradio
python-socketio==5.14.3
# via nnsight
pytz==2025.2
# via pandas
pyyaml==6.0.3
# via
# eiffel-demo (pyproject.toml)
# accelerate
# datasets
# gradio
# huggingface-hub
# sae-lens
# transformers
# wandb
regex==2025.11.3
# via
# nltk
# transformers
requests==2.32.5
# via
# datasets
# graze
# huggingface-hub
# python-socketio
# transformers
# wandb
rich==14.2.0
# via
# nnsight
# transformer-lens
# typer
ruff==0.14.3
# via gradio
sae-lens==6.21.0
# via eiffel-demo (pyproject.toml)
safehttpx==0.1.7
# via gradio
safetensors==0.6.2
# via
# accelerate
# sae-lens
# transformers
scipy==1.16.3
# via
# plotly-express
# statsmodels
semantic-version==2.10.0
# via gradio
sentencepiece==0.2.1
# via transformer-lens
sentry-sdk==2.43.0
# via wandb
shellingham==1.5.4
# via typer
simple-parsing==0.1.7
# via sae-lens
simple-websocket==1.1.0
# via python-engineio
six==1.17.0
# via python-dateutil
smmap==5.0.2
# via gitdb
sniffio==1.3.1
# via anyio
stack-data==0.6.3
# via ipython
starlette==0.49.3
# via
# fastapi
# gradio
statsmodels==0.14.5
# via plotly-express
sympy==1.14.0
# via torch
tenacity==9.1.2
# via sae-lens
tokenizers==0.22.1
# via transformers
toml==0.10.2
# via nnsight
tomlkit==0.13.3
# via gradio
torch==2.9.0
# via
# eiffel-demo (pyproject.toml)
# accelerate
# nnsight
# transformer-lens
tqdm==4.67.1
# via
# datasets
# huggingface-hub
# nltk
# transformer-lens
# transformers
traitlets==5.14.3
# via
# ipython
# matplotlib-inline
transformer-lens==2.16.1
# via sae-lens
transformers==4.57.1
# via
# eiffel-demo (pyproject.toml)
# nnsight
# sae-lens
# transformer-lens
# transformers-stream-generator
transformers-stream-generator==0.0.5
# via transformer-lens
triton==3.5.0
# via torch
typeguard==4.4.4
# via transformer-lens
typer==0.20.0
# via gradio
typing-extensions==4.15.0
# via
# aiosignal
# anyio
# fastapi
# gradio
# gradio-client
# huggingface-hub
# ipython
# pydantic
# pydantic-core
# sae-lens
# simple-parsing
# starlette
# torch
# transformer-lens
# typeguard
# typer
# typing-inspection
# wandb
typing-inspection==0.4.2
# via pydantic
tzdata==2025.2
# via pandas
urllib3==2.5.0
# via
# requests
# sentry-sdk
uvicorn==0.38.0
# via gradio
wadler-lindig==0.1.7
# via jaxtyping
wandb==0.22.3
# via transformer-lens
wcwidth==0.2.14
# via prompt-toolkit
websocket-client==1.9.0
# via python-socketio
websockets==15.0.1
# via gradio-client
wsproto==1.2.0
# via simple-websocket
xxhash==3.6.0
# via datasets
yarl==1.22.0
# via aiohttp
# HuggingFace Spaces ZeroGPU support
spaces==0.28.3
# via eiffel-demo (for ZeroGPU deployment)