metastable-void
commited on
revert changes
Browse files- .python-version +1 -1
- README.md +3 -1
- pyproject.toml +4 -4
- requirements.txt +31 -28
.python-version
CHANGED
@@ -1 +1 @@
|
|
1 |
-
3.
|
|
|
1 |
+
3.10
|
README.md
CHANGED
@@ -3,7 +3,9 @@ title: chat-1
|
|
3 |
emoji: ⚡
|
4 |
colorFrom: red
|
5 |
colorTo: purple
|
6 |
-
python_version: 3.
|
|
|
|
|
7 |
sdk: gradio
|
8 |
sdk_version: 5.9.1
|
9 |
app_file: app.py
|
|
|
3 |
emoji: ⚡
|
4 |
colorFrom: red
|
5 |
colorTo: purple
|
6 |
+
python_version: 3.10
|
7 |
+
models:
|
8 |
+
- vericava/llm-jp-3-1.8b-instruct-lora-vericava7
|
9 |
sdk: gradio
|
10 |
sdk_version: 5.9.1
|
11 |
app_file: app.py
|
pyproject.toml
CHANGED
@@ -1,12 +1,12 @@
|
|
1 |
[project]
|
2 |
dependencies = [
|
3 |
-
"accelerate>=1.
|
4 |
-
"bitsandbytes>=0.45.
|
5 |
"gradio>=5.9.1",
|
6 |
"hf-transfer>=0.1.8",
|
7 |
"spaces>=0.31.1",
|
8 |
-
"torch==2.
|
9 |
-
"transformers>=4.
|
10 |
]
|
11 |
description = ""
|
12 |
name = "chat-1"
|
|
|
1 |
[project]
|
2 |
dependencies = [
|
3 |
+
"accelerate>=1.2.1",
|
4 |
+
"bitsandbytes>=0.45.0",
|
5 |
"gradio>=5.9.1",
|
6 |
"hf-transfer>=0.1.8",
|
7 |
"spaces>=0.31.1",
|
8 |
+
"torch==2.4.0",
|
9 |
+
"transformers>=4.47.1",
|
10 |
]
|
11 |
description = ""
|
12 |
name = "chat-1"
|
requirements.txt
CHANGED
@@ -1,7 +1,7 @@
|
|
1 |
# This file was autogenerated by uv via the following command:
|
2 |
# uv pip compile pyproject.toml -o requirements.txt
|
3 |
-
accelerate==1.
|
4 |
-
# via chat
|
5 |
aiofiles==23.2.1
|
6 |
# via gradio
|
7 |
annotated-types==0.7.0
|
@@ -11,8 +11,8 @@ anyio==4.7.0
|
|
11 |
# gradio
|
12 |
# httpx
|
13 |
# starlette
|
14 |
-
bitsandbytes==0.45.
|
15 |
-
# via chat
|
16 |
certifi==2024.12.14
|
17 |
# via
|
18 |
# httpcore
|
@@ -24,6 +24,8 @@ click==8.1.8
|
|
24 |
# via
|
25 |
# typer
|
26 |
# uvicorn
|
|
|
|
|
27 |
fastapi==0.115.6
|
28 |
# via gradio
|
29 |
ffmpy==0.5.0
|
@@ -33,6 +35,7 @@ filelock==3.16.1
|
|
33 |
# huggingface-hub
|
34 |
# torch
|
35 |
# transformers
|
|
|
36 |
fsspec==2024.12.0
|
37 |
# via
|
38 |
# gradio-client
|
@@ -40,7 +43,7 @@ fsspec==2024.12.0
|
|
40 |
# torch
|
41 |
gradio==5.9.1
|
42 |
# via
|
43 |
-
# chat
|
44 |
# spaces
|
45 |
gradio-client==1.5.2
|
46 |
# via gradio
|
@@ -49,7 +52,7 @@ h11==0.14.0
|
|
49 |
# httpcore
|
50 |
# uvicorn
|
51 |
hf-transfer==0.1.8
|
52 |
-
# via chat
|
53 |
httpcore==1.0.7
|
54 |
# via httpx
|
55 |
httpx==0.28.1
|
@@ -93,39 +96,36 @@ numpy==2.2.1
|
|
93 |
# gradio
|
94 |
# pandas
|
95 |
# transformers
|
96 |
-
nvidia-cublas-cu12==12.
|
97 |
# via
|
98 |
# nvidia-cudnn-cu12
|
99 |
# nvidia-cusolver-cu12
|
100 |
# torch
|
101 |
-
nvidia-cuda-cupti-cu12==12.
|
102 |
# via torch
|
103 |
-
nvidia-cuda-nvrtc-cu12==12.
|
104 |
# via torch
|
105 |
-
nvidia-cuda-runtime-cu12==12.
|
106 |
# via torch
|
107 |
nvidia-cudnn-cu12==9.1.0.70
|
108 |
# via torch
|
109 |
-
nvidia-cufft-cu12==11.2.
|
110 |
# via torch
|
111 |
-
nvidia-curand-cu12==10.3.
|
112 |
# via torch
|
113 |
-
nvidia-cusolver-cu12==11.
|
114 |
# via torch
|
115 |
-
nvidia-cusparse-cu12==12.
|
116 |
# via
|
117 |
# nvidia-cusolver-cu12
|
118 |
# torch
|
119 |
-
nvidia-
|
120 |
-
# via torch
|
121 |
-
nvidia-nccl-cu12==2.21.5
|
122 |
# via torch
|
123 |
-
nvidia-nvjitlink-cu12==12.
|
124 |
# via
|
125 |
# nvidia-cusolver-cu12
|
126 |
# nvidia-cusparse-cu12
|
127 |
-
|
128 |
-
nvidia-nvtx-cu12==12.4.127
|
129 |
# via torch
|
130 |
orjson==3.10.13
|
131 |
# via gradio
|
@@ -194,44 +194,47 @@ six==1.17.0
|
|
194 |
sniffio==1.3.1
|
195 |
# via anyio
|
196 |
spaces==0.31.1
|
197 |
-
# via chat
|
198 |
starlette==0.41.3
|
199 |
# via
|
200 |
# fastapi
|
201 |
# gradio
|
202 |
-
sympy==1.13.
|
203 |
# via torch
|
204 |
tokenizers==0.21.0
|
205 |
# via transformers
|
206 |
tomlkit==0.13.2
|
207 |
# via gradio
|
208 |
-
torch==2.
|
209 |
# via
|
210 |
-
# chat
|
211 |
# accelerate
|
212 |
# bitsandbytes
|
213 |
tqdm==4.67.1
|
214 |
# via
|
215 |
# huggingface-hub
|
216 |
# transformers
|
217 |
-
transformers==4.
|
218 |
-
# via chat
|
219 |
-
triton==3.
|
220 |
# via torch
|
221 |
typer==0.15.1
|
222 |
# via gradio
|
223 |
typing-extensions==4.12.2
|
224 |
# via
|
225 |
# anyio
|
|
|
226 |
# fastapi
|
227 |
# gradio
|
228 |
# gradio-client
|
229 |
# huggingface-hub
|
230 |
# pydantic
|
231 |
# pydantic-core
|
|
|
232 |
# spaces
|
233 |
# torch
|
234 |
# typer
|
|
|
235 |
tzdata==2024.2
|
236 |
# via pandas
|
237 |
urllib3==2.3.0
|
@@ -239,4 +242,4 @@ urllib3==2.3.0
|
|
239 |
uvicorn==0.34.0
|
240 |
# via gradio
|
241 |
websockets==14.1
|
242 |
-
# via gradio-client
|
|
|
1 |
# This file was autogenerated by uv via the following command:
|
2 |
# uv pip compile pyproject.toml -o requirements.txt
|
3 |
+
accelerate==1.2.1
|
4 |
+
# via calm2-7b-chat (pyproject.toml)
|
5 |
aiofiles==23.2.1
|
6 |
# via gradio
|
7 |
annotated-types==0.7.0
|
|
|
11 |
# gradio
|
12 |
# httpx
|
13 |
# starlette
|
14 |
+
bitsandbytes==0.45.0
|
15 |
+
# via calm2-7b-chat (pyproject.toml)
|
16 |
certifi==2024.12.14
|
17 |
# via
|
18 |
# httpcore
|
|
|
24 |
# via
|
25 |
# typer
|
26 |
# uvicorn
|
27 |
+
exceptiongroup==1.2.2
|
28 |
+
# via anyio
|
29 |
fastapi==0.115.6
|
30 |
# via gradio
|
31 |
ffmpy==0.5.0
|
|
|
35 |
# huggingface-hub
|
36 |
# torch
|
37 |
# transformers
|
38 |
+
# triton
|
39 |
fsspec==2024.12.0
|
40 |
# via
|
41 |
# gradio-client
|
|
|
43 |
# torch
|
44 |
gradio==5.9.1
|
45 |
# via
|
46 |
+
# calm2-7b-chat (pyproject.toml)
|
47 |
# spaces
|
48 |
gradio-client==1.5.2
|
49 |
# via gradio
|
|
|
52 |
# httpcore
|
53 |
# uvicorn
|
54 |
hf-transfer==0.1.8
|
55 |
+
# via calm2-7b-chat (pyproject.toml)
|
56 |
httpcore==1.0.7
|
57 |
# via httpx
|
58 |
httpx==0.28.1
|
|
|
96 |
# gradio
|
97 |
# pandas
|
98 |
# transformers
|
99 |
+
nvidia-cublas-cu12==12.1.3.1
|
100 |
# via
|
101 |
# nvidia-cudnn-cu12
|
102 |
# nvidia-cusolver-cu12
|
103 |
# torch
|
104 |
+
nvidia-cuda-cupti-cu12==12.1.105
|
105 |
# via torch
|
106 |
+
nvidia-cuda-nvrtc-cu12==12.1.105
|
107 |
# via torch
|
108 |
+
nvidia-cuda-runtime-cu12==12.1.105
|
109 |
# via torch
|
110 |
nvidia-cudnn-cu12==9.1.0.70
|
111 |
# via torch
|
112 |
+
nvidia-cufft-cu12==11.0.2.54
|
113 |
# via torch
|
114 |
+
nvidia-curand-cu12==10.3.2.106
|
115 |
# via torch
|
116 |
+
nvidia-cusolver-cu12==11.4.5.107
|
117 |
# via torch
|
118 |
+
nvidia-cusparse-cu12==12.1.0.106
|
119 |
# via
|
120 |
# nvidia-cusolver-cu12
|
121 |
# torch
|
122 |
+
nvidia-nccl-cu12==2.20.5
|
|
|
|
|
123 |
# via torch
|
124 |
+
nvidia-nvjitlink-cu12==12.6.85
|
125 |
# via
|
126 |
# nvidia-cusolver-cu12
|
127 |
# nvidia-cusparse-cu12
|
128 |
+
nvidia-nvtx-cu12==12.1.105
|
|
|
129 |
# via torch
|
130 |
orjson==3.10.13
|
131 |
# via gradio
|
|
|
194 |
sniffio==1.3.1
|
195 |
# via anyio
|
196 |
spaces==0.31.1
|
197 |
+
# via calm2-7b-chat (pyproject.toml)
|
198 |
starlette==0.41.3
|
199 |
# via
|
200 |
# fastapi
|
201 |
# gradio
|
202 |
+
sympy==1.13.3
|
203 |
# via torch
|
204 |
tokenizers==0.21.0
|
205 |
# via transformers
|
206 |
tomlkit==0.13.2
|
207 |
# via gradio
|
208 |
+
torch==2.4.0
|
209 |
# via
|
210 |
+
# calm2-7b-chat (pyproject.toml)
|
211 |
# accelerate
|
212 |
# bitsandbytes
|
213 |
tqdm==4.67.1
|
214 |
# via
|
215 |
# huggingface-hub
|
216 |
# transformers
|
217 |
+
transformers==4.47.1
|
218 |
+
# via calm2-7b-chat (pyproject.toml)
|
219 |
+
triton==3.0.0
|
220 |
# via torch
|
221 |
typer==0.15.1
|
222 |
# via gradio
|
223 |
typing-extensions==4.12.2
|
224 |
# via
|
225 |
# anyio
|
226 |
+
# bitsandbytes
|
227 |
# fastapi
|
228 |
# gradio
|
229 |
# gradio-client
|
230 |
# huggingface-hub
|
231 |
# pydantic
|
232 |
# pydantic-core
|
233 |
+
# rich
|
234 |
# spaces
|
235 |
# torch
|
236 |
# typer
|
237 |
+
# uvicorn
|
238 |
tzdata==2024.2
|
239 |
# via pandas
|
240 |
urllib3==2.3.0
|
|
|
242 |
uvicorn==0.34.0
|
243 |
# via gradio
|
244 |
websockets==14.1
|
245 |
+
# via gradio-client
|