Spaces:
Running
on
Zero
Running
on
Zero
Update
Browse files- app.py +2 -2
- pyproject.toml +1 -4
- requirements.txt +1 -1
- uv.lock +7 -3
app.py
CHANGED
|
@@ -12,8 +12,8 @@ from transformers import KyutaiSpeechToTextForConditionalGeneration, KyutaiSpeec
|
|
| 12 |
DESCRIPTION = "# Kyutai STT 2.6B EN"
|
| 13 |
|
| 14 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
| 15 |
-
model_id = "kyutai/stt-2.6b-en"
|
| 16 |
-
model = KyutaiSpeechToTextForConditionalGeneration.from_pretrained(model_id, device_map=device)
|
| 17 |
processor = KyutaiSpeechToTextProcessor.from_pretrained(model_id)
|
| 18 |
|
| 19 |
SAMPLE_RATE = 24000
|
|
|
|
| 12 |
DESCRIPTION = "# Kyutai STT 2.6B EN"
|
| 13 |
|
| 14 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
| 15 |
+
model_id = "kyutai/stt-2.6b-en-trfs"
|
| 16 |
+
model = KyutaiSpeechToTextForConditionalGeneration.from_pretrained(model_id, device_map=device, torch_dtype="auto")
|
| 17 |
processor = KyutaiSpeechToTextProcessor.from_pretrained(model_id)
|
| 18 |
|
| 19 |
SAMPLE_RATE = 24000
|
pyproject.toml
CHANGED
|
@@ -11,7 +11,7 @@ dependencies = [
|
|
| 11 |
"librosa>=0.11.0",
|
| 12 |
"spaces>=0.37.1",
|
| 13 |
"torch==2.5.1",
|
| 14 |
-
"transformers",
|
| 15 |
]
|
| 16 |
|
| 17 |
[tool.ruff]
|
|
@@ -54,6 +54,3 @@ convention = "google"
|
|
| 54 |
|
| 55 |
[tool.ruff.format]
|
| 56 |
docstring-code-format = true
|
| 57 |
-
|
| 58 |
-
[tool.uv.sources]
|
| 59 |
-
transformers = { git = "https://github.com/huggingface/transformers", rev = "v4.52.4-Kyutai-STT-preview" }
|
|
|
|
| 11 |
"librosa>=0.11.0",
|
| 12 |
"spaces>=0.37.1",
|
| 13 |
"torch==2.5.1",
|
| 14 |
+
"transformers>=4.53.0",
|
| 15 |
]
|
| 16 |
|
| 17 |
[tool.ruff]
|
|
|
|
| 54 |
|
| 55 |
[tool.ruff.format]
|
| 56 |
docstring-code-format = true
|
|
|
|
|
|
|
|
|
requirements.txt
CHANGED
|
@@ -280,7 +280,7 @@ tqdm==4.67.1
|
|
| 280 |
# via
|
| 281 |
# huggingface-hub
|
| 282 |
# transformers
|
| 283 |
-
transformers
|
| 284 |
# via kyutai-stt-2-6b-en (pyproject.toml)
|
| 285 |
triton==3.1.0
|
| 286 |
# via torch
|
|
|
|
| 280 |
# via
|
| 281 |
# huggingface-hub
|
| 282 |
# transformers
|
| 283 |
+
transformers==4.53.0
|
| 284 |
# via kyutai-stt-2-6b-en (pyproject.toml)
|
| 285 |
triton==3.1.0
|
| 286 |
# via torch
|
uv.lock
CHANGED
|
@@ -554,7 +554,7 @@ requires-dist = [
|
|
| 554 |
{ name = "librosa", specifier = ">=0.11.0" },
|
| 555 |
{ name = "spaces", specifier = ">=0.37.1" },
|
| 556 |
{ name = "torch", specifier = "==2.5.1" },
|
| 557 |
-
{ name = "transformers",
|
| 558 |
]
|
| 559 |
|
| 560 |
[[package]]
|
|
@@ -2058,8 +2058,8 @@ wheels = [
|
|
| 2058 |
|
| 2059 |
[[package]]
|
| 2060 |
name = "transformers"
|
| 2061 |
-
version = "4.53.0
|
| 2062 |
-
source = {
|
| 2063 |
dependencies = [
|
| 2064 |
{ name = "filelock" },
|
| 2065 |
{ name = "huggingface-hub" },
|
|
@@ -2072,6 +2072,10 @@ dependencies = [
|
|
| 2072 |
{ name = "tokenizers" },
|
| 2073 |
{ name = "tqdm" },
|
| 2074 |
]
|
|
|
|
|
|
|
|
|
|
|
|
|
| 2075 |
|
| 2076 |
[[package]]
|
| 2077 |
name = "triton"
|
|
|
|
| 554 |
{ name = "librosa", specifier = ">=0.11.0" },
|
| 555 |
{ name = "spaces", specifier = ">=0.37.1" },
|
| 556 |
{ name = "torch", specifier = "==2.5.1" },
|
| 557 |
+
{ name = "transformers", specifier = ">=4.53.0" },
|
| 558 |
]
|
| 559 |
|
| 560 |
[[package]]
|
|
|
|
| 2058 |
|
| 2059 |
[[package]]
|
| 2060 |
name = "transformers"
|
| 2061 |
+
version = "4.53.0"
|
| 2062 |
+
source = { registry = "https://pypi.org/simple" }
|
| 2063 |
dependencies = [
|
| 2064 |
{ name = "filelock" },
|
| 2065 |
{ name = "huggingface-hub" },
|
|
|
|
| 2072 |
{ name = "tokenizers" },
|
| 2073 |
{ name = "tqdm" },
|
| 2074 |
]
|
| 2075 |
+
sdist = { url = "https://files.pythonhosted.org/packages/e8/40/f2d2c3bcf5c6135027cab0fd7db52f6149a1c23acc4e45f914c43d362386/transformers-4.53.0.tar.gz", hash = "sha256:f89520011b4a73066fdc7aabfa158317c3934a22e3cd652d7ffbc512c4063841", size = 9177265, upload-time = "2025-06-26T16:10:54.729Z" }
|
| 2076 |
+
wheels = [
|
| 2077 |
+
{ url = "https://files.pythonhosted.org/packages/5e/0c/68d03a38f6ab2ba2b2829eb11b334610dd236e7926787f7656001b68e1f2/transformers-4.53.0-py3-none-any.whl", hash = "sha256:7d8039ff032c01a2d7f8a8fe0066620367003275f023815a966e62203f9f5dd7", size = 10821970, upload-time = "2025-06-26T16:10:51.505Z" },
|
| 2078 |
+
]
|
| 2079 |
|
| 2080 |
[[package]]
|
| 2081 |
name = "triton"
|