metastable-void commited on
Commit
6371c0b
·
1 Parent(s): e10b173

update params

Browse files
Files changed (2) hide show
  1. app.py +2 -2
  2. pyproject.toml +1 -0
app.py CHANGED
@@ -15,8 +15,8 @@ DESCRIPTION = "# 真空ジェネレータ\n<p>Imitate 真空 (@vericava)'s posts
15
  if not torch.cuda.is_available():
16
  DESCRIPTION += "\n<p>Running on CPU 🥶 This demo does not work on CPU.</p>"
17
 
18
- MAX_MAX_NEW_TOKENS = 2048
19
- DEFAULT_MAX_NEW_TOKENS = 1024
20
  MAX_INPUT_TOKEN_LENGTH = int(os.getenv("MAX_INPUT_TOKEN_LENGTH", "32768"))
21
 
22
 
 
15
  if not torch.cuda.is_available():
16
  DESCRIPTION += "\n<p>Running on CPU 🥶 This demo does not work on CPU.</p>"
17
 
18
+ MAX_MAX_NEW_TOKENS = 768
19
+ DEFAULT_MAX_NEW_TOKENS = 512
20
  MAX_INPUT_TOKEN_LENGTH = int(os.getenv("MAX_INPUT_TOKEN_LENGTH", "32768"))
21
 
22
 
pyproject.toml CHANGED
@@ -8,6 +8,7 @@ dependencies = [
8
  "torch==2.4.0",
9
  "transformers>=4.48.3",
10
  "peft==0.14.0",
 
11
  ]
12
  description = ""
13
  name = "chat-1"
 
8
  "torch==2.4.0",
9
  "transformers>=4.48.3",
10
  "peft==0.14.0",
11
+ "sentencepiece==0.2.0",
12
  ]
13
  description = ""
14
  name = "chat-1"