coeuslearning commited on
Commit
bae1d90
·
1 Parent(s): 566b50e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -14,7 +14,7 @@ DEFAULT_MAX_NEW_TOKENS = 1024
14
  MAX_INPUT_TOKEN_LENGTH = int(os.getenv("MAX_INPUT_TOKEN_LENGTH", "4096"))
15
 
16
  DESCRIPTION = """\
17
- Llama. Protected.
18
  """
19
 
20
  if not torch.cuda.is_available():
 
14
  MAX_INPUT_TOKEN_LENGTH = int(os.getenv("MAX_INPUT_TOKEN_LENGTH", "4096"))
15
 
16
  DESCRIPTION = """\
17
+ # Llama. Protected. With Protecto.
18
  """
19
 
20
  if not torch.cuda.is_available():