sagawa commited on
Commit
5d320ee
·
1 Parent(s): f83527e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -3
app.py CHANGED
@@ -35,14 +35,13 @@ def seed_everything(seed=42):
35
  seed_everything(seed=CFG.seed)
36
 
37
 
38
- st.json(CFG.input_data)
39
  tokenizer = AutoTokenizer.from_pretrained(CFG.model_name_or_path, return_tensors='pt')
40
 
41
  if CFG.model == 't5':
42
  model = AutoModelForSeq2SeqLM.from_pretrained(CFG.model_name_or_path).to(device)
43
  elif CFG.model == 'deberta':
44
  model = EncoderDecoderModel.from_pretrained(CFG.model_name_or_path).to(device)
45
- print('ok')
46
  input_compound = CFG.input_data
47
  min_length = min(input_compound.find('CATALYST') - input_compound.find(':') - 10, 0)
48
  inp = tokenizer(input_compound, return_tensors='pt').to(device)
@@ -61,4 +60,4 @@ if type(mol) == None:
61
  output += scores
62
  output = [input_compound] + output
63
  output_df = pd.DataFrame(np.array(output).reshape(1, -1), columns=['input'] + [f'{i}th' for i in range(CFG.num_beams)] + ['valid compound'] + [f'{i}th score' for i in range(CFG.num_beams)] + ['valid compound score'])
64
- st.json(output)
 
35
  seed_everything(seed=CFG.seed)
36
 
37
 
 
38
  tokenizer = AutoTokenizer.from_pretrained(CFG.model_name_or_path, return_tensors='pt')
39
 
40
  if CFG.model == 't5':
41
  model = AutoModelForSeq2SeqLM.from_pretrained(CFG.model_name_or_path).to(device)
42
  elif CFG.model == 'deberta':
43
  model = EncoderDecoderModel.from_pretrained(CFG.model_name_or_path).to(device)
44
+
45
  input_compound = CFG.input_data
46
  min_length = min(input_compound.find('CATALYST') - input_compound.find(':') - 10, 0)
47
  inp = tokenizer(input_compound, return_tensors='pt').to(device)
 
60
  output += scores
61
  output = [input_compound] + output
62
  output_df = pd.DataFrame(np.array(output).reshape(1, -1), columns=['input'] + [f'{i}th' for i in range(CFG.num_beams)] + ['valid compound'] + [f'{i}th score' for i in range(CFG.num_beams)] + ['valid compound score'])
63
+ st.table(output_df)