Simon Salmon commited on
Commit
8cf31ff
·
1 Parent(s): 272a330

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -17
app.py CHANGED
@@ -10,8 +10,6 @@ from transformers.activations import get_activation
10
  from transformers import AutoTokenizer
11
 
12
 
13
- st.title('KoGPT2 Demo')
14
-
15
  device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
16
 
17
  from transformers import AutoTokenizer, AutoModelForMaskedLM
@@ -38,18 +36,4 @@ with st.form(key='my_form'):
38
  mask_hidden_state = last_hidden_state[mask_index]
39
  idx = torch.topk(mask_hidden_state, k=100, dim=0)[1]
40
  words = [tokenizer.decode(i.item()).strip() for i in idx]
41
- a_list.append(words)
42
- length = len(a_list)
43
- middle_index = length//2
44
- first_half = a_list[:middle_index]
45
- second_half = a_list[middle_index:]
46
- #st.write(words)
47
- #col1= st.columns(1)
48
- #with col1:
49
- #st.write(words)
50
- st.write(first_half)
51
- st.write(second_half)
52
- st.write(pd.DataFrame({
53
- 'first column': first_half,
54
- 'second column': second_half,
55
- }))
 
10
  from transformers import AutoTokenizer
11
 
12
 
 
 
13
  device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
14
 
15
  from transformers import AutoTokenizer, AutoModelForMaskedLM
 
36
  mask_hidden_state = last_hidden_state[mask_index]
37
  idx = torch.topk(mask_hidden_state, k=100, dim=0)[1]
38
  words = [tokenizer.decode(i.item()).strip() for i in idx]
39
+ st.write(words)