Spaces:
Build error
Build error
Commit
Β·
3003e77
1
Parent(s):
072e8cc
Update app.py
Browse files
app.py
CHANGED
|
@@ -25,7 +25,7 @@ def get_first_subword(word):
|
|
| 25 |
except:
|
| 26 |
return tokenizer(word, add_special_tokens=False)['input_ids'][0]
|
| 27 |
|
| 28 |
-
def search(token_to_lookup, num_neighbors=
|
| 29 |
i = get_first_subword(token_to_lookup)
|
| 30 |
_ , I_IP = index_IP.search(normalized_input_embeddings[i:i+1], num_neighbors)
|
| 31 |
hits_IP = lookup_table.take(I_IP[0])
|
|
@@ -43,9 +43,8 @@ def search(token_to_lookup, num_neighbors=500):
|
|
| 43 |
iface = gr.Interface(
|
| 44 |
fn=search,
|
| 45 |
|
| 46 |
-
inputs=[gr.Textbox(lines=1, label="Vocabulary Token", placeholder="Enter token..."),
|
| 47 |
-
|
| 48 |
-
#inputs=gr.Textbox(lines=1, label="Vocabulary Token", placeholder="Enter token..."),
|
| 49 |
outputs=[gr.Textbox(label="IP-Nearest tokens"), gr.Textbox(label="IP-Nearest subwords"), gr.Textbox(label="L2-Nearest tokens"), gr.Textbox(label="L2-Nearest subwords")],
|
| 50 |
examples=[
|
| 51 |
["##logy"],
|
|
|
|
| 25 |
except:
|
| 26 |
return tokenizer(word, add_special_tokens=False)['input_ids'][0]
|
| 27 |
|
| 28 |
+
def search(token_to_lookup, num_neighbors=250):
|
| 29 |
i = get_first_subword(token_to_lookup)
|
| 30 |
_ , I_IP = index_IP.search(normalized_input_embeddings[i:i+1], num_neighbors)
|
| 31 |
hits_IP = lookup_table.take(I_IP[0])
|
|
|
|
| 43 |
iface = gr.Interface(
|
| 44 |
fn=search,
|
| 45 |
|
| 46 |
+
#inputs=[gr.Textbox(lines=1, label="Vocabulary Token", placeholder="Enter token..."), gr.Slider(minimum=0, maximum=1000, value=250, step=10,label="number of neighbors")],
|
| 47 |
+
inputs=gr.Textbox(lines=1, label="Vocabulary Token", placeholder="Enter token..."),
|
|
|
|
| 48 |
outputs=[gr.Textbox(label="IP-Nearest tokens"), gr.Textbox(label="IP-Nearest subwords"), gr.Textbox(label="L2-Nearest tokens"), gr.Textbox(label="L2-Nearest subwords")],
|
| 49 |
examples=[
|
| 50 |
["##logy"],
|