Spaces:
Runtime error
Runtime error
Oliver Li
commited on
Commit
·
f88671d
1
Parent(s):
d1a1e86
fix table
Browse files
app.py
CHANGED
|
@@ -28,7 +28,7 @@ category = {'LABEL_0': 'toxic', 'LABEL_1': 'severe_toxic', 'LABEL_2': 'obscene',
|
|
| 28 |
# Model selection
|
| 29 |
model_options = {
|
| 30 |
"Olivernyu/finetuned_bert_base_uncased": {
|
| 31 |
-
"description": "This model detects different types of toxicity like threats, obscenity, insults, and identity-based hate in text.",
|
| 32 |
},
|
| 33 |
"distilbert-base-uncased-finetuned-sst-2-english": {
|
| 34 |
"labels": ["NEGATIVE", "POSITIVE"],
|
|
@@ -102,7 +102,6 @@ initial_table_data = [{'Text (portion)': ["who's speaking? \n you goddamn cocksu
|
|
| 102 |
for d in initial_table_data:
|
| 103 |
table_df = pd.concat([table_df, pd.DataFrame(d)], ignore_index=True)
|
| 104 |
# Load the model and perform toxicity analysis
|
| 105 |
-
st.table(table_df)
|
| 106 |
if st.button("Analyze"):
|
| 107 |
if not text:
|
| 108 |
st.write("Please enter a text.")
|
|
|
|
| 28 |
# Model selection
|
| 29 |
model_options = {
|
| 30 |
"Olivernyu/finetuned_bert_base_uncased": {
|
| 31 |
+
"description": "This model detects different types of toxicity like threats, obscenity, insults, and identity-based hate in text. The table is prepopulated with some data, the table will be displayed once you hit analyze.",
|
| 32 |
},
|
| 33 |
"distilbert-base-uncased-finetuned-sst-2-english": {
|
| 34 |
"labels": ["NEGATIVE", "POSITIVE"],
|
|
|
|
| 102 |
for d in initial_table_data:
|
| 103 |
table_df = pd.concat([table_df, pd.DataFrame(d)], ignore_index=True)
|
| 104 |
# Load the model and perform toxicity analysis
|
|
|
|
| 105 |
if st.button("Analyze"):
|
| 106 |
if not text:
|
| 107 |
st.write("Please enter a text.")
|