Upload 7 files
Browse files- config.json +15 -14
- model.safetensors +2 -2
- model_performance_metrics.txt +31 -61
- tokenizer_config.json +2 -2
config.json
CHANGED
|
@@ -1,13 +1,14 @@
|
|
| 1 |
{
|
| 2 |
-
"_name_or_path": "
|
| 3 |
-
"activation": "gelu",
|
| 4 |
"architectures": [
|
| 5 |
-
"
|
| 6 |
],
|
| 7 |
-
"
|
| 8 |
-
"
|
| 9 |
-
"
|
| 10 |
-
"
|
|
|
|
|
|
|
| 11 |
"id2label": {
|
| 12 |
"0": "Borderline",
|
| 13 |
"1": "Anxiety",
|
|
@@ -31,17 +32,17 @@
|
|
| 31 |
"Asperger": 7,
|
| 32 |
"PTSD": 8
|
| 33 |
},
|
|
|
|
| 34 |
"max_position_embeddings": 512,
|
| 35 |
-
"model_type": "
|
| 36 |
-
"
|
| 37 |
-
"
|
| 38 |
"pad_token_id": 0,
|
|
|
|
| 39 |
"problem_type": "multi_label_classification",
|
| 40 |
-
"qa_dropout": 0.1,
|
| 41 |
-
"seq_classif_dropout": 0.2,
|
| 42 |
-
"sinusoidal_pos_embds": false,
|
| 43 |
-
"tie_weights_": true,
|
| 44 |
"torch_dtype": "float32",
|
| 45 |
"transformers_version": "4.40.1",
|
|
|
|
|
|
|
| 46 |
"vocab_size": 30522
|
| 47 |
}
|
|
|
|
| 1 |
{
|
| 2 |
+
"_name_or_path": "google-bert/bert-base-uncased",
|
|
|
|
| 3 |
"architectures": [
|
| 4 |
+
"BertForSequenceClassification"
|
| 5 |
],
|
| 6 |
+
"attention_probs_dropout_prob": 0.1,
|
| 7 |
+
"classifier_dropout": null,
|
| 8 |
+
"gradient_checkpointing": false,
|
| 9 |
+
"hidden_act": "gelu",
|
| 10 |
+
"hidden_dropout_prob": 0.1,
|
| 11 |
+
"hidden_size": 768,
|
| 12 |
"id2label": {
|
| 13 |
"0": "Borderline",
|
| 14 |
"1": "Anxiety",
|
|
|
|
| 32 |
"Asperger": 7,
|
| 33 |
"PTSD": 8
|
| 34 |
},
|
| 35 |
+
"layer_norm_eps": 1e-12,
|
| 36 |
"max_position_embeddings": 512,
|
| 37 |
+
"model_type": "bert",
|
| 38 |
+
"num_attention_heads": 12,
|
| 39 |
+
"num_hidden_layers": 12,
|
| 40 |
"pad_token_id": 0,
|
| 41 |
+
"position_embedding_type": "absolute",
|
| 42 |
"problem_type": "multi_label_classification",
|
|
|
|
|
|
|
|
|
|
|
|
|
| 43 |
"torch_dtype": "float32",
|
| 44 |
"transformers_version": "4.40.1",
|
| 45 |
+
"type_vocab_size": 2,
|
| 46 |
+
"use_cache": true,
|
| 47 |
"vocab_size": 30522
|
| 48 |
}
|
model.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
-
size
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:d44c739302c5a68060135646927c15c34e25e6ba979ad755e801ff062b3327da
|
| 3 |
+
size 437980180
|
model_performance_metrics.txt
CHANGED
|
@@ -1,73 +1,43 @@
|
|
| 1 |
-
Training set class counts after balancing:
|
| 2 |
-
_Borderline 10398
|
| 3 |
-
_Anxiety 10393
|
| 4 |
-
_Depression 10400
|
| 5 |
-
_Bipolar 10359
|
| 6 |
-
_OCD 10413
|
| 7 |
-
_ADHD 10412
|
| 8 |
-
_Schizophrenia 10447
|
| 9 |
-
_Asperger 10470
|
| 10 |
-
_PTSD 10489
|
| 11 |
-
dtype: object
|
| 12 |
-
Validation set class counts after balancing:
|
| 13 |
-
_Borderline 1180
|
| 14 |
-
_Anxiety 1185
|
| 15 |
-
_Depression 1178
|
| 16 |
-
_Bipolar 1219
|
| 17 |
-
_OCD 1165
|
| 18 |
-
_ADHD 1166
|
| 19 |
-
_Schizophrenia 1131
|
| 20 |
-
_Asperger 1108
|
| 21 |
-
_PTSD 1089
|
| 22 |
-
dtype: object
|
| 23 |
-
Some weights of DistilBertForSequenceClassification were not initialized from the model checkpoint at distilbert/distilbert-base-uncased and are newly initialized: ['classifier.bias', 'classifier.weight', 'pre_classifier.bias', 'pre_classifier.weight']
|
| 24 |
-
You should probably TRAIN this model on a down-stream task to be able to use it for predictions and inference.
|
| 25 |
-
Epoch 1, Train Loss: 0.2660, Val Loss: 0.2032
|
| 26 |
-
Epoch 2, Train Loss: 0.1891, Val Loss: 0.1873
|
| 27 |
-
F1 Score on Validation Set: 0.6356
|
| 28 |
-
AUC Score on Validation Set: 0.7643
|
| 29 |
-
|
| 30 |
-
|
| 31 |
Classification Report, AUC Score, F1 Score, and Losses
|
| 32 |
|
| 33 |
-
Train Losses: [0.
|
| 34 |
-
Validation Losses: [0.
|
| 35 |
-
F1 Score: [0.
|
| 36 |
-
AUC Score: [0.
|
| 37 |
Classification Report:
|
| 38 |
_Borderline:
|
| 39 |
-
Precision: 0.
|
| 40 |
-
Recall: 0.
|
| 41 |
-
F1-score: 0.
|
| 42 |
_Anxiety:
|
| 43 |
-
Precision: 0.
|
| 44 |
-
Recall: 0.
|
| 45 |
-
F1-score: 0.
|
| 46 |
_Depression:
|
| 47 |
-
Precision: 0.
|
| 48 |
-
Recall: 0.
|
| 49 |
-
F1-score: 0.
|
| 50 |
_Bipolar:
|
| 51 |
-
Precision: 0.
|
| 52 |
-
Recall: 0.
|
| 53 |
-
F1-score: 0.
|
| 54 |
_OCD:
|
| 55 |
-
Precision: 0.
|
| 56 |
-
Recall: 0.
|
| 57 |
-
F1-score: 0.
|
| 58 |
_ADHD:
|
| 59 |
-
Precision: 0.
|
| 60 |
-
Recall: 0.
|
| 61 |
-
F1-score: 0.
|
| 62 |
_Schizophrenia:
|
| 63 |
-
Precision: 0.
|
| 64 |
-
Recall: 0.
|
| 65 |
-
F1-score: 0.
|
| 66 |
_Asperger:
|
| 67 |
-
Precision: 0.
|
| 68 |
-
Recall: 0.
|
| 69 |
-
F1-score: 0.
|
| 70 |
_PTSD:
|
| 71 |
-
Precision: 0.
|
| 72 |
-
Recall: 0.
|
| 73 |
-
F1-score: 0.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
Classification Report, AUC Score, F1 Score, and Losses
|
| 2 |
|
| 3 |
+
Train Losses: [0.20890531058768613, 0.15252333594386344]
|
| 4 |
+
Validation Losses: [0.1771729415499725, 0.17168622303959782]
|
| 5 |
+
F1 Score: [0.6797109149921196]
|
| 6 |
+
AUC Score: [0.7942126157213086]
|
| 7 |
Classification Report:
|
| 8 |
_Borderline:
|
| 9 |
+
Precision: 0.6682600382409177
|
| 10 |
+
Recall: 0.5923728813559322
|
| 11 |
+
F1-score: 0.628032345013477
|
| 12 |
_Anxiety:
|
| 13 |
+
Precision: 0.6620808254514188
|
| 14 |
+
Recall: 0.6497890295358649
|
| 15 |
+
F1-score: 0.65587734241908
|
| 16 |
_Depression:
|
| 17 |
+
Precision: 0.7261363636363637
|
| 18 |
+
Recall: 0.5424448217317487
|
| 19 |
+
F1-score: 0.6209912536443148
|
| 20 |
_Bipolar:
|
| 21 |
+
Precision: 0.8055555555555556
|
| 22 |
+
Recall: 0.5233798195242002
|
| 23 |
+
F1-score: 0.6345101939333666
|
| 24 |
_OCD:
|
| 25 |
+
Precision: 0.8200431034482759
|
| 26 |
+
Recall: 0.6532188841201717
|
| 27 |
+
F1-score: 0.7271858576206404
|
| 28 |
_ADHD:
|
| 29 |
+
Precision: 0.8740068104426788
|
| 30 |
+
Recall: 0.660377358490566
|
| 31 |
+
F1-score: 0.7523204689789936
|
| 32 |
_Schizophrenia:
|
| 33 |
+
Precision: 0.8017524644030668
|
| 34 |
+
Recall: 0.6472148541114059
|
| 35 |
+
F1-score: 0.716242661448141
|
| 36 |
_Asperger:
|
| 37 |
+
Precision: 0.7368421052631579
|
| 38 |
+
Recall: 0.6570397111913358
|
| 39 |
+
F1-score: 0.6946564885496183
|
| 40 |
_PTSD:
|
| 41 |
+
Precision: 0.8612244897959184
|
| 42 |
+
Recall: 0.581267217630854
|
| 43 |
+
F1-score: 0.6940789473684211
|
tokenizer_config.json
CHANGED
|
@@ -45,11 +45,11 @@
|
|
| 45 |
"cls_token": "[CLS]",
|
| 46 |
"do_lower_case": true,
|
| 47 |
"mask_token": "[MASK]",
|
| 48 |
-
"model_max_length":
|
| 49 |
"pad_token": "[PAD]",
|
| 50 |
"sep_token": "[SEP]",
|
| 51 |
"strip_accents": null,
|
| 52 |
"tokenize_chinese_chars": true,
|
| 53 |
-
"tokenizer_class": "
|
| 54 |
"unk_token": "[UNK]"
|
| 55 |
}
|
|
|
|
| 45 |
"cls_token": "[CLS]",
|
| 46 |
"do_lower_case": true,
|
| 47 |
"mask_token": "[MASK]",
|
| 48 |
+
"model_max_length": 512,
|
| 49 |
"pad_token": "[PAD]",
|
| 50 |
"sep_token": "[SEP]",
|
| 51 |
"strip_accents": null,
|
| 52 |
"tokenize_chinese_chars": true,
|
| 53 |
+
"tokenizer_class": "BertTokenizer",
|
| 54 |
"unk_token": "[UNK]"
|
| 55 |
}
|