{ "test_accuracy": 0.6431758294406515, "test_animal_abuse/accuracy": 0.993771709186729, "test_animal_abuse/f1": 0.7538461538461538, "test_animal_abuse/fpr": 0.0026247117368612656, "test_animal_abuse/precision": 0.7864197530864198, "test_animal_abuse/recall": 0.7238636363636364, "test_animal_abuse/threshold": 0.4399879574775696, "test_child_abuse/accuracy": 0.9965415019762845, "test_child_abuse/f1": 0.6451612903225806, "test_child_abuse/fpr": 0.0013395544852498474, "test_child_abuse/precision": 0.7023411371237458, "test_child_abuse/recall": 0.5965909090909091, "test_child_abuse/threshold": 0.2568320035934448, "test_controversial_topics,politics/accuracy": 0.9654000479099293, "test_controversial_topics,politics/f1": 0.48655854254610087, "test_controversial_topics,politics/fpr": 0.02227363874749341, "test_controversial_topics,politics/precision": 0.4312721543914927, "test_controversial_topics,politics/recall": 0.5581039755351682, "test_controversial_topics,politics/threshold": 0.2689414322376251, "test_discrimination,stereotype,injustice/accuracy": 0.951835549167565, "test_discrimination,stereotype,injustice/f1": 0.7198955158902917, "test_discrimination,stereotype,injustice/fpr": 0.02950300417972827, "test_discrimination,stereotype,injustice/precision": 0.6958424507658644, "test_discrimination,stereotype,injustice/recall": 0.7456709956709957, "test_discrimination,stereotype,injustice/threshold": 0.23370636999607086, "test_drug_abuse,weapons,banned_substance/accuracy": 0.9737842855431789, "test_drug_abuse,weapons,banned_substance/f1": 0.7649348905893408, "test_drug_abuse,weapons,banned_substance/fpr": 0.014171131471324835, "test_drug_abuse,weapons,banned_substance/precision": 0.761154154421587, "test_drug_abuse,weapons,banned_substance/recall": 0.7687533729087965, "test_drug_abuse,weapons,banned_substance/threshold": 0.42298123240470886, "test_financial_crime,property_crime,theft/accuracy": 0.9567912324829321, "test_financial_crime,property_crime,theft/f1": 0.7965599887212745, "test_financial_crime,property_crime,theft/fpr": 0.029260814503765865, "test_financial_crime,property_crime,theft/precision": 0.7628949500405077, "test_financial_crime,property_crime,theft/recall": 0.8333333333333334, "test_financial_crime,property_crime,theft/threshold": 0.3900110721588135, "test_flagged/accuracy": 0.8408042879386753, "test_flagged/aucpr": 0.8939487346032216, "test_flagged/f1": 0.8620363041870484, "test_flagged/fpr": 0.22009927245529265, "test_flagged/precision": 0.8368981936361575, "test_flagged/recall": 0.8887313392904918, "test_hate_speech,offensive_language/accuracy": 0.9472841058809438, "test_hate_speech,offensive_language/f1": 0.6893143915997529, "test_hate_speech,offensive_language/fpr": 0.024340137396049, "test_hate_speech,offensive_language/precision": 0.7250788936328197, "test_hate_speech,offensive_language/recall": 0.656912209889001, "test_hate_speech,offensive_language/threshold": 0.3302551507949829, "test_loss": 0.08685731887817383, "test_macro_f1": 0.6515819573383921, "test_macro_precision": 0.6508084700838987, "test_macro_recall": 0.6632521427544177, "test_micro_f1": 0.7391893125589605, "test_micro_precision": 0.7152469008568653, "test_micro_recall": 0.7647901514435943, "test_misinformation_regarding_ethics,laws_and_safety/accuracy": 0.9763444723919033, "test_misinformation_regarding_ethics,laws_and_safety/f1": 0.18049792531120332, "test_misinformation_regarding_ethics,laws_and_safety/fpr": 0.014217291136170707, "test_misinformation_regarding_ethics,laws_and_safety/precision": 0.1564748201438849, "test_misinformation_regarding_ethics,laws_and_safety/recall": 0.21323529411764705, "test_misinformation_regarding_ethics,laws_and_safety/threshold": 0.15002882480621338, "test_non_violent_unethical_behavior/accuracy": 0.8675140735417415, "test_non_violent_unethical_behavior/f1": 0.6805761108905173, "test_non_violent_unethical_behavior/fpr": 0.09033637079536488, "test_non_violent_unethical_behavior/precision": 0.6617760617760617, "test_non_violent_unethical_behavior/recall": 0.7004755535740823, "test_non_violent_unethical_behavior/threshold": 0.30239108204841614, "test_privacy_violation/accuracy": 0.9797730267097856, "test_privacy_violation/f1": 0.8087756546355273, "test_privacy_violation/fpr": 0.0114049221242852, "test_privacy_violation/precision": 0.7982676725342275, "test_privacy_violation/recall": 0.8195639701663798, "test_privacy_violation/threshold": 0.33982762694358826, "test_runtime": 91.5424, "test_samples_per_second": 729.629, "test_self_harm/accuracy": 0.9967211642112828, "test_self_harm/f1": 0.7144719687092569, "test_self_harm/fpr": 0.0004372210831021583, "test_self_harm/precision": 0.9042904290429042, "test_self_harm/recall": 0.5905172413793104, "test_self_harm/threshold": 0.6800292134284973, "test_sexually_explicit,adult_content/accuracy": 0.9830518624985028, "test_sexually_explicit,adult_content/f1": 0.6571774682010902, "test_sexually_explicit,adult_content/fpr": 0.011254019292604485, "test_sexually_explicit,adult_content/precision": 0.5961538461538461, "test_sexually_explicit,adult_content/recall": 0.7321187584345479, "test_sexually_explicit,adult_content/threshold": 0.31153157353401184, "test_steps_per_second": 45.607, "test_terrorism,organized_crime/accuracy": 0.9875883339322075, "test_terrorism,organized_crime/f1": 0.38180462341536164, "test_terrorism,organized_crime/fpr": 0.00798659339332084, "test_terrorism,organized_crime/precision": 0.32611464968152865, "test_terrorism,organized_crime/recall": 0.460431654676259, "test_terrorism,organized_crime/threshold": 0.19072403013706207, "test_violence,aiding_and_abetting,incitement/accuracy": 0.91033357288298, "test_violence,aiding_and_abetting,incitement/f1": 0.8425728780590385, "test_violence,aiding_and_abetting,incitement/fpr": 0.08061270584370235, "test_violence,aiding_and_abetting,incitement/precision": 0.8032376083796923, "test_violence,aiding_and_abetting,incitement/recall": 0.88595909342178, "test_violence,aiding_and_abetting,incitement/threshold": 0.3965354263782501 }