aayush97 / semeval2023-afrisenti

A low-resource sentiment analysis project for African Languages
MIT License
0 stars 0 forks source link

Use the huggingface pipeline to compute sentiment analysis on translations and compute a second baseline. #2

Closed aayush97 closed 2 years ago

aayush97 commented 2 years ago

<!DOCTYPE html>

model | language | num_examples | precision | recall | f1_score_macro | accuracy -- | -- | -- | -- | -- | -- | -- finiteautomata/bertweet-base-sentiment-analysis | am | 5984 | {'NEGATIVE': 0.4678448699067256, 'POSITIVE': 0.6203615604186489, 'NEUTRAL': 0.6281077348066298} | {'NEGATIVE': 0.6156330749354005, 'POSITIVE': 0.4894894894894895, 'NEUTRAL': 0.5860180412371134} | 0.561734142138458 | 0.572192513368984 finiteautomata/bertweet-base-sentiment-analysis | dz | 1651 | {'NEGATIVE': 0.7591093117408907, 'POSITIVE': 0.5983606557377049, 'NEUTRAL': 0.26944140197152244} | {'NEGATIVE': 0.4204035874439462, 'POSITIVE': 0.3501199040767386, 'NEUTRAL': 0.7192982456140351} | 0.458304110142747 | 0.464566929133858 finiteautomata/bertweet-base-sentiment-analysis | ha | 14172 | {'NEGATIVE': 0.6618122977346278, 'POSITIVE': 0.842391304347826, 'NEUTRAL': 0.5257517118189937} | {'NEGATIVE': 0.6260660397988191, 'POSITIVE': 0.5621933006187326, 'NEUTRAL': 0.7190553745928339} | 0.64172732076832 | 0.637171888230313 finiteautomata/bertweet-base-sentiment-analysis | ig | 10192 | {'NEGATIVE': 0.572926162260711, 'POSITIVE': 0.6695623120614768, 'NEUTRAL': 0.6187812187812188} | {'NEGATIVE': 0.48346153846153844, 'POSITIVE': 0.6498054474708171, 'NEUTRAL': 0.6870008873114463} | 0.611683490283193 | 0.623822605965463 finiteautomata/bertweet-base-sentiment-analysis | ma | 5583 | {'NEGATIVE': 0.6580706781279847, 'POSITIVE': 0.8600212089077413, 'NEUTRAL': 0.5101586418035068} | {'NEGATIVE': 0.4140625, 'POSITIVE': 0.4613196814562002, 'NEUTRAL': 0.8482184173993521} | 0.581979949701127 | 0.596990865126276 finiteautomata/bertweet-base-sentiment-analysis | pcm | 5121 | {'NEGATIVE': 0.8369166229680126, 'POSITIVE': 0.7669616519174042, 'NEUTRAL': 0.013012618296529969} | {'NEGATIVE': 0.4924406047516199, 'POSITIVE': 0.28761061946902655, 'NEUTRAL': 0.4583333333333333} | 0.354565362580184 | 0.419644600663933 finiteautomata/bertweet-base-sentiment-analysis | pt | 3063 | {'NEGATIVE': 0.5596412556053811, 'POSITIVE': 0.6463768115942029, 'NEUTRAL': 0.7368839427662957} | {'NEGATIVE': 0.7979539641943734, 'POSITIVE': 0.6549192364170338, 'NEUTRAL': 0.579375} | 0.652402079439508 | 0.651975187724453 finiteautomata/bertweet-base-sentiment-analysis | sw | 1810 | {'NEGATIVE': 0.3942307692307692, 'POSITIVE': 0.5980629539951574, 'NEUTRAL': 0.6847926267281106} | {'NEGATIVE': 0.643979057591623, 'POSITIVE': 0.4515539305301645, 'NEUTRAL': 0.6930970149253731} | 0.564189578569381 | 0.614917127071823 finiteautomata/bertweet-base-sentiment-analysis | yo | 8522 | {'NEGATIVE': 0.5947083583884546, 'POSITIVE': 0.848605577689243, 'NEUTRAL': 0.5314368171511029} | {'NEGATIVE': 0.5283119658119658, 'POSITIVE': 0.4810841332580463, 'NEUTRAL': 0.8294723294723295} | 0.607140505087682 | 0.618516780098568