Open Lithuanian LLM Leaderboard

  • Total Models: 11
{
  • "headers": [
    • "Model",
    • "Precision",
    • "#Params (B)",
    • "Average Accuracy",
    • "MMLU",
    • "GSM8K",
    • "ARC Easy",
    • "Winogrande",
    • "TruthfulQA",
    • "Hellaswag",
    • "Belebele",
    • "Hub License",
    • "Model sha",
    • "model_name_for_query"
    ],
  • "data": [
    • [
      • "<a target="_blank" href="https://huggingface.co/meta-llama/Llama-3.3-70B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">meta-llama/Llama-3.3-70B-Instruct</a>",
      • "bfloat16",
      • 70,
      • 66.27285714285713,
      • 71.46,
      • 80.97,
      • 70.66,
      • 59.83,
      • 45.61,
      • 46.05,
      • 89.33,
      • "llama3.3",
      • "main",
      • "meta-llama/Llama-3.3-70B-Instruct"
      ],
    • [
      • "<a target="_blank" href="https://huggingface.co/google/gemma-2-27b" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/gemma-2-27b</a>",
      • "bfloat16",
      • 27.2,
      • 65.68285714285715,
      • 64.82,
      • 68.69,
      • 77.4,
      • 66.77,
      • 42.06,
      • 50.82,
      • 89.22,
      • "gemma",
      • "main",
      • "google/gemma-2-27b"
      ],
    • [
      • "<a target="_blank" href="https://huggingface.co/meta-llama/Llama-3.1-70B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">meta-llama/Llama-3.1-70B</a>",
      • "bfloat16",
      • 70,
      • 64.97571428571429,
      • 67.5,
      • 72.4,
      • 70.92,
      • 64.01,
      • 43.59,
      • 46.39,
      • 90.02,
      • "llama3.1",
      • "main",
      • "meta-llama/Llama-3.1-70B"
      ],
    • [
      • "<a target="_blank" href="https://huggingface.co/google/gemma-2-9b" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/gemma-2-9b</a>",
      • "bfloat16",
      • 9.24,
      • 55.87428571428571,
      • 60.09,
      • 25.78,
      • 68.31,
      • 65.15,
      • 39.69,
      • 45.32,
      • 86.78,
      • "gemma",
      • "main",
      • "google/gemma-2-9b"
      ],
    • [
      • "<a target="_blank" href="https://huggingface.co/utter-project/EuroLLM-9B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">utter-project/EuroLLM-9B</a>",
      • "bfloat16",
      • 9.15,
      • 53.81714285714286,
      • 51.95,
      • 31.16,
      • 71.55,
      • 64.17,
      • 42.13,
      • 46.32,
      • 69.44,
      • "eurollm",
      • "main",
      • "utter-project/EuroLLM-9B"
      ],
    • [
      • "<a target="_blank" href="https://huggingface.co/meta-llama/Llama-3.1-8B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">meta-llama/Llama-3.1-8B</a>",
      • "bfloat16",
      • 8.03,
      • 45.465714285714284,
      • 44.86,
      • 30.17,
      • 48.65,
      • 54.22,
      • 37.61,
      • 35.19,
      • 67.56,
      • "llama3.1",
      • "main",
      • "meta-llama/Llama-3.1-8B"
      ],
    • [
      • "<a target="_blank" href="https://huggingface.co/google/gemma-2-2b" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/gemma-2-2b</a>",
      • "bfloat16",
      • 2.61,
      • 39.82857142857143,
      • 35.84,
      • 3.64,
      • 45.45,
      • 51.85,
      • 54.78,
      • 34.8,
      • 52.44,
      • "gemma",
      • "main",
      • "google/gemma-2-2b"
      ],
    • [
      • "<a target="_blank" href="https://huggingface.co/meta-llama/Llama-3.2-3B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">meta-llama/Llama-3.2-3B</a>",
      • "bfloat16",
      • 3.21,
      • 36.755714285714284,
      • 36.41,
      • 13.04,
      • 39.39,
      • 51.85,
      • 38.87,
      • 31.51,
      • 46.22,
      • "llama3.2",
      • "main",
      • "meta-llama/Llama-3.2-3B"
      ],
    • [
      • "<a target="_blank" href="https://huggingface.co/neurotechnology/Lt-Llama-2-13b-hf " style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">neurotechnology/Lt-Llama-2-13b-hf </a>",
      • "bfloat16",
      • 12.7,
      • 35.23142857142857,
      • 26.44,
      • 0.45,
      • 54.5,
      • 61.72,
      • 35.23,
      • 40.61,
      • 27.67,
      • "llama2",
      • "main",
      • "neurotechnology/Lt-Llama-2-13b-hf"
      ],
    • [
      • "<a target="_blank" href="https://huggingface.co/neurotechnology/Lt-Llama-2-7b-hf " style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">neurotechnology/Lt-Llama-2-7b-hf </a>",
      • "bfloat16",
      • 6.9,
      • 32.09142857142857,
      • 26.01,
      • 0,
      • 43.18,
      • 53.67,
      • 41.38,
      • 33.17,
      • 27.23,
      • "llama2",
      • "main",
      • "neurotechnology/Lt-Llama-2-7b-hf"
      ],
    • [
      • "<a target="_blank" href="https://huggingface.co/meta-llama/Llama-3.2-1B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">meta-llama/Llama-3.2-1B</a>",
      • "bfloat16",
      • 1.24,
      • 28.73714285714286,
      • 25.85,
      • 1.82,
      • 29.08,
      • 49.8,
      • 38.75,
      • 28.53,
      • 27.33,
      • "llama3.2",
      • "main",
      • "meta-llama/Llama-3.2-1B"
      ]
    ],
  • "metadata": null
}