File size: 1,716 Bytes
abbc509
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
"""Test suite for loading shlokn/autogkb dataset from Hugging Face."""

import pytest
from datasets import load_dataset


def test_dataset_loads_successfully():
    """Test that the dataset loads without errors."""
    try:
        dataset = load_dataset("shlokn/autogkb")
        assert dataset is not None
        print(f"✓ Dataset loaded successfully")
        print(f"✓ Splits: {list(dataset.keys())}")
        print(f"✓ Total examples: {sum(len(split) for split in dataset.values())}")
    except Exception as e:
        pytest.fail(f"Failed to load dataset from Hugging Face: {e}")


def test_dataset_splits_exist():
    """Test that expected splits are present."""
    dataset = load_dataset("shlokn/autogkb")
    expected_splits = {'train', 'validation', 'test'}
    actual_splits = set(dataset.keys())
    assert expected_splits.issubset(actual_splits), f"Missing expected splits. Got: {actual_splits}"


def test_dataset_not_empty():
    """Test that each split contains data."""
    dataset = load_dataset("shlokn/autogkb")
    for split_name, split_data in dataset.items():
        assert len(split_data) > 0, f"Split '{split_name}' is empty"


if __name__ == "__main__":
    # Run quick test if script is executed directly
    try:
        dataset = load_dataset("shlokn/autogkb")
        print(f"✓ Dataset loaded successfully")
        print(f"✓ Splits: {list(dataset.keys())}")
        print(f"✓ Total examples: {sum(len(split) for split in dataset.values())}")
        print("\n✓ Basic dataset loading test passed!")
        print("Run 'pytest test_hf_dataset.py -v' for comprehensive tests.")
    except Exception as e:
        print(f"✗ Failed to load dataset: {e}")
        exit(1)