Skip to main content
Learn how to run the test suite, write new tests with pytest markers and fixtures, check coverage, and integrate tests into CI/CD pipelines.

Running Tests

All Tests

pytest tests/

With Coverage

pytest tests/ --cov=myspellchecker --cov-report=html

# View report
open htmlcov/index.html

Quick Smoke Test

pytest tests/ -x -q --tb=short

Verbose Output

pytest tests/ -v

Test Selection

By File

pytest tests/test_spellchecker.py

By Pattern

# Run tests containing "context"
pytest tests/ -k "context"

# Run tests NOT containing "slow"
pytest tests/ -k "not slow"

By Marker

# Only unit tests
pytest tests/ -m unit

# Only integration tests
pytest tests/ -m integration

# Skip slow tests
pytest tests/ -m "not slow"

Single Test

pytest tests/test_spellchecker.py::TestSpellChecker::test_check_valid_text

Test Markers

Registered markers:
MarkerDescription
@pytest.mark.unitFast, isolated unit tests
@pytest.mark.integrationTests with external dependencies
@pytest.mark.slowLong-running tests
Note: Only these three markers are registered in pyproject.toml. Using unregistered markers will cause warnings with --strict-markers.
import pytest

@pytest.mark.unit
def test_normalize():
    from myspellchecker.text.normalize import normalize
    assert normalize("test") == "test"

@pytest.mark.slow
@pytest.mark.integration
def test_large_corpus():
    # Long-running test
    pass

Test Structure

Directory Layout

tests/
├── conftest.py           # Shared fixtures
├── test_spellchecker.py  # SpellChecker tests
├── test_validators.py    # Validator tests
├── test_providers.py     # Provider tests
├── test_symspell.py      # SymSpell algorithm tests
├── test_ngram_*.py       # N-gram tests
├── test_viterbi.py       # Viterbi tests
├── integration/          # Integration tests
│   ├── test_config_loading.py
│   └── test_cli_stdin_stdout.py
├── e2e/                  # End-to-end tests
│   ├── test_cli_e2e.py
│   └── test_robustness.py
└── fixtures/             # Test data
    ├── sample_corpus.txt
    └── benchmarks/

Naming Conventions

# Test file: test_<module>.py
# Test class: Test<ClassName>
# Test function: test_<behavior>

class TestSpellChecker:
    def test_check_returns_response(self):
        pass

    def test_check_detects_invalid_syllable(self):
        pass

    def test_check_with_empty_input_returns_no_errors(self):
        pass

Fixtures

Common Fixtures (conftest.py)

import pytest
from myspellchecker import SpellChecker
from myspellchecker.providers import MemoryProvider

@pytest.fixture
def checker():
    """Create a spell checker for testing."""
    return SpellChecker()

@pytest.fixture
def memory_provider():
    """Create an in-memory provider."""
    provider = MemoryProvider()
    provider.add_word("မြန်မာ", frequency=100)
    provider.add_word("နိုင်ငံ", frequency=100)
    return provider

@pytest.fixture
def checker_with_memory(memory_provider):
    """Spell checker with memory provider."""
    return SpellChecker(provider=memory_provider)

Fixture Scope

@pytest.fixture(scope="module")
def expensive_resource():
    """Created once per module."""
    return create_expensive_resource()

@pytest.fixture(scope="session")
def database():
    """Created once per test session."""
    return create_test_database()

Writing Tests

Basic Test

def test_check_valid_text(checker):
    result = checker.check("မြန်မာ")
    assert not result.has_errors

def test_check_invalid_text(checker):
    result = checker.check("invalid_text_xyz")
    assert result.has_errors

Parameterized Tests

import pytest

@pytest.mark.parametrize("text,expected_errors", [
    ("မြန်မာ", 0),
    ("မြန်မာနိုင်ငံ", 0),
    ("xyz", 1),
])
def test_check_various_inputs(checker, text, expected_errors):
    result = checker.check(text)
    assert len(result.errors) == expected_errors

Exception Testing

import pytest
from myspellchecker import SpellChecker
from myspellchecker.providers import SQLiteProvider
from myspellchecker.core.exceptions import MissingDatabaseError

def test_missing_database_raises():
    with pytest.raises(MissingDatabaseError):
        provider = SQLiteProvider(database_path="/nonexistent/path.db")
        SpellChecker(provider=provider)

Async Tests

import pytest

@pytest.mark.asyncio
async def test_async_check(checker):
    result = await checker.check_async("မြန်မာ")
    assert not result.has_errors

Mock Testing

from unittest.mock import Mock, patch

def test_with_mocked_provider():
    mock_provider = Mock()
    mock_provider.contains_word.return_value = True

    checker = SpellChecker(provider=mock_provider)
    result = checker.check("test")

    mock_provider.contains_word.assert_called()

Test Data

Loading Test Data

import json
from pathlib import Path

@pytest.fixture
def test_words():
    path = Path(__file__).parent / "fixtures" / "test_words.json"
    return json.loads(path.read_text())

def test_all_words_valid(checker, test_words):
    for word in test_words["valid"]:
        result = checker.check(word)
        assert not result.has_errors, f"Expected valid: {word}"

Sample Corpus

@pytest.fixture
def sample_corpus():
    path = Path(__file__).parent / "fixtures" / "sample_corpus.txt"
    return path.read_text().splitlines()

Coverage Requirements

The project requires ≥75% code coverage:
# Check coverage
pytest tests/ --cov=myspellchecker --cov-fail-under=75

Excluding Code from Coverage

# pragma: no cover - exclude line
if TYPE_CHECKING:  # pragma: no cover
    from typing import Optional

# Or in pyproject.toml
[tool.coverage.run]
omit = [
    "*/tests/*",
    "*/examples/*",
    "*/__pycache__/*",
]

Performance Testing

Benchmarks

import pytest

@pytest.mark.slow
def test_performance_single_check(checker, benchmark):
    result = benchmark(checker.check, "မြန်မာနိုင်ငံ")
    assert not result.has_errors

@pytest.mark.slow
def test_performance_batch(checker, benchmark):
    texts = ["မြန်မာ"] * 100
    results = benchmark(checker.check_batch, texts)
    assert len(results) == 100

Running Benchmarks

pytest tests/ -m slow --benchmark-only

CI/CD Integration

GitHub Actions Example

# .github/workflows/test.yml
name: Tests

on: [push, pull_request]

jobs:
  test:
    runs-on: ubuntu-latest
    steps:
      - uses: actions/checkout@v4
      - uses: actions/setup-python@v5
        with:
          python-version: "3.11"

      - name: Install dependencies
        run: |
          pip install -e ".[dev]"
          python setup.py build_ext --inplace

      - name: Run tests
        run: pytest tests/ --cov=myspellchecker --cov-fail-under=75

      - name: Upload coverage
        uses: codecov/codecov-action@v3

Troubleshooting

Tests Not Found

# Ensure proper naming
# Files: test_*.py or *_test.py
# Functions: test_*
# Classes: Test*

# Check collection
pytest tests/ --collect-only

Fixture Not Found

# Check conftest.py location
# Fixtures must be in conftest.py or imported

# List available fixtures
pytest tests/ --fixtures

Slow Tests

# Profile test duration
pytest tests/ --durations=10

# Skip slow tests during development
pytest tests/ -m "not slow"

See Also