Learn how to run the test suite, write new tests with pytest markers and fixtures, check coverage, and integrate tests into CI/CD pipelines.
Running Tests
All Tests
With Coverage
pytest tests/ --cov=myspellchecker --cov-report=html
# View report
open htmlcov/index.html
Quick Smoke Test
pytest tests/ -x -q --tb=short
Verbose Output
Test Selection
By File
pytest tests/test_spellchecker.py
By Pattern
# Run tests containing "context"
pytest tests/ -k "context"
# Run tests NOT containing "slow"
pytest tests/ -k "not slow"
By Marker
# Only unit tests
pytest tests/ -m unit
# Only integration tests
pytest tests/ -m integration
# Skip slow tests
pytest tests/ -m "not slow"
Single Test
pytest tests/test_spellchecker.py::TestSpellChecker::test_check_valid_text
Test Markers
Registered markers:
Marker Description @pytest.mark.unitFast, isolated unit tests @pytest.mark.integrationTests with external dependencies @pytest.mark.slowLong-running tests (skipped by default) @pytest.mark.e2eEnd-to-end tests @pytest.mark.benchmarkBenchmark and performance tests
Note : These five markers are registered in pyproject.toml. Using unregistered markers will cause warnings with --strict-markers. Slow tests are skipped by default; run them explicitly with pytest -m slow.
import pytest
@pytest.mark.unit
def test_normalize ():
from myspellchecker.text.normalize import normalize
assert normalize( "test" ) == "test"
@pytest.mark.slow
@pytest.mark.integration
def test_large_corpus ():
# Long-running test
pass
Test Structure
Directory Layout
test_validators_edge_cases.py
Naming Conventions
# Test file: test_<module>.py
# Test class: Test<ClassName>
# Test function: test_<behavior>
class TestSpellChecker :
def test_check_returns_response ( self ):
pass
def test_check_detects_invalid_syllable ( self ):
pass
def test_check_with_empty_input_returns_no_errors ( self ):
pass
Fixtures
Common Fixtures (conftest.py)
import pytest
from myspellchecker import SpellChecker
from myspellchecker.providers import MemoryProvider
@pytest.fixture
def checker ():
"""Create a spell checker for testing."""
return SpellChecker()
@pytest.fixture
def memory_provider ():
"""Create an in-memory provider."""
provider = MemoryProvider()
provider.add_word( "မြန်မာ" , frequency = 100 )
provider.add_word( "နိုင်ငံ" , frequency = 100 )
return provider
@pytest.fixture
def checker_with_memory ( memory_provider ):
"""Spell checker with memory provider."""
return SpellChecker( provider = memory_provider)
Fixture Scope
@pytest.fixture ( scope = "module" )
def expensive_resource ():
"""Created once per module."""
return create_expensive_resource()
@pytest.fixture ( scope = "session" )
def database ():
"""Created once per test session."""
return create_test_database()
Writing Tests
Basic Test
def test_check_valid_text ( checker ):
result = checker.check( "မြန်မာ" )
assert not result.has_errors
def test_check_invalid_text ( checker ):
result = checker.check( "invalid_text_xyz" )
assert result.has_errors
Parameterized Tests
import pytest
@pytest.mark.parametrize ( "text,expected_errors" , [
( "မြန်မာ" , 0 ),
( "မြန်မာနိုင်ငံ" , 0 ),
( "xyz" , 1 ),
])
def test_check_various_inputs ( checker , text , expected_errors ):
result = checker.check(text)
assert len (result.errors) == expected_errors
Exception Testing
import pytest
from myspellchecker import SpellChecker
from myspellchecker.providers import SQLiteProvider
from myspellchecker.core.exceptions import MissingDatabaseError
def test_missing_database_raises ():
with pytest.raises(MissingDatabaseError):
provider = SQLiteProvider( database_path = "/nonexistent/path.db" )
SpellChecker( provider = provider)
Async Tests
import pytest
@pytest.mark.asyncio
async def test_async_check ( checker ):
result = await checker.check_async( "မြန်မာ" )
assert not result.has_errors
pytest-asyncio must be installed separately (pip install pytest-asyncio) as it is not included in the dev dependencies.
Mock Testing
from unittest.mock import Mock, patch
def test_with_mocked_provider ():
mock_provider = Mock()
mock_provider.contains_word.return_value = True
checker = SpellChecker( provider = mock_provider)
result = checker.check( "test" )
mock_provider.contains_word.assert_called()
Test Data
Loading Test Data
import json
from pathlib import Path
@pytest.fixture
def test_samples ():
from tests.fixtures.myanmar_test_samples import VALID_WORDS
return VALID_WORDS
def test_all_words_valid ( checker , test_samples ):
for word in test_samples:
result = checker.check(word)
assert not result.has_errors, f "Expected valid: { word } "
Myanmar Test Samples
@pytest.fixture
def myanmar_samples ():
from tests.fixtures.myanmar_test_samples import SAMPLE_SENTENCES
return SAMPLE_SENTENCES
Coverage Requirements
The project requires ≥75% code coverage :
# Check coverage
pytest tests/ --cov=myspellchecker --cov-fail-under=75
Excluding Code from Coverage
# pragma: no cover - exclude line
if TYPE_CHECKING : # pragma: no cover
from typing import Optional
# Or in pyproject.toml
[tool.coverage.run]
omit = [
"*/tests/*" ,
"*/examples/*" ,
"*/__pycache__/*" ,
]
Benchmarks
import pytest
@pytest.mark.slow
def test_performance_single_check ( checker , benchmark ):
result = benchmark(checker.check, "မြန်မာနိုင်ငံ" )
assert not result.has_errors
@pytest.mark.slow
def test_performance_batch ( checker , benchmark ):
texts = [ "မြန်မာ" ] * 100
results = benchmark(checker.check_batch, texts)
assert len (results) == 100
Running Benchmarks
pytest tests/ -m slow --benchmark-only
CI/CD Integration
GitHub Actions Example
# .github/workflows/test.yml
name : Tests
on : [ push , pull_request ]
jobs :
test :
runs-on : ubuntu-latest
steps :
- uses : actions/checkout@v4
- uses : actions/setup-python@v5
with :
python-version : "3.11"
- name : Install dependencies
run : |
pip install -e ".[dev]"
python setup.py build_ext --inplace
- name : Run tests
run : pytest tests/ --cov=myspellchecker --cov-fail-under=75
- name : Upload coverage
uses : codecov/codecov-action@v3
Troubleshooting
Tests Not Found
# Ensure proper naming
# Files: test_*.py
# Functions: test_*
# Classes: Test*
# Check collection
pytest tests/ --collect-only
Fixture Not Found
# Check conftest.py location
# Fixtures must be in conftest.py or imported
# List available fixtures
pytest tests/ --fixtures
Slow Tests
# Profile test duration
pytest tests/ --durations=10
# Skip slow tests during development
pytest tests/ -m "not slow"
See Also