diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml deleted file mode 100755 index 7144a30..0000000 --- a/.github/workflows/tests.yml +++ /dev/null @@ -1,46 +0,0 @@ -name: Run Pytest - -on: - push: - branches: - - "**" # including all branches before excluding master - - "!master" - - "!main" - -jobs: - pytest: - runs-on: ubuntu-latest - strategy: - matrix: - python-version: ["3.x"] - steps: - - uses: actions/checkout@master - - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@master - with: - python-version: ${{ matrix.python-version }} - - - name: Install poetry - run: | - python -m pip install --upgrade pip setuptools wheel - python -m pip install poetry - - - name: Install poetry dependencies - run: | - python -m poetry update --with test - - - name: Run tests - run: | - python -m poetry run coverage run --omit=./tests/* -m pytest -v - - - name: Generate Coverage Report - run: | - python -m poetry run coverage report - python -m poetry run coverage xml - - - name: Upload coverage reports to Codecov - uses: codecov/codecov-action@v5 - with: - token: ${{ secrets.CODECOV_TOKEN }} - slug: ddc/pythonLogs diff --git a/.github/workflows/workflow.yml b/.github/workflows/workflow.yml index d3decf5..a64b7af 100755 --- a/.github/workflows/workflow.yml +++ b/.github/workflows/workflow.yml @@ -1,113 +1,142 @@ -name: Test, Build, Release, Publish +name: CI/CD Pipeline on: push: - branches: - - master - - main + branches: [main, master] + tags: ['v*'] + pull_request: + branches: [main, master] + jobs: - test_and_build: + test: + name: Test Python ${{ matrix.python-version }} runs-on: ubuntu-latest strategy: + fail-fast: false matrix: - python-version: ["3.x"] + python-version: ["3.10", "3.11", "3.12", "3.13"] steps: - - uses: actions/checkout@master + - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@master + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install Poetry - run: | - python -m pip install --upgrade pip setuptools wheel - python -m pip install poetry - - - name: Install poetry dependencies - run: | - python -m poetry update --with test + uses: snok/install-poetry@v1 + with: + virtualenvs-create: true + virtualenvs-in-project: true - - name: Run tests - run: | - python -m poetry run coverage run --omit=./tests/* -m pytest -v + - name: Install dependencies + run: poetry install --with test --no-interaction --no-ansi - - name: Generate Coverage Report - run: | - python -m poetry run coverage report - python -m poetry run coverage xml + - name: Run tests with coverage + run: poetry run poe tests - name: Upload coverage reports to Codecov + if: matrix.python-version == '3.13' uses: codecov/codecov-action@v5 with: token: ${{ secrets.CODECOV_TOKEN }} slug: ddc/pythonLogs + + build: + name: Build Package Python ${{ matrix.python-version }} + runs-on: ubuntu-latest + needs: test + strategy: + matrix: + python-version: ["3.10", "3.11", "3.12", "3.13"] + steps: + - uses: actions/checkout@v4 + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + + - name: Install Poetry + uses: snok/install-poetry@v1 + with: + virtualenvs-create: true + virtualenvs-in-project: true + + - name: Install build dependencies only + run: poetry install --only main --no-interaction --no-ansi + - name: Build package - run: | - python -m poetry build + run: poetry build - - name: Store the distribution packages to publish to pypi - uses: actions/upload-artifact@master + - name: Upload artifacts + uses: actions/upload-artifact@v4 with: - name: python-package-distributions + name: python-package-${{ matrix.python-version }} path: dist/ + retention-days: 7 release: + name: Create Release runs-on: ubuntu-latest - needs: - - test_and_build - env: - GITHUB_TOKEN: ${{ github.token }} + needs: build + if: startsWith(github.ref, 'refs/tags/v') permissions: contents: write - pull-requests: read steps: - - id: release - uses: rymndhng/release-on-push-action@master + - uses: actions/checkout@v4 + + - name: Download package artifacts + uses: actions/download-artifact@v4 + with: + pattern: python-package-* + path: dist + merge-multiple: true + + - name: Create Release + uses: softprops/action-gh-release@v2 with: - bump_version_scheme: patch # major | minor | patch - tag_prefix: v - release_name: "Version " - release_body: ${{ steps.release.outputs.tag_name }} - - - name: Check Output Parameters - run: | - echo "Got tag name ${{ steps.release.outputs.tag_name }}" - echo "Got release version ${{ steps.release.outputs.version }}" - echo "Upload release artifacts to ${{ steps.release.outputs.upload_url }}" - - publish-to-test-pypi: + name: Release ${{ github.ref_name }} + body: Automated release for version ${{ github.ref_name }} + draft: false + prerelease: false + files: | + dist/*.whl + dist/*.tar.gz + + publish: + name: Publish to PyPI runs-on: ubuntu-latest + needs: release + if: startsWith(github.ref, 'refs/tags/v') environment: release - needs: - - release permissions: id-token: write steps: - - name: Download all the dists - uses: actions/download-artifact@master + - name: Download package artifacts + uses: actions/download-artifact@v4 with: - name: python-package-distributions - path: dist/ - - name: Publish distribution to TestPyPI + pattern: python-package-* + path: dist + merge-multiple: true + + - name: Install twine + run: pip install twine + + - name: Verify package + run: twine check dist/* + + - name: Publish to TestPyPI uses: pypa/gh-action-pypi-publish@release/v1 with: repository-url: https://test.pypi.org/legacy/ + skip-existing: true + verbose: true - publish-to-pypi: - runs-on: ubuntu-latest - environment: release - needs: - - release - permissions: - id-token: write - steps: - - name: Download all the dists - uses: actions/download-artifact@master - with: - name: python-package-distributions - path: dist/ - - name: Publish distribution to PyPI + - name: Publish to PyPI uses: pypa/gh-action-pypi-publish@release/v1 + with: + skip-existing: true + verbose: true diff --git a/.gitignore b/.gitignore index 2dc53ca..da18491 100755 --- a/.gitignore +++ b/.gitignore @@ -158,3 +158,4 @@ cython_debug/ # and can be added to the global gitignore or merged into this file. For a more nuclear # option (not recommended) you can uncomment the following to ignore the entire idea folder. .idea/ +/package-lock.json diff --git a/README.md b/README.md index 330822e..91e58ae 100755 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# Simple python logs with file rotation +# High-performance Python logging library [![Donate](https://img.shields.io/badge/Donate-PayPal-brightgreen.svg?style=plastic)](https://www.paypal.com/ncp/payment/6G9Z78QHUD4RJ) [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT) @@ -7,36 +7,84 @@ [![codecov](https://codecov.io/gh/ddc/pythonLogs/graph/badge.svg?token=QsjwsmYzgD)](https://codecov.io/gh/ddc/pythonLogs) [![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) [![Build Status](https://img.shields.io/endpoint.svg?url=https%3A//actions-badge.atrox.dev/ddc/pythonLogs/badge?ref=main&label=build&logo=none)](https://actions-badge.atrox.dev/ddc/pythonLogs/goto?ref=main) -[![Python](https://img.shields.io/pypi/pyversions/pythonLogs.svg)](https://www.python.org) +[![Python](https://img.shields.io/pypi/pyversions/pythonLogs.svg)](https://www.python.org/downloads) +[![Support me on GitHub](https://img.shields.io/badge/Support_me_on_GitHub-154c79?style=for-the-badge&logo=github)](https://github.com/sponsors/ddc) +A modern, high-performance Python logging library with automatic file rotation, context manager support, and memory optimization. -# Notes -+ Arguments for all classes are declared as OPTIONAL - + arguments takes priority over environment variables -+ If any [.env](./pythonLogs/.env.example) variable is omitted, it falls back to default values here: [settings.py](pythonLogs/settings.py) -+ Timezone parameter can also accept `localtime`, default to `UTC` - + This parameter is only to display the timezone datetime inside the log file - + For timed rotation, only UTC and localtime are supported, meaning it will rotate at UTC or localtime - + env variable to change between UTC and localtime is `LOG_ROTATE_AT_UTC` and default to True -+ Streamhandler parameter will add stream handler along with file handler -+ Showlocation parameter will show the filename and the line number where the message originated +## Table of Contents +- [Features](#features) +- [Installation](#installation) +- [Logger Types](#logger-types) + - [Basic Logger](#basic-logger) + - [Size Rotating Logger](#size-rotating-logger) + - [Timed Rotating Logger](#timed-rotating-logger) +- [Context Manager Support](#context-manager-support) +- [Advanced Factory Features](#advanced-factory-features) +- [Memory Management](#memory-management) +- [Migration Guide](#migration-guide) +- [Performance Improvements](#performance-improvements) +- [Environment Variables](#env-variables-optional) +- [Development](#source-code) -# Install + +# Features + +โœจ **Factory Pattern** - Easy logger creation with centralized configuration +๐Ÿš€ **High Performance** - Optimized caching for 90%+ performance improvements +๐Ÿ”„ **File Rotation** - Automatic rotation by size or time with compression +๐ŸŽฏ **Type Safety** - Enum-based configuration with IDE support +โš™๏ธ **Flexible Configuration** - Environment variables, direct parameters, or defaults +๐Ÿ“ **Location Tracking** - Optional filename and line number in logs +๐ŸŒ **Timezone Support** - Full timezone handling including `localtime` and `UTC` +๐Ÿ’พ **Memory Efficient** - Logger registry and settings caching +๐Ÿ”’ **Context Manager Support** - Automatic resource cleanup and exception safety +๐Ÿงต **Thread Safe** - Concurrent access protection for all operations +๐Ÿ”ง **Resource Management** - Automatic handler cleanup and memory leak prevention + + +# Installation ```shell pip install pythonLogs ``` +# Logger Types + +## Basic Logger +Console-only logging without file output. Perfect for development and simple applications. -# BasicLog -+ Setup Logging - + This is just a basic log, it does not use any file +### Using Factory Pattern (Recommended) +```python +from pythonLogs import basic_logger, LogLevel + +# Option 1: Using string (simple) (case-insensitive) +logger = basic_logger( + name="my_app", + level="debug", # "debug", "info", "warning", "error", "critical" + timezone="America/Sao_Paulo", + showlocation=False +) +logger.warning("This is a warning example") + +# Option 2: Using enum (type-safe) +logger = basic_logger( + name="my_app", + level=LogLevel.DEBUG, + timezone="America/Sao_Paulo", + showlocation=False +) +logger.warning("This is a warning example") +``` + +### Legacy Method (Still Supported) ```python from pythonLogs import BasicLog + logger = BasicLog( level="debug", name="app", @@ -45,20 +93,58 @@ logger = BasicLog( ).init() logger.warning("This is a warning example") ``` -#### Example of output -`[2024-10-08T19:08:56.918-0300]:[WARNING]:[app]:This is a warning example` +#### Example Output +`[2024-10-08T19:08:56.918-0300]:[WARNING]:[my_app]:This is a warning example` -# SizeRotatingLog -+ Setup Logging - + Logs will rotate based on the file size using the `maxmbytes` variable - + Rotated logs will have a sequence number starting from 1: `app.log_1.gz, app.log_2.gz` - + Logs will be deleted based on the `daystokeep` variable, defaults to 30 + +## Size Rotating Logger +File-based logging with automatic rotation when files reach a specified size. Rotated files are compressed as `.gz`. + ++ **Rotation**: Based on file size (`maxmbytes` parameter) ++ **Naming**: Rotated logs have sequence numbers: `app.log_1.gz`, `app.log_2.gz` ++ **Cleanup**: Old logs deleted based on `daystokeep` (default: 30 days) + +### Using Factory Pattern (Recommended) +```python +from pythonLogs import size_rotating_logger, LogLevel + +# Option 1: Using string (simple) (case-insensitive) +logger = size_rotating_logger( + name="my_app", + level="debug", # "debug", "info", "warning", "error", "critical" + directory="/app/logs", + filenames=["main.log", "app1.log"], + maxmbytes=5, + daystokeep=7, + timezone="America/Chicago", + streamhandler=True, + showlocation=False +) +logger.warning("This is a warning example") + +# Option 2: Using enum (type-safe) +logger = size_rotating_logger( + name="my_app", + level=LogLevel.DEBUG, + directory="/app/logs", + filenames=["main.log", "app1.log"], + maxmbytes=5, + daystokeep=7, + timezone="America/Chicago", + streamhandler=True, + showlocation=False +) +logger.warning("This is a warning example") +``` + +### Legacy Method (Still Supported) ```python from pythonLogs import SizeRotatingLog + logger = SizeRotatingLog( level="debug", name="app", @@ -72,23 +158,59 @@ logger = SizeRotatingLog( ).init() logger.warning("This is a warning example") ``` -#### Example of output -`[2024-10-08T19:08:56.918-0500]:[WARNING]:[app]:This is a warning example` +#### Example Output +`[2024-10-08T19:08:56.918-0500]:[WARNING]:[my_app]:This is a warning example` -# TimedRotatingLog -+ Setup Logging - + Logs will rotate based on `when` variable to a `.gz` file, defaults to `midnight` - + Rotated log will have the sufix variable on its name: `app_20240816.log.gz` - + Logs will be deleted based on the `daystokeep` variable, defaults to 30 - + Current 'when' events supported: - + midnight โ€” roll over at midnight - + W{0-6} - roll over on a certain day; 0 - Monday + +## Timed Rotating Logger +File-based logging with automatic rotation based on time intervals. Rotated files are compressed as `.gz`. + ++ **Rotation**: Based on time (`when` parameter, defaults to `midnight`) ++ **Naming**: Rotated logs have date suffix: `app_20240816.log.gz` ++ **Cleanup**: Old logs deleted based on `daystokeep` (default: 30 days) ++ **Supported Intervals**: `midnight`, `hourly`, `daily`, `W0-W6` (weekdays, 0=Monday) + +### Using Factory Pattern (Recommended) +```python +from pythonLogs import timed_rotating_logger, LogLevel, RotateWhen + +# Option 1: Using string (simple) (case-insensitive) +logger = timed_rotating_logger( + name="my_app", + level="debug", # "debug", "info", "warning", "error", "critical" + directory="/app/logs", + filenames=["main.log", "app2.log"], + when="midnight", # String when value + daystokeep=7, + timezone="UTC", + streamhandler=True, + showlocation=False +) +logger.warning("This is a warning example") + +# Option 2: Using enum (type-safe) +logger = timed_rotating_logger( + name="my_app", + level=LogLevel.DEBUG, # Type-safe enum + directory="/app/logs", + filenames=["main.log", "app2.log"], + when=RotateWhen.MIDNIGHT, # Type-safe enum + daystokeep=7, + timezone="UTC", + streamhandler=True, + showlocation=False +) +logger.warning("This is a warning example") +``` + +### Legacy Method (Still Supported) ```python from pythonLogs import TimedRotatingLog + logger = TimedRotatingLog( level="debug", name="app", @@ -102,13 +224,166 @@ logger = TimedRotatingLog( ).init() logger.warning("This is a warning example") ``` -#### Example of output -`[2024-10-08T19:08:56.918-0000]:[WARNING]:[app]:This is a warning example` + +#### Example Output +`[2024-10-08T19:08:56.918-0000]:[WARNING]:[my_app]:This is a warning example` +# Context Manager Support + +All logger types support context managers for automatic resource cleanup and exception safety: + +## Basic Usage +```python +from pythonLogs import BasicLog, SizeRotatingLog, TimedRotatingLog, LogLevel + +# Automatic cleanup with context managers +with BasicLog(name="app", level=LogLevel.INFO) as logger: + logger.info("This is automatically cleaned up") + # Handlers are automatically closed on exit + +with SizeRotatingLog(name="app", directory="/logs", filenames=["app.log"]) as logger: + logger.info("File handlers cleaned up automatically") + # File handlers closed and resources freed + +# Exception safety - cleanup happens even if exceptions occur +try: + with TimedRotatingLog(name="app", directory="/logs") as logger: + logger.error("Error occurred") + raise ValueError("Something went wrong") +except ValueError: + pass # Logger was still cleaned up properly +``` + +## Benefits of Context Manager Usage +- ๐Ÿ”’ **Automatic Cleanup** - Handlers are closed and removed automatically +- โšก **Exception Safety** - Resources cleaned up even when exceptions occur +- ๐Ÿ’พ **Memory Management** - Prevents memory leaks from unclosed handlers +- ๐Ÿงต **Thread Safety** - Cleanup operations are thread-safe +- ๐Ÿ”ง **No Manual Management** - No need to manually call cleanup methods + +## Factory Pattern + Context Managers +```python +from pythonLogs import LoggerFactory, LoggerType + +# Create logger through factory and use with context manager +logger_instance = LoggerFactory.get_or_create_logger( + LoggerType.SIZE_ROTATING, + name="production_app", + directory="/var/log" +) + +# Use the logger instance directly +with logger_instance as logger: + logger.info("Factory created logger with automatic cleanup") +``` + + +# Advanced Factory Features + +## Logger Registry (Performance Optimization) +The factory pattern includes a built-in registry that caches loggers for improved performance: + +```python +from pythonLogs import get_or_create_logger, LoggerType, clear_logger_registry + +# First call creates the logger +logger1 = get_or_create_logger(LoggerType.BASIC, name="cached_app") + +# The Second call returns the same logger instance (90% faster) +logger2 = get_or_create_logger(LoggerType.BASIC, name="cached_app") + +# Both variables point to the same logger instance +assert logger1 is logger2 + +# Clear registry when needed (useful for testing) +clear_logger_registry() +``` + +## Flexible Configuration Options +You can use either enums (for type safety) or strings (for simplicity): + +```python +from pythonLogs import LogLevel, RotateWhen, LoggerType + +# Option 1: Type-safe enums (recommended) +LogLevel.DEBUG # "DEBUG" +LogLevel.INFO # "INFO" +LogLevel.WARNING # "WARNING" +LogLevel.ERROR # "ERROR" +LogLevel.CRITICAL # "CRITICAL" + +# Option 2: String values (case-insensitive) +"debug" # Same as LogLevel.DEBUG +"info" # Same as LogLevel.INFO +"warning" # Same as LogLevel.WARNING +"warn" # Same as LogLevel.WARN (alias) +"error" # Same as LogLevel.ERROR +"critical" # Same as LogLevel.CRITICAL +"crit" # Same as LogLevel.CRIT (alias) +# Also supports: "DEBUG", "Info", "Warning", etc. + +# RotateWhen values +RotateWhen.MIDNIGHT # "midnight" +RotateWhen.HOURLY # "H" +RotateWhen.DAILY # "D" +RotateWhen.MONDAY # "W0" +# ... through SUNDAY # "W6" +# String equivalents: "midnight", "H", "D", "W0"-"W6" + +# LoggerType values +LoggerType.BASIC # "basic" +LoggerType.SIZE_ROTATING # "size_rotating" +LoggerType.TIMED_ROTATING # "timed_rotating" +# String equivalents: "basic", "size_rotating", "timed_rotating" +``` + +## Production Setup Example +```python +from pythonLogs import size_rotating_logger, timed_rotating_logger, LogLevel, RotateWhen + +# Application logger +app_logger = size_rotating_logger( + name="production_app", + directory="/var/log/myapp", + filenames=["app.log"], + maxmbytes=50, # 50MB files + daystokeep=30, # Keep 30 days + level=LogLevel.INFO, + streamhandler=True, # Also log to console + showlocation=True, # Show file:function:line + timezone="UTC" +) + +# Error logger with longer retention +error_logger = size_rotating_logger( + name="production_errors", + directory="/var/log/myapp", + filenames=["errors.log"], + maxmbytes=10, + daystokeep=90, # Keep errors longer + level=LogLevel.ERROR, + streamhandler=False +) + +# Audit logger with daily rotation +audit_logger = timed_rotating_logger( + name="audit_log", + directory="/var/log/myapp", + filenames=["audit.log"], + when=RotateWhen.MIDNIGHT, + level=LogLevel.INFO +) + +# Use the loggers +app_logger.info("Application started") +error_logger.error("Database connection failed") +audit_logger.info("User admin logged in") +``` + ## Env Variables (Optional) ``` LOG_LEVEL=DEBUG @@ -133,6 +408,155 @@ LOG_ROTATE_AT_UTC=True +# Memory Management + +The library includes comprehensive memory management features to prevent memory leaks and optimize resource usage: + +## Automatic Resource Cleanup +```python +from pythonLogs import clear_logger_registry, shutdown_logger, LoggerFactory + +# Clear the entire logger registry with proper cleanup +clear_logger_registry() + +# Shutdown specific logger and remove from registry +shutdown_logger("my_app_logger") + +# Manual registry management +LoggerFactory.shutdown_logger("specific_logger") +LoggerFactory.clear_registry() +``` + +## Memory Optimization Features +```python +from pythonLogs import ( + get_memory_stats, + clear_formatter_cache, + clear_directory_cache, + optimize_lru_cache_sizes, + force_garbage_collection +) + +# Get current memory usage statistics +stats = get_memory_stats() +print(f"Registry size: {stats['registry_size']}") +print(f"Formatter cache: {stats['formatter_cache_size']}") +print(f"Active loggers: {stats['active_logger_count']}") + +# Clear various caches to free memory +clear_formatter_cache() # Clear cached formatters +clear_directory_cache() # Clear directory permission cache + +# Optimize LRU cache sizes for memory-constrained environments +optimize_lru_cache_sizes() + +# Force garbage collection and get collection statistics +gc_stats = force_garbage_collection() +print(f"Objects collected: {gc_stats['objects_collected']}") +``` + +## Registry Configuration +```python +from pythonLogs import LoggerFactory + +# Configure registry limits for memory management +LoggerFactory.set_memory_limits( + max_loggers=50, # Maximum cached loggers + ttl_seconds=1800 # Logger time-to-live (30 minutes) +) + +# Monitor registered loggers +registered = LoggerFactory.get_registered_loggers() +print(f"Currently registered: {list(registered.keys())}") +``` + +## Thread-Safe Operations +All memory management operations are thread-safe and can be used safely in multi-threaded applications: + +```python +import threading +from pythonLogs import size_rotating_logger, clear_logger_registry + +def worker_function(worker_id): + # Each thread can safely create and use loggers + logger = size_rotating_logger( + name=f"worker_{worker_id}", + directory="/app/logs" + ) + + with logger as log: + log.info(f"Worker {worker_id} started") + # Automatic cleanup per thread + +# Create multiple threads - all operations are thread-safe +threads = [] +for i in range(10): + thread = threading.Thread(target=worker_function, args=(i,)) + threads.append(thread) + thread.start() + +# Wait for completion and clean up +for thread in threads: + thread.join() + +# Safe to clear registry from main thread +clear_logger_registry() +``` + + +# Migration Guide + +## Upgrading from Legacy to Factory Pattern + +The factory pattern is **100% backward compatible**. Your existing code will continue to work unchanged. + +### Before (Legacy - Still Works) +```python +from pythonLogs import BasicLog, SizeRotatingLog, TimedRotatingLog + +# Old way +basic_logger = BasicLog(level="info", name="app").init() +size_logger = SizeRotatingLog(level="debug", name="app", directory="/logs").init() +timed_logger = TimedRotatingLog(level="warning", name="app", directory="/logs").init() +``` + +### After (Factory Pattern - Recommended) +```python +from pythonLogs import basic_logger, size_rotating_logger, timed_rotating_logger, LogLevel + +# New way - cleaner and faster +basic_logger = basic_logger(level=LogLevel.INFO, name="app") +size_logger = size_rotating_logger(level=LogLevel.DEBUG, name="app", directory="/logs") +timed_logger = timed_rotating_logger(level=LogLevel.WARNING, name="app", directory="/logs") +``` + +### Benefits of Migration +- ๐Ÿš€ **90% faster logger creation** with registry caching +- ๐ŸŽฏ **Type safety** with enum-based parameters +- ๐Ÿ’ก **Better IDE support** with autocomplete and validation +- ๐Ÿ”ง **Cleaner API** without manual `.init()` calls +- ๐Ÿ“š **Centralized configuration** through factory pattern + +# Performance Improvements + +## Benchmarks +The factory pattern with optimizations provides significant performance improvements: + +| Feature | Improvement | Benefit | +|---------|-------------|---------| +| Logger Registry | 90%+ faster | Cached logger instances | +| Settings Caching | ~85% faster | Reused configuration objects | +| Directory Validation | ~75% faster | Cached permission checks | +| Timezone Operations | ~60% faster | Cached timezone functions | + +## Performance Test Results +```python +# Create 100 loggers - Performance comparison +# Legacy method: ~0.045 seconds +# Factory pattern: ~0.004 seconds +# Improvement: 91% faster โšก +``` + # Source Code ### Build ```shell diff --git a/poetry.lock b/poetry.lock index 2c52000..ac7a1ec 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand. [[package]] name = "annotated-types" @@ -6,6 +6,7 @@ version = "0.7.0" description = "Reusable constraint types to use with typing.Annotated" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, @@ -17,6 +18,8 @@ version = "0.4.6" description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["test"] +markers = "sys_platform == \"win32\"" files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, @@ -24,112 +27,125 @@ files = [ [[package]] name = "coverage" -version = "7.6.10" +version = "7.9.2" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.9" +groups = ["test"] files = [ - {file = "coverage-7.6.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5c912978f7fbf47ef99cec50c4401340436d200d41d714c7a4766f377c5b7b78"}, - {file = "coverage-7.6.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a01ec4af7dfeb96ff0078ad9a48810bb0cc8abcb0115180c6013a6b26237626c"}, - {file = "coverage-7.6.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3b204c11e2b2d883946fe1d97f89403aa1811df28ce0447439178cc7463448a"}, - {file = "coverage-7.6.10-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32ee6d8491fcfc82652a37109f69dee9a830e9379166cb73c16d8dc5c2915165"}, - {file = "coverage-7.6.10-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675cefc4c06e3b4c876b85bfb7c59c5e2218167bbd4da5075cbe3b5790a28988"}, - {file = "coverage-7.6.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f4f620668dbc6f5e909a0946a877310fb3d57aea8198bde792aae369ee1c23b5"}, - {file = "coverage-7.6.10-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:4eea95ef275de7abaef630c9b2c002ffbc01918b726a39f5a4353916ec72d2f3"}, - {file = "coverage-7.6.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e2f0280519e42b0a17550072861e0bc8a80a0870de260f9796157d3fca2733c5"}, - {file = "coverage-7.6.10-cp310-cp310-win32.whl", hash = "sha256:bc67deb76bc3717f22e765ab3e07ee9c7a5e26b9019ca19a3b063d9f4b874244"}, - {file = "coverage-7.6.10-cp310-cp310-win_amd64.whl", hash = "sha256:0f460286cb94036455e703c66988851d970fdfd8acc2a1122ab7f4f904e4029e"}, - {file = "coverage-7.6.10-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ea3c8f04b3e4af80e17bab607c386a830ffc2fb88a5484e1df756478cf70d1d3"}, - {file = "coverage-7.6.10-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:507a20fc863cae1d5720797761b42d2d87a04b3e5aeb682ef3b7332e90598f43"}, - {file = "coverage-7.6.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d37a84878285b903c0fe21ac8794c6dab58150e9359f1aaebbeddd6412d53132"}, - {file = "coverage-7.6.10-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a534738b47b0de1995f85f582d983d94031dffb48ab86c95bdf88dc62212142f"}, - {file = "coverage-7.6.10-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d7a2bf79378d8fb8afaa994f91bfd8215134f8631d27eba3e0e2c13546ce994"}, - {file = "coverage-7.6.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6713ba4b4ebc330f3def51df1d5d38fad60b66720948112f114968feb52d3f99"}, - {file = "coverage-7.6.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ab32947f481f7e8c763fa2c92fd9f44eeb143e7610c4ca9ecd6a36adab4081bd"}, - {file = "coverage-7.6.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7bbd8c8f1b115b892e34ba66a097b915d3871db7ce0e6b9901f462ff3a975377"}, - {file = "coverage-7.6.10-cp311-cp311-win32.whl", hash = "sha256:299e91b274c5c9cdb64cbdf1b3e4a8fe538a7a86acdd08fae52301b28ba297f8"}, - {file = "coverage-7.6.10-cp311-cp311-win_amd64.whl", hash = "sha256:489a01f94aa581dbd961f306e37d75d4ba16104bbfa2b0edb21d29b73be83609"}, - {file = "coverage-7.6.10-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:27c6e64726b307782fa5cbe531e7647aee385a29b2107cd87ba7c0105a5d3853"}, - {file = "coverage-7.6.10-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c56e097019e72c373bae32d946ecf9858fda841e48d82df7e81c63ac25554078"}, - {file = "coverage-7.6.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7827a5bc7bdb197b9e066cdf650b2887597ad124dd99777332776f7b7c7d0d0"}, - {file = "coverage-7.6.10-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:204a8238afe787323a8b47d8be4df89772d5c1e4651b9ffa808552bdf20e1d50"}, - {file = "coverage-7.6.10-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e67926f51821b8e9deb6426ff3164870976fe414d033ad90ea75e7ed0c2e5022"}, - {file = "coverage-7.6.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e78b270eadb5702938c3dbe9367f878249b5ef9a2fcc5360ac7bff694310d17b"}, - {file = "coverage-7.6.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:714f942b9c15c3a7a5fe6876ce30af831c2ad4ce902410b7466b662358c852c0"}, - {file = "coverage-7.6.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:abb02e2f5a3187b2ac4cd46b8ced85a0858230b577ccb2c62c81482ca7d18852"}, - {file = "coverage-7.6.10-cp312-cp312-win32.whl", hash = "sha256:55b201b97286cf61f5e76063f9e2a1d8d2972fc2fcfd2c1272530172fd28c359"}, - {file = "coverage-7.6.10-cp312-cp312-win_amd64.whl", hash = "sha256:e4ae5ac5e0d1e4edfc9b4b57b4cbecd5bc266a6915c500f358817a8496739247"}, - {file = "coverage-7.6.10-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:05fca8ba6a87aabdd2d30d0b6c838b50510b56cdcfc604d40760dae7153b73d9"}, - {file = "coverage-7.6.10-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9e80eba8801c386f72e0712a0453431259c45c3249f0009aff537a517b52942b"}, - {file = "coverage-7.6.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a372c89c939d57abe09e08c0578c1d212e7a678135d53aa16eec4430adc5e690"}, - {file = "coverage-7.6.10-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ec22b5e7fe7a0fa8509181c4aac1db48f3dd4d3a566131b313d1efc102892c18"}, - {file = "coverage-7.6.10-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26bcf5c4df41cad1b19c84af71c22cbc9ea9a547fc973f1f2cc9a290002c8b3c"}, - {file = "coverage-7.6.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4e4630c26b6084c9b3cb53b15bd488f30ceb50b73c35c5ad7871b869cb7365fd"}, - {file = "coverage-7.6.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2396e8116db77789f819d2bc8a7e200232b7a282c66e0ae2d2cd84581a89757e"}, - {file = "coverage-7.6.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:79109c70cc0882e4d2d002fe69a24aa504dec0cc17169b3c7f41a1d341a73694"}, - {file = "coverage-7.6.10-cp313-cp313-win32.whl", hash = "sha256:9e1747bab246d6ff2c4f28b4d186b205adced9f7bd9dc362051cc37c4a0c7bd6"}, - {file = "coverage-7.6.10-cp313-cp313-win_amd64.whl", hash = "sha256:254f1a3b1eef5f7ed23ef265eaa89c65c8c5b6b257327c149db1ca9d4a35f25e"}, - {file = "coverage-7.6.10-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:2ccf240eb719789cedbb9fd1338055de2761088202a9a0b73032857e53f612fe"}, - {file = "coverage-7.6.10-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:0c807ca74d5a5e64427c8805de15b9ca140bba13572d6d74e262f46f50b13273"}, - {file = "coverage-7.6.10-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2bcfa46d7709b5a7ffe089075799b902020b62e7ee56ebaed2f4bdac04c508d8"}, - {file = "coverage-7.6.10-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4e0de1e902669dccbf80b0415fb6b43d27edca2fbd48c74da378923b05316098"}, - {file = "coverage-7.6.10-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f7b444c42bbc533aaae6b5a2166fd1a797cdb5eb58ee51a92bee1eb94a1e1cb"}, - {file = "coverage-7.6.10-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b330368cb99ef72fcd2dc3ed260adf67b31499584dc8a20225e85bfe6f6cfed0"}, - {file = "coverage-7.6.10-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:9a7cfb50515f87f7ed30bc882f68812fd98bc2852957df69f3003d22a2aa0abf"}, - {file = "coverage-7.6.10-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6f93531882a5f68c28090f901b1d135de61b56331bba82028489bc51bdd818d2"}, - {file = "coverage-7.6.10-cp313-cp313t-win32.whl", hash = "sha256:89d76815a26197c858f53c7f6a656686ec392b25991f9e409bcef020cd532312"}, - {file = "coverage-7.6.10-cp313-cp313t-win_amd64.whl", hash = "sha256:54a5f0f43950a36312155dae55c505a76cd7f2b12d26abeebbe7a0b36dbc868d"}, - {file = "coverage-7.6.10-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:656c82b8a0ead8bba147de9a89bda95064874c91a3ed43a00e687f23cc19d53a"}, - {file = "coverage-7.6.10-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ccc2b70a7ed475c68ceb548bf69cec1e27305c1c2606a5eb7c3afff56a1b3b27"}, - {file = "coverage-7.6.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5e37dc41d57ceba70956fa2fc5b63c26dba863c946ace9705f8eca99daecdc4"}, - {file = "coverage-7.6.10-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0aa9692b4fdd83a4647eeb7db46410ea1322b5ed94cd1715ef09d1d5922ba87f"}, - {file = "coverage-7.6.10-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa744da1820678b475e4ba3dfd994c321c5b13381d1041fe9c608620e6676e25"}, - {file = "coverage-7.6.10-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c0b1818063dc9e9d838c09e3a473c1422f517889436dd980f5d721899e66f315"}, - {file = "coverage-7.6.10-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:59af35558ba08b758aec4d56182b222976330ef8d2feacbb93964f576a7e7a90"}, - {file = "coverage-7.6.10-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7ed2f37cfce1ce101e6dffdfd1c99e729dd2ffc291d02d3e2d0af8b53d13840d"}, - {file = "coverage-7.6.10-cp39-cp39-win32.whl", hash = "sha256:4bcc276261505d82f0ad426870c3b12cb177752834a633e737ec5ee79bbdff18"}, - {file = "coverage-7.6.10-cp39-cp39-win_amd64.whl", hash = "sha256:457574f4599d2b00f7f637a0700a6422243b3565509457b2dbd3f50703e11f59"}, - {file = "coverage-7.6.10-pp39.pp310-none-any.whl", hash = "sha256:fd34e7b3405f0cc7ab03d54a334c17a9e802897580d964bd8c2001f4b9fd488f"}, - {file = "coverage-7.6.10.tar.gz", hash = "sha256:7fb105327c8f8f0682e29843e2ff96af9dcbe5bab8eeb4b398c6a33a16d80a23"}, + {file = "coverage-7.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:66283a192a14a3854b2e7f3418d7db05cdf411012ab7ff5db98ff3b181e1f912"}, + {file = "coverage-7.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4e01d138540ef34fcf35c1aa24d06c3de2a4cffa349e29a10056544f35cca15f"}, + {file = "coverage-7.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f22627c1fe2745ee98d3ab87679ca73a97e75ca75eb5faee48660d060875465f"}, + {file = "coverage-7.9.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b1c2d8363247b46bd51f393f86c94096e64a1cf6906803fa8d5a9d03784bdbf"}, + {file = "coverage-7.9.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c10c882b114faf82dbd33e876d0cbd5e1d1ebc0d2a74ceef642c6152f3f4d547"}, + {file = "coverage-7.9.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:de3c0378bdf7066c3988d66cd5232d161e933b87103b014ab1b0b4676098fa45"}, + {file = "coverage-7.9.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:1e2f097eae0e5991e7623958a24ced3282676c93c013dde41399ff63e230fcf2"}, + {file = "coverage-7.9.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:28dc1f67e83a14e7079b6cea4d314bc8b24d1aed42d3582ff89c0295f09b181e"}, + {file = "coverage-7.9.2-cp310-cp310-win32.whl", hash = "sha256:bf7d773da6af9e10dbddacbf4e5cab13d06d0ed93561d44dae0188a42c65be7e"}, + {file = "coverage-7.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:0c0378ba787681ab1897f7c89b415bd56b0b2d9a47e5a3d8dc0ea55aac118d6c"}, + {file = "coverage-7.9.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a7a56a2964a9687b6aba5b5ced6971af308ef6f79a91043c05dd4ee3ebc3e9ba"}, + {file = "coverage-7.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:123d589f32c11d9be7fe2e66d823a236fe759b0096f5db3fb1b75b2fa414a4fa"}, + {file = "coverage-7.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:333b2e0ca576a7dbd66e85ab402e35c03b0b22f525eed82681c4b866e2e2653a"}, + {file = "coverage-7.9.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:326802760da234baf9f2f85a39e4a4b5861b94f6c8d95251f699e4f73b1835dc"}, + {file = "coverage-7.9.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19e7be4cfec248df38ce40968c95d3952fbffd57b400d4b9bb580f28179556d2"}, + {file = "coverage-7.9.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0b4a4cb73b9f2b891c1788711408ef9707666501ba23684387277ededab1097c"}, + {file = "coverage-7.9.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:2c8937fa16c8c9fbbd9f118588756e7bcdc7e16a470766a9aef912dd3f117dbd"}, + {file = "coverage-7.9.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:42da2280c4d30c57a9b578bafd1d4494fa6c056d4c419d9689e66d775539be74"}, + {file = "coverage-7.9.2-cp311-cp311-win32.whl", hash = "sha256:14fa8d3da147f5fdf9d298cacc18791818f3f1a9f542c8958b80c228320e90c6"}, + {file = "coverage-7.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:549cab4892fc82004f9739963163fd3aac7a7b0df430669b75b86d293d2df2a7"}, + {file = "coverage-7.9.2-cp311-cp311-win_arm64.whl", hash = "sha256:c2667a2b913e307f06aa4e5677f01a9746cd08e4b35e14ebcde6420a9ebb4c62"}, + {file = "coverage-7.9.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ae9eb07f1cfacd9cfe8eaee6f4ff4b8a289a668c39c165cd0c8548484920ffc0"}, + {file = "coverage-7.9.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9ce85551f9a1119f02adc46d3014b5ee3f765deac166acf20dbb851ceb79b6f3"}, + {file = "coverage-7.9.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8f6389ac977c5fb322e0e38885fbbf901743f79d47f50db706e7644dcdcb6e1"}, + {file = "coverage-7.9.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff0d9eae8cdfcd58fe7893b88993723583a6ce4dfbfd9f29e001922544f95615"}, + {file = "coverage-7.9.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fae939811e14e53ed8a9818dad51d434a41ee09df9305663735f2e2d2d7d959b"}, + {file = "coverage-7.9.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:31991156251ec202c798501e0a42bbdf2169dcb0f137b1f5c0f4267f3fc68ef9"}, + {file = "coverage-7.9.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d0d67963f9cbfc7c7f96d4ac74ed60ecbebd2ea6eeb51887af0f8dce205e545f"}, + {file = "coverage-7.9.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:49b752a2858b10580969ec6af6f090a9a440a64a301ac1528d7ca5f7ed497f4d"}, + {file = "coverage-7.9.2-cp312-cp312-win32.whl", hash = "sha256:88d7598b8ee130f32f8a43198ee02edd16d7f77692fa056cb779616bbea1b355"}, + {file = "coverage-7.9.2-cp312-cp312-win_amd64.whl", hash = "sha256:9dfb070f830739ee49d7c83e4941cc767e503e4394fdecb3b54bfdac1d7662c0"}, + {file = "coverage-7.9.2-cp312-cp312-win_arm64.whl", hash = "sha256:4e2c058aef613e79df00e86b6d42a641c877211384ce5bd07585ed7ba71ab31b"}, + {file = "coverage-7.9.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:985abe7f242e0d7bba228ab01070fde1d6c8fa12f142e43debe9ed1dde686038"}, + {file = "coverage-7.9.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82c3939264a76d44fde7f213924021ed31f55ef28111a19649fec90c0f109e6d"}, + {file = "coverage-7.9.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ae5d563e970dbe04382f736ec214ef48103d1b875967c89d83c6e3f21706d5b3"}, + {file = "coverage-7.9.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bdd612e59baed2a93c8843c9a7cb902260f181370f1d772f4842987535071d14"}, + {file = "coverage-7.9.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:256ea87cb2a1ed992bcdfc349d8042dcea1b80436f4ddf6e246d6bee4b5d73b6"}, + {file = "coverage-7.9.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f44ae036b63c8ea432f610534a2668b0c3aee810e7037ab9d8ff6883de480f5b"}, + {file = "coverage-7.9.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:82d76ad87c932935417a19b10cfe7abb15fd3f923cfe47dbdaa74ef4e503752d"}, + {file = "coverage-7.9.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:619317bb86de4193debc712b9e59d5cffd91dc1d178627ab2a77b9870deb2868"}, + {file = "coverage-7.9.2-cp313-cp313-win32.whl", hash = "sha256:0a07757de9feb1dfafd16ab651e0f628fd7ce551604d1bf23e47e1ddca93f08a"}, + {file = "coverage-7.9.2-cp313-cp313-win_amd64.whl", hash = "sha256:115db3d1f4d3f35f5bb021e270edd85011934ff97c8797216b62f461dd69374b"}, + {file = "coverage-7.9.2-cp313-cp313-win_arm64.whl", hash = "sha256:48f82f889c80af8b2a7bb6e158d95a3fbec6a3453a1004d04e4f3b5945a02694"}, + {file = "coverage-7.9.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:55a28954545f9d2f96870b40f6c3386a59ba8ed50caf2d949676dac3ecab99f5"}, + {file = "coverage-7.9.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:cdef6504637731a63c133bb2e6f0f0214e2748495ec15fe42d1e219d1b133f0b"}, + {file = "coverage-7.9.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bcd5ebe66c7a97273d5d2ddd4ad0ed2e706b39630ed4b53e713d360626c3dbb3"}, + {file = "coverage-7.9.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9303aed20872d7a3c9cb39c5d2b9bdbe44e3a9a1aecb52920f7e7495410dfab8"}, + {file = "coverage-7.9.2-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc18ea9e417a04d1920a9a76fe9ebd2f43ca505b81994598482f938d5c315f46"}, + {file = "coverage-7.9.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6406cff19880aaaadc932152242523e892faff224da29e241ce2fca329866584"}, + {file = "coverage-7.9.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:2d0d4f6ecdf37fcc19c88fec3e2277d5dee740fb51ffdd69b9579b8c31e4232e"}, + {file = "coverage-7.9.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c33624f50cf8de418ab2b4d6ca9eda96dc45b2c4231336bac91454520e8d1fac"}, + {file = "coverage-7.9.2-cp313-cp313t-win32.whl", hash = "sha256:1df6b76e737c6a92210eebcb2390af59a141f9e9430210595251fbaf02d46926"}, + {file = "coverage-7.9.2-cp313-cp313t-win_amd64.whl", hash = "sha256:f5fd54310b92741ebe00d9c0d1d7b2b27463952c022da6d47c175d246a98d1bd"}, + {file = "coverage-7.9.2-cp313-cp313t-win_arm64.whl", hash = "sha256:c48c2375287108c887ee87d13b4070a381c6537d30e8487b24ec721bf2a781cb"}, + {file = "coverage-7.9.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ddc39510ac922a5c4c27849b739f875d3e1d9e590d1e7b64c98dadf037a16cce"}, + {file = "coverage-7.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a535c0c7364acd55229749c2b3e5eebf141865de3a8f697076a3291985f02d30"}, + {file = "coverage-7.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df0f9ef28e0f20c767ccdccfc5ae5f83a6f4a2fbdfbcbcc8487a8a78771168c8"}, + {file = "coverage-7.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f3da12e0ccbcb348969221d29441ac714bbddc4d74e13923d3d5a7a0bebef7a"}, + {file = "coverage-7.9.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0a17eaf46f56ae0f870f14a3cbc2e4632fe3771eab7f687eda1ee59b73d09fe4"}, + {file = "coverage-7.9.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:669135a9d25df55d1ed56a11bf555f37c922cf08d80799d4f65d77d7d6123fcf"}, + {file = "coverage-7.9.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:9d3a700304d01a627df9db4322dc082a0ce1e8fc74ac238e2af39ced4c083193"}, + {file = "coverage-7.9.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:71ae8b53855644a0b1579d4041304ddc9995c7b21c8a1f16753c4d8903b4dfed"}, + {file = "coverage-7.9.2-cp39-cp39-win32.whl", hash = "sha256:dd7a57b33b5cf27acb491e890720af45db05589a80c1ffc798462a765be6d4d7"}, + {file = "coverage-7.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:f65bb452e579d5540c8b37ec105dd54d8b9307b07bcaa186818c104ffda22441"}, + {file = "coverage-7.9.2-pp39.pp310.pp311-none-any.whl", hash = "sha256:8a1166db2fb62473285bcb092f586e081e92656c7dfa8e9f62b4d39d7e6b5050"}, + {file = "coverage-7.9.2-py3-none-any.whl", hash = "sha256:e425cd5b00f6fc0ed7cdbd766c70be8baab4b7839e4d4fe5fac48581dd968ea4"}, + {file = "coverage-7.9.2.tar.gz", hash = "sha256:997024fa51e3290264ffd7492ec97d0690293ccd2b45a6cd7d82d945a4a80c8b"}, ] [package.extras] -toml = ["tomli"] +toml = ["tomli ; python_full_version <= \"3.11.0a6\""] [[package]] name = "exceptiongroup" -version = "1.2.2" +version = "1.3.0" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" +groups = ["test"] +markers = "python_version == \"3.10\"" files = [ - {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, - {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, + {file = "exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10"}, + {file = "exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88"}, ] +[package.dependencies] +typing-extensions = {version = ">=4.6.0", markers = "python_version < \"3.13\""} + [package.extras] test = ["pytest (>=6)"] [[package]] name = "iniconfig" -version = "2.0.0" +version = "2.1.0" description = "brain-dead simple config-ini parsing" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" +groups = ["test"] files = [ - {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, - {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, + {file = "iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760"}, + {file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"}, ] [[package]] name = "packaging" -version = "24.2" +version = "25.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" +groups = ["test"] files = [ - {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, - {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, + {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, + {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, ] [[package]] @@ -138,6 +154,7 @@ version = "0.2.1" description = "Bring colors to your terminal." optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["test"] files = [ {file = "pastel-0.2.1-py2.py3-none-any.whl", hash = "sha256:4349225fcdf6c2bb34d483e523475de5bb04a5c10ef711263452cb37d7dd4364"}, {file = "pastel-0.2.1.tar.gz", hash = "sha256:e6581ac04e973cac858828c6202c1e1e81fee1dc7de7683f3e1ffe0bfd8a573d"}, @@ -145,165 +162,193 @@ files = [ [[package]] name = "pluggy" -version = "1.5.0" +version = "1.6.0" description = "plugin and hook calling mechanisms for python" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["test"] files = [ - {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, - {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, + {file = "pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746"}, + {file = "pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3"}, ] [package.extras] dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] +testing = ["coverage", "pytest", "pytest-benchmark"] [[package]] name = "poethepoet" -version = "0.32.0" -description = "A task runner that works well with poetry." +version = "0.36.0" +description = "A task runner that works well with poetry and uv." optional = false python-versions = ">=3.9" +groups = ["test"] files = [ - {file = "poethepoet-0.32.0-py3-none-any.whl", hash = "sha256:fba84c72d923feac228d1ea7734c5a54701f2e71fad42845f027c0fbf998a073"}, - {file = "poethepoet-0.32.0.tar.gz", hash = "sha256:a700be02e932e1a8907ae630928fc769ea9a77986189ba6867e6e3fd8f60e5b7"}, + {file = "poethepoet-0.36.0-py3-none-any.whl", hash = "sha256:693e3c1eae9f6731d3613c3c0c40f747d3c5c68a375beda42e590a63c5623308"}, + {file = "poethepoet-0.36.0.tar.gz", hash = "sha256:2217b49cb4e4c64af0b42ff8c4814b17f02e107d38bc461542517348ede25663"}, ] [package.dependencies] pastel = ">=0.2.1,<0.3.0" -pyyaml = ">=6.0.2,<7.0.0" +pyyaml = ">=6.0.2,<7.0" tomli = {version = ">=1.2.2", markers = "python_version < \"3.11\""} [package.extras] -poetry-plugin = ["poetry (>=1.0,<2.0)"] +poetry-plugin = ["poetry (>=1.2.0,<3.0.0) ; python_version < \"4.0\""] + +[[package]] +name = "psutil" +version = "7.0.0" +description = "Cross-platform lib for process and system monitoring in Python. NOTE: the syntax of this script MUST be kept compatible with Python 2.7." +optional = false +python-versions = ">=3.6" +groups = ["test"] +files = [ + {file = "psutil-7.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:101d71dc322e3cffd7cea0650b09b3d08b8e7c4109dd6809fe452dfd00e58b25"}, + {file = "psutil-7.0.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:39db632f6bb862eeccf56660871433e111b6ea58f2caea825571951d4b6aa3da"}, + {file = "psutil-7.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fcee592b4c6f146991ca55919ea3d1f8926497a713ed7faaf8225e174581e91"}, + {file = "psutil-7.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b1388a4f6875d7e2aff5c4ca1cc16c545ed41dd8bb596cefea80111db353a34"}, + {file = "psutil-7.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5f098451abc2828f7dc6b58d44b532b22f2088f4999a937557b603ce72b1993"}, + {file = "psutil-7.0.0-cp36-cp36m-win32.whl", hash = "sha256:84df4eb63e16849689f76b1ffcb36db7b8de703d1bc1fe41773db487621b6c17"}, + {file = "psutil-7.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:1e744154a6580bc968a0195fd25e80432d3afec619daf145b9e5ba16cc1d688e"}, + {file = "psutil-7.0.0-cp37-abi3-win32.whl", hash = "sha256:ba3fcef7523064a6c9da440fc4d6bd07da93ac726b5733c29027d7dc95b39d99"}, + {file = "psutil-7.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:4cf3d4eb1aa9b348dec30105c55cd9b7d4629285735a102beb4441e38db90553"}, + {file = "psutil-7.0.0.tar.gz", hash = "sha256:7be9c3eba38beccb6495ea33afd982a44074b78f28c434a1f51cc07fd315c456"}, +] + +[package.extras] +dev = ["abi3audit", "black (==24.10.0)", "check-manifest", "coverage", "packaging", "pylint", "pyperf", "pypinfo", "pytest", "pytest-cov", "pytest-xdist", "requests", "rstcheck", "ruff", "setuptools", "sphinx", "sphinx_rtd_theme", "toml-sort", "twine", "virtualenv", "vulture", "wheel"] +test = ["pytest", "pytest-xdist", "setuptools"] [[package]] name = "pydantic" -version = "2.10.4" +version = "2.11.7" description = "Data validation using Python type hints" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "pydantic-2.10.4-py3-none-any.whl", hash = "sha256:597e135ea68be3a37552fb524bc7d0d66dcf93d395acd93a00682f1efcb8ee3d"}, - {file = "pydantic-2.10.4.tar.gz", hash = "sha256:82f12e9723da6de4fe2ba888b5971157b3be7ad914267dea8f05f82b28254f06"}, + {file = "pydantic-2.11.7-py3-none-any.whl", hash = "sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b"}, + {file = "pydantic-2.11.7.tar.gz", hash = "sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db"}, ] [package.dependencies] annotated-types = ">=0.6.0" -pydantic-core = "2.27.2" +pydantic-core = "2.33.2" typing-extensions = ">=4.12.2" +typing-inspection = ">=0.4.0" [package.extras] email = ["email-validator (>=2.0.0)"] -timezone = ["tzdata"] +timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""] [[package]] name = "pydantic-core" -version = "2.27.2" +version = "2.33.2" description = "Core functionality for Pydantic validation and serialization" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa"}, - {file = "pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c"}, - {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7969e133a6f183be60e9f6f56bfae753585680f3b7307a8e555a948d443cc05a"}, - {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3de9961f2a346257caf0aa508a4da705467f53778e9ef6fe744c038119737ef5"}, - {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e2bb4d3e5873c37bb3dd58714d4cd0b0e6238cebc4177ac8fe878f8b3aa8e74c"}, - {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:280d219beebb0752699480fe8f1dc61ab6615c2046d76b7ab7ee38858de0a4e7"}, - {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47956ae78b6422cbd46f772f1746799cbb862de838fd8d1fbd34a82e05b0983a"}, - {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:14d4a5c49d2f009d62a2a7140d3064f686d17a5d1a268bc641954ba181880236"}, - {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:337b443af21d488716f8d0b6164de833e788aa6bd7e3a39c005febc1284f4962"}, - {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:03d0f86ea3184a12f41a2d23f7ccb79cdb5a18e06993f8a45baa8dfec746f0e9"}, - {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7041c36f5680c6e0f08d922aed302e98b3745d97fe1589db0a3eebf6624523af"}, - {file = "pydantic_core-2.27.2-cp310-cp310-win32.whl", hash = "sha256:50a68f3e3819077be2c98110c1f9dcb3817e93f267ba80a2c05bb4f8799e2ff4"}, - {file = "pydantic_core-2.27.2-cp310-cp310-win_amd64.whl", hash = "sha256:e0fd26b16394ead34a424eecf8a31a1f5137094cabe84a1bcb10fa6ba39d3d31"}, - {file = "pydantic_core-2.27.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:8e10c99ef58cfdf2a66fc15d66b16c4a04f62bca39db589ae8cba08bc55331bc"}, - {file = "pydantic_core-2.27.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:26f32e0adf166a84d0cb63be85c562ca8a6fa8de28e5f0d92250c6b7e9e2aff7"}, - {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c19d1ea0673cd13cc2f872f6c9ab42acc4e4f492a7ca9d3795ce2b112dd7e15"}, - {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e68c4446fe0810e959cdff46ab0a41ce2f2c86d227d96dc3847af0ba7def306"}, - {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9640b0059ff4f14d1f37321b94061c6db164fbe49b334b31643e0528d100d99"}, - {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:40d02e7d45c9f8af700f3452f329ead92da4c5f4317ca9b896de7ce7199ea459"}, - {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c1fd185014191700554795c99b347d64f2bb637966c4cfc16998a0ca700d048"}, - {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d81d2068e1c1228a565af076598f9e7451712700b673de8f502f0334f281387d"}, - {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a4207639fb02ec2dbb76227d7c751a20b1a6b4bc52850568e52260cae64ca3b"}, - {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:3de3ce3c9ddc8bbd88f6e0e304dea0e66d843ec9de1b0042b0911c1663ffd474"}, - {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:30c5f68ded0c36466acede341551106821043e9afaad516adfb6e8fa80a4e6a6"}, - {file = "pydantic_core-2.27.2-cp311-cp311-win32.whl", hash = "sha256:c70c26d2c99f78b125a3459f8afe1aed4d9687c24fd677c6a4436bc042e50d6c"}, - {file = "pydantic_core-2.27.2-cp311-cp311-win_amd64.whl", hash = "sha256:08e125dbdc505fa69ca7d9c499639ab6407cfa909214d500897d02afb816e7cc"}, - {file = "pydantic_core-2.27.2-cp311-cp311-win_arm64.whl", hash = "sha256:26f0d68d4b235a2bae0c3fc585c585b4ecc51382db0e3ba402a22cbc440915e4"}, - {file = "pydantic_core-2.27.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9e0c8cfefa0ef83b4da9588448b6d8d2a2bf1a53c3f1ae5fca39eb3061e2f0b0"}, - {file = "pydantic_core-2.27.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83097677b8e3bd7eaa6775720ec8e0405f1575015a463285a92bfdfe254529ef"}, - {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:172fce187655fece0c90d90a678424b013f8fbb0ca8b036ac266749c09438cb7"}, - {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:519f29f5213271eeeeb3093f662ba2fd512b91c5f188f3bb7b27bc5973816934"}, - {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05e3a55d124407fffba0dd6b0c0cd056d10e983ceb4e5dbd10dda135c31071d6"}, - {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c3ed807c7b91de05e63930188f19e921d1fe90de6b4f5cd43ee7fcc3525cb8c"}, - {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fb4aadc0b9a0c063206846d603b92030eb6f03069151a625667f982887153e2"}, - {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28ccb213807e037460326424ceb8b5245acb88f32f3d2777427476e1b32c48c4"}, - {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:de3cd1899e2c279b140adde9357c4495ed9d47131b4a4eaff9052f23398076b3"}, - {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:220f892729375e2d736b97d0e51466252ad84c51857d4d15f5e9692f9ef12be4"}, - {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a0fcd29cd6b4e74fe8ddd2c90330fd8edf2e30cb52acda47f06dd615ae72da57"}, - {file = "pydantic_core-2.27.2-cp312-cp312-win32.whl", hash = "sha256:1e2cb691ed9834cd6a8be61228471d0a503731abfb42f82458ff27be7b2186fc"}, - {file = "pydantic_core-2.27.2-cp312-cp312-win_amd64.whl", hash = "sha256:cc3f1a99a4f4f9dd1de4fe0312c114e740b5ddead65bb4102884b384c15d8bc9"}, - {file = "pydantic_core-2.27.2-cp312-cp312-win_arm64.whl", hash = "sha256:3911ac9284cd8a1792d3cb26a2da18f3ca26c6908cc434a18f730dc0db7bfa3b"}, - {file = "pydantic_core-2.27.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7d14bd329640e63852364c306f4d23eb744e0f8193148d4044dd3dacdaacbd8b"}, - {file = "pydantic_core-2.27.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82f91663004eb8ed30ff478d77c4d1179b3563df6cdb15c0817cd1cdaf34d154"}, - {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71b24c7d61131bb83df10cc7e687433609963a944ccf45190cfc21e0887b08c9"}, - {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa8e459d4954f608fa26116118bb67f56b93b209c39b008277ace29937453dc9"}, - {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce8918cbebc8da707ba805b7fd0b382816858728ae7fe19a942080c24e5b7cd1"}, - {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eda3f5c2a021bbc5d976107bb302e0131351c2ba54343f8a496dc8783d3d3a6a"}, - {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8086fa684c4775c27f03f062cbb9eaa6e17f064307e86b21b9e0abc9c0f02e"}, - {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8d9b3388db186ba0c099a6d20f0604a44eabdeef1777ddd94786cdae158729e4"}, - {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7a66efda2387de898c8f38c0cf7f14fca0b51a8ef0b24bfea5849f1b3c95af27"}, - {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:18a101c168e4e092ab40dbc2503bdc0f62010e95d292b27827871dc85450d7ee"}, - {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ba5dd002f88b78a4215ed2f8ddbdf85e8513382820ba15ad5ad8955ce0ca19a1"}, - {file = "pydantic_core-2.27.2-cp313-cp313-win32.whl", hash = "sha256:1ebaf1d0481914d004a573394f4be3a7616334be70261007e47c2a6fe7e50130"}, - {file = "pydantic_core-2.27.2-cp313-cp313-win_amd64.whl", hash = "sha256:953101387ecf2f5652883208769a79e48db18c6df442568a0b5ccd8c2723abee"}, - {file = "pydantic_core-2.27.2-cp313-cp313-win_arm64.whl", hash = "sha256:ac4dbfd1691affb8f48c2c13241a2e3b60ff23247cbcf981759c768b6633cf8b"}, - {file = "pydantic_core-2.27.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d3e8d504bdd3f10835468f29008d72fc8359d95c9c415ce6e767203db6127506"}, - {file = "pydantic_core-2.27.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:521eb9b7f036c9b6187f0b47318ab0d7ca14bd87f776240b90b21c1f4f149320"}, - {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85210c4d99a0114f5a9481b44560d7d1e35e32cc5634c656bc48e590b669b145"}, - {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d716e2e30c6f140d7560ef1538953a5cd1a87264c737643d481f2779fc247fe1"}, - {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f66d89ba397d92f840f8654756196d93804278457b5fbede59598a1f9f90b228"}, - {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:669e193c1c576a58f132e3158f9dfa9662969edb1a250c54d8fa52590045f046"}, - {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdbe7629b996647b99c01b37f11170a57ae675375b14b8c13b8518b8320ced5"}, - {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d262606bf386a5ba0b0af3b97f37c83d7011439e3dc1a9298f21efb292e42f1a"}, - {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cabb9bcb7e0d97f74df8646f34fc76fbf793b7f6dc2438517d7a9e50eee4f14d"}, - {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:d2d63f1215638d28221f664596b1ccb3944f6e25dd18cd3b86b0a4c408d5ebb9"}, - {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bca101c00bff0adb45a833f8451b9105d9df18accb8743b08107d7ada14bd7da"}, - {file = "pydantic_core-2.27.2-cp38-cp38-win32.whl", hash = "sha256:f6f8e111843bbb0dee4cb6594cdc73e79b3329b526037ec242a3e49012495b3b"}, - {file = "pydantic_core-2.27.2-cp38-cp38-win_amd64.whl", hash = "sha256:fd1aea04935a508f62e0d0ef1f5ae968774a32afc306fb8545e06f5ff5cdf3ad"}, - {file = "pydantic_core-2.27.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c10eb4f1659290b523af58fa7cffb452a61ad6ae5613404519aee4bfbf1df993"}, - {file = "pydantic_core-2.27.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef592d4bad47296fb11f96cd7dc898b92e795032b4894dfb4076cfccd43a9308"}, - {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c61709a844acc6bf0b7dce7daae75195a10aac96a596ea1b776996414791ede4"}, - {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c5f762659e47fdb7b16956c71598292f60a03aa92f8b6351504359dbdba6cf"}, - {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c9775e339e42e79ec99c441d9730fccf07414af63eac2f0e48e08fd38a64d76"}, - {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57762139821c31847cfb2df63c12f725788bd9f04bc2fb392790959b8f70f118"}, - {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d1e85068e818c73e048fe28cfc769040bb1f475524f4745a5dc621f75ac7630"}, - {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:097830ed52fd9e427942ff3b9bc17fab52913b2f50f2880dc4a5611446606a54"}, - {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:044a50963a614ecfae59bb1eaf7ea7efc4bc62f49ed594e18fa1e5d953c40e9f"}, - {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:4e0b4220ba5b40d727c7f879eac379b822eee5d8fff418e9d3381ee45b3b0362"}, - {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e4f4bb20d75e9325cc9696c6802657b58bc1dbbe3022f32cc2b2b632c3fbb96"}, - {file = "pydantic_core-2.27.2-cp39-cp39-win32.whl", hash = "sha256:cca63613e90d001b9f2f9a9ceb276c308bfa2a43fafb75c8031c4f66039e8c6e"}, - {file = "pydantic_core-2.27.2-cp39-cp39-win_amd64.whl", hash = "sha256:77d1bca19b0f7021b3a982e6f903dcd5b2b06076def36a652e3907f596e29f67"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2bf14caea37e91198329b828eae1618c068dfb8ef17bb33287a7ad4b61ac314e"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b0cb791f5b45307caae8810c2023a184c74605ec3bcbb67d13846c28ff731ff8"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:688d3fd9fcb71f41c4c015c023d12a79d1c4c0732ec9eb35d96e3388a120dcf3"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d591580c34f4d731592f0e9fe40f9cc1b430d297eecc70b962e93c5c668f15f"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:82f986faf4e644ffc189a7f1aafc86e46ef70372bb153e7001e8afccc6e54133"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:bec317a27290e2537f922639cafd54990551725fc844249e64c523301d0822fc"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:0296abcb83a797db256b773f45773da397da75a08f5fcaef41f2044adec05f50"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0d75070718e369e452075a6017fbf187f788e17ed67a3abd47fa934d001863d9"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7e17b560be3c98a8e3aa66ce828bdebb9e9ac6ad5466fba92eb74c4c95cb1151"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c33939a82924da9ed65dab5a65d427205a73181d8098e79b6b426bdf8ad4e656"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:00bad2484fa6bda1e216e7345a798bd37c68fb2d97558edd584942aa41b7d278"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c817e2b40aba42bac6f457498dacabc568c3b7a986fc9ba7c8d9d260b71485fb"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:251136cdad0cb722e93732cb45ca5299fb56e1344a833640bf93b2803f8d1bfd"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2088237af596f0a524d3afc39ab3b036e8adb054ee57cbb1dcf8e09da5b29cc"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d4041c0b966a84b4ae7a09832eb691a35aec90910cd2dbe7a208de59be77965b"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:8083d4e875ebe0b864ffef72a4304827015cff328a1be6e22cc850753bfb122b"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f141ee28a0ad2123b6611b6ceff018039df17f32ada8b534e6aa039545a3efb2"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7d0c8399fcc1848491f00e0314bd59fb34a9c008761bcb422a057670c3f65e35"}, - {file = "pydantic_core-2.27.2.tar.gz", hash = "sha256:eb026e5a4c1fee05726072337ff51d1efb6f59090b7da90d30ea58625b1ffb39"}, + {file = "pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8"}, + {file = "pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a"}, + {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac"}, + {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a"}, + {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b"}, + {file = "pydantic_core-2.33.2-cp310-cp310-win32.whl", hash = "sha256:0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22"}, + {file = "pydantic_core-2.33.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640"}, + {file = "pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7"}, + {file = "pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e"}, + {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d"}, + {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30"}, + {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf"}, + {file = "pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51"}, + {file = "pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab"}, + {file = "pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65"}, + {file = "pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc"}, + {file = "pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b"}, + {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1"}, + {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6"}, + {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea"}, + {file = "pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290"}, + {file = "pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2"}, + {file = "pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab"}, + {file = "pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f"}, + {file = "pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56"}, + {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5"}, + {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e"}, + {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162"}, + {file = "pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849"}, + {file = "pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9"}, + {file = "pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9"}, + {file = "pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac"}, + {file = "pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5"}, + {file = "pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9"}, + {file = "pydantic_core-2.33.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a2b911a5b90e0374d03813674bf0a5fbbb7741570dcd4b4e85a2e48d17def29d"}, + {file = "pydantic_core-2.33.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6fa6dfc3e4d1f734a34710f391ae822e0a8eb8559a85c6979e14e65ee6ba2954"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c54c939ee22dc8e2d545da79fc5381f1c020d6d3141d3bd747eab59164dc89fb"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53a57d2ed685940a504248187d5685e49eb5eef0f696853647bf37c418c538f7"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09fb9dd6571aacd023fe6aaca316bd01cf60ab27240d7eb39ebd66a3a15293b4"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0e6116757f7959a712db11f3e9c0a99ade00a5bbedae83cb801985aa154f071b"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d55ab81c57b8ff8548c3e4947f119551253f4e3787a7bbc0b6b3ca47498a9d3"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c20c462aa4434b33a2661701b861604913f912254e441ab8d78d30485736115a"}, + {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:44857c3227d3fb5e753d5fe4a3420d6376fa594b07b621e220cd93703fe21782"}, + {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:eb9b459ca4df0e5c87deb59d37377461a538852765293f9e6ee834f0435a93b9"}, + {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9fcd347d2cc5c23b06de6d3b7b8275be558a0c90549495c699e379a80bf8379e"}, + {file = "pydantic_core-2.33.2-cp39-cp39-win32.whl", hash = "sha256:83aa99b1285bc8f038941ddf598501a86f1536789740991d7d8756e34f1e74d9"}, + {file = "pydantic_core-2.33.2-cp39-cp39-win_amd64.whl", hash = "sha256:f481959862f57f29601ccced557cc2e817bce7533ab8e01a797a48b49c9692b3"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:87acbfcf8e90ca885206e98359d7dca4bcbb35abdc0ff66672a293e1d7a19101"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:7f92c15cd1e97d4b12acd1cc9004fa092578acfa57b67ad5e43a197175d01a64"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3f26877a748dc4251cfcfda9dfb5f13fcb034f5308388066bcfe9031b63ae7d"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac89aea9af8cd672fa7b510e7b8c33b0bba9a43186680550ccf23020f32d535"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:970919794d126ba8645f3837ab6046fb4e72bbc057b3709144066204c19a455d"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3eb3fe62804e8f859c49ed20a8451342de53ed764150cb14ca71357c765dc2a6"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:3abcd9392a36025e3bd55f9bd38d908bd17962cc49bc6da8e7e96285336e2bca"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:3a1c81334778f9e3af2f8aeb7a960736e5cab1dfebfb26aabca09afd2906c039"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2807668ba86cb38c6817ad9bc66215ab8584d1d304030ce4f0887336f28a5e27"}, + {file = "pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc"}, ] [package.dependencies] @@ -311,77 +356,89 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" [[package]] name = "pydantic-settings" -version = "2.7.1" +version = "2.10.1" description = "Settings management using Pydantic" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "pydantic_settings-2.7.1-py3-none-any.whl", hash = "sha256:590be9e6e24d06db33a4262829edef682500ef008565a969c73d39d5f8bfb3fd"}, - {file = "pydantic_settings-2.7.1.tar.gz", hash = "sha256:10c9caad35e64bfb3c2fbf70a078c0e25cc92499782e5200747f942a065dec93"}, + {file = "pydantic_settings-2.10.1-py3-none-any.whl", hash = "sha256:a60952460b99cf661dc25c29c0ef171721f98bfcb52ef8d9ea4c943d7c8cc796"}, + {file = "pydantic_settings-2.10.1.tar.gz", hash = "sha256:06f0062169818d0f5524420a360d632d5857b83cffd4d42fe29597807a1614ee"}, ] [package.dependencies] pydantic = ">=2.7.0" python-dotenv = ">=0.21.0" +typing-inspection = ">=0.4.0" [package.extras] +aws-secrets-manager = ["boto3 (>=1.35.0)", "boto3-stubs[secretsmanager]"] azure-key-vault = ["azure-identity (>=1.16.0)", "azure-keyvault-secrets (>=4.8.0)"] +gcp-secret-manager = ["google-cloud-secret-manager (>=2.23.1)"] toml = ["tomli (>=2.0.1)"] yaml = ["pyyaml (>=6.0.1)"] +[[package]] +name = "pygments" +version = "2.19.2" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.8" +groups = ["test"] +files = [ + {file = "pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b"}, + {file = "pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887"}, +] + +[package.extras] +windows-terminal = ["colorama (>=0.4.6)"] + [[package]] name = "pytest" -version = "8.3.4" +version = "8.4.1" description = "pytest: simple powerful testing with Python" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["test"] files = [ - {file = "pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6"}, - {file = "pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761"}, + {file = "pytest-8.4.1-py3-none-any.whl", hash = "sha256:539c70ba6fcead8e78eebbf1115e8b589e7565830d7d006a8723f19ac8a0afb7"}, + {file = "pytest-8.4.1.tar.gz", hash = "sha256:7c67fd69174877359ed9371ec3af8a3d2b04741818c51e5e99cc1742251fa93c"}, ] [package.dependencies] -colorama = {version = "*", markers = "sys_platform == \"win32\""} -exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} -iniconfig = "*" -packaging = "*" +colorama = {version = ">=0.4", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1", markers = "python_version < \"3.11\""} +iniconfig = ">=1" +packaging = ">=20" pluggy = ">=1.5,<2" +pygments = ">=2.7.2" tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] -dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "requests", "setuptools", "xmlschema"] [[package]] name = "python-dotenv" -version = "1.0.1" +version = "1.1.1" description = "Read key-value pairs from a .env file and set them as environment variables" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, - {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, + {file = "python_dotenv-1.1.1-py3-none-any.whl", hash = "sha256:31f23644fe2602f88ff55e1f5c79ba497e01224ee7737937930c448e4d0e24dc"}, + {file = "python_dotenv-1.1.1.tar.gz", hash = "sha256:a8a6399716257f45be6a007360200409fce5cda2661e3dec71d23dc15f6189ab"}, ] [package.extras] cli = ["click (>=5.0)"] -[[package]] -name = "pytz" -version = "2024.2" -description = "World timezone definitions, modern and historical" -optional = false -python-versions = "*" -files = [ - {file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"}, - {file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"}, -] - [[package]] name = "pyyaml" version = "6.0.2" description = "YAML parser and emitter for Python" optional = false python-versions = ">=3.8" +groups = ["test"] files = [ {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, @@ -444,6 +501,8 @@ version = "2.2.1" description = "A lil' TOML parser" optional = false python-versions = ">=3.8" +groups = ["test"] +markers = "python_version == \"3.10\"" files = [ {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, @@ -481,16 +540,33 @@ files = [ [[package]] name = "typing-extensions" -version = "4.12.2" -description = "Backported and Experimental Type Hints for Python 3.8+" +version = "4.14.1" +description = "Backported and Experimental Type Hints for Python 3.9+" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main", "test"] files = [ - {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, - {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, + {file = "typing_extensions-4.14.1-py3-none-any.whl", hash = "sha256:d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76"}, + {file = "typing_extensions-4.14.1.tar.gz", hash = "sha256:38b39f4aeeab64884ce9f74c94263ef78f3c22467c8724005483154c26648d36"}, ] +markers = {test = "python_version == \"3.10\""} + +[[package]] +name = "typing-inspection" +version = "0.4.1" +description = "Runtime typing introspection tools" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51"}, + {file = "typing_inspection-0.4.1.tar.gz", hash = "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28"}, +] + +[package.dependencies] +typing-extensions = ">=4.12.0" [metadata] -lock-version = "2.0" +lock-version = "2.1" python-versions = "^3.10" -content-hash = "c635d441c3cbce547d598f78e7c7cd74f701b4d5c682e8d89b53056f72c319d7" +content-hash = "1b58e2194b8ca5f7f291348a8dfbb2f7bf2b5eb0d781d0aee4955e482a159115" diff --git a/pyproject.toml b/pyproject.toml index c6ad669..d4badcd 100755 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,11 +1,12 @@ [build-system] -requires = ["poetry-core>=1.9.1"] +requires = ["poetry-core>=2.0.0,<3.0.0"] build-backend = "poetry.core.masonry.api" + [tool.poetry] name = "pythonLogs" -version = "3.0.13" -description = "Simple python logs with file rotation" +version = "4.0.1" +description = "A modern, high-performance Python logging library with automatic file rotation, factory pattern for easy logger creation, and optimized caching for better performance." license = "MIT" readme = "README.md" authors = ["Daniel Costa "] @@ -31,21 +32,26 @@ classifiers = [ ] -[tool.poetry.group.test] -optional = true - - [tool.poetry.dependencies] python = "^3.10" -pydantic-settings = "^2.7.1" -python-dotenv = "^1.0.1" -pytz = "^2024.2" +pydantic-settings = "^2.10.1" +python-dotenv = "^1.1.1" [tool.poetry.group.test.dependencies] -coverage = "^7.6.10" -poethepoet = "^0.32.0" -pytest = "^8.3.4" +coverage = "^7.9.2" +poethepoet = "^0.36.0" +psutil = "^7.0.0" +pytest = "^8.4.1" + + +[tool.poetry.group.test] +optional = true + +[tool.pytest.ini_options] +markers = [ + "slow: marks tests as slow (deselect with '-m \"not slow\"')" +] [tool.coverage.run] diff --git a/pythonLogs/__init__.py b/pythonLogs/__init__.py index ea4c18e..9abc452 100755 --- a/pythonLogs/__init__.py +++ b/pythonLogs/__init__.py @@ -1,15 +1,56 @@ +# -*- encoding: utf-8 -*- import logging from importlib.metadata import version from typing import Literal, NamedTuple -from .timed_rotating import TimedRotatingLog -from .size_rotating import SizeRotatingLog -from .basic_log import BasicLog +from pythonLogs.basic_log import BasicLog +from pythonLogs.constants import LogLevel, RotateWhen +from pythonLogs.factory import ( + basic_logger, + clear_logger_registry, + create_logger, + get_or_create_logger, + get_registered_loggers, + LoggerFactory, + LoggerType, + shutdown_logger, + size_rotating_logger, + timed_rotating_logger +) +from pythonLogs.memory_utils import ( + clear_directory_cache, + clear_formatter_cache, + force_garbage_collection, + get_memory_stats, + optimize_lru_cache_sizes, + set_directory_cache_limit +) +from pythonLogs.size_rotating import SizeRotatingLog +from pythonLogs.timed_rotating import TimedRotatingLog __all__ = ( "BasicLog", "TimedRotatingLog", "SizeRotatingLog", + "LoggerFactory", + "LoggerType", + "LogLevel", + "RotateWhen", + "create_logger", + "get_or_create_logger", + "basic_logger", + "size_rotating_logger", + "timed_rotating_logger", + "clear_logger_registry", + "get_registered_loggers", + "shutdown_logger", + # Memory management utilities + "get_memory_stats", + "clear_formatter_cache", + "clear_directory_cache", + "force_garbage_collection", + "optimize_lru_cache_sizes", + "set_directory_cache_limit", ) __title__ = "pythonLogs" diff --git a/pythonLogs/basic_log.py b/pythonLogs/basic_log.py index 1268bcf..a871608 100644 --- a/pythonLogs/basic_log.py +++ b/pythonLogs/basic_log.py @@ -1,11 +1,15 @@ # -*- encoding: utf-8 -*- import logging +import threading from typing import Optional from pythonLogs.log_utils import get_format, get_level, get_timezone_function -from pythonLogs.settings import LogSettings +from pythonLogs.memory_utils import cleanup_logger_handlers, register_logger_weakref +from pythonLogs.settings import get_log_settings class BasicLog: + """Basic logger with context manager support for automatic resource cleanup.""" + def __init__( self, level: Optional[str] = None, @@ -15,13 +19,16 @@ def __init__( timezone: Optional[str] = None, showlocation: Optional[bool] = None, ): - _settings = LogSettings() + _settings = get_log_settings() self.level = get_level(level or _settings.level) self.appname = name or _settings.appname self.encoding = encoding or _settings.encoding self.datefmt = datefmt or _settings.date_format self.timezone = timezone or _settings.timezone self.showlocation = showlocation or _settings.show_location + self.logger = None + # Instance-level lock for thread safety + self._lock = threading.Lock() def init(self): logger = logging.getLogger(self.appname) @@ -29,4 +36,28 @@ def init(self): logging.Formatter.converter = get_timezone_function(self.timezone) _format = get_format(self.showlocation, self.appname, self.timezone) logging.basicConfig(datefmt=self.datefmt, encoding=self.encoding, format=_format) + self.logger = logger + # Register weak reference for memory tracking + register_logger_weakref(logger) return logger + + def __enter__(self): + """Context manager entry.""" + if not hasattr(self, 'logger') or self.logger is None: + self.init() + return self.logger + + def __exit__(self, exc_type, exc_val, exc_tb): + """Context manager exit with automatic cleanup.""" + if hasattr(self, 'logger'): + self._cleanup_logger(self.logger) + + def _cleanup_logger(self, logger: logging.Logger) -> None: + """Clean up logger resources by closing all handlers with thread safety.""" + with self._lock: + cleanup_logger_handlers(logger) + + @staticmethod + def cleanup_logger(logger: logging.Logger) -> None: + """Static method for cleaning up logger resources (backward compatibility).""" + cleanup_logger_handlers(logger) diff --git a/pythonLogs/constants.py b/pythonLogs/constants.py new file mode 100644 index 0000000..5b5a252 --- /dev/null +++ b/pythonLogs/constants.py @@ -0,0 +1,42 @@ +# -*- encoding: utf-8 -*- +from enum import Enum + +# File and Directory Constants +MB_TO_BYTES = 1024 * 1024 +DEFAULT_FILE_MODE = 0o755 +DEFAULT_BACKUP_COUNT = 30 + +# Date Format Constants +DEFAULT_DATE_FORMAT = "%Y-%m-%dT%H:%M:%S" +DEFAULT_ROTATE_SUFFIX = "%Y%m%d" + +# Encoding Constants +DEFAULT_ENCODING = "UTF-8" + +# Timezone Constants +DEFAULT_TIMEZONE = "UTC" + + +class LogLevel(str, Enum): + """Log levels""" + CRITICAL = "CRITICAL" + CRIT = "CRIT" + ERROR = "ERROR" + WARNING = "WARNING" + WARN = "WARN" + INFO = "INFO" + DEBUG = "DEBUG" + + +class RotateWhen(str, Enum): + """Rotation timing options for TimedRotatingLog""" + MIDNIGHT = "midnight" + MONDAY = "W0" + TUESDAY = "W1" + WEDNESDAY = "W2" + THURSDAY = "W3" + FRIDAY = "W4" + SATURDAY = "W5" + SUNDAY = "W6" + HOURLY = "H" + DAILY = "D" diff --git a/pythonLogs/factory.py b/pythonLogs/factory.py new file mode 100644 index 0000000..c23c898 --- /dev/null +++ b/pythonLogs/factory.py @@ -0,0 +1,384 @@ +# -*- encoding: utf-8 -*- +import logging +import threading +import time +from enum import Enum +from typing import Dict, Optional, Tuple, Union +from pythonLogs.basic_log import BasicLog +from pythonLogs.constants import LogLevel, RotateWhen +from pythonLogs.memory_utils import cleanup_logger_handlers +from pythonLogs.size_rotating import SizeRotatingLog +from pythonLogs.timed_rotating import TimedRotatingLog + + +class LoggerType(str, Enum): + """Available logger types""" + BASIC = "basic" + SIZE_ROTATING = "size_rotating" + TIMED_ROTATING = "timed_rotating" + + +class LoggerFactory: + """Factory for creating different types of loggers with optimized instantiation and memory management""" + + # Logger registry for reusing loggers by name with timestamp tracking + _logger_registry: Dict[str, Tuple[logging.Logger, float]] = {} + # Thread lock for registry access + _registry_lock = threading.RLock() + # Memory optimization settings + _max_loggers = 100 # Maximum number of cached loggers + _logger_ttl = 3600 # Logger TTL in seconds (1 hour) + + @classmethod + def get_or_create_logger( + cls, + logger_type: Union[LoggerType, str], + name: Optional[str] = None, **kwargs, + ) -> logging.Logger: + """ + Get an existing logger from registry or create new one. + Loggers are cached by name for performance. + + Args: + logger_type: Type of logger to create + name: Logger name (used as cache key) + **kwargs: Additional logger configuration + + Returns: + Cached or newly created logger instance + """ + # Use the default name if none provided + if name is None: + from pythonLogs.settings import get_log_settings + name = get_log_settings().appname + + # Thread-safe check-and-create operation + with cls._registry_lock: + # Clean up expired loggers first + cls._cleanup_expired_loggers() + + # Check if logger already exists in the registry + if name in cls._logger_registry: + logger, timestamp = cls._logger_registry[name] + # Update timestamp for LRU tracking + cls._logger_registry[name] = (logger, time.time()) + return logger + + # Ensure registry size limit + cls._enforce_size_limit() + + # Create a new logger and cache it with timestamp + logger = cls.create_logger(logger_type, name=name, **kwargs) + cls._logger_registry[name] = (logger, time.time()) + return logger + + @classmethod + def clear_registry(cls) -> None: + """Clear the logger registry with proper resource cleanup.""" + with cls._registry_lock: + for logger, _ in cls._logger_registry.values(): + cls._cleanup_logger(logger) + cls._logger_registry.clear() + + @classmethod + def _cleanup_expired_loggers(cls) -> None: + """Remove expired loggers from registry based on TTL.""" + current_time = time.time() + expired_keys = [] + + for name, (logger, timestamp) in cls._logger_registry.items(): + if current_time - timestamp > cls._logger_ttl: + expired_keys.append(name) + cls._cleanup_logger(logger) + + for key in expired_keys: + cls._logger_registry.pop(key, None) + + @classmethod + def _enforce_size_limit(cls) -> None: + """Enforce maximum registry size by removing the oldest entries (LRU eviction).""" + if cls._max_loggers <= 0: + # Special case: if max_loggers is 0 or negative, clear all + cls.clear_registry() + return + + if len(cls._logger_registry) >= cls._max_loggers: + # Sort by timestamp (oldest first) and remove the oldest entries + sorted_entries = sorted(cls._logger_registry.items(), key=lambda x: x[1][1]) + entries_to_remove = len(sorted_entries) - cls._max_loggers + 1 + + for i in range(min(entries_to_remove, len(sorted_entries))): + name, (logger, _) = sorted_entries[i] + cls._cleanup_logger(logger) + cls._logger_registry.pop(name, None) + + @classmethod + def set_memory_limits(cls, max_loggers: int = 100, ttl_seconds: int = 3600) -> None: + """Configure memory management limits for the logger registry. + + Args: + max_loggers: Maximum number of cached loggers + ttl_seconds: Time-to-live for cached loggers in seconds + """ + with cls._registry_lock: + cls._max_loggers = max_loggers + cls._logger_ttl = ttl_seconds + # Clean up immediately with new settings + cls._cleanup_expired_loggers() + cls._enforce_size_limit() + + @staticmethod + def _cleanup_logger(logger: logging.Logger) -> None: + """Clean up logger resources by closing all handlers.""" + cleanup_logger_handlers(logger) + + @classmethod + def shutdown_logger(cls, name: str) -> bool: + """Shutdown and remove a specific logger from registry. + + Args: + name: Logger name to shut down + + Returns: + True if logger was found and shutdown, False otherwise + """ + with cls._registry_lock: + if name in cls._logger_registry: + logger, _ = cls._logger_registry.pop(name) + cls._cleanup_logger(logger) + return True + return False + + @classmethod + def get_registered_loggers(cls) -> dict[str, logging.Logger]: + """Get all registered loggers. Returns a copy of the registry.""" + with cls._registry_lock: + return {name: logger for name, (logger, _) in cls._logger_registry.items()} + + @staticmethod + def create_logger( + logger_type: Union[LoggerType, str], + level: Optional[Union[LogLevel, str]] = None, + name: Optional[str] = None, + directory: Optional[str] = None, + filenames: Optional[list | tuple] = None, + encoding: Optional[str] = None, + datefmt: Optional[str] = None, + timezone: Optional[str] = None, + streamhandler: Optional[bool] = None, + showlocation: Optional[bool] = None, # Size rotating specific + maxmbytes: Optional[int] = None, # Timed rotating specific + when: Optional[Union[RotateWhen, str]] = None, + sufix: Optional[str] = None, + rotateatutc: Optional[bool] = None, + # Common + daystokeep: Optional[int] = None, + ) -> logging.Logger: + + """ + Factory method to create loggers based on type. + + Args: + logger_type: Type of logger to create (LoggerType enum or string) + level: Log level (LogLevel enum or string: DEBUG, INFO, WARNING, ERROR, CRITICAL) + name: Logger name + directory: Log directory path + filenames: List/tuple of log filenames + encoding: File encoding + datefmt: Date format string + timezone: Timezone for timestamps + streamhandler: Enable console output + showlocation: Show file location in logs + maxmbytes: Max file size in MB (size rotating only) + when: When to rotate (RotateWhen enum or string: MIDNIGHT, HOURLY, DAILY, etc.) + sufix: Date suffix for rotated files (timed rotating only) + rotateatutc: Rotate at UTC time (timed rotating only) + daystokeep: Days to keep old logs + + Returns: + Configured logger instance + + Raises: + ValueError: If invalid logger_type is provided + """ + # Convert string to enum if needed + if isinstance(logger_type, str): + try: + logger_type = LoggerType(logger_type.lower()) + except ValueError: + raise ValueError(f"Invalid logger type: {logger_type}. Valid types: {[t.value for t in LoggerType]}") + + # Convert enum values to strings for logger classes + level_str = level.value if isinstance(level, LogLevel) else level + when_str = when.value if isinstance(when, RotateWhen) else when + + # Create logger based on type + match logger_type: + case LoggerType.BASIC: + logger_instance = BasicLog( + level=level_str, + name=name, + encoding=encoding, + datefmt=datefmt, + timezone=timezone, + showlocation=showlocation, ) + + case LoggerType.SIZE_ROTATING: + logger_instance = SizeRotatingLog( + level=level_str, + name=name, + directory=directory, + filenames=filenames, + maxmbytes=maxmbytes, + daystokeep=daystokeep, + encoding=encoding, + datefmt=datefmt, + timezone=timezone, + streamhandler=streamhandler, + showlocation=showlocation, ) + + case LoggerType.TIMED_ROTATING: + logger_instance = TimedRotatingLog( + level=level_str, + name=name, + directory=directory, + filenames=filenames, + when=when_str, + sufix=sufix, + daystokeep=daystokeep, + encoding=encoding, + datefmt=datefmt, + timezone=timezone, + streamhandler=streamhandler, + showlocation=showlocation, + rotateatutc=rotateatutc, ) + + case _: + raise ValueError(f"Unsupported logger type: {logger_type}") + + return logger_instance.init() + + @staticmethod + def create_basic_logger( + level: Optional[Union[LogLevel, str]] = None, + name: Optional[str] = None, + encoding: Optional[str] = None, + datefmt: Optional[str] = None, + timezone: Optional[str] = None, + showlocation: Optional[bool] = None, + ) -> logging.Logger: + + """Convenience method for creating a basic logger""" + return LoggerFactory.create_logger( + LoggerType.BASIC, + level=level, + name=name, + encoding=encoding, + datefmt=datefmt, + timezone=timezone, + showlocation=showlocation, ) + + @staticmethod + def create_size_rotating_logger( + level: Optional[Union[LogLevel, str]] = None, + name: Optional[str] = None, + directory: Optional[str] = None, + filenames: Optional[list | tuple] = None, + maxmbytes: Optional[int] = None, + daystokeep: Optional[int] = None, + encoding: Optional[str] = None, + datefmt: Optional[str] = None, + timezone: Optional[str] = None, + streamhandler: Optional[bool] = None, + showlocation: Optional[bool] = None, + ) -> logging.Logger: + + """Convenience method for creating a size rotating logger""" + return LoggerFactory.create_logger( + LoggerType.SIZE_ROTATING, + level=level, + name=name, + directory=directory, + filenames=filenames, + maxmbytes=maxmbytes, + daystokeep=daystokeep, + encoding=encoding, + datefmt=datefmt, + timezone=timezone, + streamhandler=streamhandler, + showlocation=showlocation, ) + + @staticmethod + def create_timed_rotating_logger( + level: Optional[Union[LogLevel, str]] = None, + name: Optional[str] = None, + directory: Optional[str] = None, + filenames: Optional[list | tuple] = None, + when: Optional[Union[RotateWhen, str]] = None, + sufix: Optional[str] = None, + daystokeep: Optional[int] = None, + encoding:Optional[str] = None, + datefmt: Optional[str] = None, + timezone: Optional[str] = None, + streamhandler: Optional[bool] = None, + showlocation: Optional[bool] = None, + rotateatutc: Optional[bool] = None, + ) -> logging.Logger: + + """Convenience method for creating a timed rotating logger""" + return LoggerFactory.create_logger( + LoggerType.TIMED_ROTATING, + level=level, + name=name, + directory=directory, + filenames=filenames, + when=when, + sufix=sufix, + daystokeep=daystokeep, + encoding=encoding, + datefmt=datefmt, + timezone=timezone, + streamhandler=streamhandler, + showlocation=showlocation, + rotateatutc=rotateatutc, ) + + +# Convenience functions for backward compatibility and easier usage +def create_logger(logger_type: Union[LoggerType, str], **kwargs) -> logging.Logger: + """Convenience function to create a logger using the factory""" + return LoggerFactory.create_logger(logger_type, **kwargs) + + +def get_or_create_logger(logger_type: Union[LoggerType, str], **kwargs) -> logging.Logger: + """Convenience function to get cached or create a logger using the factory""" + return LoggerFactory.get_or_create_logger(logger_type, **kwargs) + + +def basic_logger(**kwargs) -> logging.Logger: + """Convenience function to create a basic logger""" + return LoggerFactory.create_basic_logger(**kwargs) + + +def size_rotating_logger(**kwargs) -> logging.Logger: + """Convenience function to create a size rotating logger""" + return LoggerFactory.create_size_rotating_logger(**kwargs) + + +def timed_rotating_logger(**kwargs) -> logging.Logger: + """Convenience function to create a timed rotating logger""" + return LoggerFactory.create_timed_rotating_logger(**kwargs) + + +def clear_logger_registry() -> None: + """Convenience function to clear the logger registry with proper cleanup""" + LoggerFactory.clear_registry() + + +def shutdown_logger(name: str) -> bool: + """Convenience function to shut down a specific logger""" + return LoggerFactory.shutdown_logger(name) + + +def get_registered_loggers() -> dict[str, logging.Logger]: + """Convenience function to get all registered loggers""" + return LoggerFactory.get_registered_loggers() diff --git a/pythonLogs/log_utils.py b/pythonLogs/log_utils.py index 58a99db..cdbc2e9 100644 --- a/pythonLogs/log_utils.py +++ b/pythonLogs/log_utils.py @@ -5,18 +5,26 @@ import os import shutil import sys +import threading import time from datetime import datetime, timedelta, timezone as dttz -from time import struct_time -from typing import Any, Callable -import pytz +from functools import lru_cache +from pathlib import Path +from typing import Callable, Set +from zoneinfo import ZoneInfo +from pythonLogs.constants import DEFAULT_FILE_MODE, LogLevel + + +# Global cache for checked directories with thread safety and size limits +_checked_directories: Set[str] = set() +_directory_lock = threading.Lock() +_max_cached_directories = 500 # Limit cache size to prevent unbounded growth def get_stream_handler( level: int, formatter: logging.Formatter, ) -> logging.StreamHandler: - stream_hdlr = logging.StreamHandler() stream_hdlr.setFormatter(formatter) stream_hdlr.setLevel(level) @@ -27,83 +35,94 @@ def get_logger_and_formatter( name: str, datefmt: str, show_location: bool, - timezone: str, -) -> [logging.Logger, logging.Formatter]: - + timezone_: str, +) -> tuple[logging.Logger, logging.Formatter]: logger = logging.getLogger(name) - for handler in logger.handlers[:]: - handler.close() - logger.removeHandler(handler) - formatt = get_format(show_location, name, timezone) + # More efficient handler cleanup with context manager-like pattern + handlers_to_remove = list(logger.handlers) + for handler in handlers_to_remove: + try: + handler.close() + except (OSError, ValueError): + pass # Ignore expected errors during cleanup + finally: + logger.removeHandler(handler) + + formatt = get_format(show_location, name, timezone_) formatter = logging.Formatter(formatt, datefmt=datefmt) - formatter.converter = get_timezone_function(timezone) + formatter.converter = get_timezone_function(timezone_) return logger, formatter def check_filename_instance(filenames: list | tuple) -> None: - if not isinstance(filenames, list | tuple): + if not isinstance(filenames, (list, tuple)): err_msg = f"Unable to parse filenames. Filename instance is not list or tuple. | {filenames}" write_stderr(err_msg) raise TypeError(err_msg) def check_directory_permissions(directory_path: str) -> None: - if os.path.isdir(directory_path) and not os.access(directory_path, os.W_OK | os.X_OK): - err_msg = f"Unable to access directory | {directory_path}" - write_stderr(err_msg) - raise PermissionError(err_msg) + # Thread-safe check with double-checked locking pattern + if directory_path in _checked_directories: + return + + with _directory_lock: + # Check again inside the lock to avoid race conditions + if directory_path in _checked_directories: + return + + path_obj = Path(directory_path) + + if path_obj.exists(): + if not os.access(directory_path, os.W_OK | os.X_OK): + err_msg = f"Unable to access directory | {directory_path}" + write_stderr(err_msg) + raise PermissionError(err_msg) + else: + try: + path_obj.mkdir(mode=DEFAULT_FILE_MODE, parents=True, exist_ok=True) + except PermissionError as e: + err_msg = f"Unable to create directory | {directory_path}" + write_stderr(f"{err_msg} | {repr(e)}") + raise PermissionError(err_msg) - try: - if not os.path.isdir(directory_path): - os.makedirs(directory_path, mode=0o755, exist_ok=True) - except PermissionError as e: - err_msg = f"Unable to create directory | {directory_path}" - write_stderr(f"{err_msg} | {repr(e)}") - raise PermissionError(err_msg) + # Add to cache with size limit enforcement + if len(_checked_directories) >= _max_cached_directories: + # Remove a random entry to make space (simple eviction strategy) + _checked_directories.pop() + _checked_directories.add(directory_path) def remove_old_logs(logs_dir: str, days_to_keep: int) -> None: - files_list = list_files(logs_dir, ends_with=".gz") - for file in files_list: - try: - if is_older_than_x_days(file, days_to_keep): - delete_file(file) - except Exception as e: - write_stderr(f"Unable to delete {days_to_keep} days old logs | {file} | {repr(e)}") + if days_to_keep <= 0: + return - -def list_files(directory: str, ends_with: str) -> tuple: - """ - List all files in the given directory - and returns them in a list sorted by creation time in ascending order - :param directory: - :param ends_with: - :return: tuple - """ + cutoff_time = datetime.now() - timedelta(days=days_to_keep) try: - result: list = [] - if os.path.isdir(directory): - result: list = [os.path.join(directory, f) for f in os.listdir(directory) if f.lower().endswith(ends_with)] - result.sort(key=os.path.getmtime) - return tuple(result) - except Exception as e: - write_stderr(repr(e)) - raise e + for file_path in Path(logs_dir).glob("*.gz"): + try: + if file_path.stat().st_mtime < cutoff_time.timestamp(): + file_path.unlink() + except (OSError, IOError) as e: + write_stderr(f"Unable to delete old log | {file_path} | {repr(e)}") + except OSError as e: + write_stderr(f"Unable to scan directory for old logs | {logs_dir} | {repr(e)}") def delete_file(path: str) -> bool: - """ - Remove the given file and returns True if the file was successfully removed - :param path: - :return: True - """ + """Remove the given file and returns True if the file was successfully removed""" + path_obj = Path(path) + try: - if os.path.isfile(path): - os.remove(path) - elif os.path.exists(path): - shutil.rmtree(path) + if path_obj.is_file(): + path_obj.unlink() + elif path_obj.is_dir(): + shutil.rmtree(path_obj) + elif path_obj.exists(): + # Handle special files + path_obj.unlink() else: raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT), path) except OSError as e: @@ -113,97 +132,96 @@ def delete_file(path: str) -> bool: def is_older_than_x_days(path: str, days: int) -> bool: - """ - Check if a file or directory is older than the specified number of days - :param path: - :param days: - :return: - """ - - if not os.path.exists(path): + """Check if a file or directory is older than the specified number of days""" + path_obj = Path(path) + + if not path_obj.exists(): raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT), path) try: if int(days) in (0, 1): - cutoff_time = datetime.today() + cutoff_time = datetime.now() else: - cutoff_time = datetime.today() - timedelta(days=int(days)) + cutoff_time = datetime.now() - timedelta(days=int(days)) except ValueError as e: write_stderr(repr(e)) raise e - file_timestamp = os.stat(path).st_mtime - file_time = datetime.fromtimestamp(file_timestamp) - - if file_time < cutoff_time: - return True - return False + file_time = datetime.fromtimestamp(path_obj.stat().st_mtime) + return file_time < cutoff_time -def write_stderr(msg: str) -> None: - """ - Write msg to stderr - :param msg: - :return: None - """ +# Cache stderr timezone for better performance +@lru_cache(maxsize=1) +def _get_stderr_timezone(): + timezone_name = os.getenv("LOG_TIMEZONE", "UTC") + if timezone_name.lower() == "localtime": + return None # Use system local timezone + return ZoneInfo(timezone_name) - obj = datetime.now(dttz.utc) - dt = obj.astimezone(pytz.timezone(os.getenv("LOG_TIMEZONE", "UTC"))) - dt_timezone = dt.strftime("%Y-%m-%dT%H:%M:%S.%f:%z") - sys.stderr.write(f"[{dt_timezone}]:[ERROR]:{msg}\n") +def write_stderr(msg: str) -> None: + """Write msg to stderr with optimized timezone handling""" + try: + tz = _get_stderr_timezone() + if tz is None: + # Use local timezone + dt = datetime.now() + else: + dt = datetime.now(dttz.utc).astimezone(tz) + dt_timezone = dt.strftime("%Y-%m-%dT%H:%M:%S.%f%z") + sys.stderr.write(f"[{dt_timezone}]:[ERROR]:{msg}\n") + except (OSError, ValueError, KeyError): + # Fallback to simple timestamp if timezone fails + sys.stderr.write(f"[{datetime.now().isoformat()}]:[ERROR]:{msg}\n") -def get_level(level: str) -> logging: - """ - Get logging level - :param level: - :return: level - """ +def get_level(level: str) -> int: + """Get logging level using enum values""" if not isinstance(level, str): write_stderr(f"Unable to get log level. Setting default level to: 'INFO' ({logging.INFO})") return logging.INFO - match level.lower(): - case "debug": - return logging.DEBUG - case "warning" | "warn": - return logging.WARNING - case "error": - return logging.ERROR - case "critical" | "crit": - return logging.CRITICAL - case _: - return logging.INFO + level_map = { + LogLevel.DEBUG.value.lower(): logging.DEBUG, + LogLevel.WARNING.value.lower(): logging.WARNING, + LogLevel.WARN.value.lower(): logging.WARNING, + LogLevel.ERROR.value.lower(): logging.ERROR, + LogLevel.CRITICAL.value.lower(): logging.CRITICAL, + LogLevel.CRIT.value.lower(): logging.CRITICAL, + LogLevel.INFO.value.lower(): logging.INFO, + } + + return level_map.get(level.lower(), logging.INFO) def get_log_path(directory: str, filename: str) -> str: - """ - Get log file path - :param directory: - :param filename: - :return: path as str - """ + """Get log file path with optimized validation""" + log_file_path = str(Path(directory) / filename) - log_file_path = str(os.path.join(directory, filename)) - err_message = f"Unable to open log file for writing | {log_file_path}" + # Check directory permissions (cached) + check_directory_permissions(directory) - try: - open(log_file_path, "a+").close() - except PermissionError as e: - write_stderr(f"{err_message} | {repr(e)}") + # Only validate write access to directory, not create the file + if not os.access(directory, os.W_OK): + err_message = f"Unable to write to log directory | {directory}" + write_stderr(err_message) raise PermissionError(err_message) - except FileNotFoundError as e: - write_stderr(f"{err_message} | {repr(e)}") - raise FileNotFoundError(err_message) - except OSError as e: - write_stderr(f"{err_message} | {repr(e)}") - raise e return log_file_path -def get_format(show_location: bool, name: str, timezone: str) -> str: +@lru_cache(maxsize=32) +def _get_timezone_offset(timezone_: str) -> str: + """Cache timezone offset calculation""" + if timezone_.lower() == "localtime": + return time.strftime("%z") + else: + return datetime.now(ZoneInfo(timezone_)).strftime("%z") + + +def get_format(show_location: bool, name: str, timezone_: str) -> str: + """Get log format string with cached timezone offset""" _debug_fmt = "" _logger_name = "" @@ -213,52 +231,46 @@ def get_format(show_location: bool, name: str, timezone: str) -> str: if show_location: _debug_fmt = "[%(filename)s:%(funcName)s:%(lineno)d]:" - if timezone == "localtime": - utc_offset = time.strftime("%z") - else: - utc_offset = datetime.now(pytz.timezone(timezone)).strftime("%z") - - fmt = f"[%(asctime)s.%(msecs)03d{utc_offset}]:[%(levelname)s]:{_logger_name}{_debug_fmt}%(message)s" - return fmt + utc_offset = _get_timezone_offset(timezone_) + return f"[%(asctime)s.%(msecs)03d{utc_offset}]:[%(levelname)s]:{_logger_name}{_debug_fmt}%(message)s" -def gzip_file_with_sufix(file_path, sufix) -> str | None: - """ - gzip file - :param file_path: - :param sufix: - :return: bool - """ +def gzip_file_with_sufix(file_path: str, sufix: str) -> str | None: + """gzip file with improved error handling and performance""" + path_obj = Path(file_path) - if os.path.isfile(file_path): - sfname, sext = os.path.splitext(file_path) - renamed_dst = f"{sfname}_{sufix}{sext}.gz" + if not path_obj.is_file(): + return None - try: - with open(file_path, "rb") as fin: - with gzip.open(renamed_dst, "wb") as fout: - fout.writelines(fin) - except Exception as e: - write_stderr(f"Unable to gzip log file | {file_path} | {repr(e)}") - raise e + # Use pathlib for cleaner path operations + renamed_dst = path_obj.with_name(f"{path_obj.stem}_{sufix}{path_obj.suffix}.gz") - try: - delete_file(file_path) - except OSError as e: - write_stderr(f"Unable to delete source log file | {file_path} | {repr(e)}") - raise e + try: + with open(file_path, "rb") as fin: + with gzip.open(renamed_dst, "wb", compresslevel=6) as fout: # Balanced compression + shutil.copyfileobj(fin, fout, length=64*1024) # type: ignore # 64KB chunks for better performance + except (OSError, IOError) as e: + write_stderr(f"Unable to gzip log file | {file_path} | {repr(e)}") + raise e - return renamed_dst + try: + path_obj.unlink() # Use pathlib for deletion + except OSError as e: + write_stderr(f"Unable to delete source log file | {file_path} | {repr(e)}") + raise e + return str(renamed_dst) -def get_timezone_function( - time_zone: str, -) -> Callable[[float | None, Any], struct_time] | Callable[[Any], struct_time]: +@lru_cache(maxsize=32) +def get_timezone_function(time_zone: str) -> Callable: + """Get timezone function with caching for better performance""" match time_zone.lower(): case "utc": return time.gmtime case "localtime": return time.localtime case _: - return lambda *args: datetime.now(tz=pytz.timezone(time_zone)).timetuple() + # Cache the timezone object + tz = ZoneInfo(time_zone) + return lambda *args: datetime.now(tz=tz).timetuple() diff --git a/pythonLogs/memory_utils.py b/pythonLogs/memory_utils.py new file mode 100644 index 0000000..0875b67 --- /dev/null +++ b/pythonLogs/memory_utils.py @@ -0,0 +1,208 @@ +# -*- encoding: utf-8 -*- +import logging +import threading +import weakref +from functools import lru_cache +from typing import Any, Dict, Optional, Set + + +# Shared handler cleanup utility +def cleanup_logger_handlers(logger: Optional[logging.Logger]) -> None: + """Clean up logger resources by closing all handlers. + + This is a centralized utility to ensure consistent cleanup behavior + across all logger types and prevent code duplication. + + Args: + logger: The logger to clean up (can be None) + """ + if logger is None: + return + + # Create a snapshot of handlers to avoid modification during iteration + handlers_to_remove = list(logger.handlers) + for handler in handlers_to_remove: + try: + handler.close() + except (OSError, ValueError): + # Ignore errors during cleanup to prevent cascading failures + pass + finally: + logger.removeHandler(handler) + + +# Formatter cache to reduce memory usage for identical formatters +_formatter_cache: Dict[str, logging.Formatter] = {} +_formatter_cache_lock = threading.Lock() +_max_formatters = 50 # Limit formatter cache size + + +def get_cached_formatter(format_string: str, datefmt: Optional[str] = None) -> logging.Formatter: + """Get a cached formatter or create and cache a new one. + + This reduces memory usage by reusing formatter instances with + identical configuration instead of creating new ones each time. + + Args: + format_string: The format string for the formatter + datefmt: Optional date format string + + Returns: + Cached or newly created formatter instance + """ + # Create cache key from configuration + cache_key = f"{format_string}|{datefmt or ''}" + + with _formatter_cache_lock: + # Return existing formatter if cached + if cache_key in _formatter_cache: + return _formatter_cache[cache_key] + + # Enforce cache size limit + if len(_formatter_cache) >= _max_formatters: + # Remove the oldest entry (FIFO eviction) + oldest_key = next(iter(_formatter_cache)) + _formatter_cache.pop(oldest_key) + + # Create and cache new formatter + formatter = logging.Formatter(fmt=format_string, datefmt=datefmt) + _formatter_cache[cache_key] = formatter + return formatter + + +def clear_formatter_cache() -> None: + """Clear the formatter cache to free memory.""" + with _formatter_cache_lock: + _formatter_cache.clear() + + +# Directory cache utilities with memory management +def set_directory_cache_limit(max_directories: int) -> None: + """Set the maximum number of directories to cache. + + Args: + max_directories: Maximum number of directories to keep in cache + """ + from . import log_utils + with log_utils._directory_lock: + log_utils._max_cached_directories = max_directories + # Trim cache if it exceeds new limit + while len(log_utils._checked_directories) > max_directories: + log_utils._checked_directories.pop() + + +def clear_directory_cache() -> None: + """Clear the directory cache to free memory.""" + from . import log_utils + with log_utils._directory_lock: + log_utils._checked_directories.clear() + + +# Weak reference registry for tracking active loggers without preventing GC +_active_loggers: Set[weakref.ReferenceType] = set() +_weak_ref_lock = threading.Lock() + + +def register_logger_weakref(logger: logging.Logger) -> None: + """Register a weak reference to a logger for memory tracking. + + This allows monitoring active loggers without preventing garbage collection. + + Args: + logger: Logger to track + """ + global _active_loggers + def cleanup_callback(ref): + with _weak_ref_lock: + _active_loggers.discard(ref) + + with _weak_ref_lock: + weak_ref = weakref.ref(logger, cleanup_callback) + _active_loggers.add(weak_ref) + + +def get_active_logger_count() -> int: + """Get the count of currently active loggers. + + Returns: + Number of active logger instances + """ + global _active_loggers + with _weak_ref_lock: + # Clean up dead references + dead_refs = {ref for ref in _active_loggers if ref() is None} + _active_loggers -= dead_refs + return len(_active_loggers) + + +def get_memory_stats() -> Dict[str, Any]: + """Get memory usage statistics for the logging system. + + Returns: + Dictionary containing memory usage statistics + """ + from . import factory + + with factory.LoggerFactory._registry_lock: + registry_size = len(factory.LoggerFactory._logger_registry) + + with _formatter_cache_lock: + formatter_cache_size = len(_formatter_cache) + + from . import log_utils + with log_utils._directory_lock: + directory_cache_size = len(log_utils._checked_directories) + + return { + 'registry_size': registry_size, + 'formatter_cache_size': formatter_cache_size, + 'directory_cache_size': directory_cache_size, + 'active_logger_count': get_active_logger_count(), + 'max_registry_size': factory.LoggerFactory._max_loggers, + 'max_formatter_cache': _max_formatters, + 'max_directory_cache': log_utils._max_cached_directories, + } + + +# LRU cache size optimization +def optimize_lru_cache_sizes() -> None: + """Optimize LRU cache sizes based on typical usage patterns.""" + # Clear existing caches and reduce their sizes + from . import log_utils + + # Clear and recreate timezone function cache with smaller size + log_utils.get_timezone_function.cache_clear() + log_utils.get_timezone_function = lru_cache(maxsize=8)(log_utils.get_timezone_function.__wrapped__) + + # Clear and recreate timezone offset cache with smaller size + log_utils._get_timezone_offset.cache_clear() + log_utils._get_timezone_offset = lru_cache(maxsize=8)(log_utils._get_timezone_offset.__wrapped__) + + # Clear and recreate stderr timezone cache with smaller size + log_utils._get_stderr_timezone.cache_clear() + log_utils._get_stderr_timezone = lru_cache(maxsize=4)(log_utils._get_stderr_timezone.__wrapped__) + + +def force_garbage_collection() -> Dict[str, int]: + """Force garbage collection and return collection statistics. + + This can be useful for testing memory leaks or forcing cleanup + in long-running applications. + + Returns: + Dictionary with garbage collection statistics + """ + import gc + + # Clear all our caches first + clear_formatter_cache() + clear_directory_cache() + + # Force garbage collection + collected = gc.collect() + + return { + 'objects_collected': collected, + 'garbage_count': len(gc.garbage), + 'reference_cycles': gc.get_count(), + } diff --git a/pythonLogs/settings.py b/pythonLogs/settings.py index ade13cb..a002c97 100644 --- a/pythonLogs/settings.py +++ b/pythonLogs/settings.py @@ -1,36 +1,35 @@ # -*- encoding: utf-8 -*- -from enum import Enum +from functools import lru_cache +from typing import Optional from dotenv import load_dotenv from pydantic import Field from pydantic_settings import BaseSettings, SettingsConfigDict -from typing import Optional - +from pythonLogs.constants import ( + DEFAULT_BACKUP_COUNT, + DEFAULT_DATE_FORMAT, + DEFAULT_ENCODING, + DEFAULT_ROTATE_SUFFIX, + DEFAULT_TIMEZONE, + LogLevel, + RotateWhen +) -class LogLevel(str, Enum): - """log levels""" - CRITICAL = "CRITICAL" - CRIT = "CRIT" - ERROR = "ERROR" - WARNING = "WARNING" - WARN = "WARN" - INFO = "INFO" - DEBUG = "DEBUG" +# Lazy loading flag for dotenv +_dotenv_loaded = False class LogSettings(BaseSettings): """If any ENV variable is omitted, it falls back to default values here""" - load_dotenv() - level: Optional[LogLevel] = Field(default=LogLevel.INFO) appname: Optional[str] = Field(default="app") directory: Optional[str] = Field(default="/app/logs") filename: Optional[str] = Field(default="app.log") - encoding: Optional[str] = Field(default="UTF-8") - date_format: Optional[str] = Field(default="%Y-%m-%dT%H:%M:%S") - days_to_keep: Optional[int] = Field(default=30) - timezone: Optional[str] = Field(default="UTC") + encoding: Optional[str] = Field(default=DEFAULT_ENCODING) + date_format: Optional[str] = Field(default=DEFAULT_DATE_FORMAT) + days_to_keep: Optional[int] = Field(default=DEFAULT_BACKUP_COUNT) + timezone: Optional[str] = Field(default=DEFAULT_TIMEZONE) stream_handler: Optional[bool] = Field(default=True) show_location: Optional[bool] = Field(default=False) @@ -38,8 +37,18 @@ class LogSettings(BaseSettings): max_file_size_mb: Optional[int] = Field(default=10) # TimedRotatingLog - rotate_when: Optional[str] = Field(default="midnight") + rotate_when: Optional[RotateWhen] = Field(default=RotateWhen.MIDNIGHT) rotate_at_utc: Optional[bool] = Field(default=True) - rotate_file_sufix: Optional[str] = Field(default="%Y%m%d") + rotate_file_sufix: Optional[str] = Field(default=DEFAULT_ROTATE_SUFFIX) model_config = SettingsConfigDict(env_prefix="LOG_", env_file=".env", extra="allow") + + +@lru_cache(maxsize=1) +def get_log_settings() -> LogSettings: + """Get cached log settings instance to avoid repeated instantiation""" + global _dotenv_loaded + if not _dotenv_loaded: + load_dotenv() + _dotenv_loaded = True + return LogSettings() diff --git a/pythonLogs/size_rotating.py b/pythonLogs/size_rotating.py index 78722f0..d3f7112 100755 --- a/pythonLogs/size_rotating.py +++ b/pythonLogs/size_rotating.py @@ -1,7 +1,10 @@ # -*- encoding: utf-8 -*- import logging.handlers import os +import re +import threading from typing import Optional +from pythonLogs.constants import MB_TO_BYTES from pythonLogs.log_utils import ( check_directory_permissions, check_filename_instance, @@ -10,14 +13,15 @@ get_logger_and_formatter, get_stream_handler, gzip_file_with_sufix, - list_files, remove_old_logs, write_stderr, ) -from pythonLogs.settings import LogSettings +from pythonLogs.memory_utils import cleanup_logger_handlers, register_logger_weakref +from pythonLogs.settings import get_log_settings class SizeRotatingLog: + """Size-based rotating logger with context manager support for automatic resource cleanup.""" def __init__( self, level: Optional[str] = None, @@ -32,7 +36,7 @@ def __init__( streamhandler: Optional[bool] = None, showlocation: Optional[bool] = None, ): - _settings = LogSettings() + _settings = get_log_settings() self.level = get_level(level or _settings.level) self.appname = name or _settings.appname self.directory = directory or _settings.directory @@ -44,6 +48,9 @@ def __init__( self.timezone = timezone or _settings.timezone self.streamhandler = streamhandler or _settings.stream_handler self.showlocation = showlocation or _settings.show_location + self.logger = None + # Instance-level lock for thread safety + self._lock = threading.Lock() def init(self): check_filename_instance(self.filenames) @@ -58,7 +65,7 @@ def init(self): file_handler = logging.handlers.RotatingFileHandler( filename=log_file_path, mode="a", - maxBytes=self.maxmbytes * 1024 * 1024, + maxBytes=self.maxmbytes * MB_TO_BYTES, backupCount=self.daystokeep, encoding=self.encoding, delay=False, @@ -73,7 +80,31 @@ def init(self): stream_hdlr = get_stream_handler(self.level, formatter) logger.addHandler(stream_hdlr) + self.logger = logger + # Register weak reference for memory tracking + register_logger_weakref(logger) return logger + + def __enter__(self): + """Context manager entry.""" + if not hasattr(self, 'logger') or self.logger is None: + self.init() + return self.logger + + def __exit__(self, exc_type, exc_val, exc_tb): + """Context manager exit with automatic cleanup.""" + if hasattr(self, 'logger'): + self._cleanup_logger(self.logger) + + def _cleanup_logger(self, logger: logging.Logger) -> None: + """Clean up logger resources by closing all handlers with thread safety.""" + with self._lock: + cleanup_logger_handlers(logger) + + @staticmethod + def cleanup_logger(logger: logging.Logger) -> None: + """Static method for cleaning up logger resources (backward compatibility).""" + cleanup_logger_handlers(logger) class GZipRotatorSize: @@ -87,19 +118,21 @@ def __call__(self, source: str, dest: str) -> None: source_filename, _ = os.path.basename(source).split(".") new_file_number = self._get_new_file_number(self.directory, source_filename) if os.path.isfile(source): - gzip_file_with_sufix(source, new_file_number) + gzip_file_with_sufix(source, str(new_file_number)) @staticmethod - def _get_new_file_number(directory, source_filename): - new_file_number = 1 - previous_gz_files = list_files(directory, ends_with=".gz") - for gz_file in previous_gz_files: - if source_filename in gz_file: - try: - oldest_file_name = gz_file.split(".")[0].split("_") - if len(oldest_file_name) > 1: - new_file_number = int(oldest_file_name[1]) + 1 - except ValueError as e: - write_stderr(f"Unable to get previous gz log file number | {gz_file} | {repr(e)}") - raise - return new_file_number + def _get_new_file_number(directory: str, source_filename: str) -> int: + pattern = re.compile(rf"{re.escape(source_filename)}_(\d+)\.log\.gz$") + max_num = 0 + try: + # Use pathlib for better performance with large directories + from pathlib import Path + dir_path = Path(directory) + for file_path in dir_path.iterdir(): + if file_path.is_file(): + match = pattern.match(file_path.name) + if match: + max_num = max(max_num, int(match.group(1))) + except OSError as e: + write_stderr(f"Unable to get previous gz log file number | {repr(e)}") + return max_num + 1 diff --git a/pythonLogs/timed_rotating.py b/pythonLogs/timed_rotating.py index 304ace3..51a8947 100755 --- a/pythonLogs/timed_rotating.py +++ b/pythonLogs/timed_rotating.py @@ -1,6 +1,7 @@ # -*- encoding: utf-8 -*- import logging.handlers import os +import threading from typing import Optional from pythonLogs.log_utils import ( check_directory_permissions, @@ -12,14 +13,20 @@ gzip_file_with_sufix, remove_old_logs, ) -from pythonLogs.settings import LogSettings +from pythonLogs.memory_utils import cleanup_logger_handlers, register_logger_weakref +from pythonLogs.settings import get_log_settings class TimedRotatingLog: """ + Time-based rotating logger with context manager support for automatic resource cleanup. + Current 'rotating_when' events supported for TimedRotatingLogs: - midnight - roll over at midnight - W{0-6} - roll over on a certain day; 0 - Monday + Use RotateWhen enum values: + RotateWhen.MIDNIGHT - roll over at midnight + RotateWhen.MONDAY through RotateWhen.SUNDAY - roll over on specific days + RotateWhen.HOURLY - roll over every hour + RotateWhen.DAILY - roll over daily """ def __init__( @@ -32,13 +39,13 @@ def __init__( sufix: Optional[str] = None, daystokeep: Optional[int] = None, encoding: Optional[str] = None, - datefmt: Optional[str] = None, - timezone: Optional[str] = None, - streamhandler: Optional[bool] = None, - showlocation: Optional[bool] = None, - rotateatutc: Optional[bool] = None, + datefmt: Optional[str] = None, + timezone: Optional[str] = None, + streamhandler: Optional[bool] = None, + showlocation: Optional[bool] = None, + rotateatutc: Optional[bool] = None, ): - _settings = LogSettings() + _settings = get_log_settings() self.level = get_level(level or _settings.level) self.appname = name or _settings.appname self.directory = directory or _settings.directory @@ -52,6 +59,9 @@ def __init__( self.streamhandler = streamhandler or _settings.stream_handler self.showlocation = showlocation or _settings.show_location self.rotateatutc = rotateatutc or _settings.rotate_at_utc + self.logger = None + # Instance-level lock for thread safety + self._lock = threading.Lock() def init(self): check_filename_instance(self.filenames) @@ -68,8 +78,7 @@ def init(self): encoding=self.encoding, when=self.when, utc=self.rotateatutc, - backupCount=self.daystokeep, - ) + backupCount=self.daystokeep, ) file_handler.suffix = self.sufix file_handler.rotator = GZipRotatorTimed(self.directory, self.daystokeep) file_handler.setFormatter(formatter) @@ -80,8 +89,32 @@ def init(self): stream_hdlr = get_stream_handler(self.level, formatter) logger.addHandler(stream_hdlr) + self.logger = logger + # Register weak reference for memory tracking + register_logger_weakref(logger) return logger + def __enter__(self): + """Context manager entry.""" + if not hasattr(self, 'logger') or self.logger is None: + self.init() + return self.logger + + def __exit__(self, exc_type, exc_val, exc_tb): + """Context manager exit with automatic cleanup.""" + if hasattr(self, 'logger'): + self._cleanup_logger(self.logger) + + def _cleanup_logger(self, logger: logging.Logger) -> None: + """Clean up logger resources by closing all handlers with thread safety.""" + with self._lock: + cleanup_logger_handlers(logger) + + @staticmethod + def cleanup_logger(logger: logging.Logger) -> None: + """Static method for cleaning up logger resources (backward compatibility).""" + cleanup_logger_handlers(logger) + class GZipRotatorTimed: def __init__(self, dir_logs: str, days_to_keep: int): diff --git a/tests/README.md b/tests/README.md new file mode 100644 index 0000000..6018e1d --- /dev/null +++ b/tests/README.md @@ -0,0 +1,136 @@ +# Test Suite Documentation + +This directory contains comprehensive tests for the pythonLogs library, covering all features including Factory Pattern, Context Managers, Memory Management, and Performance Optimizations. + +## Test Files Overview + +### Core Functionality Tests +- **`test_basic_log.py`** - Comprehensive BasicLog functionality testing + - Tests BasicLog initialization, context managers, thread safety + - Validates cleanup methods and multiple instance handling + - **10 test cases** covering all BasicLog features + +- **`test_some_log_utils.py`** - Tests for utility functions + - Tests helper functions in `log_utils.py` + - Includes file operations, timezone handling, and validation + - Multiple test cases for various utilities + +### Context Manager & Resource Management Tests +- **`test_context_managers.py`** - Context manager functionality for all logger types + - Tests automatic resource cleanup for BasicLog, SizeRotatingLog, TimedRotatingLog + - Validates exception safety and proper handler cleanup + - Tests nested context managers and multiple file handlers + - **10 test cases** including the new `shutdown_logger` test + +- **`test_resource_management.py`** - Resource lifecycle management + - Test factory registry cleanup and memory management + - Validates handler cleanup and resource disposal + - Tests concurrent access safety and performance + - **9 test cases** for robust resource management + +### Logger Type Tests +- **`test_size_rotating.py`** - Size-based rotating logger tests + - Tests file rotation, compression, and cleanup + - Context manager functionality and resource management + - Multiple file handling and stream output + - Comprehensive size rotation scenarios + +- **`test_timed_rotating.py`** - Time-based rotating logger tests + - Tests time-based rotation (hourly, daily, midnight, weekdays) + - Context manager functionality and resource management + - Timezone handling and rotation scheduling + - Comprehensive time rotation scenarios + +### Factory Pattern Tests +- **`test_factory.py`** - Core factory pattern functionality + - Tests `LoggerFactory` class and all factory methods + - Validates logger creation, registry caching, and performance + - Tests error handling and type validation + - **Multiple test cases** covering all factory features + +- **`test_enums.py`** - Enum usage with factory pattern + - Tests `LogLevel`, `RotateWhen`, and `LoggerType` enums + - Validates enum-to-string conversion and type safety + - Tests backward compatibility with string values + - **10 test cases** covering all enum scenarios + +- **`test_factory_examples.py`** - Integration and practical examples + - Real-world usage scenarios and production-like setups + - Multi-logger configurations and file-based logging + - Registry usage patterns and error scenarios + - **Multiple test cases** demonstrating practical usage + +- **`test_string_levels.py`** - String-based level configuration + - Tests case-insensitive string level handling + - Validates string to enum conversion + - Tests all logger types with string levels + - Comprehensive string level compatibility + +### Performance & Memory Tests +- **`test_performance.py`** - Performance and optimization tests + - Validates caching improvements and performance gains + - Tests settings caching, registry performance, and memory usage + - Stress testing and large-scale logger creation + - Performance benchmarking for optimization features + +- **`test_memory_optimization.py`** - Memory management and optimization + - Test memory usage patterns and cleanup efficiency + - Validates formatter caching and directory caching + - Tests garbage collection and memory leak prevention + - Memory optimization feature validation + +- **`test_performance_zoneinfo.py`** - Performance tests for timezone operations + - Benchmarks timezone function caching and optimization + - Tests performance under concurrent access and bulk operations + - Validates memory efficiency of timezone caching + - Timezone performance optimization validation + +### Thread Safety & Concurrency Tests +- **`test_thread_safety.py`** - Concurrency and thread safety + - Tests concurrent logger creation and registry access + - Validates thread-safe operations across all components + - Tests concurrent context manager cleanup + - Stress testing for multithreaded environments + +### Timezone & Migration Tests +- **`test_timezone_migration.py`** - Timezone functionality with zoneinfo + - Tests migration from pytz to Python's built-in zoneinfo module + - Validates UTC, localtime, and named timezone support + - Tests timezone integration with all logger types and factory pattern + - **Multiple test cases** covering comprehensive timezone scenarios + +- **`test_zoneinfo_fallbacks.py`** - Timezone fallback mechanisms and edge cases + - Tests fallback behavior for systems without complete timezone data + - Validates error handling and edge cases for timezone operations + - Tests concurrent access and memory efficiency + - **Multiple test cases** for robust timezone handling + + + +## Running Tests + +### Run All Tests and Create a Coverage Report +```bash +poetry run poe test +``` + +### Run Specific Test Categories +```bash +# Context managers and resource management +poetry run pytest tests/test_context_managers.py tests/test_resource_management.py -v + +# Core logger functionality +poetry run pytest tests/test_basic_log.py tests/test_size_rotating.py tests/test_timed_rotating.py -v + +# Factory pattern tests +poetry run pytest tests/test_factory*.py tests/test_enums.py -v + +# Performance and memory tests +poetry run pytest tests/test_performance*.py tests/test_memory*.py -v + +# Thread safety and concurrency +poetry run pytest tests/test_thread_safety.py -v + +# Timezone functionality +poetry run pytest tests/test_timezone*.py tests/test_zoneinfo*.py -v +``` diff --git a/tests/test_basic_log.py b/tests/test_basic_log.py new file mode 100644 index 0000000..e3777d9 --- /dev/null +++ b/tests/test_basic_log.py @@ -0,0 +1,187 @@ +# -*- coding: utf-8 -*- +import logging +import os +import sys +import pytest + + +# Add the parent directory to sys.path for imports +sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + +from pythonLogs import BasicLog, LogLevel, clear_logger_registry + + +class TestBasicLog: + """Test BasicLog functionality.""" + + def setup_method(self): + """Set up test fixtures before each test method.""" + # Clear any existing loggers + clear_logger_registry() + + def teardown_method(self): + """Clean up after each test method.""" + # Clear registry after each test + clear_logger_registry() + + def test_basic_log_initialization(self): + """Test BasicLog initialization with default parameters.""" + basic_log = BasicLog() + assert hasattr(basic_log, 'level') + assert hasattr(basic_log, 'appname') + assert hasattr(basic_log, 'encoding') + assert hasattr(basic_log, 'datefmt') + assert hasattr(basic_log, 'timezone') + assert hasattr(basic_log, 'showlocation') + assert basic_log.logger is None + + def test_basic_log_initialization_with_params(self): + """Test BasicLog initialization with custom parameters.""" + basic_log = BasicLog( + level=LogLevel.DEBUG.value, + name="test_app", + encoding="utf-8", + datefmt="%Y-%m-%d %H:%M:%S", + timezone="UTC", + showlocation=True + ) + assert basic_log.level == logging.DEBUG + assert basic_log.appname == "test_app" + assert basic_log.encoding == "utf-8" + assert basic_log.datefmt == "%Y-%m-%d %H:%M:%S" + assert basic_log.timezone == "UTC" + assert basic_log.showlocation is True + + def test_basic_log_init_method(self): + """Test BasicLog init method creates logger.""" + basic_log = BasicLog(name="test_init", level=LogLevel.INFO.value) + logger = basic_log.init() + + assert isinstance(logger, logging.Logger) + assert logger.name == "test_init" + assert logger.level == logging.INFO + assert hasattr(basic_log, 'logger') + assert basic_log.logger is logger + + def test_basic_log_logger_functionality(self): + """Test that BasicLog logger can log messages.""" + basic_log = BasicLog(name="test_logging", level=LogLevel.INFO.value) + logger = basic_log.init() + + # Test logging at different levels + logger.info("Test info message") + logger.warning("Test warning message") + logger.error("Test error message") + + # Should not raise any exceptions + + def test_basic_log_level_filtering(self): + """Test that BasicLog respects log level filtering.""" + basic_log = BasicLog(name="test_level", level=LogLevel.WARNING.value) + logger = basic_log.init() + + assert logger.level == logging.WARNING + assert logger.isEnabledFor(logging.WARNING) + assert logger.isEnabledFor(logging.ERROR) + assert not logger.isEnabledFor(logging.INFO) + assert not logger.isEnabledFor(logging.DEBUG) + + def test_basic_log_cleanup_static_method(self): + """Test BasicLog static cleanup method.""" + basic_log = BasicLog(name="test_cleanup", level=LogLevel.INFO.value) + logger = basic_log.init() + + # Add a handler + handler = logging.StreamHandler() + logger.addHandler(handler) + assert len(logger.handlers) > 0 + + # Clean up using static method + BasicLog.cleanup_logger(logger) + assert len(logger.handlers) == 0 + + def test_basic_log_instance_cleanup_method(self): + """Test BasicLog instance cleanup method.""" + basic_log = BasicLog(name="test_instance_cleanup", level=LogLevel.INFO.value) + logger = basic_log.init() + + # Add a handler + handler = logging.StreamHandler() + logger.addHandler(handler) + assert len(logger.handlers) > 0 + + # Clean up using instance method + basic_log._cleanup_logger(logger) + assert len(logger.handlers) == 0 + + def test_basic_log_thread_safety(self): + """Test BasicLog thread safety with concurrent operations.""" + import threading + import time + + basic_log = BasicLog(name="test_thread_safety", level=LogLevel.INFO.value) + logger = basic_log.init() + results = [] + + def log_messages(thread_id): + try: + for i in range(10): + logger.info(f"Thread {thread_id} message {i}") + time.sleep(0.001) # Small delay + results.append(f"thread_{thread_id}_success") + except Exception as e: + results.append(f"thread_{thread_id}_error_{e}") + + # Create multiple threads + threads = [] + for i in range(5): + thread = threading.Thread(target=log_messages, args=(i,)) + threads.append(thread) + thread.start() + + # Wait for all threads to complete + for thread in threads: + thread.join() + + # All threads should complete successfully + assert len(results) == 5 + assert all("success" in result for result in results) + + def test_basic_log_different_log_levels(self): + """Test BasicLog with different log levels.""" + log_levels = [ + (LogLevel.DEBUG.value, logging.DEBUG), + (LogLevel.INFO.value, logging.INFO), + (LogLevel.WARNING.value, logging.WARNING), + (LogLevel.ERROR.value, logging.ERROR), + (LogLevel.CRITICAL.value, logging.CRITICAL), + ] + + for level_str, level_int in log_levels: + basic_log = BasicLog(name=f"test_{level_str}", level=level_str) + logger = basic_log.init() + assert logger.level == level_int + + # Clean up + BasicLog.cleanup_logger(logger) + + def test_basic_log_multiple_instances(self): + """Test creating multiple BasicLog instances.""" + loggers = [] + + for i in range(5): + basic_log = BasicLog(name=f"test_multi_{i}", level=LogLevel.INFO.value) + logger = basic_log.init() + loggers.append((basic_log, logger)) + + assert logger.name == f"test_multi_{i}" + assert logger.level == logging.INFO + + # Clean up all loggers + for basic_log, logger in loggers: + BasicLog.cleanup_logger(logger) + assert len(logger.handlers) == 0 + + +if __name__ == "__main__": + pytest.main([__file__]) diff --git a/tests/test_context_managers.py b/tests/test_context_managers.py new file mode 100644 index 0000000..44a898d --- /dev/null +++ b/tests/test_context_managers.py @@ -0,0 +1,248 @@ +# -*- coding: utf-8 -*- +import logging +import os +import sys +import tempfile + + +# Add the parent directory to sys.path for imports +sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + +import pytest +from pythonLogs import ( + BasicLog, + SizeRotatingLog, + TimedRotatingLog, + LogLevel, + RotateWhen, + clear_logger_registry, + LoggerFactory, +) + + +class TestContextManagers: + """Test context manager functionality for resource management.""" + + def setup_method(self): + """Set up test fixtures before each test method.""" + # Clear any existing loggers + clear_logger_registry() + + # Create temporary directory for log files + self.temp_dir = tempfile.mkdtemp() + self.log_file = "test.log" + + def teardown_method(self): + """Clean up after each test method.""" + # Clear registry after each test + clear_logger_registry() + + # Clean up temporary files + import shutil + if os.path.exists(self.temp_dir): + shutil.rmtree(self.temp_dir, ignore_errors=True) + + def test_basic_log_context_manager(self): + """Test BasicLog as context manager.""" + logger_name = "test_basic_context" + + with BasicLog(name=logger_name, level=LogLevel.INFO.value) as logger: + assert isinstance(logger, logging.Logger) + assert logger.name == logger_name + assert logger.level == logging.INFO + + # Test logging + logger.info("Test message in context") + + # After context exit, handlers should be cleaned up + assert len(logger.handlers) == 0 + + def test_size_rotating_context_manager(self): + """Test SizeRotatingLog as context manager.""" + logger_name = "test_size_context" + + with SizeRotatingLog( + name=logger_name, + level=LogLevel.DEBUG.value, + directory=self.temp_dir, + filenames=[self.log_file], + maxmbytes=1, + daystokeep=2 + ) as logger: + assert isinstance(logger, logging.Logger) + assert logger.name == logger_name + assert logger.level == logging.DEBUG + + # Should have file handlers + file_handlers = [h for h in logger.handlers if hasattr(h, 'baseFilename')] + assert len(file_handlers) > 0 + + # Test logging + logger.debug("Test debug message") + logger.info("Test info message") + + # After context exit, handlers should be cleaned up + assert len(logger.handlers) == 0 + + def test_timed_rotating_context_manager(self): + """Test TimedRotatingLog as context manager.""" + logger_name = "test_timed_context" + + with TimedRotatingLog( + name=logger_name, + level=LogLevel.WARNING.value, + directory=self.temp_dir, + filenames=[self.log_file], + when=RotateWhen.HOURLY.value, + daystokeep=3 + ) as logger: + assert isinstance(logger, logging.Logger) + assert logger.name == logger_name + assert logger.level == logging.WARNING + + # Should have file handlers + file_handlers = [h for h in logger.handlers if hasattr(h, 'baseFilename')] + assert len(file_handlers) > 0 + + # Test logging + logger.warning("Test warning message") + logger.error("Test error message") + + # After context exit, handlers should be cleaned up + assert len(logger.handlers) == 0 + + def test_context_manager_exception_handling(self): + """Test context manager cleanup on exceptions.""" + logger_name = "test_exception_context" + logger_ref = None + + try: + with BasicLog(name=logger_name, level=LogLevel.ERROR.value) as logger: + logger_ref = logger + logger.error("Test before exception") + raise ValueError("Test exception") + except ValueError: + pass # Expected exception + + # Even with exception, handlers should be cleaned up + assert logger_ref is not None + assert len(logger_ref.handlers) == 0 + + def test_context_manager_without_init(self): + """Test context manager calls init() if not already called.""" + logger_instance = BasicLog(name="test_no_init", level=LogLevel.INFO.value) + + # Don't call init() manually - logger should be None initially + assert logger_instance.logger is None + + with logger_instance as logger: + # Context manager should have called init() + assert hasattr(logger_instance, 'logger') + assert logger_instance.logger is not None + assert isinstance(logger, logging.Logger) + logger.info("Test message") + + # Cleanup should still work + assert len(logger.handlers) == 0 + + def test_context_manager_with_existing_init(self): + """Test context manager with logger already initialized.""" + logger_instance = BasicLog(name="test_existing_init", level=LogLevel.INFO.value) + + # Call init() manually first + manual_logger = logger_instance.init() + assert hasattr(logger_instance, 'logger') + + with logger_instance as context_logger: + # Should return the same logger + assert context_logger is manual_logger + context_logger.info("Test message") + + # Cleanup should still work + assert len(manual_logger.handlers) == 0 + + def test_multiple_file_handlers_cleanup(self): + """Test cleanup of multiple file handlers.""" + logger_name = "test_multi_files" + multiple_files = ["test1.log", "test2.log", "test3.log"] + + with SizeRotatingLog( + name=logger_name, directory=self.temp_dir, filenames=multiple_files, maxmbytes=1 + ) as logger: + # Should have multiple file handlers + file_handlers = [h for h in logger.handlers if hasattr(h, 'baseFilename')] + assert len(file_handlers) == len(multiple_files) + + logger.info("Test message to multiple files") + + # All handlers should be cleaned up + assert len(logger.handlers) == 0 + + def test_stream_handler_cleanup(self): + """Test cleanup of stream handlers.""" + logger_name = "test_stream_cleanup" + + with SizeRotatingLog( + name=logger_name, directory=self.temp_dir, filenames=[self.log_file], streamhandler=True + # Enable stream handler + ) as logger: + # Should have both file and stream handlers + stream_handlers = [h for h in logger.handlers if isinstance(h, logging.StreamHandler)] + file_handlers = [h for h in logger.handlers if hasattr(h, 'baseFilename')] + + assert len(stream_handlers) > 0 + assert len(file_handlers) > 0 + + logger.info("Test message to file and console") + + # All handlers should be cleaned up + assert len(logger.handlers) == 0 + + def test_nested_context_managers(self): + """Test nested context managers don't interfere.""" + with BasicLog(name="outer_logger", level=LogLevel.INFO.value) as outer_logger: + outer_logger.info("Outer logger message") + + with BasicLog(name="inner_logger", level=LogLevel.DEBUG.value) as inner_logger: + inner_logger.debug("Inner logger message") + assert outer_logger.name != inner_logger.name + + # Inner logger should be cleaned up + assert len(inner_logger.handlers) == 0 + + # Outer logger should still work + outer_logger.info("Outer logger still working") + + # Both loggers should be cleaned up + assert len(outer_logger.handlers) == 0 + assert len(inner_logger.handlers) == 0 + + def test_shutdown_logger(self): + """Test shutdown_logger functionality.""" + logger_name = "test_shutdown_logger" + + # Create a logger using factory (with registry caching) + logger = LoggerFactory.get_or_create_logger("basic", name=logger_name, level=LogLevel.INFO.value) + + # Verify logger is in registry + assert logger_name in LoggerFactory._logger_registry + + # Add some handlers + handler = logging.StreamHandler() + logger.addHandler(handler) + + # Verify handler is attached + assert len(logger.handlers) > 0 + + # Shutdown the specific logger + LoggerFactory.shutdown_logger(logger_name) + + # Verify logger handlers are cleaned up + assert len(logger.handlers) == 0 + + # Verify logger is removed from registry + assert logger_name not in LoggerFactory._logger_registry + + +if __name__ == "__main__": + pytest.main([__file__]) diff --git a/tests/test_enums.py b/tests/test_enums.py new file mode 100644 index 0000000..a3e0215 --- /dev/null +++ b/tests/test_enums.py @@ -0,0 +1,147 @@ +#!/usr/bin/env python3 +"""Test enum usage with the factory pattern.""" +import os +import sys +import tempfile +import pytest + + +# Add parent directory to path for imports +sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + +from pythonLogs import ( + LoggerFactory, + LoggerType, + LogLevel, + RotateWhen, + create_logger, + basic_logger, + timed_rotating_logger, + clear_logger_registry, +) + + +class TestEnumUsage: + """Test cases for enum usage with factory pattern.""" + + def setup_method(self): + """Clear registry before each test.""" + clear_logger_registry() + + def test_log_level_enum_usage(self): + """Test LogLevel enum usage.""" + logger = LoggerFactory.create_logger( + LoggerType.BASIC, + name="enum_test", + level=LogLevel.DEBUG # Using enum instead of string + ) + assert logger.name == "enum_test" + assert logger.level == 10 # DEBUG level + + def test_rotate_when_enum_usage(self): + """Test RotateWhen enum usage.""" + with tempfile.TemporaryDirectory() as temp_dir: + logger = timed_rotating_logger( + name="rotate_test", + directory=temp_dir, + level=LogLevel.INFO, # LogLevel enum + when=RotateWhen.MIDNIGHT # RotateWhen enum + ) + assert logger.name == "rotate_test" + + def test_mixed_enum_and_string_usage(self): + """Test mixed enum and string usage.""" + logger = create_logger( + "basic", # String logger type + name="mixed_test", + level=LogLevel.WARNING # Enum level + ) + assert logger.name == "mixed_test" + assert logger.level == 30 # WARNING level + + def test_all_log_level_enum_values(self): + """Test all LogLevel enum values are accessible and work.""" + levels = [ + (LogLevel.DEBUG, 10), + (LogLevel.INFO, 20), + (LogLevel.WARNING, 30), + (LogLevel.ERROR, 40), + (LogLevel.CRITICAL, 50) + ] + + for enum_level, expected_int in levels: + logger = basic_logger( + name=f"test_{enum_level.value.lower()}", + level=enum_level + ) + assert logger.level == expected_int + + def test_all_rotate_when_enum_values(self): + """Test all RotateWhen enum values are accessible.""" + when_options = [ + RotateWhen.MIDNIGHT, + RotateWhen.HOURLY, + RotateWhen.DAILY, + RotateWhen.MONDAY, + RotateWhen.TUESDAY, + RotateWhen.WEDNESDAY, + RotateWhen.THURSDAY, + RotateWhen.FRIDAY, + RotateWhen.SATURDAY, + RotateWhen.SUNDAY + ] + + # Just verify they're accessible and have expected values + expected_values = ['midnight', 'H', 'D', 'W0', 'W1', 'W2', 'W3', 'W4', 'W5', 'W6'] + actual_values = [when.value for when in when_options] + assert actual_values == expected_values + + def test_enum_string_conversion(self): + """Test that enums are properly converted to strings internally.""" + with tempfile.TemporaryDirectory() as temp_dir: + # Create logger with enums + logger = LoggerFactory.create_logger( + LoggerType.TIMED_ROTATING, + name="conversion_test", + directory=temp_dir, + level=LogLevel.ERROR, + when=RotateWhen.DAILY + ) + + # Verify logger was created successfully + assert logger.name == "conversion_test" + assert logger.level == 40 # ERROR level + + def test_backward_compatibility_with_strings(self): + """Test that string values still work alongside enums.""" + with tempfile.TemporaryDirectory() as temp_dir: + # Mix of enums and strings + logger = timed_rotating_logger( + name="compat_test", + directory=temp_dir, + level="INFO", # String level + when=RotateWhen.MIDNIGHT # Enum when + ) + + assert logger.name == "compat_test" + assert logger.level == 20 # INFO level + + def test_logger_type_enum_values(self): + """Test LoggerType enum values.""" + types = [LoggerType.BASIC, LoggerType.SIZE_ROTATING, LoggerType.TIMED_ROTATING] + expected = ["basic", "size_rotating", "timed_rotating"] + actual = [t.value for t in types] + assert actual == expected + + def test_case_insensitive_string_logger_types(self): + """Test that string logger types are case insensitive.""" + test_cases = ["BASIC", "Basic", "basic", "BASIC"] + + for case in test_cases: + logger = create_logger(case, name=f"case_test_{case}") + assert logger.name == f"case_test_{case}" + + def test_invalid_enum_conversion(self): + """Test error handling for invalid enum-like strings.""" + with pytest.raises(ValueError, match="Invalid logger type"): + create_logger("invalid_enum_type", name="error_test") diff --git a/tests/test_factory.py b/tests/test_factory.py new file mode 100644 index 0000000..6d02422 --- /dev/null +++ b/tests/test_factory.py @@ -0,0 +1,316 @@ +#!/usr/bin/env python3 +"""Test the factory pattern implementation.""" +import os +import sys +import tempfile +import pytest + + +# Add parent directory to path for imports +sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + + +from pythonLogs import ( + LoggerFactory, + LoggerType, + LogLevel, + RotateWhen, + create_logger, + get_or_create_logger, + basic_logger, + size_rotating_logger, + timed_rotating_logger, + clear_logger_registry, + get_registered_loggers, +) + + +class TestLoggerFactory: + """Test cases for the LoggerFactory pattern.""" + + def setup_method(self): + """Clear registry before each test.""" + clear_logger_registry() + + def test_basic_logger_creation_via_factory(self): + """Test basic logger creation using factory.""" + _basic_logger = LoggerFactory.create_logger(LoggerType.BASIC, name="test_basic") + assert _basic_logger.name == "test_basic" + + def test_size_rotating_logger_creation(self): + """Test size rotating logger creation using convenience function.""" + with tempfile.TemporaryDirectory() as temp_dir: + size_logger = size_rotating_logger( + name="test_size", + directory=temp_dir, + maxmbytes=5 + ) + assert size_logger.name == "test_size" + + def test_timed_rotating_logger_creation(self): + """Test timed rotating logger creation.""" + with tempfile.TemporaryDirectory() as temp_dir: + timed_logger = timed_rotating_logger( + name="test_timed", + directory=temp_dir, + when="midnight" + ) + assert timed_logger.name == "test_timed" + + def test_logger_registry_caching(self): + """Test logger registry functionality.""" + # Clear registry and verify it's empty + clear_logger_registry() + assert len(get_registered_loggers()) == 0 + + # Create logger with caching + logger1 = get_or_create_logger(LoggerType.BASIC, name="cached_logger") + logger2 = get_or_create_logger(LoggerType.BASIC, name="cached_logger") + + # Should be the same instance + assert logger1 is logger2 + assert len(get_registered_loggers()) == 1 + + def test_string_based_logger_type_creation(self): + """Test string-based logger type creation.""" + string_logger = create_logger("basic", name="string_test") + assert string_logger.name == "string_test" + + def test_invalid_logger_type_handling(self): + """Test error handling for invalid logger types.""" + with pytest.raises(ValueError, match="Invalid logger type"): + create_logger("invalid_type", name="error_test") + + def test_performance_improvement_with_caching(self): + """Test performance improvements with registry caching.""" + import time + + # Test without registry (creates new each time) + clear_logger_registry() + start_time = time.time() + for i in range(20): # Reduced for faster tests + create_logger(LoggerType.BASIC, name=f"perf_test_{i}") + no_cache_time = time.time() - start_time + + # Test with registry (reuses loggers) + clear_logger_registry() + start_time = time.time() + for i in range(20): + get_or_create_logger(LoggerType.BASIC, name="cached_perf_test") + cached_time = time.time() - start_time + + # Cached should be faster (allow some tolerance for test environment) + assert cached_time <= no_cache_time + + def test_convenience_functions(self): + """Test all convenience functions work correctly.""" + basic_conv = basic_logger(name="conv_basic") + assert basic_conv.name == "conv_basic" + + def test_registry_management(self): + """Test registry management functions.""" + # Create some loggers + logger1 = get_or_create_logger(LoggerType.BASIC, name="logger1") + logger2 = get_or_create_logger(LoggerType.BASIC, name="logger2") + + # Check registry contents + registered = get_registered_loggers() + assert len(registered) == 2 + assert "logger1" in registered + assert "logger2" in registered + + # Clear registry + clear_logger_registry() + assert len(get_registered_loggers()) == 0 + + def test_logger_with_file_output(self): + """Test logger creation with actual file output.""" + with tempfile.TemporaryDirectory() as temp_dir: + logger = size_rotating_logger( + name="file_test", + directory=temp_dir, + filenames=["test.log"], + level="INFO" + ) + + # Test logging + logger.info("Test message") + logger.warning("Test warning") + + # Verify logger is working + assert logger.name == "file_test" + assert logger.level == 20 # INFO level + + def test_factory_create_logger_with_enums(self): + """Test factory create_logger with enum parameters.""" + logger = LoggerFactory.create_logger( + LoggerType.BASIC, + level=LogLevel.DEBUG, + name="enum_test" + ) + assert logger.name == "enum_test" + assert logger.level == 10 # DEBUG level + + def test_factory_create_logger_with_strings(self): + """Test factory create_logger with string parameters.""" + logger = LoggerFactory.create_logger( + "basic", + level="WARNING", + name="string_test" + ) + assert logger.name == "string_test" + assert logger.level == 30 # WARNING level + + def test_factory_create_logger_invalid_type(self): + """Test factory create_logger with invalid logger type.""" + with pytest.raises(ValueError, match="Invalid logger type"): + LoggerFactory.create_logger("invalid_type") + + def test_factory_create_size_rotating_logger(self): + """Test factory create_size_rotating_logger method.""" + with tempfile.TemporaryDirectory() as temp_dir: + logger = LoggerFactory.create_size_rotating_logger( + name="size_factory_test", + directory=temp_dir, + maxmbytes=10, + level=LogLevel.INFO + ) + assert logger.name == "size_factory_test" + + def test_factory_create_timed_rotating_logger(self): + """Test factory create_timed_rotating_logger method.""" + with tempfile.TemporaryDirectory() as temp_dir: + logger = LoggerFactory.create_timed_rotating_logger( + name="timed_factory_test", + directory=temp_dir, + when=RotateWhen.DAILY, + level="ERROR" + ) + assert logger.name == "timed_factory_test" + + def test_factory_create_basic_logger(self): + """Test factory create_basic_logger method.""" + logger = LoggerFactory.create_basic_logger( + name="basic_factory_test", + level=LogLevel.CRITICAL + ) + assert logger.name == "basic_factory_test" + assert logger.level == 50 # CRITICAL level + + def test_factory_shutdown_logger(self): + """Test factory shutdown_logger functionality.""" + # Create and register a logger + logger = get_or_create_logger(LoggerType.BASIC, name="shutdown_test") + assert "shutdown_test" in get_registered_loggers() + + # Shutdown the logger + result = LoggerFactory.shutdown_logger("shutdown_test") + assert result is True + assert "shutdown_test" not in get_registered_loggers() + + # Try to shutdown non-existent logger + result = LoggerFactory.shutdown_logger("non_existent") + assert result is False + + def test_factory_get_or_create_with_default_name(self): + """Test get_or_create_logger with default name.""" + logger = LoggerFactory.get_or_create_logger(LoggerType.BASIC) + # Should use default appname from settings + assert logger.name is not None + + def test_factory_enum_conversion_edge_cases(self): + """Test enum conversion edge cases in factory.""" + # Test with lowercase string + logger = LoggerFactory.create_logger("basic", name="lowercase_test") + assert logger.name == "lowercase_test" + + # Test with uppercase string + logger = LoggerFactory.create_logger("BASIC", name="uppercase_test") + assert logger.name == "uppercase_test" + + def test_convenience_functions_comprehensive(self): + """Test all convenience functions with various parameters.""" + # Test basic_logger + basic_log = basic_logger(name="conv_basic", level="DEBUG") + assert basic_log.name == "conv_basic" + + # Test size_rotating_logger + with tempfile.TemporaryDirectory() as temp_dir: + size_log = size_rotating_logger( + name="conv_size", + directory=temp_dir, + filenames=["test1.log", "test2.log"], + maxmbytes=5, + daystokeep=30 + ) + assert size_log.name == "conv_size" + + # Test timed_rotating_logger + timed_log = timed_rotating_logger( + name="conv_timed", + directory=temp_dir, + when="midnight", + sufix="%Y%m%d", + daystokeep=7 + ) + assert timed_log.name == "conv_timed" + + def test_factory_pattern_match_case_coverage(self): + """Test all pattern match cases in factory create_logger.""" + with tempfile.TemporaryDirectory() as temp_dir: + # Test BASIC case + basic = LoggerFactory.create_logger( + LoggerType.BASIC, + name="match_basic" + ) + assert basic.name == "match_basic" + + # Test SIZE_ROTATING case + size = LoggerFactory.create_logger( + LoggerType.SIZE_ROTATING, + name="match_size", + directory=temp_dir + ) + assert size.name == "match_size" + + # Test TIMED_ROTATING case + timed = LoggerFactory.create_logger( + LoggerType.TIMED_ROTATING, + name="match_timed", + directory=temp_dir + ) + assert timed.name == "match_timed" + + def test_factory_registry_copy_safety(self): + """Test that get_registered_loggers returns a copy.""" + # Create some loggers + logger1 = get_or_create_logger(LoggerType.BASIC, name="copy_test1") + logger2 = get_or_create_logger(LoggerType.BASIC, name="copy_test2") + + # Get registry copy + registry_copy = get_registered_loggers() + assert len(registry_copy) == 2 + + # Modify the copy (should not affect original) + registry_copy["new_logger"] = logger1 + + # Original registry should be unchanged + original_registry = get_registered_loggers() + assert len(original_registry) == 2 + assert "new_logger" not in original_registry + + def test_factory_error_handling_during_cleanup(self): + """Test error handling during logger cleanup.""" + from unittest.mock import Mock + + # Create a logger + logger = get_or_create_logger(LoggerType.BASIC, name="cleanup_error_test") + + # Create a mock handler that will raise an error on close + mock_handler = Mock() + mock_handler.close.side_effect = OSError("Test error") + logger.addHandler(mock_handler) + + # Shutdown should handle the error gracefully + result = LoggerFactory.shutdown_logger("cleanup_error_test") + assert result is True diff --git a/tests/test_factory_examples.py b/tests/test_factory_examples.py new file mode 100644 index 0000000..8774d3e --- /dev/null +++ b/tests/test_factory_examples.py @@ -0,0 +1,260 @@ +#!/usr/bin/env python3 +"""Practical examples and integration tests for the Logger Factory Pattern.""" +import os +import sys +import tempfile +from pathlib import Path +import pytest + + +# Add parent directory to path for imports +sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + +from pythonLogs import ( + LoggerFactory, + LoggerType, + LogLevel, + RotateWhen, + create_logger, + get_or_create_logger, + basic_logger, + size_rotating_logger, + timed_rotating_logger, + clear_logger_registry, +) + + +class TestFactoryExamples: + """Integration tests demonstrating factory pattern usage.""" + + def setup_method(self): + """Clear registry before each test.""" + clear_logger_registry() + + def test_basic_console_logging(self): + """Test basic console logging example.""" + logger = LoggerFactory.create_logger( + LoggerType.BASIC, + name="console_app", + level=LogLevel.INFO + ) + + # Test logging (won't fail, just exercises the code) + logger.info("Application started") + logger.warning("This is a warning") + logger.debug("This debug message should be filtered out") + + assert logger.name == "console_app" + assert logger.level == 20 # INFO level + + def test_file_based_size_rotating_logger(self): + """Test file-based size rotating logger example.""" + with tempfile.TemporaryDirectory() as temp_dir: + logger = size_rotating_logger( + name="app_logger", + directory=temp_dir, + filenames=["app.log", "debug.log"], + maxmbytes=1, # Small size for testing + daystokeep=7, + level=LogLevel.DEBUG, + streamhandler=False # No console output for test + ) + + # Generate some log messages + for i in range(10): + logger.info(f"Log message {i}") + logger.error(f"Error message {i}") + + # Verify log files were created + log_files = list(Path(temp_dir).glob("*.log")) + assert len(log_files) >= 1 # At least one log file should exist + + def test_time_based_rotating_logger(self): + """Test time-based rotating logger example.""" + with tempfile.TemporaryDirectory() as temp_dir: + logger = timed_rotating_logger( + name="scheduled_app", + directory=temp_dir, + filenames=["scheduled.log"], + when=RotateWhen.DAILY, + level=LogLevel.WARNING, + streamhandler=False + ) + + logger.warning("Scheduled task started") + logger.error("Task encountered an error") + logger.critical("Critical system failure") + + # Verify log file was created + log_files = list(Path(temp_dir).glob("*.log")) + assert len(log_files) >= 1 + + def test_production_like_multi_logger_setup(self): + """Test production-like setup with multiple loggers.""" + with tempfile.TemporaryDirectory() as temp_dir: + # Application logger + app_logger = LoggerFactory.create_logger( + LoggerType.SIZE_ROTATING, + name="production_app", + directory=temp_dir, + filenames=["app.log"], + maxmbytes=50, + daystokeep=30, + level=LogLevel.INFO, + streamhandler=False, + showlocation=True, + timezone="UTC" + ) + + # Error logger + error_logger = LoggerFactory.create_logger( + LoggerType.SIZE_ROTATING, + name="production_errors", + directory=temp_dir, + filenames=["errors.log"], + maxmbytes=10, + daystokeep=90, + level=LogLevel.ERROR, + streamhandler=False + ) + + # Audit logger + audit_logger = LoggerFactory.create_logger( + LoggerType.TIMED_ROTATING, + name="audit_log", + directory=temp_dir, + filenames=["audit.log"], + when=RotateWhen.MIDNIGHT, + level=LogLevel.INFO, + streamhandler=False + ) + + # Test logging to different loggers + app_logger.info("Application started successfully") + error_logger.error("Database connection failed") + audit_logger.info("User login: admin") + + # Verify all loggers are different instances + assert app_logger.name != error_logger.name != audit_logger.name + + # Verify log files were created + log_files = list(Path(temp_dir).glob("*.log")) + assert len(log_files) >= 3 + + def test_logger_registry_in_production_scenario(self): + """Test logger registry usage in production scenario.""" + with tempfile.TemporaryDirectory() as temp_dir: + # First module gets logger + module1_logger = get_or_create_logger( + LoggerType.SIZE_ROTATING, + name="shared_app_logger", + directory=temp_dir, + level=LogLevel.INFO + ) + + # The Second module gets the same logger (cached) + module2_logger = get_or_create_logger( + LoggerType.SIZE_ROTATING, + name="shared_app_logger", + directory=temp_dir # Must provide same params + ) + + # Should be the same instance + assert module1_logger is module2_logger + + # Both modules can log + module1_logger.info("Message from module 1") + module2_logger.info("Message from module 2") + + def test_mixed_enum_string_usage_example(self): + """Test realistic mixed usage of enums and strings.""" + with tempfile.TemporaryDirectory() as temp_dir: + # Configuration from environment (strings) + config_level = "INFO" + config_when = "midnight" + + # Create logger with mix of config and enums + logger = timed_rotating_logger( + name="config_driven_app", + directory=temp_dir, + level=config_level, # String from config + when=RotateWhen.MIDNIGHT, # Enum for type safety + streamhandler=True + ) + + logger.info("Configuration loaded successfully") + assert logger.name == "config_driven_app" + + def test_error_handling_scenarios(self): + """Test various error handling scenarios.""" + # Invalid logger type + with pytest.raises(ValueError, match="Invalid logger type"): + create_logger("nonexistent_type", name="error_test") + + # Invalid directory (should raise PermissionError when trying to create) + with pytest.raises(PermissionError): + size_rotating_logger( + name="permission_test", + directory="/invalid/directory/path" + ) + + def test_logger_customization_example(self): + """Test logger with extensive customization.""" + with tempfile.TemporaryDirectory() as temp_dir: + logger = LoggerFactory.create_logger( + LoggerType.TIMED_ROTATING, + name="custom_app", + directory=temp_dir, + filenames=["custom.log", "custom_debug.log"], + level=LogLevel.DEBUG, + when=RotateWhen.HOURLY, + daystokeep=14, + encoding="utf-8", + datefmt="%Y-%m-%d %H:%M:%S", + timezone="UTC", + streamhandler=True, + showlocation=True, + rotateatutc=True + ) + + # Test all log levels + logger.debug("Debug information") + logger.info("Informational message") + logger.warning("Warning message") + logger.error("Error occurred") + logger.critical("Critical failure") + + assert logger.name == "custom_app" + assert logger.level == 10 # DEBUG level + + def test_convenience_functions_examples(self): + """Test all convenience functions with realistic scenarios.""" + # Basic logger for console output + console_logger = basic_logger( + name="console", + level=LogLevel.WARNING + ) + console_logger.warning("Console warning message") + + # Size rotating for application logs + with tempfile.TemporaryDirectory() as temp_dir: + app_logger = size_rotating_logger( + name="application", + directory=temp_dir, + maxmbytes=5, + level=LogLevel.INFO + ) + app_logger.info("Application log message") + + # Timed rotating for audit logs + audit_logger = timed_rotating_logger( + name="audit", + directory=temp_dir, + when=RotateWhen.DAILY, + level=LogLevel.INFO + ) + audit_logger.info("Audit log message") + + # Verify all loggers have different names + names = {console_logger.name, app_logger.name, audit_logger.name} + assert len(names) == 3 # All unique names diff --git a/tests/test_log.py b/tests/test_log.py deleted file mode 100755 index ff0080e..0000000 --- a/tests/test_log.py +++ /dev/null @@ -1,135 +0,0 @@ -# -*- encoding: utf-8 -*- -import gzip -import os -import tempfile -from datetime import datetime -from pythonLogs import BasicLog, SizeRotatingLog, TimedRotatingLog -from pythonLogs.log_utils import delete_file - - -class TestLogs: - @classmethod - def setup_class(cls): - cls.directory = tempfile.gettempdir() - cls.filenames = ("testA.log", "testB.log", "testC.log") - - @classmethod - def teardown_class(cls): - for filename in cls.filenames: - file_path = str(os.path.join(cls.directory, filename)) - if os.path.isfile(file_path): - delete_file(file_path) - - def test_basic_log(self, caplog): - level = "INFO" - log = BasicLog( - level=level, - name="app", - encoding="UTF-8", - datefmt="%Y-%m-%dT%H:%M:%S", - timezone="UTC", - showlocation=False, - ).init() - log.info("test_basic_log") - assert level in caplog.text - assert "test_basic_log" in caplog.text - - def test_size_rotating_log(self, caplog): - # creating files with 2MB - for filename in self.filenames: - file_path = str(os.path.join(self.directory, filename)) - with open(file_path, "wb") as f: - f.seek((2 * 1024 * 1024) - 1) - f.write(b"\0") - - # creating an exisiting gz file to force rotation number - fname_no_ext = self.filenames[0].split(".")[0] - existing_gz_filename = f"{fname_no_ext}_1.log.gz" - existing_gz_file_path = str(os.path.join(self.directory, existing_gz_filename)) - with gzip.open(existing_gz_file_path, "wb") as fout: - fout.write(b"") - new_gz_filename_rotated = f"{fname_no_ext}_2.log.gz" - new_gz_filepath_rotated = str(os.path.join(self.directory, new_gz_filename_rotated)) - - level = "INFO" - log = SizeRotatingLog( - level=level, - name="app", - directory=self.directory, - filenames=self.filenames, - maxmbytes=1, - daystokeep=7, - encoding="UTF-8", - datefmt="%Y-%m-%dT%H:%M:%S", - timezone="UTC", - streamhandler=True, - showlocation=False, - ).init() - log.info("test_size_rotating_log") - assert level in caplog.text - assert "test_size_rotating_log" in caplog.text - - # delete .gz files - assert os.path.isfile(new_gz_filepath_rotated) == True - delete_file(new_gz_filepath_rotated) - for filename in self.filenames: - gz_file_name = f"{os.path.splitext(filename)[0]}_1.log.gz" - gz_file_path = os.path.join(tempfile.gettempdir(), gz_file_name) - assert os.path.isfile(gz_file_path) == True - delete_file(gz_file_path) - - def test_timed_rotating_log(self, caplog): - level = "INFO" - year = 2020 - month = 10 - day = 10 - - log = TimedRotatingLog( - level=level, - name="app", - directory=self.directory, - filenames=self.filenames, - when="midnight", - sufix="%Y%m%d", - daystokeep=7, - encoding="UTF-8", - datefmt="%Y-%m-%dT%H:%M:%S", - timezone="UTC", - streamhandler=True, - showlocation=False, - ).init() - log.info("start_test_timed_rotating_log") - assert level in caplog.text - assert "start_test_timed_rotating_log" in caplog.text - - # change files datetime - epoch_times = datetime(year, month, day, 1, 1, 1).timestamp() - for filename in self.filenames: - file_path = str(os.path.join(self.directory, filename)) - os.utime(file_path, (epoch_times, epoch_times)) - - log = TimedRotatingLog( - level=level, - name="app", - directory=self.directory, - filenames=self.filenames, - when="midnight", - sufix="%Y%m%d", - daystokeep=7, - encoding="UTF-8", - datefmt="%Y-%m-%dT%H:%M:%S", - timezone="UTC", - streamhandler=True, - showlocation=False, - ).init() - log.info("end_test_timed_rotating_log") - assert level in caplog.text - assert "end_test_timed_rotating_log" in caplog.text - - # delete test.gz files - for filename in self.filenames: - gz_file = f"{os.path.splitext(filename)[0]}_{year}{month}{day}" - gz_file_name = f"{gz_file}.log.gz" - gz_file_path = os.path.join(tempfile.gettempdir(), gz_file_name) - assert os.path.exists(gz_file_path) - delete_file(str(gz_file_path)) diff --git a/tests/test_log_utils.py b/tests/test_log_utils.py deleted file mode 100644 index 9e1ce3b..0000000 --- a/tests/test_log_utils.py +++ /dev/null @@ -1,186 +0,0 @@ -# -*- encoding: utf-8 -*- -import pytest -import tempfile -import os -from pythonLogs import log_utils -import logging - - -class TestLogUtils: - @classmethod - def setup_class(cls): - """setup_class""" - pass - - @classmethod - def teardown_class(cls): - """teardown_class""" - pass - - def test_get_stream_handler(self): - level = log_utils.get_level("DEBUG") - _, formatter = log_utils.get_logger_and_formatter("appname", "%Y-%m-%dT%H:%M:%S", False, "UTC") - stream_hdlr = log_utils.get_stream_handler(level, formatter) - assert isinstance(stream_hdlr, logging.StreamHandler) - - def test_check_filename_instance(self): - filenames = "test1.log" - with pytest.raises(TypeError) as exec_info: - log_utils.check_filename_instance(filenames) - assert type(exec_info.value) is TypeError - assert filenames in str(exec_info.value) - assert "Unable to parse filenames" in str(exec_info.value) - - def test_check_directory_permissions(self): - # test permission error on access - directory = os.path.join(tempfile.gettempdir(), "test") - os.makedirs(directory, mode=0o444, exist_ok=True) - assert os.path.exists(directory) == True - with pytest.raises(PermissionError) as exec_info: - log_utils.check_directory_permissions(directory) - assert type(exec_info.value) is PermissionError - assert "Unable to access directory" in str(exec_info.value) - log_utils.delete_file(directory) - assert os.path.exists(directory) == False - - # test permission error on creation - directory = "/non-existent-directory" - with pytest.raises(PermissionError) as exec_info: - log_utils.check_directory_permissions(directory) - assert type(exec_info.value) is PermissionError - assert "Unable to create directory" in str(exec_info.value) - - def test_remove_old_logs(self): - directory = os.path.join(tempfile.gettempdir(), "test") - os.makedirs(directory, mode=0o755, exist_ok=True) - assert os.path.exists(directory) == True - tmpfilewrapper = tempfile.NamedTemporaryFile(dir=directory, suffix=".gz") - log_utils.remove_old_logs(directory, 1) - file_path = tmpfilewrapper.name - assert os.path.isfile(file_path) == False - log_utils.delete_file(directory) - assert os.path.exists(directory) == False - - def test_delete_file(self): - directory = tempfile.gettempdir() - tmpfilewrapper = tempfile.NamedTemporaryFile(dir=directory, suffix=".log") - file_path = tmpfilewrapper.name - assert os.path.isfile(file_path) == True - log_utils.delete_file(file_path) - assert os.path.isfile(file_path) == False - - def test_is_older_than_x_days(self): - directory = tempfile.gettempdir() - tmpfilewrapper = tempfile.NamedTemporaryFile(dir=directory, suffix=".log") - file_path = tmpfilewrapper.name - assert os.path.isfile(file_path) == True - - result = log_utils.is_older_than_x_days(file_path, 1) - assert result == True - - result = log_utils.is_older_than_x_days(file_path, 5) - assert result == False - - log_utils.delete_file(file_path) - assert os.path.isfile(file_path) == False - - def test_get_level(self): - level = log_utils.get_level(11111111) - assert level == logging.INFO - - level = log_utils.get_level("") - assert level == logging.INFO - - level = log_utils.get_level("INFO") - assert level == logging.INFO - - level = log_utils.get_level("DEBUG") - assert level == logging.DEBUG - - level = log_utils.get_level("WARNING") - assert level == logging.WARNING - - level = log_utils.get_level("ERROR") - assert level == logging.ERROR - - level = log_utils.get_level("CRITICAL") - assert level == logging.CRITICAL - - def test_get_log_path(self): - directory = tempfile.gettempdir() - filename = "test1.log" - log_utils.get_log_path(directory, filename) - - directory = "/non-existent-directory" - filename = "test2.log" - with pytest.raises(FileNotFoundError) as exec_info: - log_utils.get_log_path(directory, filename) - assert type(exec_info.value) is FileNotFoundError - assert "Unable to open log file for writing" in str(exec_info.value) - - directory = tempfile.gettempdir() - filename = "test3.log" - file_path = str(os.path.join(directory, filename)) - with open(file_path, "w") as file: - file.write("test") - assert os.path.isfile(file_path) == True - os.chmod(file_path, 0o111) - with pytest.raises(PermissionError) as exec_info: - log_utils.get_log_path(directory, filename) - assert type(exec_info.value) is PermissionError - assert "Unable to open log file for writing" in str(exec_info.value) - log_utils.delete_file(file_path) - assert os.path.isfile(file_path) == False - - def test_get_format(self): - show_location = True - name = "test1" - timezone = "UTC" - result = log_utils.get_format(show_location, name, timezone) - assert result == ( - f"[%(asctime)s.%(msecs)03d+0000]:[%(levelname)s]:[{name}]:" - "[%(filename)s:%(funcName)s:%(lineno)d]:%(message)s" - ) - - show_location = False - name = "test2" - timezone = "America/Los_Angeles" - result = log_utils.get_format(show_location, name, timezone) - assert result == f"[%(asctime)s.%(msecs)03d-0800]:[%(levelname)s]:[{name}]:%(message)s" - - show_location = False - name = "test3" - timezone = "Australia/Queensland" - result = log_utils.get_format(show_location, name, timezone) - assert result == f"[%(asctime)s.%(msecs)03d+1000]:[%(levelname)s]:[{name}]:%(message)s" - - def test_gzip_file_with_sufix(self): - directory = tempfile.gettempdir() - tmpfilewrapper = tempfile.NamedTemporaryFile(dir=directory, suffix=".log") - file_path = tmpfilewrapper.name - assert os.path.isfile(file_path) == True - sufix = "test1" - result = log_utils.gzip_file_with_sufix(file_path, sufix) - file_path_no_suffix = file_path.split(".")[0] - assert result == f"{file_path_no_suffix}_{sufix}.log.gz" - log_utils.delete_file(result) - assert os.path.isfile(result) == False - - # test a non-existent file - file_path = "/non-existent-directory/test2.log" - sufix = "test2" - result = log_utils.gzip_file_with_sufix(file_path, sufix) - assert result is None - - def test_get_timezone_function(self): - timezone = "UTC" - result = log_utils.get_timezone_function(timezone) - assert result.__name__ == "gmtime" - - timezone = "localtime" - result = log_utils.get_timezone_function(timezone) - assert result.__name__ == "localtime" - - timezone = "America/Los_Angeles" - result = log_utils.get_timezone_function(timezone) - assert result.__name__ == "" diff --git a/tests/test_memory_optimization.py b/tests/test_memory_optimization.py new file mode 100644 index 0000000..6196ad0 --- /dev/null +++ b/tests/test_memory_optimization.py @@ -0,0 +1,408 @@ +#!/usr/bin/env python3 +"""Test memory optimization features of the pythonLogs library.""" +import gc +import os +import sys +import tempfile +import time +import pytest + + +# Add parent directory to path for imports +sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + +from pythonLogs import ( + LoggerFactory, + LoggerType, + LogLevel, + basic_logger, + clear_logger_registry, + get_memory_stats, + clear_formatter_cache, + clear_directory_cache, + force_garbage_collection, + optimize_lru_cache_sizes, + set_directory_cache_limit, +) + + +class TestMemoryOptimization: + """Test cases for memory optimization features.""" + + def setup_method(self): + """Clear all caches and registries before each test.""" + clear_logger_registry() + clear_formatter_cache() + clear_directory_cache() + force_garbage_collection() + + def teardown_method(self): + """Clean up after each test.""" + clear_logger_registry() + clear_formatter_cache() + clear_directory_cache() + # Reset to default limits + LoggerFactory.set_memory_limits(max_loggers=100, ttl_seconds=3600) + set_directory_cache_limit(500) + + def test_logger_registry_size_limit(self): + """Test that logger registry enforces size limits.""" + # Set a small limit for testing + LoggerFactory.set_memory_limits(max_loggers=3, ttl_seconds=3600) + + # Create more loggers than the limit + loggers = [] + for i in range(5): + logger = LoggerFactory.get_or_create_logger( + LoggerType.BASIC, + name=f"test_logger_{i}", + level=LogLevel.INFO + ) + loggers.append(logger) + + # Registry should not exceed the limit + registry = LoggerFactory.get_registered_loggers() + assert len(registry) <= 3, f"Registry size {len(registry)} exceeds limit of 3" + + # Verify oldest loggers were evicted + registry_names = set(registry.keys()) + expected_names = {"test_logger_2", "test_logger_3", "test_logger_4"} # Last 3 + assert registry_names == expected_names or len(registry_names.intersection(expected_names)) >= 2 + + def test_logger_registry_ttl(self): + """Test that logger registry enforces TTL (time-to-live).""" + # Set very short TTL for testing + LoggerFactory.set_memory_limits(max_loggers=100, ttl_seconds=1) + + # Create a logger + logger1 = LoggerFactory.get_or_create_logger( + LoggerType.BASIC, + name="ttl_test_logger", + level=LogLevel.INFO + ) + + # Verify it's in registry + registry = LoggerFactory.get_registered_loggers() + assert "ttl_test_logger" in registry + + # Wait for TTL to expire + time.sleep(1.1) + + # Create another logger to trigger cleanup + logger2 = LoggerFactory.get_or_create_logger( + LoggerType.BASIC, + name="new_logger", + level=LogLevel.INFO + ) + + # The Original logger should be cleaned up due to TTL + registry = LoggerFactory.get_registered_loggers() + assert "ttl_test_logger" not in registry + assert "new_logger" in registry + + def test_directory_cache_size_limit(self): + """Test that directory cache enforces size limits.""" + import pythonLogs.log_utils as log_utils + + # Set a small limit for testing + set_directory_cache_limit(3) + + # Create temporary directories and check them + temp_dirs = [] + for i in range(5): + temp_dir = tempfile.mkdtemp(prefix=f"cache_test_{i}_") + temp_dirs.append(temp_dir) + log_utils.check_directory_permissions(temp_dir) + + try: + # Cache should not exceed the limit + with log_utils._directory_lock: + cache_size = len(log_utils._checked_directories) + assert cache_size <= 3, f"Directory cache size {cache_size} exceeds limit of 3" + + finally: + # Cleanup temp directories + import shutil + for temp_dir in temp_dirs: + if os.path.exists(temp_dir): + shutil.rmtree(temp_dir, ignore_errors=True) + + def test_formatter_cache_efficiency(self): + """Test that formatters are cached and reused efficiently.""" + from pythonLogs.memory_utils import get_cached_formatter + + # Clear cache first + clear_formatter_cache() + + # Create formatters with the same configuration + format_string = "[%(asctime)s]:[%(levelname)s]:%(message)s" + datefmt = "%Y-%m-%d %H:%M:%S" + + formatter1 = get_cached_formatter(format_string, datefmt) + formatter2 = get_cached_formatter(format_string, datefmt) + formatter3 = get_cached_formatter(format_string, datefmt) + + # Should be the exact same instance (cached) + assert formatter1 is formatter2 + assert formatter2 is formatter3 + + # Create formatter with different configuration + formatter4 = get_cached_formatter(format_string, "%Y-%m-%d") + + # Should be different instance + assert formatter1 is not formatter4 + + def test_memory_stats_reporting(self): + """Test memory statistics reporting functionality.""" + # Create some loggers to populate stats + for i in range(3): + LoggerFactory.get_or_create_logger( + LoggerType.BASIC, + name=f"stats_test_{i}", + level=LogLevel.INFO + ) + + # Get memory stats + stats = get_memory_stats() + + # Verify stats structure + expected_keys = { + 'registry_size', + 'formatter_cache_size', + 'directory_cache_size', + 'active_logger_count', + 'max_registry_size', + 'max_formatter_cache', + 'max_directory_cache' + } + assert set(stats.keys()) == expected_keys + + # Verify some basic constraints + assert stats['registry_size'] >= 3 + assert stats['max_registry_size'] > 0 + assert stats['max_formatter_cache'] > 0 + assert stats['max_directory_cache'] > 0 + assert isinstance(stats['active_logger_count'], int) + + def test_weak_reference_tracking(self): + """Test that weak references track active loggers correctly.""" + from pythonLogs.memory_utils import get_active_logger_count + + initial_count = get_active_logger_count() + + # Create loggers in local scope + def create_temporary_loggers(): + loggers = [] + for i in range(3): + logger = basic_logger(name=f"weak_ref_test_{i}", level=LogLevel.INFO.value) + loggers.append(logger) + return len(loggers) + + created_count = create_temporary_loggers() + + # Force garbage collection to clean up references + gc.collect() + + # Active count should eventually decrease (may not be immediate due to GC timing) + # Allow some tolerance for GC behavior + final_count = get_active_logger_count() + assert final_count >= initial_count # Some loggers might still be referenced + + def test_lru_cache_optimization(self): + """Test LRU cache size optimization.""" + from pythonLogs import log_utils + + # Get initial cache info + initial_timezone_cache = log_utils.get_timezone_function.cache_info() + + # Optimize cache sizes + optimize_lru_cache_sizes() + + # Verify caches were cleared and are working + optimized_cache = log_utils.get_timezone_function.cache_info() + assert optimized_cache.currsize == 0 # Should be cleared + + # Use the function to verify it still works + tz_func = log_utils.get_timezone_function("UTC") + assert callable(tz_func) + + # Verify cache is working + new_cache_info = log_utils.get_timezone_function.cache_info() + assert new_cache_info.currsize == 1 + + def test_force_garbage_collection(self): + """Test forced garbage collection functionality.""" + # Create some objects that could be garbage collected + temp_objects = [] + for i in range(100): + temp_objects.append(f"temp_object_{i}" * 1000) + + # Clear reference + del temp_objects + + # Force garbage collection + gc_stats = force_garbage_collection() + + # Verify stats structure + expected_keys = {'objects_collected', 'garbage_count', 'reference_cycles'} + assert set(gc_stats.keys()) == expected_keys + + # Verify all values are integers + for value in gc_stats.values(): + assert isinstance(value, (int, tuple)) # reference_cycles might be a tuple + + def test_concurrent_memory_operations(self): + """Test memory operations under concurrent access.""" + import concurrent.futures + + # Set reasonable limits for concurrent testing + LoggerFactory.set_memory_limits(max_loggers=20, ttl_seconds=10) + + results = [] + errors = [] + + def memory_worker(worker_id): + """Worker that performs various memory operations.""" + try: + # Create logger + logger = LoggerFactory.get_or_create_logger( + LoggerType.BASIC, + name=f"concurrent_memory_{worker_id}", + level=LogLevel.INFO + ) + + # Get memory stats + stats = get_memory_stats() + + # Use formatter cache + from pythonLogs.memory_utils import get_cached_formatter + formatter = get_cached_formatter(f"[%(levelname)s]:{worker_id}:%(message)s") + + # Log something + logger.info(f"Memory test from worker {worker_id}") + + results.append({ + 'worker_id': worker_id, + 'logger_name': logger.name, + 'stats': stats, + 'formatter': formatter is not None + }) + + except Exception as e: + errors.append(f"Worker {worker_id}: {str(e)}") + + # Run concurrent workers + num_workers = 10 + with concurrent.futures.ThreadPoolExecutor(max_workers=num_workers) as executor: + futures = [executor.submit(memory_worker, i) for i in range(num_workers)] + for future in concurrent.futures.as_completed(futures): + future.result() + + # Verify results + assert len(errors) == 0, f"Concurrent memory operations failed: {errors}" + assert len(results) == num_workers + + # Verify memory constraints were maintained + final_stats = get_memory_stats() + assert final_stats['registry_size'] <= 20 # Respect size limit + + def test_memory_leak_prevention(self): + """Test that the library prevents common memory leaks.""" + initial_stats = get_memory_stats() + + # Create and destroy many loggers + for batch in range(5): + # Create a batch of loggers + batch_loggers = [] + for i in range(10): + logger = basic_logger( + name=f"leak_test_batch_{batch}_logger_{i}", + level=LogLevel.INFO.value + ) + batch_loggers.append(logger) + logger.info(f"Test message from batch {batch}, logger {i}") + + # Clear references (simulating end of scope) + del batch_loggers + + # Force cleanup + force_garbage_collection() + + # Check final stats + final_stats = get_memory_stats() + + # Registry should not have grown excessively + registry_growth = final_stats['registry_size'] - initial_stats['registry_size'] + assert registry_growth <= 20, f"Registry grew by {registry_growth}, possible memory leak" + + # Cache sizes should be reasonable + assert final_stats['formatter_cache_size'] <= 50 + assert final_stats['directory_cache_size'] <= 500 + + def test_logger_cleanup_on_context_exit(self): + """Test that logger cleanup works properly with context managers.""" + from pythonLogs import BasicLog, SizeRotatingLog + + # Track initial handler count + import logging + initial_handlers = len(logging.getLogger().handlers) + + # Use context managers that should clean up + with tempfile.TemporaryDirectory() as temp_dir: + # Basic logger context manager + with BasicLog(name="cleanup_test_basic", level="INFO") as logger1: + assert len(logger1.handlers) >= 0 + logger1.info("Test message 1") + + # After context exit, handlers should be cleaned + assert len(logger1.handlers) == 0 + + # Size rotating logger context manager + with SizeRotatingLog( + name="cleanup_test_size", + directory=temp_dir, + filenames=["test.log"], + level="INFO" + ) as logger2: + assert len(logger2.handlers) > 0 + logger2.info("Test message 2") + + # After context exit, handlers should be cleaned + assert len(logger2.handlers) == 0 + + # Overall handler count should not have increased + final_handlers = len(logging.getLogger().handlers) + assert final_handlers == initial_handlers + + def test_registry_memory_management_edge_cases(self): + """Test edge cases in registry memory management.""" + # Test with zero limits (should handle gracefully) + LoggerFactory.set_memory_limits(max_loggers=0, ttl_seconds=0) + + # Creating a logger should still work but might not be cached + logger = LoggerFactory.get_or_create_logger( + LoggerType.BASIC, + name="edge_case_logger", + level=LogLevel.INFO + ) + assert logger is not None + + # Test with very large limits + LoggerFactory.set_memory_limits(max_loggers=10000, ttl_seconds=86400) + + # Should handle without issues + for i in range(50): + logger = LoggerFactory.get_or_create_logger( + LoggerType.BASIC, + name=f"large_limit_test_{i}", + level=LogLevel.INFO + ) + assert logger is not None + + # Registry should contain all loggers (within limit) + registry = LoggerFactory.get_registered_loggers() + assert len(registry) >= 50 + + +if __name__ == "__main__": + pytest.main([__file__]) diff --git a/tests/test_performance.py b/tests/test_performance.py new file mode 100644 index 0000000..c78b12d --- /dev/null +++ b/tests/test_performance.py @@ -0,0 +1,243 @@ +#!/usr/bin/env python3 +"""Performance tests for the factory pattern and optimizations.""" +import os +import sys +import tempfile +import time +import pytest + + +# Add parent directory to path for imports +sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + +from pythonLogs import ( + LoggerFactory, + LoggerType, + LogLevel, + create_logger, + get_or_create_logger, + basic_logger, + size_rotating_logger, + clear_logger_registry, + get_registered_loggers, +) + + +class TestPerformance: + """Performance tests for factory pattern and optimizations.""" + + def setup_method(self): + """Clear registry before each test.""" + clear_logger_registry() + + def test_settings_caching_performance(self): + """Test that settings caching improves performance.""" + # Create multiple loggers (should reuse cached settings) + start_time = time.time() + + loggers = [] + for i in range(50): # Reasonable number for CI/testing + logger = LoggerFactory.create_logger( + LoggerType.BASIC, + name=f"settings_test_{i}" + ) + loggers.append(logger) + + elapsed_time = time.time() - start_time + + # Should complete relatively quickly (less than 1 second) + assert elapsed_time < 1.0 + assert len(loggers) == 50 + + # Verify all loggers were created with unique names + names = {logger.name for logger in loggers} + assert len(names) == 50 + + def test_registry_caching_performance(self): + """Test that registry caching provides significant performance improvement.""" + # Baseline: Create new loggers each time + start_time = time.time() + for i in range(30): + create_logger(LoggerType.BASIC, name=f"no_cache_{i}") + no_cache_time = time.time() - start_time + + # With caching: Reuse same logger + clear_logger_registry() + start_time = time.time() + for i in range(30): + get_or_create_logger(LoggerType.BASIC, name="cached_logger") + cache_time = time.time() - start_time + + # Cached should be significantly faster + # Allow some tolerance for test environment variability + performance_improvement = (no_cache_time - cache_time) / no_cache_time + assert performance_improvement > 0.1 # At least 10% improvement + + # Verify only one logger was actually created + assert len(get_registered_loggers()) == 1 + + def test_directory_permission_caching(self): + """Test that directory permission checking is cached.""" + with tempfile.TemporaryDirectory() as temp_dir: + # First call should check and cache directory permissions + start_time = time.time() + logger1 = size_rotating_logger( + name="dir_test_1", + directory=temp_dir + ) + first_call_time = time.time() - start_time + + # Subsequent calls to the same directory should be faster (cached) + start_time = time.time() + for i in range(10): + logger = size_rotating_logger( + name=f"dir_test_{i+2}", + directory=temp_dir # The Same directory should use cache + ) + subsequent_calls_time = time.time() - start_time + + # The Average time per subsequent call should be less than the first call + avg_subsequent_time = subsequent_calls_time / 10 + assert avg_subsequent_time <= first_call_time + + def test_timezone_function_caching(self): + """Test that timezone functions are cached for performance.""" + # Create multiple loggers with same timezone + start_time = time.time() + + loggers = [] + for i in range(20): + logger = basic_logger( + name=f"tz_test_{i}", + timezone="UTC" # Same timezone should use cached function + ) + loggers.append(logger) + + elapsed_time = time.time() - start_time + + # Should complete quickly due to timezone caching + assert elapsed_time < 0.5 + assert len(loggers) == 20 + + def test_enum_vs_string_performance(self): + """Test that enum usage doesn't significantly impact performance.""" + # Test with string values + start_time = time.time() + for i in range(25): + create_logger("basic", name=f"string_test_{i}", level="INFO") + string_time = time.time() - start_time + + # Test with enum values + start_time = time.time() + for i in range(25): + create_logger(LoggerType.BASIC, name=f"enum_test_{i}", level=LogLevel.INFO) + enum_time = time.time() - start_time + + # Enum performance should be comparable to strings + # Allow 60% tolerance for enum conversion overhead + assert enum_time <= string_time * 1.6 + + def test_large_scale_logger_creation(self): + """Test performance with larger number of loggers.""" + start_time = time.time() + + # Create 100 different loggers + loggers = [] + for i in range(100): + logger = LoggerFactory.create_logger( + LoggerType.BASIC, + name=f"scale_test_{i}", + level=LogLevel.INFO + ) + loggers.append(logger) + + elapsed_time = time.time() - start_time + + # Should complete in reasonable time (less than 2 seconds) + assert elapsed_time < 2.0 + assert len(loggers) == 100 + + # Verify all loggers are unique + names = {logger.name for logger in loggers} + assert len(names) == 100 + + def test_mixed_logger_types_performance(self): + """Test performance when creating mixed logger types.""" + with tempfile.TemporaryDirectory() as temp_dir: + start_time = time.time() + + loggers = [] + for i in range(30): # 10 of each type + if i % 3 == 0: + logger = create_logger(LoggerType.BASIC, name=f"mixed_basic_{i}") + elif i % 3 == 1: + logger = size_rotating_logger( + name=f"mixed_size_{i}", + directory=temp_dir + ) + else: + logger = create_logger( + LoggerType.TIMED_ROTATING, + name=f"mixed_timed_{i}", + directory=temp_dir + ) + loggers.append(logger) + + elapsed_time = time.time() - start_time + + # Should complete efficiently + assert elapsed_time < 1.5 + assert len(loggers) == 30 + + def test_memory_usage_with_registry(self): + """Test that registry doesn't cause excessive memory usage.""" + # Create many loggers in registry + for i in range(50): + get_or_create_logger(LoggerType.BASIC, name=f"memory_test_{i}") + + # Verify registry contains expected number + registered = get_registered_loggers() + assert len(registered) == 50 + + # Clear registry + clear_logger_registry() + + # Verify registry is empty + assert len(get_registered_loggers()) == 0 + + @pytest.mark.slow + def test_stress_test_factory_pattern(self): + """Stress test the factory pattern with intensive usage.""" + with tempfile.TemporaryDirectory() as temp_dir: + start_time = time.time() + + # Intensive mixed usage + for i in range(200): + if i % 4 == 0: + logger = get_or_create_logger(LoggerType.BASIC, name="stress_cached") + elif i % 4 == 1: + logger = create_logger("basic", name=f"stress_basic_{i}") + elif i % 4 == 2: + logger = size_rotating_logger( + name=f"stress_size_{i}", + directory=temp_dir, + level=LogLevel.WARNING + ) + else: + logger = LoggerFactory.create_logger( + LoggerType.TIMED_ROTATING, + name=f"stress_timed_{i}", + directory=temp_dir, + when="midnight" + ) + + # Actually use the logger + logger.info(f"Stress test message {i}") + + elapsed_time = time.time() - start_time + + # Should complete in reasonable time even under stress + assert elapsed_time < 5.0 # 5 seconds max for 200 loggers + + # Verify registry has cached logger + assert "stress_cached" in get_registered_loggers() diff --git a/tests/test_performance_zoneinfo.py b/tests/test_performance_zoneinfo.py new file mode 100644 index 0000000..61fecaf --- /dev/null +++ b/tests/test_performance_zoneinfo.py @@ -0,0 +1,312 @@ +#!/usr/bin/env python3 +"""Performance tests for zoneinfo vs pytz migration.""" +import os +import sys +import tempfile +import time +import pytest + + +# Add parent directory to path for imports +sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + +from pythonLogs import ( + basic_logger, + size_rotating_logger, + LogLevel, + clear_logger_registry, +) + + +class TestZoneinfoPerformance: + """Performance tests for zoneinfo timezone operations.""" + + def setup_method(self): + """Clear registry and caches before each test.""" + clear_logger_registry() + + # Clear timezone caches + from pythonLogs.log_utils import get_timezone_function, _get_timezone_offset, _get_stderr_timezone + get_timezone_function.cache_clear() + _get_timezone_offset.cache_clear() + _get_stderr_timezone.cache_clear() + + def test_timezone_function_caching_performance(self): + """Test that timezone function caching improves performance.""" + from pythonLogs.log_utils import get_timezone_function + + # First call (not cached) + start_time = time.time() + for _ in range(50): + get_timezone_function("America/New_York") + first_call_time = time.time() - start_time + + # Clear cache and test again + get_timezone_function.cache_clear() + + # Subsequent calls (should be from cache after first) + start_time = time.time() + for _ in range(50): + get_timezone_function("America/New_York") # Same timezone, should be cached + cached_call_time = time.time() - start_time + + # Cached calls should be significantly faster + # Note: Since caching happens after the first call, we expect similar times + # but the cache prevents repeated timezone object creation + assert cached_call_time <= first_call_time * 1.5 # Allow some tolerance + + def test_timezone_offset_caching_performance(self): + """Test timezone offset calculation caching performance.""" + from pythonLogs.log_utils import _get_timezone_offset + + # Test with multiple calls to the same timezone + start_time = time.time() + for _ in range(100): + _get_timezone_offset("UTC") # Should be cached after first call + cached_time = time.time() - start_time + + # Should complete very quickly due to caching + assert cached_time < 0.1 # Should be very fast + + def test_logger_creation_performance_with_timezones(self): + """Test logger creation performance with various timezones.""" + timezones = ["UTC", "localtime", "America/Chicago", "Europe/London"] + + start_time = time.time() + + loggers = [] + for i in range(40): # 10 loggers per timezone + tz = timezones[i % len(timezones)] + logger = basic_logger( + name=f"perf_test_{i}", + timezone=tz, + level=LogLevel.INFO + ) + loggers.append(logger) + + elapsed_time = time.time() - start_time + + # Should complete in reasonable time + assert elapsed_time < 1.0 # 1 second for 40 loggers + assert len(loggers) == 40 + + def test_concurrent_timezone_performance(self): + """Test timezone performance under concurrent access.""" + import threading + + results = [] + + def timezone_worker(worker_id): + start_time = time.time() + + # Create loggers with same timezone (should benefit from caching) + for i in range(10): + logger = basic_logger( + name=f"concurrent_{worker_id}_{i}", + timezone="UTC" + ) + logger.info(f"Concurrent test {worker_id}-{i}") + + elapsed = time.time() - start_time + results.append(elapsed) + + # Run concurrent workers + threads = [] + for i in range(5): + thread = threading.Thread(target=timezone_worker, args=(i,)) + threads.append(thread) + thread.start() + + for thread in threads: + thread.join() + + # All workers should complete in reasonable time + assert len(results) == 5 + for elapsed in results: + assert elapsed < 0.5 # Each worker should complete quickly + + def test_timezone_memory_efficiency(self): + """Test memory efficiency of timezone caching.""" + try: + import psutil + import os + + # Get initial memory usage + process = psutil.Process(os.getpid()) + initial_memory = process.memory_info().rss + + # Create many loggers with same timezone + loggers = [] + for i in range(200): + logger = basic_logger( + name=f"memory_test_{i}", + timezone="America/New_York" # Same timezone for all + ) + loggers.append(logger) + + # Get memory usage after logger creation + after_creation_memory = process.memory_info().rss + + # Clear loggers + loggers.clear() + clear_logger_registry() + + # Memory increase should be reasonable (not linear with the number of loggers) + memory_increase = after_creation_memory - initial_memory + + # Allow up to 50MB increase for 200 loggers (should be much less with caching) + assert memory_increase < 50 * 1024 * 1024 # 50MB + + except ImportError: + # psutil not available, just test that we can create many loggers without crashing + loggers = [] + for i in range(200): + logger = basic_logger( + name=f"memory_test_{i}", + timezone="America/New_York" + ) + loggers.append(logger) + + # If we get here without errors, memory usage is acceptable + assert len(loggers) == 200 + + # Clear loggers + loggers.clear() + clear_logger_registry() + + def test_timezone_function_performance_comparison(self): + """Compare performance of different timezone function types.""" + from pythonLogs.log_utils import get_timezone_function + + # Test UTC (should return time.gmtime - fastest) + start_time = time.time() + for _ in range(1000): + func = get_timezone_function("UTC") + func() # Call the function + utc_time = time.time() - start_time + + # Test localtime (should return time.localtime - fast) + start_time = time.time() + for _ in range(1000): + func = get_timezone_function("localtime") + func() # Call the function + local_time = time.time() - start_time + + # Test named timezone (custom function - should be reasonable) + start_time = time.time() + for _ in range(1000): + func = get_timezone_function("America/Chicago") + func() # Call the function + named_time = time.time() - start_time + + # UTC and localtime should be fastest (native functions) + # Named timezone will be slower but should still be reasonable + assert utc_time < 0.1 + assert local_time < 0.1 + assert named_time < 1.0 # Allow more time for named timezones + + def test_bulk_logger_creation_performance(self): + """Test performance when creating many loggers with timezones.""" + start_time = time.time() + + # Create 100 loggers with various timezones + timezones = ["UTC", "localtime", "America/New_York", "Europe/Paris", "Asia/Tokyo"] + + for i in range(100): + tz = timezones[i % len(timezones)] + logger = basic_logger( + name=f"bulk_test_{i}", + timezone=tz + ) + # Actually use the logger to ensure it's fully initialized + logger.info(f"Bulk test message {i}") + + elapsed_time = time.time() - start_time + + # Should complete in reasonable time (less than 2 seconds for 100 loggers) + assert elapsed_time < 2.0 + + def test_file_logger_timezone_performance(self): + """Test performance of file-based loggers with timezones.""" + with tempfile.TemporaryDirectory() as temp_dir: + start_time = time.time() + + # Create file-based loggers with timezones + for i in range(20): + logger = size_rotating_logger( + name=f"file_tz_test_{i}", + directory=temp_dir, + timezone="America/Chicago", + level=LogLevel.INFO, + streamhandler=False + ) + + # Write some log messages + for j in range(5): + logger.info(f"File timezone test {i}-{j}") + + elapsed_time = time.time() - start_time + + # Should complete in reasonable time + assert elapsed_time < 1.5 # 1.5 seconds for 20 file loggers with 100 messages + + @pytest.mark.slow + def test_stress_test_timezone_operations(self): + """Stress test timezone operations for performance and stability.""" + import threading + import random + + timezones = [ + "UTC", + "localtime", + "America/New_York", + "America/Chicago", + "America/Los_Angeles", + "Europe/London", + "Europe/Paris", + "Europe/Berlin", + "Asia/Tokyo", + "Asia/Shanghai", + "Australia/Sydney" + ] + + errors = [] + + def stress_worker(worker_id): + try: + for __i in range(50): + # Random timezone selection + tz = random.choice(timezones) + + logger = basic_logger( + name=f"stress_{worker_id}_{__i}", + timezone=tz, + level=LogLevel.INFO + ) + logger.info(f"Stress test message {worker_id}-{__i} with {tz}") + + # Small delay to simulate real usage + time.sleep(0.001) + + except Exception as e: + errors.append((worker_id, e)) + + # Run stress test with multiple workers + start_time = time.time() + + threads = [] + for _i in range(10): # 10 workers + thread = threading.Thread(target=stress_worker, args=(_i,)) + threads.append(thread) + thread.start() + + for thread in threads: + thread.join() + + elapsed_time = time.time() - start_time + + # Should complete without errors + assert len(errors) == 0, f"Errors during stress test: {errors}" + + # Should complete in reasonable time (10 workers * 50 operations each) + assert elapsed_time < 30.0 # 30 seconds for 500 total operations diff --git a/tests/test_resource_management.py b/tests/test_resource_management.py new file mode 100644 index 0000000..154c97e --- /dev/null +++ b/tests/test_resource_management.py @@ -0,0 +1,289 @@ +# -*- coding: utf-8 -*- +import logging +import os +import sys +import tempfile +import time + + +# Add the parent directory to sys.path for imports +sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + +import pytest +from pythonLogs import ( + LoggerFactory, basic_logger, + size_rotating_logger, + clear_logger_registry, + shutdown_logger, + get_registered_loggers, + LogLevel +) + + +class TestResourceManagement: + """Test resource management functionality.""" + + def setup_method(self): + """Set up test fixtures before each test method.""" + # Clear any existing loggers + clear_logger_registry() + + # Create temporary directory for log files + self.temp_dir = tempfile.mkdtemp() + self.log_file = "resource_test.log" + + def teardown_method(self): + """Clean up after each test method.""" + # Clear registry after each test + clear_logger_registry() + + # Clean up temporary files + import shutil + if os.path.exists(self.temp_dir): + shutil.rmtree(self.temp_dir, ignore_errors=True) + + def test_factory_registry_cleanup(self): + """Test that factory registry cleanup properly closes handlers.""" + logger_name = "test_registry_cleanup" + + # Create logger through factory + logger = LoggerFactory.create_size_rotating_logger( + name=logger_name, + directory=self.temp_dir, + filenames=[self.log_file], + maxmbytes=1 + ) + + # Add to registry + LoggerFactory._logger_registry[logger_name] = (logger, time.time()) + + # Verify logger has handlers + assert len(logger.handlers) > 0 + initial_handler_count = len(logger.handlers) + + # Clear registry + LoggerFactory.clear_registry() + + # Verify handlers were closed and removed + assert len(logger.handlers) == 0 + assert len(LoggerFactory._logger_registry) == 0 + + def test_shutdown_specific_logger(self): + """Test shutting down a specific logger.""" + logger1_name = "test_logger_1" + logger2_name = "test_logger_2" + + # Create two loggers + logger1 = basic_logger(name=logger1_name, level=LogLevel.INFO.value) + logger2 = basic_logger(name=logger2_name, level=LogLevel.DEBUG.value) + + # Add to registry manually for testing + LoggerFactory._logger_registry[logger1_name] = (logger1, time.time()) + LoggerFactory._logger_registry[logger2_name] = (logger2, time.time()) + + # Verify both are in registry + assert len(get_registered_loggers()) == 2 + + # Shutdown only logger1 + result = shutdown_logger(logger1_name) + assert result is True + + # Verify logger1 was removed and cleaned up + assert logger1_name not in get_registered_loggers() + assert len(logger1.handlers) == 0 + + # Verify logger2 is still active + assert logger2_name in get_registered_loggers() + assert len(get_registered_loggers()) == 1 + + def test_shutdown_nonexistent_logger(self): + """Test shutting down a logger that doesn't exist.""" + result = shutdown_logger("nonexistent_logger") + assert result is False + + def test_handler_cleanup_static_method(self): + """Test the static cleanup method directly.""" + from pythonLogs.basic_log import BasicLog + + # Create a logger with handlers + logger = logging.getLogger("test_static_cleanup") + handler1 = logging.StreamHandler() + handler2 = logging.StreamHandler() + + logger.addHandler(handler1) + logger.addHandler(handler2) + + assert len(logger.handlers) == 2 + + # Use static cleanup method + BasicLog.cleanup_logger(logger) + + # Verify all handlers were cleaned up + assert len(logger.handlers) == 0 + + def test_handler_cleanup_with_errors(self): + """Test handler cleanup handles errors gracefully.""" + from pythonLogs.basic_log import BasicLog + + logger = logging.getLogger("test_error_cleanup") + + # Create a mock handler that raises an error on close + class ErrorHandler(logging.Handler): + def close(self): + raise OSError("Mock error during close") + + error_handler = ErrorHandler() + normal_handler = logging.StreamHandler() + + logger.addHandler(error_handler) + logger.addHandler(normal_handler) + + assert len(logger.handlers) == 2 + + # Cleanup should handle errors and still remove handlers + BasicLog.cleanup_logger(logger) + + # All handlers should be removed despite errors + assert len(logger.handlers) == 0 + + def test_registry_clear_with_file_handlers(self): + """Test registry cleanup with file handlers.""" + logger_name = "test_file_handlers" + + # Create logger with file handlers + logger = LoggerFactory.create_size_rotating_logger( + name=logger_name, + directory=self.temp_dir, + filenames=[self.log_file, "second.log"], + maxmbytes=1, + streamhandler=True # Add stream handler too + ) + + # Add to registry + LoggerFactory._logger_registry[logger_name] = (logger, time.time()) + + # Write some data to verify handlers are working + logger.info("Test message before cleanup") + + # Verify we have multiple handlers + file_handlers = [h for h in logger.handlers if hasattr(h, 'baseFilename')] + stream_handlers = [h for h in logger.handlers if isinstance(h, logging.StreamHandler)] + + assert len(file_handlers) == 2 # Two file handlers + assert len(stream_handlers) > 0 # At least one stream handler + + # Clear registry + clear_logger_registry() + + # Verify all handlers cleaned up + assert len(logger.handlers) == 0 + assert len(get_registered_loggers()) == 0 + + def test_resource_cleanup_performance(self): + """Test that resource cleanup doesn't cause performance issues.""" + num_loggers = 10 + logger_names = [f"perf_test_logger_{i}" for i in range(num_loggers)] + + # Create multiple loggers + start_time = time.time() + for name in logger_names: + logger = size_rotating_logger( + name=name, + directory=self.temp_dir, + filenames=[f"{name}.log"], + maxmbytes=1 + ) + LoggerFactory._logger_registry[name] = (logger, time.time()) + + creation_time = time.time() - start_time + + # Verify all created + assert len(get_registered_loggers()) == num_loggers + + # Clear all at once + cleanup_start = time.time() + clear_logger_registry() + cleanup_time = time.time() - cleanup_start + + # Verify cleanup completed + assert len(get_registered_loggers()) == 0 + + # Performance should be reasonable (less than 1 second for 10 loggers) + assert cleanup_time < 1.0 + print(f"Created {num_loggers} loggers in {creation_time:.4f}s") + print(f"Cleaned up {num_loggers} loggers in {cleanup_time:.4f}s") + + def test_memory_usage_after_cleanup(self): + """Test that memory is properly released after cleanup.""" + import gc + import weakref + + logger_name = "memory_test_logger" + + # Create logger and get weak reference + logger = size_rotating_logger( + name=logger_name, + directory=self.temp_dir, + filenames=[self.log_file], + maxmbytes=1 + ) + + # Add to registry + LoggerFactory._logger_registry[logger_name] = (logger, time.time()) + + # Create weak reference to track if logger is garbage collected + logger_weakref = weakref.ref(logger) + handler_weakrefs = [weakref.ref(h) for h in logger.handlers] + + # Clear local reference + del logger + + # Logger should still exist due to registry + assert logger_weakref() is not None + + # Clear registry + clear_logger_registry() + + # Force garbage collection + gc.collect() + + # Logger should be garbage collected + # Note: This test might be flaky depending on Python's garbage collector + # but it helps verify we're not holding unnecessary references + print(f"Logger weakref after cleanup: {logger_weakref()}") + + def test_concurrent_cleanup(self): + """Test resource cleanup works correctly with concurrent access.""" + import concurrent.futures + + def create_and_cleanup_logger(index): + """Create a logger and immediately clean it up.""" + logger_name = f"concurrent_test_{index}" + logger = basic_logger(name=logger_name, level=LogLevel.INFO.value) + + # Add to registry + LoggerFactory._logger_registry[logger_name] = (logger, time.time()) + + # Small delay to increase chance of concurrent access + time.sleep(0.01) + + # Shutdown this specific logger + return shutdown_logger(logger_name) + + # Create multiple threads doing concurrent operations + num_threads = 5 + with concurrent.futures.ThreadPoolExecutor(max_workers=num_threads) as executor: + futures = [executor.submit(create_and_cleanup_logger, i) for i in range(num_threads)] + + # Wait for all to complete + results = [future.result() for future in concurrent.futures.as_completed(futures)] + + # All operations should succeed + assert all(results) + + # The Registry should be empty + assert len(get_registered_loggers()) == 0 + + +if __name__ == "__main__": + pytest.main([__file__]) diff --git a/tests/test_size_rotating.py b/tests/test_size_rotating.py new file mode 100644 index 0000000..bfc0f67 --- /dev/null +++ b/tests/test_size_rotating.py @@ -0,0 +1,323 @@ +#!/usr/bin/env python3 +"""Test the size rotating logger implementation.""" +import logging +import os +import sys +import tempfile +from pathlib import Path +from unittest.mock import Mock, patch +import pytest + + +# Add parent directory to path for imports +sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + +from pythonLogs.size_rotating import SizeRotatingLog, GZipRotatorSize + + +class TestSizeRotatingLog: + """Test cases for the SizeRotatingLog class.""" + + def test_size_rotating_log_initialization(self): + """Test SizeRotatingLog initialization with default settings.""" + with tempfile.TemporaryDirectory() as temp_dir: + size_log = SizeRotatingLog( + name="test_size", + directory=temp_dir, + level="INFO" + ) + assert size_log.appname == "test_size" + assert size_log.directory == temp_dir + assert size_log.level == logging.INFO + + def test_size_rotating_log_initialization_with_all_params(self): + """Test SizeRotatingLog initialization with all parameters.""" + with tempfile.TemporaryDirectory() as temp_dir: + size_log = SizeRotatingLog( + level="DEBUG", + name="test_comprehensive", + directory=temp_dir, + filenames=["test1.log", "test2.log"], + maxmbytes=10, + daystokeep=30, + encoding="utf-8", + datefmt="%Y-%m-%d %H:%M:%S", + timezone="UTC", + streamhandler=True, + showlocation=True + ) + assert size_log.appname == "test_comprehensive" + assert size_log.filenames == ["test1.log", "test2.log"] + assert size_log.maxmbytes == 10 + assert size_log.daystokeep == 30 + assert size_log.streamhandler is True + assert size_log.showlocation is True + + def test_size_rotating_log_init_method(self): + """Test the init method of SizeRotatingLog.""" + with tempfile.TemporaryDirectory() as temp_dir: + size_log = SizeRotatingLog( + name="test_init", + directory=temp_dir, + filenames=["test.log"] + ) + logger = size_log.init() + + assert isinstance(logger, logging.Logger) + assert logger.name == "test_init" + assert len(logger.handlers) > 0 + + def test_size_rotating_log_context_manager(self): + """Test SizeRotatingLog as context manager.""" + with tempfile.TemporaryDirectory() as temp_dir: + with SizeRotatingLog( + name="test_context", + directory=temp_dir + ) as logger: + assert isinstance(logger, logging.Logger) + assert logger.name == "test_context" + logger.info("Test message in context") + + def test_size_rotating_log_context_manager_cleanup(self): + """Test context manager cleanup functionality.""" + with tempfile.TemporaryDirectory() as temp_dir: + size_log = SizeRotatingLog( + name="test_cleanup", + directory=temp_dir + ) + + with size_log as logger: + initial_handler_count = len(logger.handlers) + assert initial_handler_count > 0 + + # After context exit, handlers should be cleaned up + final_handler_count = len(logger.handlers) + assert final_handler_count == 0 + + def test_size_rotating_log_multiple_files(self): + """Test SizeRotatingLog with multiple log files.""" + with tempfile.TemporaryDirectory() as temp_dir: + size_log = SizeRotatingLog( + name="test_multiple", + directory=temp_dir, + filenames=["app.log", "error.log", "debug.log"] + ) + logger = size_log.init() + + # Should have handlers for each file (plus stream handler if enabled) + file_handlers = [h for h in logger.handlers if isinstance(h, logging.handlers.RotatingFileHandler)] + assert len(file_handlers) == 3 + + def test_size_rotating_log_with_stream_handler(self): + """Test SizeRotatingLog with stream handler enabled.""" + with tempfile.TemporaryDirectory() as temp_dir: + size_log = SizeRotatingLog( + name="test_stream", + directory=temp_dir, + streamhandler=True + ) + logger = size_log.init() + + # Should have both file and stream handlers + stream_handlers = [h for h in logger.handlers if isinstance(h, logging.StreamHandler)] + file_handlers = [h for h in logger.handlers if isinstance(h, logging.handlers.RotatingFileHandler)] + + assert len(stream_handlers) >= 1 + assert len(file_handlers) >= 1 + + def test_size_rotating_log_cleanup_logger_error_handling(self): + """Test error handling in _cleanup_logger method.""" + with tempfile.TemporaryDirectory() as temp_dir: + size_log = SizeRotatingLog( + name="test_error_cleanup", + directory=temp_dir + ) + logger = size_log.init() + + # Add a mock handler that will raise an error on close + mock_handler = Mock() + mock_handler.close.side_effect = OSError("Test error") + logger.addHandler(mock_handler) + + # Should handle the error gracefully + SizeRotatingLog.cleanup_logger(logger) + + # Mock handler should still be removed despite the error + assert mock_handler not in logger.handlers + + def test_size_rotating_log_invalid_filenames(self): + """Test SizeRotatingLog with invalid filenames' parameter.""" + with tempfile.TemporaryDirectory() as temp_dir: + size_log = SizeRotatingLog( + name="test_invalid", + directory=temp_dir, + filenames="invalid_string" # Should be list or tuple + ) + + with pytest.raises(TypeError, match="Unable to parse filenames"): + size_log.init() + + def test_size_rotating_log_actual_logging(self): + """Test actual logging functionality with file creation.""" + with tempfile.TemporaryDirectory() as temp_dir: + log_file = "test_actual.log" + size_log = SizeRotatingLog( + name="test_actual", + directory=temp_dir, + filenames=[log_file], + level="INFO" + ) + logger = size_log.init() + + # Log some messages + logger.info("Test info message") + logger.warning("Test warning message") + logger.error("Test error message") + + # Check that log file was created + log_path = Path(temp_dir) / log_file + assert log_path.exists() + + # Check log content + log_content = log_path.read_text() + assert "Test info message" in log_content + assert "Test warning message" in log_content + assert "Test error message" in log_content + + +class TestGZipRotatorSize: + """Test cases for the GZipRotatorSize class.""" + + def test_gzip_rotator_size_initialization(self): + """Test GZipRotatorSize initialization.""" + with tempfile.TemporaryDirectory() as temp_dir: + rotator = GZipRotatorSize(temp_dir, 7) + assert rotator.directory == temp_dir + assert rotator.daystokeep == 7 + + def test_gzip_rotator_size_call_with_empty_file(self): + """Test GZipRotatorSize with empty source file.""" + with tempfile.TemporaryDirectory() as temp_dir: + rotator = GZipRotatorSize(temp_dir, 7) + + # Create empty source file + source_file = Path(temp_dir) / "empty.log" + source_file.touch() + + # Should not process empty files + rotator(str(source_file), "dest.log") + + # Source file should still exist (not processed) + assert source_file.exists() + + def test_gzip_rotator_size_call_with_content(self): + """Test GZipRotatorSize with file containing content.""" + with tempfile.TemporaryDirectory() as temp_dir: + rotator = GZipRotatorSize(temp_dir, 7) + + # Create source file with content + source_file = Path(temp_dir) / "content.log" + source_file.write_text("Test log content\nMore content\n") + + # Process the file + rotator(str(source_file), "dest.log") + + # Source file should be processed (removed) + assert not source_file.exists() + + # Should have created a gzipped file + gz_files = list(Path(temp_dir).glob("*.gz")) + assert len(gz_files) > 0 + + def test_gzip_rotator_size_get_new_file_number(self): + """Test _get_new_file_number method.""" + with tempfile.TemporaryDirectory() as temp_dir: + # Create some existing numbered log files + (Path(temp_dir) / "test_1.log.gz").touch() + (Path(temp_dir) / "test_3.log.gz").touch() + (Path(temp_dir) / "test_5.log.gz").touch() + (Path(temp_dir) / "other_2.log.gz").touch() # Different filename + + # Should return 6 (max + 1 for "test" files) + new_number = GZipRotatorSize._get_new_file_number(temp_dir, "test") + assert new_number == 6 + + def test_gzip_rotator_size_get_new_file_number_no_existing_files(self): + """Test _get_new_file_number with no existing files.""" + with tempfile.TemporaryDirectory() as temp_dir: + new_number = GZipRotatorSize._get_new_file_number(temp_dir, "test") + assert new_number == 1 + + def test_gzip_rotator_size_get_new_file_number_with_special_chars(self): + """Test _get_new_file_number with special characters in filename.""" + with tempfile.TemporaryDirectory() as temp_dir: + # Create files with special characters that need escaping + filename = "test-app[special]" + (Path(temp_dir) / f"{filename}_1.log.gz").touch() + (Path(temp_dir) / f"{filename}_2.log.gz").touch() + + new_number = GZipRotatorSize._get_new_file_number(temp_dir, filename) + assert new_number == 3 + + def test_gzip_rotator_size_error_handling(self): + """Test GZipRotatorSize error handling for directory access.""" + # Test with non-existent directory + rotator = GZipRotatorSize("/non/existent/directory", 7) + + # Should handle OSError gracefully and return 1 + new_number = GZipRotatorSize._get_new_file_number("/non/existent/directory", "test") + assert new_number == 1 + + def test_gzip_rotator_size_call_with_nonexistent_source(self): + """Test GZipRotatorSize with non-existent source file.""" + with tempfile.TemporaryDirectory() as temp_dir: + rotator = GZipRotatorSize(temp_dir, 7) + + # Call with non-existent source file + rotator("/non/existent/file.log", "dest.log") + + # Should not crash and not create any files + gz_files = list(Path(temp_dir).glob("*.gz")) + assert len(gz_files) == 0 + + @patch('pythonLogs.size_rotating.remove_old_logs') + def test_gzip_rotator_size_calls_remove_old_logs(self, mock_remove_old_logs): + """Test that GZipRotatorSize calls remove_old_logs.""" + with tempfile.TemporaryDirectory() as temp_dir: + rotator = GZipRotatorSize(temp_dir, 7) + + # Create source file with content + source_file = Path(temp_dir) / "test.log" + source_file.write_text("Test content") + + rotator(str(source_file), "dest.log") + + # Should have called remove_old_logs + mock_remove_old_logs.assert_called_once_with(temp_dir, 7) + + def test_gzip_rotator_size_integration(self): + """Test GZipRotatorSize integration with actual rotation.""" + with tempfile.TemporaryDirectory() as temp_dir: + # Setup SizeRotatingLog with GZipRotatorSize + size_log = SizeRotatingLog( + name="integration_test", + directory=temp_dir, + filenames=["test.log"], + maxmbytes=1, # Small size to trigger rotation + daystokeep=5 + ) + logger = size_log.init() + + # Log enough content to potentially trigger rotation + large_message = "A" * 1000 # 1KB message + for i in range(50): # 50KB total + logger.info(f"Message {i}: {large_message}") + + # Force handlers to flush + for handler in logger.handlers: + if hasattr(handler, 'flush'): + handler.flush() + + # Verify log file exists + log_files = list(Path(temp_dir).glob("*.log")) + assert len(log_files) > 0 diff --git a/tests/test_some_log_utils.py b/tests/test_some_log_utils.py new file mode 100644 index 0000000..f8bd699 --- /dev/null +++ b/tests/test_some_log_utils.py @@ -0,0 +1,390 @@ +# -*- encoding: utf-8 -*- +import contextlib +import io +import logging +import os +import shutil +import sys +import tempfile +import time +import pytest + + +# Add parent directory to path for imports +sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + +from pythonLogs import log_utils + + +class TestLogUtils: + @classmethod + def setup_class(cls): + """setup_class""" + pass + + @classmethod + def teardown_class(cls): + """teardown_class""" + pass + + def test_get_stream_handler(self): + level = log_utils.get_level("DEBUG") + _, formatter = log_utils.get_logger_and_formatter("appname", "%Y-%m-%dT%H:%M:%S", False, "UTC") + stream_hdlr = log_utils.get_stream_handler(level, formatter) + assert isinstance(stream_hdlr, logging.StreamHandler) + + def test_check_filename_instance(self): + filenames = "test1.log" + with pytest.raises(TypeError) as exec_info: + log_utils.check_filename_instance(filenames) + assert type(exec_info.value) is TypeError + assert filenames in str(exec_info.value) + assert "Unable to parse filenames" in str(exec_info.value) + + def test_check_directory_permissions(self): + # Test permission error on access + directory = os.path.join(tempfile.gettempdir(), "test_permission") + os.makedirs(directory, mode=0o000, exist_ok=True) # No permissions at all + assert os.path.exists(directory) == True + with pytest.raises(PermissionError) as exec_info: + log_utils.check_directory_permissions(directory) + os.chmod(directory, 0o755) # Restore permissions for cleanup + assert type(exec_info.value) is PermissionError + assert "Unable to access directory" in str(exec_info.value) + log_utils.delete_file(directory) + assert os.path.exists(directory) == False + + # test permission error on creation + directory = "/non-existent-directory" + with pytest.raises(PermissionError) as exec_info: + log_utils.check_directory_permissions(directory) + assert type(exec_info.value) is PermissionError + assert "Unable to create directory" in str(exec_info.value) + + def test_remove_old_logs(self): + directory = os.path.join(tempfile.gettempdir(), "test_remove_logs") + os.makedirs(directory, mode=0o755, exist_ok=True) + assert os.path.exists(directory) == True + + # Create a file and manually set its modification time to be old + with tempfile.NamedTemporaryFile(dir=directory, suffix=".gz", delete=False) as tmpfile: + file_path = tmpfile.name + old_time = time.time() - 2*24*60*60 # 2 days old + os.utime(file_path, (old_time, old_time)) + + log_utils.remove_old_logs(directory, 1) # Remove files older than 1 day + assert os.path.isfile(file_path) == False + log_utils.delete_file(directory) + assert os.path.exists(directory) == False + + def test_delete_file(self): + directory = tempfile.gettempdir() + tmpfilewrapper = tempfile.NamedTemporaryFile(dir=directory, suffix=".log") + file_path = tmpfilewrapper.name + assert os.path.isfile(file_path) == True + log_utils.delete_file(file_path) + assert os.path.isfile(file_path) == False + + def test_is_older_than_x_days(self): + directory = tempfile.gettempdir() + tmpfilewrapper = tempfile.NamedTemporaryFile(dir=directory, suffix=".log") + file_path = tmpfilewrapper.name + assert os.path.isfile(file_path) == True + + result = log_utils.is_older_than_x_days(file_path, 1) + assert result == True + + result = log_utils.is_older_than_x_days(file_path, 5) + assert result == False + + log_utils.delete_file(file_path) + assert os.path.isfile(file_path) == False + + def test_get_level(self): + level = log_utils.get_level(11111111) + assert level == logging.INFO + + level = log_utils.get_level("") + assert level == logging.INFO + + level = log_utils.get_level("INFO") + assert level == logging.INFO + + level = log_utils.get_level("DEBUG") + assert level == logging.DEBUG + + level = log_utils.get_level("WARNING") + assert level == logging.WARNING + + level = log_utils.get_level("ERROR") + assert level == logging.ERROR + + level = log_utils.get_level("CRITICAL") + assert level == logging.CRITICAL + + def test_get_log_path(self): + temp_dir = tempfile.mkdtemp() + test_file = "test.log" + try: + # Test 1: Valid directory should return the correct path + result = log_utils.get_log_path(temp_dir, test_file) + assert result == os.path.join(temp_dir, test_file) + + # Test 2: Directory that gets created should work fine + new_dir = os.path.join(temp_dir, "newdir") + result = log_utils.get_log_path(new_dir, test_file) + assert result == os.path.join(new_dir, test_file) + assert os.path.exists(new_dir) # Should have been created + + # Test 3: Existing but non-writable directory should raise PermissionError + readonly_dir = os.path.join(temp_dir, "readonly") + os.makedirs(readonly_dir, mode=0o555) + try: + with pytest.raises(PermissionError) as exc_info: + log_utils.get_log_path(readonly_dir, test_file) + assert "Unable to access directory" in str(exc_info.value) + finally: + os.chmod(readonly_dir, 0o755) # Cleanup permissions + os.rmdir(readonly_dir) + finally: + shutil.rmtree(temp_dir) + + def test_get_format(self): + show_location = True + name = "test1" + timezone = "UTC" + result = log_utils.get_format(show_location, name, timezone) + assert result == ( + f"[%(asctime)s.%(msecs)03d+0000]:[%(levelname)s]:[{name}]:" + "[%(filename)s:%(funcName)s:%(lineno)d]:%(message)s" + ) + + show_location = False + name = "test2" + timezone = "America/Los_Angeles" + result = log_utils.get_format(show_location, name, timezone) + assert result.startswith(f"[%(asctime)s.%(msecs)03d-0") + assert result.endswith(f"]:[%(levelname)s]:[{name}]:%(message)s") + + show_location = False + name = "test3" + timezone = "Australia/Queensland" + result = log_utils.get_format(show_location, name, timezone) + assert result == f"[%(asctime)s.%(msecs)03d+1000]:[%(levelname)s]:[{name}]:%(message)s" + + def test_gzip_file_with_sufix(self): + directory = tempfile.gettempdir() + tmpfilewrapper = tempfile.NamedTemporaryFile(dir=directory, suffix=".log") + file_path = tmpfilewrapper.name + assert os.path.isfile(file_path) == True + sufix = "test1" + result = log_utils.gzip_file_with_sufix(file_path, sufix) + file_path_no_suffix = file_path.split(".")[0] + assert result == f"{file_path_no_suffix}_{sufix}.log.gz" + log_utils.delete_file(result) + assert os.path.isfile(result) == False + + # test a non-existent file + file_path = "/non-existent-directory/test2.log" + sufix = "test2" + result = log_utils.gzip_file_with_sufix(file_path, sufix) + assert result is None + + def test_get_timezone_function(self): + timezone = "UTC" + result = log_utils.get_timezone_function(timezone) + assert result.__name__ == "gmtime" + + timezone = "localtime" + result = log_utils.get_timezone_function(timezone) + assert result.__name__ == "localtime" + + timezone = "America/Los_Angeles" + result = log_utils.get_timezone_function(timezone) + assert result.__name__ == "" + + def test_write_stderr(self): + """Test write_stderr function output""" + # Capture stderr output + stderr_capture = io.StringIO() + with contextlib.redirect_stderr(stderr_capture): + log_utils.write_stderr("Test error message") + + output = stderr_capture.getvalue() + assert "ERROR" in output + assert "Test error message" in output + assert output.startswith("[") # Should start with timestamp + + def test_write_stderr_with_timezone_error(self): + """Test write_stderr fallback when timezone fails""" + # Set invalid timezone to trigger fallback + original_tz = os.environ.get("LOG_TIMEZONE") + os.environ["LOG_TIMEZONE"] = "Invalid/Timezone" + + try: + stderr_capture = io.StringIO() + with contextlib.redirect_stderr(stderr_capture): + log_utils.write_stderr("Test fallback message") + + output = stderr_capture.getvalue() + assert "ERROR" in output + assert "Test fallback message" in output + finally: + # Restore original timezone + if original_tz is not None: + os.environ["LOG_TIMEZONE"] = original_tz + elif "LOG_TIMEZONE" in os.environ: + del os.environ["LOG_TIMEZONE"] + + def test_get_logger_and_formatter(self): + """Test get_logger_and_formatter function""" + name = "test_logger" + datefmt = "%Y-%m-%d %H:%M:%S" + show_location = True + timezone = "UTC" + + logger, formatter = log_utils.get_logger_and_formatter(name, datefmt, show_location, timezone) + + assert isinstance(logger, logging.Logger) + assert isinstance(formatter, logging.Formatter) + assert logger.name == name + assert formatter.datefmt == datefmt + + def test_get_logger_and_formatter_cleanup(self): + """Test that get_logger_and_formatter properly cleans up existing handlers""" + name = "test_cleanup_logger" + + # Create a logger with existing handlers + logger = logging.getLogger(name) + handler = logging.StreamHandler() + logger.addHandler(handler) + initial_handler_count = len(logger.handlers) + assert initial_handler_count > 0 + + # Call get_logger_and_formatter + new_logger, formatter = log_utils.get_logger_and_formatter(name, "%Y-%m-%d", False, "UTC") + + # Should be the same logger but with handlers cleaned up + assert new_logger is logger + assert len(new_logger.handlers) == 0 + + def test_timezone_offset_caching(self): + """Test _get_timezone_offset function via get_format""" + # Test UTC timezone + format1 = log_utils.get_format(False, "test", "UTC") + format2 = log_utils.get_format(False, "test", "UTC") + assert "+0000" in format1 + assert format1 == format2 # Should be identical due to caching + + # Test localtime + format3 = log_utils.get_format(False, "test", "localtime") + assert format3 is not None + + def test_stderr_timezone_caching(self): + """Test _get_stderr_timezone function via write_stderr""" + # Test with UTC + original_tz = os.environ.get("LOG_TIMEZONE") + os.environ["LOG_TIMEZONE"] = "UTC" + + try: + stderr_capture = io.StringIO() + with contextlib.redirect_stderr(stderr_capture): + log_utils.write_stderr("Test UTC message") + + output = stderr_capture.getvalue() + assert "+0000" in output or "Z" in output # UTC timezone indicator + finally: + if original_tz is not None: + os.environ["LOG_TIMEZONE"] = original_tz + elif "LOG_TIMEZONE" in os.environ: + del os.environ["LOG_TIMEZONE"] + + def test_stderr_timezone_localtime(self): + """Test _get_stderr_timezone with localtime""" + original_tz = os.environ.get("LOG_TIMEZONE") + os.environ["LOG_TIMEZONE"] = "localtime" + + try: + stderr_capture = io.StringIO() + with contextlib.redirect_stderr(stderr_capture): + log_utils.write_stderr("Test localtime message") + + output = stderr_capture.getvalue() + assert "Test localtime message" in output + finally: + if original_tz is not None: + os.environ["LOG_TIMEZONE"] = original_tz + elif "LOG_TIMEZONE" in os.environ: + del os.environ["LOG_TIMEZONE"] + + def test_get_level_edge_cases(self): + """Test get_level with various edge cases""" + # Test with non-string input (already tested in existing test) + level = log_utils.get_level(None) + assert level == logging.INFO + + # Test with empty string (already tested) + level = log_utils.get_level("") + assert level == logging.INFO + + # Test case sensitivity + level = log_utils.get_level("debug") + assert level == logging.DEBUG + + level = log_utils.get_level("WARN") + assert level == logging.WARNING + + level = log_utils.get_level("crit") + assert level == logging.CRITICAL + + def test_is_older_than_x_days_edge_cases(self): + """Test is_older_than_x_days with edge cases""" + with tempfile.NamedTemporaryFile() as tmp_file: + # Test with days = 0 + result = log_utils.is_older_than_x_days(tmp_file.name, 0) + assert result == True # Should use current time as cutoff + + # Test with non-existent file + with pytest.raises(FileNotFoundError): + log_utils.is_older_than_x_days("/non/existent/file.log", 1) + + # Test with invalid days parameter + with pytest.raises(ValueError): + log_utils.is_older_than_x_days(tmp_file.name, "invalid") + + def test_delete_file_edge_cases(self): + """Test delete_file with different file types""" + # Test with non-existent file + non_existent = "/tmp/non_existent_file_test.log" + with pytest.raises(FileNotFoundError): + log_utils.delete_file(non_existent) + + def test_gzip_file_error_handling(self): + """Test gzip_file_with_sufix error handling""" + # Test with non-existent source file + result = log_utils.gzip_file_with_sufix("/non/existent/file.log", "test") + assert result is None + + def test_remove_old_logs_edge_cases(self): + """Test remove_old_logs with edge cases""" + with tempfile.TemporaryDirectory() as temp_dir: + # Test with days_to_keep = 0 (should return early) + log_utils.remove_old_logs(temp_dir, 0) # Should not raise error + + # Test with negative days + log_utils.remove_old_logs(temp_dir, -1) # Should return early + + # Test with non-existent directory (should handle gracefully) + log_utils.remove_old_logs("/non/existent/directory", 1) # Should not crash + + def test_check_directory_permissions_caching(self): + """Test that directory permission checking uses caching""" + with tempfile.TemporaryDirectory() as temp_dir: + # First call should add to cache + log_utils.check_directory_permissions(temp_dir) + + # Second call should use cache (no exception should be raised) + log_utils.check_directory_permissions(temp_dir) + + # Verify it's in the cache by checking the global variable + assert temp_dir in log_utils._checked_directories diff --git a/tests/test_string_levels.py b/tests/test_string_levels.py new file mode 100644 index 0000000..fa563e2 --- /dev/null +++ b/tests/test_string_levels.py @@ -0,0 +1,256 @@ +# -*- coding: utf-8 -*- +import logging +import os +import sys +import tempfile + + +# Add the parent directory to sys.path for imports +sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + +import pytest +from pythonLogs import ( + basic_logger, + size_rotating_logger, + timed_rotating_logger, + LoggerFactory, LogLevel, + RotateWhen, + clear_logger_registry +) + + +class TestStringLevels: + """Test string level support across all logger types and methods.""" + + def setup_method(self): + """Set up test fixtures before each test method.""" + # Clear any existing loggers + clear_logger_registry() + + # Create temporary directory for log files + self.temp_dir = tempfile.mkdtemp() + self.log_file = "string_test.log" + + def teardown_method(self): + """Clean up after each test method.""" + # Clear registry after each test + clear_logger_registry() + + # Clean up temporary files + import shutil + if os.path.exists(self.temp_dir): + shutil.rmtree(self.temp_dir, ignore_errors=True) + + def test_basic_logger_string_levels(self): + """Test BasicLog with string levels.""" + test_cases = [ + ("debug", logging.DEBUG), + ("info", logging.INFO), + ("warning", logging.WARNING), + ("error", logging.ERROR), + ("critical", logging.CRITICAL), + # Test case-insensitive + ("DEBUG", logging.DEBUG), + ("Info", logging.INFO), + ("WARNING", logging.WARNING), + ("Error", logging.ERROR), + ("CRITICAL", logging.CRITICAL), + ] + + for level_str, expected_level in test_cases: + logger = basic_logger( + name=f"test_basic_{level_str}", + level=level_str + ) + assert logger.level == expected_level + assert isinstance(logger, logging.Logger) + + def test_size_rotating_logger_string_levels(self): + """Test SizeRotatingLog with string levels.""" + test_cases = [ + ("debug", logging.DEBUG), + ("info", logging.INFO), + ("warning", logging.WARNING), + ("error", logging.ERROR), + ("critical", logging.CRITICAL), + ] + + for level_str, expected_level in test_cases: + logger = size_rotating_logger( + name=f"test_size_{level_str}", + level=level_str, + directory=self.temp_dir, + filenames=[f"{level_str}.log"], + maxmbytes=1 + ) + assert logger.level == expected_level + assert isinstance(logger, logging.Logger) + + def test_timed_rotating_logger_string_levels(self): + """Test TimedRotatingLog with string levels.""" + test_cases = [ + ("debug", logging.DEBUG), + ("info", logging.INFO), + ("warning", logging.WARNING), + ("error", logging.ERROR), + ("critical", logging.CRITICAL), + ] + + for level_str, expected_level in test_cases: + logger = timed_rotating_logger( + name=f"test_timed_{level_str}", + level=level_str, + directory=self.temp_dir, + filenames=[f"{level_str}.log"], + when=RotateWhen.HOURLY + ) + assert logger.level == expected_level + assert isinstance(logger, logging.Logger) + + def test_factory_create_logger_string_levels(self): + """Test LoggerFactory.create_logger with string levels.""" + test_cases = [ + ("basic", "debug", logging.DEBUG), + ("size_rotating", "info", logging.INFO), + ("timed_rotating", "warning", logging.WARNING), + ] + + for logger_type_str, level_str, expected_level in test_cases: + logger = LoggerFactory.create_logger( + logger_type_str, + name=f"test_factory_{logger_type_str}_{level_str}", + level=level_str, + directory=self.temp_dir if logger_type_str != "basic" else None, + filenames=[f"{level_str}.log"] if logger_type_str != "basic" else None, + maxmbytes=1 if logger_type_str == "size_rotating" else None, + when=RotateWhen.HOURLY if logger_type_str == "timed_rotating" else None + ) + assert logger.level == expected_level + assert isinstance(logger, logging.Logger) + + def test_mixed_enum_and_string_usage(self): + """Test mixing enum and string usage in the same application.""" + # Create logger with enum + logger_enum = basic_logger( + name="enum_logger", + level=LogLevel.DEBUG + ) + + # Create logger with string + logger_string = basic_logger( + name="string_logger", + level="debug" + ) + + # Both should have the same level + assert logger_enum.level == logger_string.level == logging.DEBUG + + # Both should be proper logger instances + assert isinstance(logger_enum, logging.Logger) + assert isinstance(logger_string, logging.Logger) + + def test_string_level_aliases(self): + """Test string level aliases (warn, crit).""" + # Test WARN alias for WARNING + logger_warn = basic_logger(name="warn_test", level="warn") + logger_warning = basic_logger(name="warning_test", level="warning") + assert logger_warn.level == logger_warning.level == logging.WARNING + + # Test CRIT alias for CRITICAL + logger_crit = basic_logger(name="crit_test", level="crit") + logger_critical = basic_logger(name="critical_test", level="critical") + assert logger_crit.level == logger_critical.level == logging.CRITICAL + + def test_invalid_string_level_fallback(self): + """Test that invalid string levels fall back to INFO.""" + invalid_levels = ["invalid", "trace", "verbose", "123", ""] + + for invalid_level in invalid_levels: + logger = basic_logger( + name=f"invalid_{invalid_level or 'empty'}", + level=invalid_level + ) + # Should fall back to INFO level + assert logger.level == logging.INFO + + def test_string_levels_with_context_managers(self): + """Test string levels work with context managers.""" + from pythonLogs import BasicLog, SizeRotatingLog + + # Test BasicLog context manager with string level + with BasicLog(name="context_basic", level="warning") as logger: + assert logger.level == logging.WARNING + logger.warning("Test warning message") + + # Test SizeRotatingLog context manager with string level + with SizeRotatingLog( + name="context_size", + level="error", + directory=self.temp_dir, + filenames=["context.log"], + maxmbytes=1 + ) as logger: + assert logger.level == logging.ERROR + logger.error("Test error message") + + def test_factory_registry_with_string_levels(self): + """Test factory registry works with string levels.""" + from pythonLogs import get_or_create_logger + + # Create logger with string level + logger1 = get_or_create_logger("basic", name="registry_test", level="info") + assert logger1.level == logging.INFO + + # Get the same logger again (should be cached) + logger2 = get_or_create_logger("basic", name="registry_test", level="debug") + + # Should be the same instance (registry hit) + assert logger1 is logger2 + # Level should remain as first created (INFO) + assert logger2.level == logging.INFO + + def test_comprehensive_string_level_functionality(self): + """Test comprehensive functionality with string levels.""" + # Create loggers of each type with string levels + basic = basic_logger(name="comp_basic", level="debug") + + size_rotating = size_rotating_logger( + name="comp_size", + level="info", + directory=self.temp_dir, + filenames=["comp_size.log"], + maxmbytes=1, + streamhandler=True + ) + + timed_rotating = timed_rotating_logger( + name="comp_timed", + level="warning", + directory=self.temp_dir, + filenames=["comp_timed.log"], + when="midnight", + streamhandler=True + ) + + # Test logging functionality + basic.debug("Debug message") + basic.info("Info message") + + size_rotating.info("Size rotating info") + size_rotating.warning("Size rotating warning") + + timed_rotating.warning("Timed rotating warning") + timed_rotating.error("Timed rotating error") + + # Verify levels are set correctly + assert basic.level == logging.DEBUG + assert size_rotating.level == logging.INFO + assert timed_rotating.level == logging.WARNING + + # Verify all are proper logger instances + assert all(isinstance(logger, logging.Logger) + for logger in [basic, size_rotating, timed_rotating]) + + +if __name__ == "__main__": + pytest.main([__file__]) diff --git a/tests/test_thread_safety.py b/tests/test_thread_safety.py new file mode 100644 index 0000000..60d2747 --- /dev/null +++ b/tests/test_thread_safety.py @@ -0,0 +1,419 @@ +#!/usr/bin/env python3 +"""Test thread safety of the pythonLogs library.""" +import concurrent.futures +import os +import sys +import tempfile +import threading +import time +import pytest + + +# Add parent directory to path for imports +sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + +from pythonLogs import ( + LoggerFactory, + LoggerType, + LogLevel, clear_logger_registry, + BasicLog, + SizeRotatingLog, + TimedRotatingLog, +) + + +class TestThreadSafety: + """Test cases for thread safety of logger creation and management.""" + + def setup_method(self): + """Clear registry before each test.""" + clear_logger_registry() + + def teardown_method(self): + """Clear the registry after each test.""" + clear_logger_registry() + + def test_concurrent_logger_factory_creation(self): + """Test concurrent logger creation via factory doesn't create duplicates.""" + results = [] + num_threads = 10 + logger_name = "concurrent_test_logger" + + def create_logger_worker(): + """Worker function to create logger.""" + _logger = LoggerFactory.get_or_create_logger( + LoggerType.BASIC, + name=logger_name, + level=LogLevel.INFO + ) + results.append(_logger) + return _logger + + # Create multiple threads that try to create the same logger + with concurrent.futures.ThreadPoolExecutor(max_workers=num_threads) as executor: + futures = [executor.submit(create_logger_worker) for _ in range(num_threads)] + loggers = [future.result() for future in concurrent.futures.as_completed(futures)] + + # All loggers should be the exact same instance (cached) + first_logger = loggers[0] + for logger in loggers[1:]: + assert logger is first_logger, "All loggers should be the same cached instance" + + # Registry should only contain one logger + registry = LoggerFactory.get_registered_loggers() + assert len(registry) == 1 + assert logger_name in registry + assert registry[logger_name] is first_logger + + def test_concurrent_registry_operations(self): + """Test concurrent registry operations (create, shutdown, clear).""" + num_threads = 20 + results = {'created': [], 'shutdown': [], 'errors': []} + + def mixed_operations_worker(worker_id): + """Worker that performs mixed registry operations.""" + try: + logger_name = f"test_logger_{worker_id}" + + # Create logger + logger = LoggerFactory.get_or_create_logger( + LoggerType.BASIC, + name=logger_name, + level=LogLevel.DEBUG + ) + results['created'].append(logger_name) + + # Small delay to increase chance of race conditions + time.sleep(0.01) + + # Try to shut down logger + if LoggerFactory.shutdown_logger(logger_name): + results['shutdown'].append(logger_name) + + except Exception as e: + results['errors'].append(str(e)) + + # Run concurrent operations + with concurrent.futures.ThreadPoolExecutor(max_workers=num_threads) as executor: + futures = [executor.submit(mixed_operations_worker, i) for i in range(num_threads)] + for future in concurrent.futures.as_completed(futures): + future.result() + + # No errors should occur + assert len(results['errors']) == 0, f"Errors occurred: {results['errors']}" + + # All created loggers should be accounted for + assert len(results['created']) == num_threads + + # Registry should be consistent + registry = LoggerFactory.get_registered_loggers() + # Some loggers might still be in the registry if shutdown happened after creation + assert len(registry) <= num_threads + + def test_concurrent_directory_cache_access(self): + """Test concurrent access to directory permission cache.""" + import pythonLogs.log_utils as log_utils + + num_threads = 15 + temp_dirs = [] + errors = [] + + def check_directory_worker(worker_id): + """Worker that checks directory permissions.""" + try: + # Create a unique temp directory for each worker + _temp_dir = tempfile.mkdtemp(prefix=f"test_thread_{worker_id}_") + temp_dirs.append(_temp_dir) + + # Multiple calls to the same directory should be safe + for _ in range(3): + log_utils.check_directory_permissions(_temp_dir) + time.sleep(0.001) # Small delay to increase race condition chance + + except Exception as e: + errors.append(str(e)) + + try: + # Clear the directory cache first + log_utils._checked_directories.clear() + + # Run concurrent directory checks + with concurrent.futures.ThreadPoolExecutor(max_workers=num_threads) as executor: + futures = [executor.submit(check_directory_worker, i) for i in range(num_threads)] + for future in concurrent.futures.as_completed(futures): + future.result() + + # No errors should occur + assert len(errors) == 0, f"Errors occurred: {errors}" + + # All directories should be in cache + assert len(log_utils._checked_directories) == num_threads + + finally: + # Cleanup temp directories + import shutil + for temp_dir in temp_dirs: + if os.path.exists(temp_dir): + shutil.rmtree(temp_dir, ignore_errors=True) + + def test_concurrent_context_manager_cleanup(self): + """Test concurrent context manager cleanup doesn't cause issues.""" + num_threads = 10 + errors = [] + + def context_manager_worker(worker_id): + """Worker that uses logger context managers.""" + try: + with tempfile.TemporaryDirectory() as temp_dir: + # Use different logger types to test all cleanup paths + if worker_id % 3 == 0: + with BasicLog(name=f"basic_{worker_id}", level="INFO") as logger: + logger.info(f"Basic logger message from thread {worker_id}") + elif worker_id % 3 == 1: + with SizeRotatingLog( + name=f"size_{worker_id}", + directory=temp_dir, + level="DEBUG" + ) as logger: + logger.debug(f"Size rotating message from thread {worker_id}") + else: + with TimedRotatingLog( + name=f"timed_{worker_id}", + directory=temp_dir, + level="WARNING" + ) as logger: + logger.warning(f"Timed rotating message from thread {worker_id}") + + except Exception as e: + errors.append(f"Thread {worker_id}: {str(e)}") + + # Run concurrent context manager operations + with concurrent.futures.ThreadPoolExecutor(max_workers=num_threads) as executor: + futures = [executor.submit(context_manager_worker, i) for i in range(num_threads)] + for future in concurrent.futures.as_completed(futures): + future.result() + + # No errors should occur during cleanup + assert len(errors) == 0, f"Context manager errors: {errors}" + + def test_stress_test_factory_pattern(self): + """Stress test the factory pattern with high concurrency.""" + num_threads = 50 + operations_per_thread = 10 + logger_names = [f"stress_logger_{i}" for i in range(5)] # Shared logger names + results = {'success': 0, 'errors': []} + results_lock = threading.Lock() + + def stress_worker(): + """Worker that performs multiple factory operations.""" + try: + for _ in range(operations_per_thread): + # Randomly pick a logger name to increase collision chance + import random + _logger_name = random.choice(logger_names) + + # Create or get logger + logger = LoggerFactory.get_or_create_logger( + LoggerType.BASIC, + name=_logger_name, + level=LogLevel.INFO + ) + + # Use the logger + logger.info(f"Stress test message from {threading.current_thread().name}") + + # Small delay + time.sleep(0.001) + + with results_lock: + results['success'] += operations_per_thread + + except Exception as e: + with results_lock: + results['errors'].append(str(e)) + + # Run stress test + with concurrent.futures.ThreadPoolExecutor(max_workers=num_threads) as executor: + futures = [executor.submit(stress_worker) for _ in range(num_threads)] + for future in concurrent.futures.as_completed(futures): + future.result() + + # Verify results + expected_operations = num_threads * operations_per_thread + assert results['success'] == expected_operations, f"Expected {expected_operations}, got {results['success']}" + assert len(results['errors']) == 0, f"Stress test errors: {results['errors']}" + + # Registry should only have the expected number of unique loggers + registry = LoggerFactory.get_registered_loggers() + assert len(registry) == len(logger_names) + for logger_name in logger_names: + assert logger_name in registry + + def test_concurrent_file_operations(self): + """Test concurrent file operations don't conflict.""" + num_threads = 8 + errors = [] + + def file_logger_worker(worker_id): + """Worker that creates file loggers and logs messages.""" + try: + with tempfile.TemporaryDirectory() as temp_dir: + # Create size rotating logger + with SizeRotatingLog( + name=f"file_worker_{worker_id}", + directory=temp_dir, + filenames=[f"test_{worker_id}.log"], + maxmbytes=1, + level="INFO" + ) as logger: + # Log multiple messages + for i in range(50): + logger.info(f"Worker {worker_id} message {i}: {'A' * 100}") + time.sleep(0.001) + + except Exception as e: + errors.append(f"Worker {worker_id}: {str(e)}") + + # Run concurrent file operations + with concurrent.futures.ThreadPoolExecutor(max_workers=num_threads) as executor: + futures = [executor.submit(file_logger_worker, i) for i in range(num_threads)] + for future in concurrent.futures.as_completed(futures): + future.result() + + # No file operation errors should occur + assert len(errors) == 0, f"File operation errors: {errors}" + + def test_registry_clear_during_operations(self): + """Test clearing registry while other operations are happening.""" + num_worker_threads = 10 + should_stop = threading.Event() + errors = [] + + def continuous_worker(worker_id): + """Worker that continuously creates loggers.""" + try: + while not should_stop.is_set(): + logger_name = f"continuous_{worker_id}_{int(time.time() * 1000)}" + logger = LoggerFactory.get_or_create_logger( + LoggerType.BASIC, + name=logger_name, + level=LogLevel.INFO + ) + logger.info(f"Continuous message from worker {worker_id}") + time.sleep(0.01) + + except Exception as e: + errors.append(f"Worker {worker_id}: {str(e)}") + + def registry_clearer(): + """Worker that periodically clears the registry.""" + try: + for _ in range(5): + time.sleep(0.1) + clear_logger_registry() + + except Exception as e: + errors.append(f"Registry clearer: {str(e)}") + + try: + # Start worker threads + with concurrent.futures.ThreadPoolExecutor(max_workers=num_worker_threads + 1) as executor: + # Start continuous workers + worker_futures = [ + executor.submit(continuous_worker, i) for i in range(num_worker_threads) + ] + + # Start registry clearer + clearer_future = executor.submit(registry_clearer) + + # Let it run for a bit + time.sleep(0.5) + + # Signal workers to stop + should_stop.set() + + # Wait for all to complete + for future in worker_futures + [clearer_future]: + future.result(timeout=5) + + except concurrent.futures.TimeoutError: + should_stop.set() + pytest.fail("Thread operations timed out") + + # Should complete without errors + assert len(errors) == 0, f"Registry clear test errors: {errors}" + + def test_thread_local_logger_independence(self): + """Test that loggers in different threads don't interfere with each other.""" + num_threads = 5 + thread_results = {} + results_lock = threading.Lock() + + def independent_worker(worker_id): + """Worker that creates and uses independent loggers.""" + try: + with tempfile.TemporaryDirectory() as temp_dir: + # Each thread creates its own logger instance (not using factory caching) + logger_instance = SizeRotatingLog( + name=f"independent_{worker_id}", + directory=temp_dir, + filenames=[f"independent_{worker_id}.log"], + level="DEBUG" + ) + + with logger_instance as logger: + # Log thread-specific messages + messages = [] + for i in range(10): + _message = f"Thread {worker_id} message {i}" + logger.info(_message) + messages.append(_message) + + # Verify log file exists and contains expected content + log_file = os.path.join(temp_dir, f"independent_{worker_id}.log") + assert os.path.exists(log_file), f"Log file missing for thread {worker_id}" + + with open(log_file, 'r') as f: + _log_content = f.read() + + # All thread-specific messages should be in the file + for _message in messages: + assert _message in _log_content + + with results_lock: + thread_results[worker_id] = { + 'messages': messages, + 'log_content': _log_content + } + + except Exception as e: + with results_lock: + thread_results[worker_id] = {'error': str(e)} + + # Run independent workers + with concurrent.futures.ThreadPoolExecutor(max_workers=num_threads) as executor: + futures = [executor.submit(independent_worker, i) for i in range(num_threads)] + for future in concurrent.futures.as_completed(futures): + future.result() + + # Verify all threads succeeded + for worker_id in range(num_threads): + assert worker_id in thread_results + assert 'error' not in thread_results[worker_id], \ + f"Thread {worker_id} failed: {thread_results[worker_id].get('error')}" + assert 'messages' in thread_results[worker_id] + assert len(thread_results[worker_id]['messages']) == 10 + + # Verify no cross-contamination between threads + for worker_id in range(num_threads): + log_content = thread_results[worker_id]['log_content'] + for other_id in range(num_threads): + if other_id != worker_id: + # This thread's log should not contain messages from other threads + for message in thread_results[other_id]['messages']: + assert message not in log_content, \ + f"Thread {worker_id} log contains message from thread {other_id}" + + +if __name__ == "__main__": + pytest.main([__file__]) diff --git a/tests/test_timed_rotating.py b/tests/test_timed_rotating.py new file mode 100644 index 0000000..667c364 --- /dev/null +++ b/tests/test_timed_rotating.py @@ -0,0 +1,431 @@ +#!/usr/bin/env python3 +"""Test the timed rotating logger implementation.""" +import logging +import os +import sys +import tempfile +from pathlib import Path +from unittest.mock import Mock, patch +import pytest + + +# Add parent directory to path for imports +sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + +from pythonLogs.timed_rotating import TimedRotatingLog, GZipRotatorTimed +from pythonLogs import RotateWhen + + +class TestTimedRotatingLog: + """Test cases for the TimedRotatingLog class.""" + + def test_timed_rotating_log_initialization(self): + """Test TimedRotatingLog initialization with default settings.""" + with tempfile.TemporaryDirectory() as temp_dir: + timed_log = TimedRotatingLog( + name="test_timed", + directory=temp_dir, + level="INFO" + ) + assert timed_log.appname == "test_timed" + assert timed_log.directory == temp_dir + assert timed_log.level == logging.INFO + + def test_timed_rotating_log_initialization_with_all_params(self): + """Test TimedRotatingLog initialization with all parameters.""" + with tempfile.TemporaryDirectory() as temp_dir: + timed_log = TimedRotatingLog( + level="DEBUG", + name="test_comprehensive", + directory=temp_dir, + filenames=["test1.log", "test2.log"], + when="midnight", + sufix="%Y%m%d", + daystokeep=30, + encoding="utf-8", + datefmt="%Y-%m-%d %H:%M:%S", + timezone="UTC", + streamhandler=True, + showlocation=True, + rotateatutc=True + ) + assert timed_log.appname == "test_comprehensive" + assert timed_log.filenames == ["test1.log", "test2.log"] + assert timed_log.when == "midnight" + assert timed_log.sufix == "%Y%m%d" + assert timed_log.daystokeep == 30 + assert timed_log.streamhandler is True + assert timed_log.showlocation is True + assert timed_log.rotateatutc is True + + def test_timed_rotating_log_init_method(self): + """Test the init method of TimedRotatingLog.""" + with tempfile.TemporaryDirectory() as temp_dir: + timed_log = TimedRotatingLog( + name="test_init", + directory=temp_dir, + filenames=["test.log"] + ) + logger = timed_log.init() + + assert isinstance(logger, logging.Logger) + assert logger.name == "test_init" + assert len(logger.handlers) > 0 + + def test_timed_rotating_log_context_manager(self): + """Test TimedRotatingLog as context manager.""" + with tempfile.TemporaryDirectory() as temp_dir: + with TimedRotatingLog( + name="test_context", + directory=temp_dir + ) as logger: + assert isinstance(logger, logging.Logger) + assert logger.name == "test_context" + logger.info("Test message in context") + + def test_timed_rotating_log_context_manager_cleanup(self): + """Test context manager cleanup functionality.""" + with tempfile.TemporaryDirectory() as temp_dir: + timed_log = TimedRotatingLog( + name="test_cleanup", + directory=temp_dir + ) + + with timed_log as logger: + initial_handler_count = len(logger.handlers) + assert initial_handler_count > 0 + + # After context exit, handlers should be cleaned up + final_handler_count = len(logger.handlers) + assert final_handler_count == 0 + + def test_timed_rotating_log_multiple_files(self): + """Test TimedRotatingLog with multiple log files.""" + with tempfile.TemporaryDirectory() as temp_dir: + timed_log = TimedRotatingLog( + name="test_multiple", + directory=temp_dir, + filenames=["app.log", "error.log", "debug.log"] + ) + logger = timed_log.init() + + # Should have handlers for each file + file_handlers = [h for h in logger.handlers if isinstance(h, logging.handlers.TimedRotatingFileHandler)] + assert len(file_handlers) == 3 + + def test_timed_rotating_log_with_stream_handler(self): + """Test TimedRotatingLog with stream handler enabled.""" + with tempfile.TemporaryDirectory() as temp_dir: + timed_log = TimedRotatingLog( + name="test_stream", + directory=temp_dir, + streamhandler=True + ) + logger = timed_log.init() + + # Should have both file and stream handlers + stream_handlers = [h for h in logger.handlers if isinstance(h, logging.StreamHandler)] + file_handlers = [h for h in logger.handlers if isinstance(h, logging.handlers.TimedRotatingFileHandler)] + + assert len(stream_handlers) >= 1 + assert len(file_handlers) >= 1 + + def test_timed_rotating_log_when_values(self): + """Test TimedRotatingLog with different 'when' values.""" + with tempfile.TemporaryDirectory() as temp_dir: + test_cases = [ + "midnight", + "H", # hourly + "D", # daily + RotateWhen.MIDNIGHT, + RotateWhen.HOURLY, + RotateWhen.DAILY + ] + + for when_value in test_cases: + timed_log = TimedRotatingLog( + name=f"test_when_{str(when_value).replace('/', '_')}", + directory=temp_dir, + when=when_value + ) + logger = timed_log.init() + assert logger is not None + + def test_timed_rotating_log_cleanup_logger_error_handling(self): + """Test error handling in _cleanup_logger method.""" + with tempfile.TemporaryDirectory() as temp_dir: + timed_log = TimedRotatingLog( + name="test_error_cleanup", + directory=temp_dir + ) + logger = timed_log.init() + + # Add a mock handler that will raise an error on close + mock_handler = Mock() + mock_handler.close.side_effect = OSError("Test error") + logger.addHandler(mock_handler) + + # Should handle the error gracefully + TimedRotatingLog.cleanup_logger(logger) + + # Mock handler should still be removed despite the error + assert mock_handler not in logger.handlers + + def test_timed_rotating_log_invalid_filenames(self): + """Test TimedRotatingLog with invalid filenames parameter.""" + with tempfile.TemporaryDirectory() as temp_dir: + timed_log = TimedRotatingLog( + name="test_invalid", + directory=temp_dir, + filenames="invalid_string" # Should be list or tuple + ) + + with pytest.raises(TypeError, match="Unable to parse filenames"): + timed_log.init() + + def test_timed_rotating_log_actual_logging(self): + """Test actual logging functionality with file creation.""" + with tempfile.TemporaryDirectory() as temp_dir: + log_file = "test_actual.log" + timed_log = TimedRotatingLog( + name="test_actual", + directory=temp_dir, + filenames=[log_file], + level="INFO" + ) + logger = timed_log.init() + + # Log some messages + logger.info("Test info message") + logger.warning("Test warning message") + logger.error("Test error message") + + # Check that log file was created + log_path = Path(temp_dir) / log_file + assert log_path.exists() + + # Check log content + log_content = log_path.read_text() + assert "Test info message" in log_content + assert "Test warning message" in log_content + assert "Test error message" in log_content + + def test_timed_rotating_log_with_custom_suffix(self): + """Test TimedRotatingLog with custom suffix.""" + with tempfile.TemporaryDirectory() as temp_dir: + timed_log = TimedRotatingLog( + name="test_suffix", + directory=temp_dir, + sufix="%Y%m%d_%H%M%S" + ) + logger = timed_log.init() + + # Check that handler has the custom suffix + file_handlers = [h for h in logger.handlers if isinstance(h, logging.handlers.TimedRotatingFileHandler)] + assert len(file_handlers) > 0 + assert file_handlers[0].suffix == "%Y%m%d_%H%M%S" + + def test_timed_rotating_log_utc_rotation(self): + """Test TimedRotatingLog with UTC rotation.""" + with tempfile.TemporaryDirectory() as temp_dir: + timed_log = TimedRotatingLog( + name="test_utc", + directory=temp_dir, + rotateatutc=True + ) + logger = timed_log.init() + + # Check that handler is configured for UTC + file_handlers = [h for h in logger.handlers if isinstance(h, logging.handlers.TimedRotatingFileHandler)] + assert len(file_handlers) > 0 + assert file_handlers[0].utc is True + + +class TestGZipRotatorTimed: + """Test cases for the GZipRotatorTimed class.""" + + def test_gzip_rotator_timed_initialization(self): + """Test GZipRotatorTimed initialization.""" + with tempfile.TemporaryDirectory() as temp_dir: + rotator = GZipRotatorTimed(temp_dir, 7) + assert rotator.dir == temp_dir + assert rotator.days_to_keep == 7 + + def test_gzip_rotator_timed_call_basic(self): + """Test GZipRotatorTimed basic call functionality.""" + with tempfile.TemporaryDirectory() as temp_dir: + rotator = GZipRotatorTimed(temp_dir, 7) + + # Create source file with content + source_file = Path(temp_dir) / "test.log" + source_file.write_text("Test log content\nMore content\n") + + # Create destination filename (simulating what TimedRotatingFileHandler would do) + dest_file = "test.log.20240101" + + # Process the file + rotator(str(source_file), dest_file) + + # Source file should be processed (removed by gzip_file_with_sufix) + assert not source_file.exists() + + # Should have created a gzipped file + gz_files = list(Path(temp_dir).glob("*.gz")) + assert len(gz_files) > 0 + + def test_gzip_rotator_timed_suffix_extraction(self): + """Test suffix extraction from destination filename.""" + with tempfile.TemporaryDirectory() as temp_dir: + rotator = GZipRotatorTimed(temp_dir, 7) + + # Create source file with content + source_file = Path(temp_dir) / "app.log" + source_file.write_text("Test content") + + test_cases = [ + ("app.log.20240101", "20240101"), + ("app.log.2024-01-01", "2024-01-01"), + ("app.log.backup", "backup"), + ("app.log.txt", "txt") + ] + + for dest_filename, expected_suffix in test_cases: + # Reset source file + source_file.write_text("Test content") + + with patch('pythonLogs.timed_rotating.gzip_file_with_sufix') as mock_gzip: + rotator(str(source_file), dest_filename) + mock_gzip.assert_called_once_with(str(source_file), expected_suffix) + + def test_gzip_rotator_timed_with_nonexistent_source(self): + """Test GZipRotatorTimed with non-existent source file.""" + with tempfile.TemporaryDirectory() as temp_dir: + rotator = GZipRotatorTimed(temp_dir, 7) + + # Call with non-existent source file + rotator("/non/existent/file.log", "dest.log.20240101") + + # Should not crash and not create any files + gz_files = list(Path(temp_dir).glob("*.gz")) + assert len(gz_files) == 0 + + @patch('pythonLogs.timed_rotating.remove_old_logs') + def test_gzip_rotator_timed_calls_remove_old_logs(self, mock_remove_old_logs): + """Test that GZipRotatorTimed calls remove_old_logs.""" + with tempfile.TemporaryDirectory() as temp_dir: + rotator = GZipRotatorTimed(temp_dir, 7) + + # Create source file with content + source_file = Path(temp_dir) / "test.log" + source_file.write_text("Test content") + + rotator(str(source_file), "test.log.20240101") + + # Should have called remove_old_logs + mock_remove_old_logs.assert_called_once_with(temp_dir, 7) + + @patch('pythonLogs.timed_rotating.gzip_file_with_sufix') + def test_gzip_rotator_timed_calls_gzip_file_with_sufix(self, mock_gzip): + """Test that GZipRotatorTimed calls gzip_file_with_sufix with correct parameters.""" + with tempfile.TemporaryDirectory() as temp_dir: + rotator = GZipRotatorTimed(temp_dir, 7) + + # Create source file with content + source_file = Path(temp_dir) / "test.log" + source_file.write_text("Test content") + + dest_file = "test.log.20240101" + rotator(str(source_file), dest_file) + + # Should have called gzip_file_with_sufix with extracted suffix + mock_gzip.assert_called_once_with(str(source_file), "20240101") + + def test_gzip_rotator_timed_integration(self): + """Test GZipRotatorTimed integration with TimedRotatingLog.""" + with tempfile.TemporaryDirectory() as temp_dir: + # Setup TimedRotatingLog with GZipRotatorTimed + timed_log = TimedRotatingLog( + name="integration_test", + directory=temp_dir, + filenames=["test.log"], + when="midnight", + daystokeep=5 + ) + logger = timed_log.init() + + # Log some content + for i in range(10): + logger.info(f"Test message {i}") + + # Force handlers to flush + for handler in logger.handlers: + if hasattr(handler, 'flush'): + handler.flush() + + # Verify log file exists + log_files = list(Path(temp_dir).glob("*.log")) + assert len(log_files) > 0 + + def test_gzip_rotator_timed_suffix_edge_cases(self): + """Test GZipRotatorTimed with edge cases in suffix extraction.""" + with tempfile.TemporaryDirectory() as temp_dir: + rotator = GZipRotatorTimed(temp_dir, 7) + + # Create source file + source_file = Path(temp_dir) / "test.log" + source_file.write_text("Test content") + + # Test edge cases + edge_cases = [ + ("file_no_extension", ""), + ("file.", ""), + ("file.log.", ""), + ("", "") + ] + + for dest_filename, expected_suffix in edge_cases: + source_file.write_text("Test content") # Reset file + + with patch('pythonLogs.timed_rotating.gzip_file_with_sufix') as mock_gzip: + rotator(str(source_file), dest_filename) + if dest_filename: # Only call if dest_filename is not empty + mock_gzip.assert_called_once_with(str(source_file), expected_suffix) + + def test_timed_rotating_log_double_context_manager_entry(self): + """Test TimedRotatingLog context manager when init already called.""" + with tempfile.TemporaryDirectory() as temp_dir: + timed_log = TimedRotatingLog( + name="test_double_entry", + directory=temp_dir + ) + + # Call init manually first + logger1 = timed_log.init() + + # Then use as context manager + with timed_log as logger2: + # Should return the same logger instance + assert logger1 is logger2 + + def test_timed_rotating_log_handler_configuration(self): + """Test TimedRotatingLog handler configuration details.""" + with tempfile.TemporaryDirectory() as temp_dir: + timed_log = TimedRotatingLog( + name="test_handler_config", + directory=temp_dir, + encoding="utf-8", + when="H", + daystokeep=10 + # Note: rotateatutc defaults to True from settings + ) + logger = timed_log.init() + + file_handlers = [h for h in logger.handlers if isinstance(h, logging.handlers.TimedRotatingFileHandler)] + assert len(file_handlers) > 0 + + handler = file_handlers[0] + assert handler.encoding == "utf-8" + assert handler.when == "H" # 'hourly' gets converted to 'H' + assert handler.backupCount == 10 + assert handler.utc is True # Default from settings is True + assert isinstance(handler.rotator, GZipRotatorTimed) diff --git a/tests/test_timezone_migration.py b/tests/test_timezone_migration.py new file mode 100644 index 0000000..bae2710 --- /dev/null +++ b/tests/test_timezone_migration.py @@ -0,0 +1,290 @@ +#!/usr/bin/env python3 +"""Test timezone functionality after pytz to zoneinfo migration.""" +import os +import sys +import tempfile +import pytest + + +# Add parent directory to path for imports +sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + +from pythonLogs import ( + basic_logger, + size_rotating_logger, + timed_rotating_logger, + LogLevel, + RotateWhen, + LoggerFactory, + LoggerType, + clear_logger_registry, +) +from pythonLogs.log_utils import ( + get_timezone_function, + _get_timezone_offset, + write_stderr, + _get_stderr_timezone, +) + + +class TestTimezoneZoneinfo: + """Test cases for timezone functionality using zoneinfo instead of pytz.""" + + def setup_method(self): + """Clear registry before each test.""" + clear_logger_registry() + + def test_zoneinfo_import_success(self): + """Test that ZoneInfo is properly imported.""" + from pythonLogs.log_utils import ZoneInfo + + # Should be able to create timezone objects + utc_tz = ZoneInfo("UTC") + assert utc_tz is not None + + def test_utc_timezone_basic_logger(self): + """Test UTC timezone with basic logger.""" + logger = basic_logger( + name="utc_test", + level=LogLevel.INFO, + timezone="UTC" + ) + + # Should not raise exceptions + logger.info("UTC timezone test message") + assert logger.name == "utc_test" + + def test_localtime_timezone_basic_logger(self): + """Test localtime timezone with basic logger.""" + logger = basic_logger( + name="local_test", + level=LogLevel.INFO, + timezone="localtime" + ) + + logger.info("Local timezone test message") + assert logger.name == "local_test" + + def test_named_timezone_basic_logger(self): + """Test named timezone (America/New_York) with basic logger.""" + logger = basic_logger( + name="ny_test", + level=LogLevel.INFO, + timezone="America/New_York" + ) + + logger.info("New York timezone test message") + assert logger.name == "ny_test" + + def test_timezone_with_size_rotating_logger(self): + """Test timezone functionality with size rotating logger.""" + with tempfile.TemporaryDirectory() as temp_dir: + logger = size_rotating_logger( + name="size_tz_test", + directory=temp_dir, + level=LogLevel.INFO, + timezone="America/Chicago", + streamhandler=False + ) + + logger.info("Size rotating with timezone test") + assert logger.name == "size_tz_test" + + def test_timezone_with_timed_rotating_logger(self): + """Test timezone functionality with timed rotating logger.""" + with tempfile.TemporaryDirectory() as temp_dir: + logger = timed_rotating_logger( + name="timed_tz_test", + directory=temp_dir, + level=LogLevel.INFO, + timezone="Europe/London", + when=RotateWhen.DAILY, + streamhandler=False + ) + + logger.info("Timed rotating with timezone test") + assert logger.name == "timed_tz_test" + + def test_timezone_factory_pattern(self): + """Test timezone functionality through factory pattern.""" + logger = LoggerFactory.create_logger( + LoggerType.BASIC, + name="factory_tz_test", + level=LogLevel.DEBUG, + timezone="Asia/Tokyo" + ) + + logger.debug("Factory timezone test message") + assert logger.name == "factory_tz_test" + + def test_invalid_timezone_handling(self): + """Test handling of invalid timezone names.""" + # Should handle invalid timezone gracefully + with pytest.raises(Exception): # ZoneInfoNotFoundError or similar + basic_logger( + name="invalid_tz_test", + timezone="Invalid/Timezone" + ) + + def test_timezone_offset_calculation(self): + """Test timezone offset calculation function.""" + # Test UTC + utc_offset = _get_timezone_offset("UTC") + assert utc_offset == "+0000" + + # Test localtime + local_offset = _get_timezone_offset("localtime") + assert len(local_offset) == 5 # Format: ยฑHHMM + assert local_offset[0] in ['+', '-'] + + def test_timezone_function_caching(self): + """Test that timezone functions are properly cached.""" + # First call + func1 = get_timezone_function("UTC") + + # Second call should return cached result + func2 = get_timezone_function("UTC") + + # Should be the same function object (cached) + assert func1 is func2 + + def test_timezone_function_types(self): + """Test different timezone function types.""" + # UTC should return gmtime + utc_func = get_timezone_function("UTC") + import time + assert utc_func is time.gmtime + + # Localtime should return localtime + local_func = get_timezone_function("localtime") + assert local_func is time.localtime + + # Named timezone should return custom function + named_func = get_timezone_function("America/New_York") + assert callable(named_func) + assert named_func is not time.gmtime + assert named_func is not time.localtime + + def test_stderr_timezone_functionality(self): + """Test stderr timezone handling.""" + import io + from contextlib import redirect_stderr + + # Capture stderr output + stderr_capture = io.StringIO() + + with redirect_stderr(stderr_capture): + write_stderr("Test error message") + + output = stderr_capture.getvalue() + + # Should contain timestamp and error message + assert "ERROR" in output + assert "Test error message" in output + assert "[" in output and "]" in output # Timestamp brackets + + def test_stderr_timezone_caching(self): + """Test that stderr timezone is cached.""" + # First call + tz1 = _get_stderr_timezone() + + # Second call should return cached result + tz2 = _get_stderr_timezone() + + # Should be the same object (cached) + assert tz1 is tz2 + + def test_multiple_timezone_loggers(self): + """Test creating loggers with different timezones.""" + timezones = ["UTC", "America/New_York", "Europe/Paris", "Asia/Tokyo"] + loggers = [] + + for i, tz in enumerate(timezones): + logger = basic_logger( + name=f"tz_test_{i}", + timezone=tz, + level=LogLevel.INFO + ) + loggers.append(logger) + logger.info(f"Message from {tz}") + + # All loggers should be created successfully + assert len(loggers) == len(timezones) + + # Each should have a unique name + names = {logger.name for logger in loggers} + assert len(names) == len(timezones) + + def test_timezone_with_factory_registry(self): + """Test timezone functionality with factory registry.""" + from pythonLogs import get_or_create_logger + + # Create logger with timezone + logger1 = get_or_create_logger( + LoggerType.BASIC, + name="registry_tz_test", + timezone="Australia/Sydney" + ) + + # Get the same logger from registry + logger2 = get_or_create_logger( + LoggerType.BASIC, + name="registry_tz_test", + timezone="Australia/Sydney" + ) + + # Should be the same instance + assert logger1 is logger2 + + logger1.info("Registry timezone test") + + def test_case_insensitive_timezone_handling(self): + """Test case insensitive timezone handling.""" + # Test localtime in different cases + logger1 = basic_logger(name="test1", timezone="localtime") + logger2 = basic_logger(name="test2", timezone="LOCALTIME") + logger3 = basic_logger(name="test3", timezone="LocalTime") + + # All should work without errors + logger1.info("Test message 1") + logger2.info("Test message 2") + logger3.info("Test message 3") + + def test_timezone_performance_optimization(self): + """Test that timezone operations are optimized.""" + import time + + # Time creating multiple loggers with the same timezone (should use cache) + start_time = time.time() + + loggers = [] + for i in range(20): + logger = basic_logger( + name=f"perf_test_{i}", + timezone="America/Chicago" # Same timezone - should use cache + ) + loggers.append(logger) + + elapsed_time = time.time() - start_time + + # Should complete quickly due to caching + assert elapsed_time < 0.5 # Should be very fast + assert len(loggers) == 20 + + def test_backward_compatibility_timezone_strings(self): + """Test that string-based timezone parameters still work.""" + # All of these should work (backward compatibility) + test_cases = [ + "UTC", + "localtime", + "America/New_York", + "Europe/London" + ] + + for tz in test_cases: + logger = basic_logger( + name=f"compat_test_{tz.replace('/', '_')}", + timezone=tz + ) + logger.info(f"Compatibility test for {tz}") + assert logger.name.startswith("compat_test_") diff --git a/tests/test_zoneinfo_fallbacks.py b/tests/test_zoneinfo_fallbacks.py new file mode 100644 index 0000000..324ac54 --- /dev/null +++ b/tests/test_zoneinfo_fallbacks.py @@ -0,0 +1,238 @@ +#!/usr/bin/env python3 +"""Test zoneinfo fallback mechanisms and edge cases.""" +import os +import sys +import tempfile +from unittest.mock import patch +import pytest + + +# Add parent directory to path for imports +sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + + +class TestZoneinfoFallbacks: + """Test fallback mechanisms for zoneinfo import and edge cases.""" + + def test_zoneinfo_import_available(self): + """Test that zoneinfo is available in Python 3.9+.""" + try: + from zoneinfo import ZoneInfo + assert ZoneInfo is not None + print("โœ“ Native zoneinfo available") + except ImportError: + pytest.skip("zoneinfo not available in this Python version") + + def test_timezone_error_handling(self): + """Test proper error handling for timezone operations.""" + from pythonLogs import basic_logger, LogLevel + + # Test with invalid timezone + with pytest.raises(Exception): # Should raise ZoneInfoNotFoundError or similar + basic_logger( + name="error_test", + timezone="NonExistent/Timezone", + level=LogLevel.INFO + ) + + def test_timezone_offset_edge_cases(self): + """Test timezone offset calculation for edge cases.""" + from pythonLogs.log_utils import _get_timezone_offset + + # Test UTC (should always work) + utc_offset = _get_timezone_offset("UTC") + assert utc_offset == "+0000" + + # Test localtime (should work on any system) + local_offset = _get_timezone_offset("localtime") + assert isinstance(local_offset, str) + assert len(local_offset) == 5 + assert local_offset[0] in ['+', '-'] + + # Test case insensitivity for localtime + local_offset_upper = _get_timezone_offset("LOCALTIME") + assert local_offset_upper == local_offset + + def test_stderr_timezone_fallback(self): + """Test stderr timezone fallback behavior.""" + from pythonLogs.log_utils import write_stderr + import io + from contextlib import redirect_stderr + + # Mock environment variable + with patch.dict(os.environ, {'LOG_TIMEZONE': 'UTC'}): + stderr_capture = io.StringIO() + with redirect_stderr(stderr_capture): + write_stderr("Test message") + + output = stderr_capture.getvalue() + assert "Test message" in output + assert "ERROR" in output + + def test_timezone_function_fallback(self): + """Test timezone function fallback for edge cases.""" + from pythonLogs.log_utils import get_timezone_function + import time + + # Test standard cases + utc_func = get_timezone_function("UTC") + assert utc_func is time.gmtime + + local_func = get_timezone_function("localtime") + assert local_func is time.localtime + + # Test case insensitivity + utc_func_upper = get_timezone_function("utc") + assert utc_func_upper is time.gmtime + + local_func_upper = get_timezone_function("LOCALTIME") + assert local_func_upper is time.localtime + + def test_logger_creation_with_fallback_timezone(self): + """Test logger creation when timezone operations might fail.""" + from pythonLogs import basic_logger, LogLevel + + # These should all work with proper fallback + logger = basic_logger( + name="fallback_test", + timezone="UTC", + level=LogLevel.INFO + ) + + logger.info("Fallback test message") + assert logger.name == "fallback_test" + + def test_complex_timezone_scenarios(self): + """Test complex timezone scenarios and edge cases.""" + from pythonLogs import size_rotating_logger, LogLevel + + with tempfile.TemporaryDirectory() as temp_dir: + # Test with various timezone formats + timezones = [ + "UTC", + "localtime", + "America/New_York", + "Europe/London", + "Asia/Tokyo" + ] + + for i, tz in enumerate(timezones): + try: + logger = size_rotating_logger( + name=f"complex_tz_test_{i}", + directory=temp_dir, + timezone=tz, + level=LogLevel.INFO, + streamhandler=False + ) + logger.info(f"Complex timezone test: {tz}") + assert logger.name == f"complex_tz_test_{i}" + except Exception as e: + # Some timezones might not be available on all systems + pytest.skip(f"Timezone {tz} not available: {e}") + + def test_zoneinfo_caching_behavior(self): + """Test that zoneinfo objects are properly cached.""" + from pythonLogs.log_utils import get_timezone_function, _get_timezone_offset + + # Test function caching + func1 = get_timezone_function("America/Chicago") + func2 = get_timezone_function("America/Chicago") + assert func1 is func2 # Should be cached + + # Test offset caching + offset1 = _get_timezone_offset("America/Chicago") + offset2 = _get_timezone_offset("America/Chicago") + assert offset1 == offset2 # Should be cached + + def test_environment_variable_timezone_handling(self): + """Test timezone handling through environment variables.""" + + # Test with environment variable + with patch.dict(os.environ, {'LOG_TIMEZONE': 'Europe/Paris'}): + # Environment variable should be used for stderr + from pythonLogs.log_utils import _get_stderr_timezone + + # Clear cache to test new environment + _get_stderr_timezone.cache_clear() + + tz = _get_stderr_timezone() + assert tz is not None + + def test_concurrent_timezone_access(self): + """Test timezone functionality under concurrent access.""" + import threading + from pythonLogs import basic_logger, LogLevel + + results = [] + errors = [] + + def create_logger_worker(worker_id): + try: + logger = basic_logger( + name=f"concurrent_test_{worker_id}", + timezone="UTC", + level=LogLevel.INFO + ) + logger.info(f"Concurrent test message {worker_id}") + results.append(worker_id) + except Exception as e: + errors.append((worker_id, e)) + + # Create multiple threads + threads = [] + for i in range(10): + thread = threading.Thread(target=create_logger_worker, args=(i,)) + threads.append(thread) + thread.start() + + # Wait for all threads + for thread in threads: + thread.join() + + # All should succeed + assert len(errors) == 0, f"Errors occurred: {errors}" + assert len(results) == 10 + + def test_memory_usage_with_timezone_caching(self): + """Test that timezone caching doesn't cause memory leaks.""" + from pythonLogs import basic_logger, clear_logger_registry + + # Create many loggers with same timezone (should use cache) + for i in range(100): + logger = basic_logger( + name=f"memory_test_{i}", + timezone="UTC" + ) + logger.info(f"Memory test {i}") + + # Clear registry to free memory + clear_logger_registry() + + # Should complete without memory issues + assert True # If we get here, no memory issues occurred + + def test_timezone_validation_edge_cases(self): + """Test timezone validation for various edge cases.""" + from pythonLogs.log_utils import _get_timezone_offset + + # Test case variations (timezone names are case-sensitive except for localtime) + test_cases = [ + ("UTC", "+0000"), + ("localtime", None), # Will vary by system + ("LOCALTIME", None), # Will vary by system + ] + + for tz_input, expected in test_cases: + result = _get_timezone_offset(tz_input) + if expected is not None: + assert result == expected + else: + # For localtime, just check format + assert isinstance(result, str) + assert len(result) == 5 + assert result[0] in ['+', '-'] + + # Test that invalid timezone names raise appropriate errors + with pytest.raises(Exception): # Should raise ZoneInfoNotFoundError + _get_timezone_offset("invalid_timezone")