forked from Shifty/pyserveX
cython path_matcher added to reduce time on hot operations
This commit is contained in:
parent
6c50a35aa3
commit
5d863bc97c
14
.gitignore
vendored
14
.gitignore
vendored
@ -14,3 +14,17 @@ static/*
|
|||||||
|
|
||||||
docs/
|
docs/
|
||||||
dist/
|
dist/
|
||||||
|
build/
|
||||||
|
|
||||||
|
# Cython generated files
|
||||||
|
*.c
|
||||||
|
*.so
|
||||||
|
*.pyd
|
||||||
|
*.html
|
||||||
|
*.egg-info/
|
||||||
|
|
||||||
|
# IDE
|
||||||
|
.idea/
|
||||||
|
.vscode/
|
||||||
|
*.swp
|
||||||
|
*.swo
|
||||||
33
Makefile
33
Makefile
@ -1,4 +1,4 @@
|
|||||||
.PHONY: help install build clean test lint format run dev-install dev-deps check release-patch release-minor release-major pipeline-check
|
.PHONY: help install build build-cython clean test lint format run dev-install dev-deps check release-patch release-minor release-major pipeline-check benchmark
|
||||||
|
|
||||||
PYTHON = python3
|
PYTHON = python3
|
||||||
POETRY = poetry
|
POETRY = poetry
|
||||||
@ -21,12 +21,14 @@ help:
|
|||||||
@printf " $(YELLOW)%-20s$(CYAN) %s$(NC)\n" "install-package" "Installing package locally"
|
@printf " $(YELLOW)%-20s$(CYAN) %s$(NC)\n" "install-package" "Installing package locally"
|
||||||
@echo ""
|
@echo ""
|
||||||
@echo "$(YELLOW)Building:$(NC)"
|
@echo "$(YELLOW)Building:$(NC)"
|
||||||
@printf " $(YELLOW)%-20s$(CYAN) %s$(NC)\n" "build" "Building package"
|
@printf " $(YELLOW)%-20s$(CYAN) %s$(NC)\n" "build" "Building package (with Cython)"
|
||||||
|
@printf " $(YELLOW)%-20s$(CYAN) %s$(NC)\n" "build-cython" "Building Cython extensions only"
|
||||||
@printf " $(YELLOW)%-20s$(CYAN) %s$(NC)\n" "clean" "Cleaning temporary files"
|
@printf " $(YELLOW)%-20s$(CYAN) %s$(NC)\n" "clean" "Cleaning temporary files"
|
||||||
@echo ""
|
@echo ""
|
||||||
@echo "$(YELLOW)Testing:$(NC)"
|
@echo "$(YELLOW)Testing:$(NC)"
|
||||||
@printf " $(YELLOW)%-20s$(CYAN) %s$(NC)\n" "test" "Running tests"
|
@printf " $(YELLOW)%-20s$(CYAN) %s$(NC)\n" "test" "Running tests"
|
||||||
@printf " $(YELLOW)%-20s$(CYAN) %s$(NC)\n" "test-cov" "Running tests with coverage"
|
@printf " $(YELLOW)%-20s$(CYAN) %s$(NC)\n" "test-cov" "Running tests with coverage"
|
||||||
|
@printf " $(YELLOW)%-20s$(CYAN) %s$(NC)\n" "benchmark" "Running performance benchmarks"
|
||||||
@printf " $(YELLOW)%-20s$(CYAN) %s$(NC)\n" "lint" "Checking code with linters"
|
@printf " $(YELLOW)%-20s$(CYAN) %s$(NC)\n" "lint" "Checking code with linters"
|
||||||
@printf " $(YELLOW)%-20s$(CYAN) %s$(NC)\n" "format" "Formatting code"
|
@printf " $(YELLOW)%-20s$(CYAN) %s$(NC)\n" "format" "Formatting code"
|
||||||
@printf " $(YELLOW)%-20s$(CYAN) %s$(NC)\n" "check" "Lint and test"
|
@printf " $(YELLOW)%-20s$(CYAN) %s$(NC)\n" "check" "Lint and test"
|
||||||
@ -75,18 +77,27 @@ dev-deps:
|
|||||||
@echo "$(GREEN)Installing additional tools...$(NC)"
|
@echo "$(GREEN)Installing additional tools...$(NC)"
|
||||||
$(POETRY) add --group dev pytest pytest-cov black isort mypy flake8
|
$(POETRY) add --group dev pytest pytest-cov black isort mypy flake8
|
||||||
|
|
||||||
build: clean
|
build: clean build-cython
|
||||||
@echo "$(GREEN)Building package...$(NC)"
|
@echo "$(GREEN)Building package...$(NC)"
|
||||||
$(POETRY) build
|
$(POETRY) build
|
||||||
|
|
||||||
|
build-cython:
|
||||||
|
@echo "$(GREEN)Building Cython extensions...$(NC)"
|
||||||
|
$(POETRY) run python scripts/build_cython.py build_ext --inplace || echo "$(YELLOW)Cython build skipped (optional)$(NC)"
|
||||||
|
|
||||||
clean:
|
clean:
|
||||||
@echo "$(GREEN)Cleaning temporary files...$(NC)"
|
@echo "$(GREEN)Cleaning temporary files...$(NC)"
|
||||||
rm -rf dist/
|
rm -rf dist/
|
||||||
rm -rf build/
|
rm -rf build/
|
||||||
rm -rf *.egg-info/
|
rm -rf *.egg-info/
|
||||||
find . -type d -name __pycache__ -exec rm -rf {} +
|
find . -type d -name __pycache__ -exec rm -rf {} + 2>/dev/null || true
|
||||||
find . -type f -name "*.pyc" -delete
|
find . -type f -name "*.pyc" -delete 2>/dev/null || true
|
||||||
find . -type f -name "*.pyo" -delete
|
find . -type f -name "*.pyo" -delete 2>/dev/null || true
|
||||||
|
@# Cython artifacts
|
||||||
|
find $(PACKAGE_NAME) -type f -name "*.c" -delete 2>/dev/null || true
|
||||||
|
find $(PACKAGE_NAME) -type f -name "*.so" -delete 2>/dev/null || true
|
||||||
|
find $(PACKAGE_NAME) -type f -name "*.pyd" -delete 2>/dev/null || true
|
||||||
|
find $(PACKAGE_NAME) -type f -name "*.html" -delete 2>/dev/null || true
|
||||||
|
|
||||||
test:
|
test:
|
||||||
@echo "$(GREEN)Running tests...$(NC)"
|
@echo "$(GREEN)Running tests...$(NC)"
|
||||||
@ -98,16 +109,20 @@ test-cov:
|
|||||||
|
|
||||||
lint:
|
lint:
|
||||||
@echo "$(GREEN)Checking code with linters...$(NC)"
|
@echo "$(GREEN)Checking code with linters...$(NC)"
|
||||||
$(POETRY) run flake8 $(PACKAGE_NAME)/
|
$(POETRY) run flake8 $(PACKAGE_NAME)/ --exclude='*.pyx,*.pxd'
|
||||||
$(POETRY) run mypy $(PACKAGE_NAME)/
|
$(POETRY) run mypy $(PACKAGE_NAME)/
|
||||||
|
|
||||||
format:
|
format:
|
||||||
@echo "$(GREEN)Formatting code...$(NC)"
|
@echo "$(GREEN)Formatting code...$(NC)"
|
||||||
$(POETRY) run black $(PACKAGE_NAME)/
|
$(POETRY) run black $(PACKAGE_NAME)/ --exclude='\.pyx$$'
|
||||||
$(POETRY) run isort $(PACKAGE_NAME)/
|
$(POETRY) run isort $(PACKAGE_NAME)/ --skip-glob='*.pyx'
|
||||||
|
|
||||||
check: lint test
|
check: lint test
|
||||||
|
|
||||||
|
benchmark: build-cython
|
||||||
|
@echo "$(GREEN)Running benchmarks...$(NC)"
|
||||||
|
$(POETRY) run python benchmarks/bench_path_matcher.py
|
||||||
|
|
||||||
run:
|
run:
|
||||||
@echo "$(GREEN)Starting server in development mode...$(NC)"
|
@echo "$(GREEN)Starting server in development mode...$(NC)"
|
||||||
$(POETRY) run python run.py --debug
|
$(POETRY) run python run.py --debug
|
||||||
|
|||||||
215
benchmarks/bench_path_matcher.py
Normal file
215
benchmarks/bench_path_matcher.py
Normal file
@ -0,0 +1,215 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Benchmark script for path_matcher performance comparison.
|
||||||
|
|
||||||
|
Compares:
|
||||||
|
- Pure Python implementation
|
||||||
|
- Cython implementation (if available)
|
||||||
|
- Original MountedApp from asgi_mount.py
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
python benchmarks/bench_path_matcher.py
|
||||||
|
"""
|
||||||
|
|
||||||
|
import time
|
||||||
|
import statistics
|
||||||
|
from typing import Callable, List, Tuple
|
||||||
|
|
||||||
|
from pyserve._path_matcher_py import (
|
||||||
|
FastMountedPath as PyFastMountedPath,
|
||||||
|
FastMountManager as PyFastMountManager,
|
||||||
|
path_matches_prefix as py_path_matches_prefix,
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
from pyserve._path_matcher import (
|
||||||
|
FastMountedPath as CyFastMountedPath,
|
||||||
|
FastMountManager as CyFastMountManager,
|
||||||
|
path_matches_prefix as cy_path_matches_prefix,
|
||||||
|
)
|
||||||
|
CYTHON_AVAILABLE = True
|
||||||
|
except ImportError:
|
||||||
|
CYTHON_AVAILABLE = False
|
||||||
|
print("Cython module not compiled. Run: python setup_cython.py build_ext --inplace\n")
|
||||||
|
|
||||||
|
from pyserve.asgi_mount import MountedApp
|
||||||
|
|
||||||
|
|
||||||
|
def benchmark(func: Callable, iterations: int = 100000) -> Tuple[float, float]:
|
||||||
|
times = []
|
||||||
|
|
||||||
|
for _ in range(1000):
|
||||||
|
func()
|
||||||
|
|
||||||
|
for _ in range(iterations):
|
||||||
|
start = time.perf_counter_ns()
|
||||||
|
func()
|
||||||
|
end = time.perf_counter_ns()
|
||||||
|
times.append(end - start)
|
||||||
|
|
||||||
|
return statistics.mean(times), statistics.stdev(times)
|
||||||
|
|
||||||
|
|
||||||
|
def format_time(ns: float) -> str:
|
||||||
|
if ns < 1000:
|
||||||
|
return f"{ns:.1f} ns"
|
||||||
|
elif ns < 1_000_000:
|
||||||
|
return f"{ns/1000:.2f} µs"
|
||||||
|
else:
|
||||||
|
return f"{ns/1_000_000:.2f} ms"
|
||||||
|
|
||||||
|
|
||||||
|
def run_benchmarks():
|
||||||
|
print("=" * 70)
|
||||||
|
print("PATH MATCHER BENCHMARK")
|
||||||
|
print("=" * 70)
|
||||||
|
print()
|
||||||
|
|
||||||
|
# Test paths
|
||||||
|
mount_path = "/api/v1"
|
||||||
|
test_paths = [
|
||||||
|
"/api/v1/users/123/posts", # Matching - long
|
||||||
|
"/api/v1", # Matching - exact
|
||||||
|
"/api/v2/users", # Not matching - similar prefix
|
||||||
|
"/other/path", # Not matching - completely different
|
||||||
|
]
|
||||||
|
|
||||||
|
iterations = 100000
|
||||||
|
|
||||||
|
# =========================================================================
|
||||||
|
# Benchmark 1: Single path matching
|
||||||
|
# =========================================================================
|
||||||
|
print("BENCHMARK 1: Single Path Matching")
|
||||||
|
print("-" * 70)
|
||||||
|
print(f" Mount path: {mount_path}")
|
||||||
|
print(f" Iterations: {iterations:,}")
|
||||||
|
print()
|
||||||
|
|
||||||
|
results = {}
|
||||||
|
|
||||||
|
# Original MountedApp
|
||||||
|
original_mount = MountedApp(mount_path, app=None, name="test") # type: ignore
|
||||||
|
|
||||||
|
for test_path in test_paths:
|
||||||
|
print(f" Test path: {test_path}")
|
||||||
|
|
||||||
|
# Original
|
||||||
|
mean, std = benchmark(lambda: original_mount.matches(test_path), iterations)
|
||||||
|
results[("Original", test_path)] = mean
|
||||||
|
print(f" Original MountedApp: {format_time(mean):>12} ± {format_time(std)}")
|
||||||
|
|
||||||
|
# Pure Python
|
||||||
|
py_mount = PyFastMountedPath(mount_path)
|
||||||
|
mean, std = benchmark(lambda: py_mount.matches(test_path), iterations)
|
||||||
|
results[("Python", test_path)] = mean
|
||||||
|
print(f" Pure Python: {format_time(mean):>12} ± {format_time(std)}")
|
||||||
|
|
||||||
|
# Cython (if available)
|
||||||
|
if CYTHON_AVAILABLE:
|
||||||
|
cy_mount = CyFastMountedPath(mount_path)
|
||||||
|
mean, std = benchmark(lambda: cy_mount.matches(test_path), iterations)
|
||||||
|
results[("Cython", test_path)] = mean
|
||||||
|
print(f" Cython: {format_time(mean):>12} ± {format_time(std)}")
|
||||||
|
|
||||||
|
print()
|
||||||
|
|
||||||
|
# =========================================================================
|
||||||
|
# Benchmark 2: Mount Manager lookup
|
||||||
|
# =========================================================================
|
||||||
|
print()
|
||||||
|
print("BENCHMARK 2: Mount Manager Lookup (10 mounts)")
|
||||||
|
print("-" * 70)
|
||||||
|
|
||||||
|
# Setup managers with 10 mounts
|
||||||
|
mount_paths = [f"/api/v{i}" for i in range(10)]
|
||||||
|
|
||||||
|
py_manager = PyFastMountManager()
|
||||||
|
for p in mount_paths:
|
||||||
|
py_manager.add_mount(PyFastMountedPath(p, name=p))
|
||||||
|
|
||||||
|
if CYTHON_AVAILABLE:
|
||||||
|
cy_manager = CyFastMountManager()
|
||||||
|
for p in mount_paths:
|
||||||
|
cy_manager.add_mount(CyFastMountedPath(p, name=p))
|
||||||
|
|
||||||
|
test_lookups = [
|
||||||
|
"/api/v5/users/123", # Middle mount
|
||||||
|
"/api/v0/items", # First mount (longest)
|
||||||
|
"/api/v9/data", # Last mount
|
||||||
|
"/other/not/found", # No match
|
||||||
|
]
|
||||||
|
|
||||||
|
for test_path in test_lookups:
|
||||||
|
print(f" Lookup path: {test_path}")
|
||||||
|
|
||||||
|
# Pure Python
|
||||||
|
mean, std = benchmark(lambda: py_manager.get_mount(test_path), iterations)
|
||||||
|
print(f" Pure Python: {format_time(mean):>12} ± {format_time(std)}")
|
||||||
|
|
||||||
|
# Cython
|
||||||
|
if CYTHON_AVAILABLE:
|
||||||
|
mean, std = benchmark(lambda: cy_manager.get_mount(test_path), iterations)
|
||||||
|
print(f" Cython: {format_time(mean):>12} ± {format_time(std)}")
|
||||||
|
|
||||||
|
print()
|
||||||
|
|
||||||
|
# =========================================================================
|
||||||
|
# Benchmark 3: Combined match + modify
|
||||||
|
# =========================================================================
|
||||||
|
print()
|
||||||
|
print("BENCHMARK 3: Combined Match + Modify Path")
|
||||||
|
print("-" * 70)
|
||||||
|
|
||||||
|
from pyserve._path_matcher_py import match_and_modify_path as py_match_modify
|
||||||
|
if CYTHON_AVAILABLE:
|
||||||
|
from pyserve._path_matcher import match_and_modify_path as cy_match_modify
|
||||||
|
|
||||||
|
test_path = "/api/v1/users/123/posts"
|
||||||
|
print(f" Test path: {test_path}")
|
||||||
|
print(f" Mount path: {mount_path}")
|
||||||
|
print()
|
||||||
|
|
||||||
|
# Original (separate calls)
|
||||||
|
def original_match_modify():
|
||||||
|
if original_mount.matches(test_path):
|
||||||
|
return original_mount.get_modified_path(test_path)
|
||||||
|
return None
|
||||||
|
|
||||||
|
mean, std = benchmark(original_match_modify, iterations)
|
||||||
|
print(f" Original (2 calls): {format_time(mean):>12} ± {format_time(std)}")
|
||||||
|
|
||||||
|
# Pure Python combined
|
||||||
|
mean, std = benchmark(lambda: py_match_modify(test_path, mount_path), iterations)
|
||||||
|
print(f" Pure Python (combined): {format_time(mean):>12} ± {format_time(std)}")
|
||||||
|
|
||||||
|
# Cython combined
|
||||||
|
if CYTHON_AVAILABLE:
|
||||||
|
mean, std = benchmark(lambda: cy_match_modify(test_path, mount_path), iterations)
|
||||||
|
print(f" Cython (combined): {format_time(mean):>12} ± {format_time(std)}")
|
||||||
|
|
||||||
|
# =========================================================================
|
||||||
|
# Summary
|
||||||
|
# =========================================================================
|
||||||
|
print()
|
||||||
|
print("=" * 70)
|
||||||
|
print("SUMMARY")
|
||||||
|
print("=" * 70)
|
||||||
|
|
||||||
|
if CYTHON_AVAILABLE:
|
||||||
|
print("Cython module is available and was benchmarked")
|
||||||
|
else:
|
||||||
|
print("Cython module not available - only Pure Python was benchmarked")
|
||||||
|
print(" To build Cython module:")
|
||||||
|
print(" 1. Install Cython: pip install cython")
|
||||||
|
print(" 2. Build: python setup_cython.py build_ext --inplace")
|
||||||
|
|
||||||
|
print()
|
||||||
|
print("The optimized path matcher provides:")
|
||||||
|
print(" - Pre-computed path length and trailing slash")
|
||||||
|
print(" - Boundary-aware prefix matching (prevents /api matching /api-v2)")
|
||||||
|
print(" - Combined match+modify operation to reduce function calls")
|
||||||
|
print(" - Longest-prefix-first ordering in MountManager")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
run_benchmarks()
|
||||||
10
mypy.ini
10
mypy.ini
@ -12,6 +12,16 @@ warn_unused_ignores = True
|
|||||||
warn_no_return = True
|
warn_no_return = True
|
||||||
warn_unreachable = True
|
warn_unreachable = True
|
||||||
strict_equality = True
|
strict_equality = True
|
||||||
|
exclude = (?x)(
|
||||||
|
^pyserve/_path_matcher\.pyx$
|
||||||
|
)
|
||||||
|
|
||||||
[mypy-tests.*]
|
[mypy-tests.*]
|
||||||
disallow_untyped_defs = False
|
disallow_untyped_defs = False
|
||||||
|
|
||||||
|
[mypy-pyserve._path_matcher]
|
||||||
|
ignore_missing_imports = True
|
||||||
|
follow_imports = skip
|
||||||
|
|
||||||
|
[mypy-django.*]
|
||||||
|
ignore_missing_imports = True
|
||||||
|
|||||||
72
poetry.lock
generated
72
poetry.lock
generated
@ -274,6 +274,55 @@ files = [
|
|||||||
[package.extras]
|
[package.extras]
|
||||||
toml = ["tomli ; python_full_version <= \"3.11.0a6\""]
|
toml = ["tomli ; python_full_version <= \"3.11.0a6\""]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "cython"
|
||||||
|
version = "3.2.2"
|
||||||
|
description = "The Cython compiler for writing C extensions in the Python language."
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.8"
|
||||||
|
groups = ["dev"]
|
||||||
|
files = [
|
||||||
|
{file = "cython-3.2.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b5afac4e77e71a9010dc7fd3191ced00f9b12b494dd7525c140781054ce63a73"},
|
||||||
|
{file = "cython-3.2.2-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9cd2ede6af225499ad22888dbfb13b92d71fc1016f401ee637559a5831b177c2"},
|
||||||
|
{file = "cython-3.2.2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8c9265b3e84ae2d999b7c3165c683e366bbbbbe4346468055ca2366fe013f2df"},
|
||||||
|
{file = "cython-3.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:d7b3447b2005dffc5f276d420a480d2b57d15091242652d410b6a46fb00ed251"},
|
||||||
|
{file = "cython-3.2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d140c2701cbb8cf960300cf1b67f3b4fa9d294d32e51b85f329bff56936a82fd"},
|
||||||
|
{file = "cython-3.2.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:50bbaabee733fd2780985e459fc20f655e02def83e8eff10220ad88455a34622"},
|
||||||
|
{file = "cython-3.2.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a9509f1e9c41c86b790cff745bb31927bbc861662a3b462596d71d3d2a578abb"},
|
||||||
|
{file = "cython-3.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:034ab96cb8bc8e7432bc27491f8d66f51e435b1eb21ddc03aa844be8f21ad847"},
|
||||||
|
{file = "cython-3.2.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:692a41c8fe06fb2dc55ca2c8d71c80c469fd16fe69486ed99f3b3cbb2d3af83f"},
|
||||||
|
{file = "cython-3.2.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:098590c1dc309f8a0406ade031963a95a87714296b425539f9920aebf924560d"},
|
||||||
|
{file = "cython-3.2.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a3898c076e9c458bcb3e4936187919fda5f5365fe4c567d35d2b003444b6f3fe"},
|
||||||
|
{file = "cython-3.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:2b910b89a2a71004064c5e890b9512a251eda63fae252caa0feb9835057035f9"},
|
||||||
|
{file = "cython-3.2.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:aa24cd0bdab27ca099b2467806c684404add597c1108e07ddf7b6471653c85d7"},
|
||||||
|
{file = "cython-3.2.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:60f4aa425e1ff98abf8d965ae7020f06dd2cbc01dbd945137d2f9cca4ff0524a"},
|
||||||
|
{file = "cython-3.2.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a473df474ba89e9fee81ee82b31062a267f9e598096b222783477e56d02ad12c"},
|
||||||
|
{file = "cython-3.2.2-cp313-cp313-win_amd64.whl", hash = "sha256:b4df52101209817fde7284cf779156f79142fb639b1d7840f11680ff4bb30604"},
|
||||||
|
{file = "cython-3.2.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:177faf4d61e9f2d4d2db61194ac9ec16d3fe3041c1b6830f871a01935319eeb3"},
|
||||||
|
{file = "cython-3.2.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8db28aef793c81dc69383b619ca508668998aaf099cd839d3cbae85184cce744"},
|
||||||
|
{file = "cython-3.2.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3de43a5786033a27fae1c882feb5ff0d023c38b83356e6800c1be0bcd6cf9f11"},
|
||||||
|
{file = "cython-3.2.2-cp314-cp314-win_amd64.whl", hash = "sha256:fed44d0ab2d36f1b0301c770b0dafec23bcb9700d58e7769cd6d9136b3304c11"},
|
||||||
|
{file = "cython-3.2.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e7200309b81f4066cf36a96efeec646716ca74afd73d159045169263db891133"},
|
||||||
|
{file = "cython-3.2.2-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8e72ee88a9a5381d30a6da116a3c8352730b9b038a49ed9bc5c3d0ed6d69b06c"},
|
||||||
|
{file = "cython-3.2.2-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0e35ff0f1bb3a7a5c40afb8fb540e4178b6551909f10748bf39d323f8140ccf3"},
|
||||||
|
{file = "cython-3.2.2-cp38-cp38-win_amd64.whl", hash = "sha256:b223c1f84c3420c24f6a4858e979524bd35a79437a5839e29d41201c87ed119d"},
|
||||||
|
{file = "cython-3.2.2-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:a6387e3ad31342443916db9a419509935fddd8d4cbac34aab9c895ae55326a56"},
|
||||||
|
{file = "cython-3.2.2-cp39-abi3-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:436eb562d0affbc0b959f62f3f9c1ed251b9499e4f29c1d19514ae859894b6bf"},
|
||||||
|
{file = "cython-3.2.2-cp39-abi3-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:f560ff3aea5b5df93853ec7bf1a1e9623d6d511f4192f197559aca18fca43392"},
|
||||||
|
{file = "cython-3.2.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:d8c93fe128b58942832b1fcac96e48f93c2c69b569eff0d38d30fb5995fecfa0"},
|
||||||
|
{file = "cython-3.2.2-cp39-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:b4fe499eed7cd70b2aa4e096b9ce2588f5e6fdf049b46d40a5e55efcde6e4904"},
|
||||||
|
{file = "cython-3.2.2-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:14432d7f207245a3c35556155873f494784169297b28978a6204f1c60d31553e"},
|
||||||
|
{file = "cython-3.2.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:820c4a99dbf6b3e6c0300be42b4040b501eff0e1feeb80cfa52c48a346fb0df2"},
|
||||||
|
{file = "cython-3.2.2-cp39-abi3-win32.whl", hash = "sha256:826cad0ad43ab05a26e873b5d625f64d458dc739ec6fdeecab848b60a91c4252"},
|
||||||
|
{file = "cython-3.2.2-cp39-abi3-win_arm64.whl", hash = "sha256:5f818d40bbcf17e2089e2de7840f0de1c0ca527acf9b044aba79d5f5d8a5bdba"},
|
||||||
|
{file = "cython-3.2.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ff07e784ea748225bbdea07fec0ac451379e9e41a0a84cb57b36db19dd01ae71"},
|
||||||
|
{file = "cython-3.2.2-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:aff11412ed5fc78bd8b148621f4d1034fcad6cfcba468c20cd9f327b4f61ec3e"},
|
||||||
|
{file = "cython-3.2.2-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ca18d9d53c0e2f0c9347478b37532b46e0dc34c704e052ab1b0d8b21a290fc0f"},
|
||||||
|
{file = "cython-3.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:86b1d39a1ea974dd16fe3bcef0df7b64dadd0bd38d05a339f287b48d37cb109f"},
|
||||||
|
{file = "cython-3.2.2-py3-none-any.whl", hash = "sha256:13b99ecb9482aff6a6c12d1ca6feef6940c507af909914b49f568de74fa965fb"},
|
||||||
|
{file = "cython-3.2.2.tar.gz", hash = "sha256:c3add3d483acc73129a61d105389344d792c17e7c1cee24863f16416bd071634"},
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "django"
|
name = "django"
|
||||||
version = "5.2.9"
|
version = "5.2.9"
|
||||||
@ -1131,6 +1180,27 @@ files = [
|
|||||||
{file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"},
|
{file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"},
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "setuptools"
|
||||||
|
version = "80.9.0"
|
||||||
|
description = "Easily download, build, install, upgrade, and uninstall Python packages"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.9"
|
||||||
|
groups = ["dev"]
|
||||||
|
files = [
|
||||||
|
{file = "setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922"},
|
||||||
|
{file = "setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\"", "ruff (>=0.8.0) ; sys_platform != \"cygwin\""]
|
||||||
|
core = ["importlib_metadata (>=6) ; python_version < \"3.10\"", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1) ; python_version < \"3.11\"", "wheel (>=0.43.0)"]
|
||||||
|
cover = ["pytest-cov"]
|
||||||
|
doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"]
|
||||||
|
enabler = ["pytest-enabler (>=2.2)"]
|
||||||
|
test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21) ; python_version >= \"3.9\" and sys_platform != \"cygwin\"", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf ; sys_platform != \"cygwin\"", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"]
|
||||||
|
type = ["importlib_metadata (>=7.0.2) ; python_version < \"3.10\"", "jaraco.develop (>=7.21) ; sys_platform != \"cygwin\"", "mypy (==1.14.*)", "pytest-mypy"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "sniffio"
|
name = "sniffio"
|
||||||
version = "1.3.1"
|
version = "1.3.1"
|
||||||
@ -1551,4 +1621,4 @@ wsgi = ["a2wsgi"]
|
|||||||
[metadata]
|
[metadata]
|
||||||
lock-version = "2.1"
|
lock-version = "2.1"
|
||||||
python-versions = ">=3.12"
|
python-versions = ">=3.12"
|
||||||
content-hash = "a08668c23222843b27b3977933c93b261328f43e90f22f35212c6c6f6030e3dc"
|
content-hash = "32ebf260f6792987cb4236fe29ad3329374e063504d507b5a0319684e24a30a8"
|
||||||
|
|||||||
@ -52,7 +52,7 @@ all-frameworks = [
|
|||||||
|
|
||||||
|
|
||||||
[build-system]
|
[build-system]
|
||||||
requires = ["poetry-core>=2.0.0,<3.0.0"]
|
requires = ["poetry-core>=2.0.0,<3.0.0", "setuptools", "cython>=3.0.0"]
|
||||||
build-backend = "poetry.core.masonry.api"
|
build-backend = "poetry.core.masonry.api"
|
||||||
|
|
||||||
[tool.black]
|
[tool.black]
|
||||||
@ -95,4 +95,6 @@ isort = "^6.0.1"
|
|||||||
mypy = "^1.17.1"
|
mypy = "^1.17.1"
|
||||||
flake8 = "^7.3.0"
|
flake8 = "^7.3.0"
|
||||||
pytest-asyncio = "^1.3.0"
|
pytest-asyncio = "^1.3.0"
|
||||||
|
cython = "^3.0.0"
|
||||||
|
setuptools = "^80.0.0"
|
||||||
|
|
||||||
|
|||||||
@ -5,17 +5,17 @@ PyServe - HTTP web server written on Python
|
|||||||
__version__ = "0.8.0"
|
__version__ = "0.8.0"
|
||||||
__author__ = "Ilya Glazunov"
|
__author__ = "Ilya Glazunov"
|
||||||
|
|
||||||
from .server import PyServeServer
|
|
||||||
from .config import Config
|
|
||||||
from .asgi_mount import (
|
from .asgi_mount import (
|
||||||
ASGIAppLoader,
|
ASGIAppLoader,
|
||||||
ASGIMountManager,
|
ASGIMountManager,
|
||||||
MountedApp,
|
MountedApp,
|
||||||
|
create_django_app,
|
||||||
create_fastapi_app,
|
create_fastapi_app,
|
||||||
create_flask_app,
|
create_flask_app,
|
||||||
create_django_app,
|
|
||||||
create_starlette_app,
|
create_starlette_app,
|
||||||
)
|
)
|
||||||
|
from .config import Config
|
||||||
|
from .server import PyServeServer
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"PyServeServer",
|
"PyServeServer",
|
||||||
|
|||||||
225
pyserve/_path_matcher.pyx
Normal file
225
pyserve/_path_matcher.pyx
Normal file
@ -0,0 +1,225 @@
|
|||||||
|
# cython: language_level=3
|
||||||
|
# cython: boundscheck=False
|
||||||
|
# cython: wraparound=False
|
||||||
|
# cython: cdivision=True
|
||||||
|
"""
|
||||||
|
Fast path matching module for PyServe.
|
||||||
|
|
||||||
|
This Cython module provides optimized path matching operations
|
||||||
|
for ASGI mount routing, significantly reducing overhead on hot paths.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from cpython.object cimport PyObject
|
||||||
|
|
||||||
|
|
||||||
|
cdef class FastMountedPath:
|
||||||
|
cdef:
|
||||||
|
str _path
|
||||||
|
str _path_with_slash
|
||||||
|
Py_ssize_t _path_len
|
||||||
|
bint _is_root
|
||||||
|
public str name
|
||||||
|
public bint strip_path
|
||||||
|
|
||||||
|
def __cinit__(self):
|
||||||
|
self._path = ""
|
||||||
|
self._path_with_slash = "/"
|
||||||
|
self._path_len = 0
|
||||||
|
self._is_root = 1
|
||||||
|
self.name = ""
|
||||||
|
self.strip_path = 1
|
||||||
|
|
||||||
|
def __init__(self, str path, str name="", bint strip_path=True):
|
||||||
|
cdef Py_ssize_t path_len
|
||||||
|
|
||||||
|
path_len = len(path)
|
||||||
|
if path_len > 1 and path[path_len - 1] == '/':
|
||||||
|
path = path[:path_len - 1]
|
||||||
|
|
||||||
|
self._path = path
|
||||||
|
self._path_len = len(path)
|
||||||
|
self._is_root = 1 if (path == "" or path == "/") else 0
|
||||||
|
self._path_with_slash = path + "/" if self._is_root == 0 else "/"
|
||||||
|
self.name = name if name else path
|
||||||
|
self.strip_path = 1 if strip_path else 0
|
||||||
|
|
||||||
|
@property
|
||||||
|
def path(self) -> str:
|
||||||
|
return self._path
|
||||||
|
|
||||||
|
cpdef bint matches(self, str request_path):
|
||||||
|
cdef Py_ssize_t req_len
|
||||||
|
|
||||||
|
if self._is_root:
|
||||||
|
return 1
|
||||||
|
|
||||||
|
req_len = len(request_path)
|
||||||
|
|
||||||
|
if req_len < self._path_len:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
if req_len == self._path_len:
|
||||||
|
return 1 if request_path == self._path else 0
|
||||||
|
|
||||||
|
if request_path[self._path_len] == '/':
|
||||||
|
return 1 if request_path[:self._path_len] == self._path else 0
|
||||||
|
|
||||||
|
return 0
|
||||||
|
|
||||||
|
cpdef str get_modified_path(self, str original_path):
|
||||||
|
cdef str new_path
|
||||||
|
|
||||||
|
if not self.strip_path:
|
||||||
|
return original_path
|
||||||
|
|
||||||
|
if self._is_root:
|
||||||
|
return original_path
|
||||||
|
|
||||||
|
new_path = original_path[self._path_len:]
|
||||||
|
|
||||||
|
if not new_path:
|
||||||
|
return "/"
|
||||||
|
|
||||||
|
return new_path
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return f"FastMountedPath(path={self._path!r}, name={self.name!r})"
|
||||||
|
|
||||||
|
|
||||||
|
def _get_path_len_neg(mount):
|
||||||
|
return -len(mount.path)
|
||||||
|
|
||||||
|
|
||||||
|
cdef class FastMountManager:
|
||||||
|
cdef:
|
||||||
|
list _mounts
|
||||||
|
int _mount_count
|
||||||
|
|
||||||
|
def __cinit__(self):
|
||||||
|
self._mounts = []
|
||||||
|
self._mount_count = 0
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self._mounts = []
|
||||||
|
self._mount_count = 0
|
||||||
|
|
||||||
|
cpdef void add_mount(self, FastMountedPath mount):
|
||||||
|
self._mounts.append(mount)
|
||||||
|
self._mounts = sorted(self._mounts, key=_get_path_len_neg, reverse=False)
|
||||||
|
self._mount_count = len(self._mounts)
|
||||||
|
|
||||||
|
cpdef FastMountedPath get_mount(self, str request_path):
|
||||||
|
cdef:
|
||||||
|
int i
|
||||||
|
FastMountedPath mount
|
||||||
|
|
||||||
|
for i in range(self._mount_count):
|
||||||
|
mount = <FastMountedPath>self._mounts[i]
|
||||||
|
if mount.matches(request_path):
|
||||||
|
return mount
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
cpdef bint remove_mount(self, str path):
|
||||||
|
cdef:
|
||||||
|
int i
|
||||||
|
Py_ssize_t path_len
|
||||||
|
FastMountedPath mount
|
||||||
|
|
||||||
|
path_len = len(path)
|
||||||
|
if path_len > 1 and path[path_len - 1] == '/':
|
||||||
|
path = path[:path_len - 1]
|
||||||
|
|
||||||
|
for i in range(self._mount_count):
|
||||||
|
mount = <FastMountedPath>self._mounts[i]
|
||||||
|
if mount._path == path:
|
||||||
|
del self._mounts[i]
|
||||||
|
self._mount_count -= 1
|
||||||
|
return 1
|
||||||
|
|
||||||
|
return 0
|
||||||
|
|
||||||
|
@property
|
||||||
|
def mounts(self) -> list:
|
||||||
|
return list(self._mounts)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def mount_count(self) -> int:
|
||||||
|
return self._mount_count
|
||||||
|
|
||||||
|
cpdef list list_mounts(self):
|
||||||
|
cdef:
|
||||||
|
list result = []
|
||||||
|
FastMountedPath mount
|
||||||
|
|
||||||
|
for mount in self._mounts:
|
||||||
|
result.append({
|
||||||
|
"path": mount._path,
|
||||||
|
"name": mount.name,
|
||||||
|
"strip_path": mount.strip_path,
|
||||||
|
})
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
cpdef bint path_matches_prefix(str request_path, str mount_path):
|
||||||
|
cdef:
|
||||||
|
Py_ssize_t mount_len = len(mount_path)
|
||||||
|
Py_ssize_t req_len = len(request_path)
|
||||||
|
|
||||||
|
if mount_len == 0 or mount_path == "/":
|
||||||
|
return 1
|
||||||
|
|
||||||
|
if req_len < mount_len:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
if req_len == mount_len:
|
||||||
|
return 1 if request_path == mount_path else 0
|
||||||
|
|
||||||
|
if request_path[mount_len] == '/':
|
||||||
|
return 1 if request_path[:mount_len] == mount_path else 0
|
||||||
|
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
cpdef str strip_path_prefix(str original_path, str mount_path):
|
||||||
|
cdef:
|
||||||
|
Py_ssize_t mount_len = len(mount_path)
|
||||||
|
str result
|
||||||
|
|
||||||
|
if mount_len == 0 or mount_path == "/":
|
||||||
|
return original_path
|
||||||
|
|
||||||
|
result = original_path[mount_len:]
|
||||||
|
|
||||||
|
if not result:
|
||||||
|
return "/"
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
cpdef tuple match_and_modify_path(str request_path, str mount_path, bint strip_path=True):
|
||||||
|
cdef:
|
||||||
|
Py_ssize_t mount_len = len(mount_path)
|
||||||
|
Py_ssize_t req_len = len(request_path)
|
||||||
|
bint is_root = 1 if (mount_len == 0 or mount_path == "/") else 0
|
||||||
|
str modified
|
||||||
|
|
||||||
|
if is_root:
|
||||||
|
return (True, request_path if strip_path else request_path)
|
||||||
|
|
||||||
|
if req_len < mount_len:
|
||||||
|
return (False, None)
|
||||||
|
|
||||||
|
if req_len == mount_len:
|
||||||
|
if request_path == mount_path:
|
||||||
|
return (True, "/" if strip_path else request_path)
|
||||||
|
return (False, None)
|
||||||
|
|
||||||
|
if request_path[mount_len] == '/' and request_path[:mount_len] == mount_path:
|
||||||
|
if strip_path:
|
||||||
|
modified = request_path[mount_len:]
|
||||||
|
return (True, modified if modified else "/")
|
||||||
|
return (True, request_path)
|
||||||
|
|
||||||
|
return (False, None)
|
||||||
168
pyserve/_path_matcher_py.py
Normal file
168
pyserve/_path_matcher_py.py
Normal file
@ -0,0 +1,168 @@
|
|||||||
|
"""
|
||||||
|
Pure Python fallback for _path_matcher when Cython is not available.
|
||||||
|
|
||||||
|
This module provides the same interface as the Cython _path_matcher module,
|
||||||
|
allowing the application to run without compilation.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Any, Dict, List, Optional, Tuple
|
||||||
|
|
||||||
|
|
||||||
|
class FastMountedPath:
|
||||||
|
__slots__ = ("_path", "_path_with_slash", "_path_len", "_is_root", "name", "strip_path")
|
||||||
|
|
||||||
|
def __init__(self, path: str, name: str = "", strip_path: bool = True):
|
||||||
|
if path.endswith("/") and len(path) > 1:
|
||||||
|
path = path[:-1]
|
||||||
|
|
||||||
|
self._path = path
|
||||||
|
self._path_len = len(path)
|
||||||
|
self._is_root = path == "" or path == "/"
|
||||||
|
self._path_with_slash = path + "/" if not self._is_root else "/"
|
||||||
|
self.name = name or path
|
||||||
|
self.strip_path = strip_path
|
||||||
|
|
||||||
|
@property
|
||||||
|
def path(self) -> str:
|
||||||
|
return self._path
|
||||||
|
|
||||||
|
def matches(self, request_path: str) -> bool:
|
||||||
|
if self._is_root:
|
||||||
|
return True
|
||||||
|
|
||||||
|
req_len = len(request_path)
|
||||||
|
|
||||||
|
if req_len < self._path_len:
|
||||||
|
return False
|
||||||
|
|
||||||
|
if req_len == self._path_len:
|
||||||
|
return request_path == self._path
|
||||||
|
|
||||||
|
if request_path[self._path_len] == "/":
|
||||||
|
return request_path[: self._path_len] == self._path
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
def get_modified_path(self, original_path: str) -> str:
|
||||||
|
if not self.strip_path:
|
||||||
|
return original_path
|
||||||
|
|
||||||
|
if self._is_root:
|
||||||
|
return original_path
|
||||||
|
|
||||||
|
new_path = original_path[self._path_len :]
|
||||||
|
|
||||||
|
if not new_path:
|
||||||
|
return "/"
|
||||||
|
|
||||||
|
return new_path
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
return f"FastMountedPath(path={self._path!r}, name={self.name!r})"
|
||||||
|
|
||||||
|
|
||||||
|
class FastMountManager:
|
||||||
|
__slots__ = ("_mounts", "_mount_count")
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
self._mounts: List[FastMountedPath] = []
|
||||||
|
self._mount_count: int = 0
|
||||||
|
|
||||||
|
def add_mount(self, mount: FastMountedPath) -> None:
|
||||||
|
self._mounts.append(mount)
|
||||||
|
self._mounts.sort(key=lambda m: len(m.path), reverse=True)
|
||||||
|
self._mount_count = len(self._mounts)
|
||||||
|
|
||||||
|
def get_mount(self, request_path: str) -> Optional[FastMountedPath]:
|
||||||
|
for mount in self._mounts:
|
||||||
|
if mount.matches(request_path):
|
||||||
|
return mount
|
||||||
|
return None
|
||||||
|
|
||||||
|
def remove_mount(self, path: str) -> bool:
|
||||||
|
if path.endswith("/") and len(path) > 1:
|
||||||
|
path = path[:-1]
|
||||||
|
|
||||||
|
for i, mount in enumerate(self._mounts):
|
||||||
|
if mount._path == path:
|
||||||
|
del self._mounts[i]
|
||||||
|
self._mount_count -= 1
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
@property
|
||||||
|
def mounts(self) -> List[FastMountedPath]:
|
||||||
|
return self._mounts.copy()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def mount_count(self) -> int:
|
||||||
|
return self._mount_count
|
||||||
|
|
||||||
|
def list_mounts(self) -> List[Dict[str, Any]]:
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
"path": mount._path,
|
||||||
|
"name": mount.name,
|
||||||
|
"strip_path": mount.strip_path,
|
||||||
|
}
|
||||||
|
for mount in self._mounts
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def path_matches_prefix(request_path: str, mount_path: str) -> bool:
|
||||||
|
mount_len = len(mount_path)
|
||||||
|
req_len = len(request_path)
|
||||||
|
|
||||||
|
if mount_len == 0 or mount_path == "/":
|
||||||
|
return True
|
||||||
|
|
||||||
|
if req_len < mount_len:
|
||||||
|
return False
|
||||||
|
|
||||||
|
if req_len == mount_len:
|
||||||
|
return request_path == mount_path
|
||||||
|
|
||||||
|
if request_path[mount_len] == "/":
|
||||||
|
return request_path[:mount_len] == mount_path
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def strip_path_prefix(original_path: str, mount_path: str) -> str:
|
||||||
|
mount_len = len(mount_path)
|
||||||
|
|
||||||
|
if mount_len == 0 or mount_path == "/":
|
||||||
|
return original_path
|
||||||
|
|
||||||
|
result = original_path[mount_len:]
|
||||||
|
|
||||||
|
if not result:
|
||||||
|
return "/"
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def match_and_modify_path(request_path: str, mount_path: str, strip_path: bool = True) -> Tuple[bool, Optional[str]]:
|
||||||
|
mount_len = len(mount_path)
|
||||||
|
req_len = len(request_path)
|
||||||
|
is_root = mount_len == 0 or mount_path == "/"
|
||||||
|
|
||||||
|
if is_root:
|
||||||
|
return (True, request_path)
|
||||||
|
|
||||||
|
if req_len < mount_len:
|
||||||
|
return (False, None)
|
||||||
|
|
||||||
|
if req_len == mount_len:
|
||||||
|
if request_path == mount_path:
|
||||||
|
return (True, "/" if strip_path else request_path)
|
||||||
|
return (False, None)
|
||||||
|
|
||||||
|
if request_path[mount_len] == "/" and request_path[:mount_len] == mount_path:
|
||||||
|
if strip_path:
|
||||||
|
modified = request_path[mount_len:]
|
||||||
|
return (True, modified if modified else "/")
|
||||||
|
return (True, request_path)
|
||||||
|
|
||||||
|
return (False, None)
|
||||||
@ -8,7 +8,8 @@ This module provides functionality to mount external ASGI/WSGI applications
|
|||||||
import importlib
|
import importlib
|
||||||
import sys
|
import sys
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Dict, Any, Optional, Callable, cast
|
from typing import Any, Callable, Dict, Optional, cast
|
||||||
|
|
||||||
from starlette.types import ASGIApp, Receive, Scope, Send
|
from starlette.types import ASGIApp, Receive, Scope, Send
|
||||||
|
|
||||||
from .logging_utils import get_logger
|
from .logging_utils import get_logger
|
||||||
@ -74,20 +75,17 @@ class ASGIAppLoader:
|
|||||||
def _wrap_wsgi(self, wsgi_app: Callable) -> ASGIApp:
|
def _wrap_wsgi(self, wsgi_app: Callable) -> ASGIApp:
|
||||||
try:
|
try:
|
||||||
from a2wsgi import WSGIMiddleware
|
from a2wsgi import WSGIMiddleware
|
||||||
|
|
||||||
return cast(ASGIApp, WSGIMiddleware(wsgi_app))
|
return cast(ASGIApp, WSGIMiddleware(wsgi_app))
|
||||||
except ImportError:
|
except ImportError:
|
||||||
logger.warning("a2wsgi not installed, trying asgiref")
|
logger.warning("a2wsgi not installed, trying asgiref")
|
||||||
try:
|
try:
|
||||||
from asgiref.wsgi import WsgiToAsgi
|
from asgiref.wsgi import WsgiToAsgi
|
||||||
|
|
||||||
return cast(ASGIApp, WsgiToAsgi(wsgi_app))
|
return cast(ASGIApp, WsgiToAsgi(wsgi_app))
|
||||||
except ImportError:
|
except ImportError:
|
||||||
logger.error(
|
logger.error("Neither a2wsgi nor asgiref installed. " "Install with: pip install a2wsgi or pip install asgiref")
|
||||||
"Neither a2wsgi nor asgiref installed. "
|
raise ImportError("WSGI adapter not available. Install a2wsgi or asgiref.")
|
||||||
"Install with: pip install a2wsgi or pip install asgiref"
|
|
||||||
)
|
|
||||||
raise ImportError(
|
|
||||||
"WSGI adapter not available. Install a2wsgi or asgiref."
|
|
||||||
)
|
|
||||||
|
|
||||||
def get_app(self, app_path: str) -> Optional[ASGIApp]:
|
def get_app(self, app_path: str) -> Optional[ASGIApp]:
|
||||||
return self._apps.get(app_path)
|
return self._apps.get(app_path)
|
||||||
@ -132,7 +130,7 @@ class MountedApp:
|
|||||||
if self.path == "":
|
if self.path == "":
|
||||||
return original_path
|
return original_path
|
||||||
|
|
||||||
new_path = original_path[len(self.path):]
|
new_path = original_path[len(self.path) :]
|
||||||
return new_path if new_path else "/"
|
return new_path if new_path else "/"
|
||||||
|
|
||||||
|
|
||||||
@ -215,10 +213,7 @@ class ASGIMountManager:
|
|||||||
modified_scope["path"] = mount.get_modified_path(path)
|
modified_scope["path"] = mount.get_modified_path(path)
|
||||||
modified_scope["root_path"] = scope.get("root_path", "") + mount.path
|
modified_scope["root_path"] = scope.get("root_path", "") + mount.path
|
||||||
|
|
||||||
logger.debug(
|
logger.debug(f"Routing request to mounted app '{mount.name}': " f"{path} -> {modified_scope['path']}")
|
||||||
f"Routing request to mounted app '{mount.name}': "
|
|
||||||
f"{path} -> {modified_scope['path']}"
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
await mount.app(modified_scope, receive, send)
|
await mount.app(modified_scope, receive, send)
|
||||||
@ -288,7 +283,8 @@ def create_django_app(
|
|||||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", settings_module)
|
os.environ.setdefault("DJANGO_SETTINGS_MODULE", settings_module)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from django.core.asgi import get_asgi_application # type: ignore[import-untyped]
|
from django.core.asgi import get_asgi_application
|
||||||
|
|
||||||
return cast(ASGIApp, get_asgi_application())
|
return cast(ASGIApp, get_asgi_application())
|
||||||
except ImportError as e:
|
except ImportError as e:
|
||||||
logger.error(f"Failed to load Django application: {e}")
|
logger.error(f"Failed to load Django application: {e}")
|
||||||
|
|||||||
@ -1,8 +1,8 @@
|
|||||||
import sys
|
|
||||||
import argparse
|
import argparse
|
||||||
|
import sys
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from . import PyServeServer, Config, __version__
|
from . import Config, PyServeServer, __version__
|
||||||
|
|
||||||
|
|
||||||
def main() -> None:
|
def main() -> None:
|
||||||
@ -10,30 +10,11 @@ def main() -> None:
|
|||||||
description="PyServe - HTTP web server",
|
description="PyServe - HTTP web server",
|
||||||
prog="pyserve",
|
prog="pyserve",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument("-c", "--config", default="config.yaml", help="Path to configuration file (default: config.yaml)")
|
||||||
"-c", "--config",
|
parser.add_argument("--host", help="Host to bind the server to")
|
||||||
default="config.yaml",
|
parser.add_argument("--port", type=int, help="Port to bind the server to")
|
||||||
help="Path to configuration file (default: config.yaml)"
|
parser.add_argument("--debug", action="store_true", help="Enable debug mode")
|
||||||
)
|
parser.add_argument("--version", action="version", version=f"%(prog)s {__version__}")
|
||||||
parser.add_argument(
|
|
||||||
"--host",
|
|
||||||
help="Host to bind the server to"
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--port",
|
|
||||||
type=int,
|
|
||||||
help="Port to bind the server to"
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--debug",
|
|
||||||
action="store_true",
|
|
||||||
help="Enable debug mode"
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--version",
|
|
||||||
action="version",
|
|
||||||
version=f"%(prog)s {__version__}"
|
|
||||||
)
|
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
|||||||
@ -1,8 +1,10 @@
|
|||||||
import yaml
|
|
||||||
import os
|
|
||||||
from typing import Dict, Any, List, cast
|
|
||||||
from dataclasses import dataclass, field
|
|
||||||
import logging
|
import logging
|
||||||
|
import os
|
||||||
|
from dataclasses import dataclass, field
|
||||||
|
from typing import Any, Dict, List, cast
|
||||||
|
|
||||||
|
import yaml
|
||||||
|
|
||||||
from .logging_utils import setup_logging
|
from .logging_utils import setup_logging
|
||||||
|
|
||||||
|
|
||||||
@ -84,7 +86,7 @@ class Config:
|
|||||||
@classmethod
|
@classmethod
|
||||||
def from_yaml(cls, file_path: str) -> "Config":
|
def from_yaml(cls, file_path: str) -> "Config":
|
||||||
try:
|
try:
|
||||||
with open(file_path, 'r', encoding='utf-8') as f:
|
with open(file_path, "r", encoding="utf-8") as f:
|
||||||
data = yaml.safe_load(f)
|
data = yaml.safe_load(f)
|
||||||
|
|
||||||
return cls._from_dict(data)
|
return cls._from_dict(data)
|
||||||
@ -99,133 +101,117 @@ class Config:
|
|||||||
def _from_dict(cls, data: Dict[str, Any]) -> "Config":
|
def _from_dict(cls, data: Dict[str, Any]) -> "Config":
|
||||||
config = cls()
|
config = cls()
|
||||||
|
|
||||||
if 'http' in data:
|
if "http" in data:
|
||||||
http_data = data['http']
|
http_data = data["http"]
|
||||||
config.http = HttpConfig(
|
config.http = HttpConfig(
|
||||||
static_dir=http_data.get('static_dir', config.http.static_dir),
|
static_dir=http_data.get("static_dir", config.http.static_dir),
|
||||||
templates_dir=http_data.get('templates_dir', config.http.templates_dir)
|
templates_dir=http_data.get("templates_dir", config.http.templates_dir),
|
||||||
)
|
)
|
||||||
|
|
||||||
if 'server' in data:
|
if "server" in data:
|
||||||
server_data = data['server']
|
server_data = data["server"]
|
||||||
config.server = ServerConfig(
|
config.server = ServerConfig(
|
||||||
host=server_data.get('host', config.server.host),
|
host=server_data.get("host", config.server.host),
|
||||||
port=server_data.get('port', config.server.port),
|
port=server_data.get("port", config.server.port),
|
||||||
backlog=server_data.get('backlog', config.server.backlog),
|
backlog=server_data.get("backlog", config.server.backlog),
|
||||||
default_root=server_data.get('default_root', config.server.default_root),
|
default_root=server_data.get("default_root", config.server.default_root),
|
||||||
proxy_timeout=server_data.get('proxy_timeout', config.server.proxy_timeout),
|
proxy_timeout=server_data.get("proxy_timeout", config.server.proxy_timeout),
|
||||||
redirect_instructions=server_data.get('redirect_instructions', {})
|
redirect_instructions=server_data.get("redirect_instructions", {}),
|
||||||
)
|
)
|
||||||
|
|
||||||
if 'ssl' in data:
|
if "ssl" in data:
|
||||||
ssl_data = data['ssl']
|
ssl_data = data["ssl"]
|
||||||
config.ssl = SSLConfig(
|
config.ssl = SSLConfig(
|
||||||
enabled=ssl_data.get('enabled', config.ssl.enabled),
|
enabled=ssl_data.get("enabled", config.ssl.enabled),
|
||||||
cert_file=ssl_data.get('cert_file', config.ssl.cert_file),
|
cert_file=ssl_data.get("cert_file", config.ssl.cert_file),
|
||||||
key_file=ssl_data.get('key_file', config.ssl.key_file)
|
key_file=ssl_data.get("key_file", config.ssl.key_file),
|
||||||
)
|
)
|
||||||
|
|
||||||
if 'logging' in data:
|
if "logging" in data:
|
||||||
log_data = data['logging']
|
log_data = data["logging"]
|
||||||
format_data = log_data.get('format', {})
|
format_data = log_data.get("format", {})
|
||||||
global_format = LogFormatConfig(
|
global_format = LogFormatConfig(
|
||||||
type=format_data.get('type', 'standard'),
|
type=format_data.get("type", "standard"),
|
||||||
use_colors=format_data.get('use_colors', True),
|
use_colors=format_data.get("use_colors", True),
|
||||||
show_module=format_data.get('show_module', True),
|
show_module=format_data.get("show_module", True),
|
||||||
timestamp_format=format_data.get('timestamp_format', '%Y-%m-%d %H:%M:%S')
|
timestamp_format=format_data.get("timestamp_format", "%Y-%m-%d %H:%M:%S"),
|
||||||
)
|
)
|
||||||
console_data = log_data.get('console', {})
|
console_data = log_data.get("console", {})
|
||||||
console_format_data = console_data.get('format', {})
|
console_format_data = console_data.get("format", {})
|
||||||
console_format = LogFormatConfig(
|
console_format = LogFormatConfig(
|
||||||
type=console_format_data.get('type', global_format.type),
|
type=console_format_data.get("type", global_format.type),
|
||||||
use_colors=console_format_data.get('use_colors', global_format.use_colors),
|
use_colors=console_format_data.get("use_colors", global_format.use_colors),
|
||||||
show_module=console_format_data.get('show_module', global_format.show_module),
|
show_module=console_format_data.get("show_module", global_format.show_module),
|
||||||
timestamp_format=console_format_data.get('timestamp_format', global_format.timestamp_format)
|
timestamp_format=console_format_data.get("timestamp_format", global_format.timestamp_format),
|
||||||
)
|
|
||||||
console_config = LogHandlerConfig(
|
|
||||||
level=console_data.get('level', log_data.get('level', 'INFO')),
|
|
||||||
format=console_format
|
|
||||||
)
|
)
|
||||||
|
console_config = LogHandlerConfig(level=console_data.get("level", log_data.get("level", "INFO")), format=console_format)
|
||||||
files_config = []
|
files_config = []
|
||||||
if 'log_file' in log_data:
|
if "log_file" in log_data:
|
||||||
default_file_format = LogFormatConfig(
|
default_file_format = LogFormatConfig(
|
||||||
type=global_format.type,
|
type=global_format.type, use_colors=False, show_module=global_format.show_module, timestamp_format=global_format.timestamp_format
|
||||||
use_colors=False,
|
|
||||||
show_module=global_format.show_module,
|
|
||||||
timestamp_format=global_format.timestamp_format
|
|
||||||
)
|
)
|
||||||
default_file = LogFileConfig(
|
default_file = LogFileConfig(
|
||||||
path=log_data['log_file'],
|
path=log_data["log_file"],
|
||||||
level=log_data.get('level', 'INFO'),
|
level=log_data.get("level", "INFO"),
|
||||||
format=default_file_format,
|
format=default_file_format,
|
||||||
loggers=[], # Empty list means including all loggers
|
loggers=[], # Empty list means including all loggers
|
||||||
max_bytes=10 * 1024 * 1024,
|
max_bytes=10 * 1024 * 1024,
|
||||||
backup_count=5
|
backup_count=5,
|
||||||
)
|
)
|
||||||
files_config.append(default_file)
|
files_config.append(default_file)
|
||||||
|
|
||||||
if 'files' in log_data:
|
if "files" in log_data:
|
||||||
for file_data in log_data['files']:
|
for file_data in log_data["files"]:
|
||||||
file_format_data = file_data.get('format', {})
|
file_format_data = file_data.get("format", {})
|
||||||
file_format = LogFormatConfig(
|
file_format = LogFormatConfig(
|
||||||
type=file_format_data.get('type', global_format.type),
|
type=file_format_data.get("type", global_format.type),
|
||||||
use_colors=file_format_data.get('use_colors', False),
|
use_colors=file_format_data.get("use_colors", False),
|
||||||
show_module=file_format_data.get('show_module', global_format.show_module),
|
show_module=file_format_data.get("show_module", global_format.show_module),
|
||||||
timestamp_format=file_format_data.get('timestamp_format', global_format.timestamp_format)
|
timestamp_format=file_format_data.get("timestamp_format", global_format.timestamp_format),
|
||||||
)
|
)
|
||||||
file_config = LogFileConfig(
|
file_config = LogFileConfig(
|
||||||
path=file_data.get('path', './logs/pyserve.log'),
|
path=file_data.get("path", "./logs/pyserve.log"),
|
||||||
level=file_data.get('level', log_data.get('level', 'INFO')),
|
level=file_data.get("level", log_data.get("level", "INFO")),
|
||||||
format=file_format,
|
format=file_format,
|
||||||
loggers=file_data.get('loggers', []),
|
loggers=file_data.get("loggers", []),
|
||||||
max_bytes=file_data.get('max_bytes', 10 * 1024 * 1024),
|
max_bytes=file_data.get("max_bytes", 10 * 1024 * 1024),
|
||||||
backup_count=file_data.get('backup_count', 5)
|
backup_count=file_data.get("backup_count", 5),
|
||||||
)
|
)
|
||||||
files_config.append(file_config)
|
files_config.append(file_config)
|
||||||
|
|
||||||
if 'show_module' in console_format_data:
|
if "show_module" in console_format_data:
|
||||||
print(
|
print("\033[33mWARNING: Parameter 'show_module' in console.format in development and may work incorrectly\033[0m")
|
||||||
"\033[33mWARNING: Parameter 'show_module' in console.format in development and may work incorrectly\033[0m"
|
console_config.format.show_module = console_format_data.get("show_module")
|
||||||
)
|
|
||||||
console_config.format.show_module = console_format_data.get('show_module')
|
|
||||||
|
|
||||||
for i, file_data in enumerate(log_data.get('files', [])):
|
for i, file_data in enumerate(log_data.get("files", [])):
|
||||||
if 'format' in file_data and 'show_module' in file_data['format']:
|
if "format" in file_data and "show_module" in file_data["format"]:
|
||||||
print(
|
print(f"\033[33mWARNING: Parameter 'show_module' in files[{i}].format in development and may work incorrectly\033[0m")
|
||||||
f"\033[33mWARNING: Parameter 'show_module' in files[{i}].format in development and may work incorrectly\033[0m"
|
|
||||||
)
|
|
||||||
|
|
||||||
if not files_config:
|
if not files_config:
|
||||||
default_file_format = LogFormatConfig(
|
default_file_format = LogFormatConfig(
|
||||||
type=global_format.type,
|
type=global_format.type, use_colors=False, show_module=global_format.show_module, timestamp_format=global_format.timestamp_format
|
||||||
use_colors=False,
|
|
||||||
show_module=global_format.show_module,
|
|
||||||
timestamp_format=global_format.timestamp_format
|
|
||||||
)
|
)
|
||||||
default_file = LogFileConfig(
|
default_file = LogFileConfig(
|
||||||
path='./logs/pyserve.log',
|
path="./logs/pyserve.log",
|
||||||
level=log_data.get('level', 'INFO'),
|
level=log_data.get("level", "INFO"),
|
||||||
format=default_file_format,
|
format=default_file_format,
|
||||||
loggers=[],
|
loggers=[],
|
||||||
max_bytes=10 * 1024 * 1024,
|
max_bytes=10 * 1024 * 1024,
|
||||||
backup_count=5
|
backup_count=5,
|
||||||
)
|
)
|
||||||
files_config.append(default_file)
|
files_config.append(default_file)
|
||||||
|
|
||||||
config.logging = LoggingConfig(
|
config.logging = LoggingConfig(
|
||||||
level=log_data.get('level', 'INFO'),
|
level=log_data.get("level", "INFO"),
|
||||||
console_output=log_data.get('console_output', True),
|
console_output=log_data.get("console_output", True),
|
||||||
format=global_format,
|
format=global_format,
|
||||||
console=console_config,
|
console=console_config,
|
||||||
files=files_config
|
files=files_config,
|
||||||
)
|
)
|
||||||
|
|
||||||
if 'extensions' in data:
|
if "extensions" in data:
|
||||||
for ext_data in data['extensions']:
|
for ext_data in data["extensions"]:
|
||||||
extension = ExtensionConfig(
|
extension = ExtensionConfig(type=ext_data.get("type", ""), config=ext_data.get("config", {}))
|
||||||
type=ext_data.get('type', ''),
|
|
||||||
config=ext_data.get('config', {})
|
|
||||||
)
|
|
||||||
config.extensions.append(extension)
|
config.extensions.append(extension)
|
||||||
|
|
||||||
return config
|
return config
|
||||||
@ -245,14 +231,14 @@ class Config:
|
|||||||
if not (1 <= self.server.port <= 65535):
|
if not (1 <= self.server.port <= 65535):
|
||||||
errors.append(f"Invalid port: {self.server.port}")
|
errors.append(f"Invalid port: {self.server.port}")
|
||||||
|
|
||||||
valid_log_levels = ['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL']
|
valid_log_levels = ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"]
|
||||||
if self.logging.level.upper() not in valid_log_levels:
|
if self.logging.level.upper() not in valid_log_levels:
|
||||||
errors.append(f"Invalid logging level: {self.logging.level}")
|
errors.append(f"Invalid logging level: {self.logging.level}")
|
||||||
|
|
||||||
if self.logging.console.level.upper() not in valid_log_levels:
|
if self.logging.console.level.upper() not in valid_log_levels:
|
||||||
errors.append(f"Invalid console logging level: {self.logging.console.level}")
|
errors.append(f"Invalid console logging level: {self.logging.console.level}")
|
||||||
|
|
||||||
valid_format_types = ['standard', 'json']
|
valid_format_types = ["standard", "json"]
|
||||||
|
|
||||||
if self.logging.format.type not in valid_format_types:
|
if self.logging.format.type not in valid_format_types:
|
||||||
errors.append(f"Invalid logging format type: {self.logging.format.type}")
|
errors.append(f"Invalid logging format type: {self.logging.format.type}")
|
||||||
@ -283,40 +269,40 @@ class Config:
|
|||||||
|
|
||||||
def setup_logging(self) -> None:
|
def setup_logging(self) -> None:
|
||||||
config_dict = {
|
config_dict = {
|
||||||
'level': self.logging.level,
|
"level": self.logging.level,
|
||||||
'console_output': self.logging.console_output,
|
"console_output": self.logging.console_output,
|
||||||
'format': {
|
"format": {
|
||||||
'type': self.logging.format.type,
|
"type": self.logging.format.type,
|
||||||
'use_colors': self.logging.format.use_colors,
|
"use_colors": self.logging.format.use_colors,
|
||||||
'show_module': self.logging.format.show_module,
|
"show_module": self.logging.format.show_module,
|
||||||
'timestamp_format': self.logging.format.timestamp_format
|
"timestamp_format": self.logging.format.timestamp_format,
|
||||||
},
|
},
|
||||||
'console': {
|
"console": {
|
||||||
'level': self.logging.console.level,
|
"level": self.logging.console.level,
|
||||||
'format': {
|
"format": {
|
||||||
'type': self.logging.console.format.type,
|
"type": self.logging.console.format.type,
|
||||||
'use_colors': self.logging.console.format.use_colors,
|
"use_colors": self.logging.console.format.use_colors,
|
||||||
'show_module': self.logging.console.format.show_module,
|
"show_module": self.logging.console.format.show_module,
|
||||||
'timestamp_format': self.logging.console.format.timestamp_format
|
"timestamp_format": self.logging.console.format.timestamp_format,
|
||||||
}
|
},
|
||||||
},
|
},
|
||||||
'files': []
|
"files": [],
|
||||||
}
|
}
|
||||||
|
|
||||||
for file_config in self.logging.files:
|
for file_config in self.logging.files:
|
||||||
file_dict = {
|
file_dict = {
|
||||||
'path': file_config.path,
|
"path": file_config.path,
|
||||||
'level': file_config.level,
|
"level": file_config.level,
|
||||||
'loggers': file_config.loggers,
|
"loggers": file_config.loggers,
|
||||||
'max_bytes': file_config.max_bytes,
|
"max_bytes": file_config.max_bytes,
|
||||||
'backup_count': file_config.backup_count,
|
"backup_count": file_config.backup_count,
|
||||||
'format': {
|
"format": {
|
||||||
'type': file_config.format.type,
|
"type": file_config.format.type,
|
||||||
'use_colors': file_config.format.use_colors,
|
"use_colors": file_config.format.use_colors,
|
||||||
'show_module': file_config.format.show_module,
|
"show_module": file_config.format.show_module,
|
||||||
'timestamp_format': file_config.format.timestamp_format
|
"timestamp_format": file_config.format.timestamp_format,
|
||||||
}
|
},
|
||||||
}
|
}
|
||||||
cast(List[Dict[str, Any]], config_dict['files']).append(file_dict)
|
cast(List[Dict[str, Any]], config_dict["files"]).append(file_dict)
|
||||||
|
|
||||||
setup_logging(config_dict)
|
setup_logging(config_dict)
|
||||||
|
|||||||
@ -1,7 +1,9 @@
|
|||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
from typing import Dict, Any, List, Optional, Type
|
from typing import Any, Dict, List, Optional, Type
|
||||||
|
|
||||||
from starlette.requests import Request
|
from starlette.requests import Request
|
||||||
from starlette.responses import Response
|
from starlette.responses import Response
|
||||||
|
|
||||||
from .logging_utils import get_logger
|
from .logging_utils import get_logger
|
||||||
|
|
||||||
logger = get_logger(__name__)
|
logger = get_logger(__name__)
|
||||||
@ -36,6 +38,7 @@ class RoutingExtension(Extension):
|
|||||||
default_proxy_timeout = config.get("default_proxy_timeout", 30.0)
|
default_proxy_timeout = config.get("default_proxy_timeout", 30.0)
|
||||||
self.router = create_router_from_config(regex_locations)
|
self.router = create_router_from_config(regex_locations)
|
||||||
from .routing import RequestHandler
|
from .routing import RequestHandler
|
||||||
|
|
||||||
self.handler = RequestHandler(self.router, default_proxy_timeout=default_proxy_timeout)
|
self.handler = RequestHandler(self.router, default_proxy_timeout=default_proxy_timeout)
|
||||||
|
|
||||||
async def process_request(self, request: Request) -> Optional[Response]:
|
async def process_request(self, request: Request) -> Optional[Response]:
|
||||||
@ -54,11 +57,9 @@ class SecurityExtension(Extension):
|
|||||||
super().__init__(config)
|
super().__init__(config)
|
||||||
self.allowed_ips = config.get("allowed_ips", [])
|
self.allowed_ips = config.get("allowed_ips", [])
|
||||||
self.blocked_ips = config.get("blocked_ips", [])
|
self.blocked_ips = config.get("blocked_ips", [])
|
||||||
self.security_headers = config.get("security_headers", {
|
self.security_headers = config.get(
|
||||||
"X-Content-Type-Options": "nosniff",
|
"security_headers", {"X-Content-Type-Options": "nosniff", "X-Frame-Options": "DENY", "X-XSS-Protection": "1; mode=block"}
|
||||||
"X-Frame-Options": "DENY",
|
)
|
||||||
"X-XSS-Protection": "1; mode=block"
|
|
||||||
})
|
|
||||||
|
|
||||||
async def process_request(self, request: Request) -> Optional[Response]:
|
async def process_request(self, request: Request) -> Optional[Response]:
|
||||||
client_ip = request.client.host if request.client else "unknown"
|
client_ip = request.client.host if request.client else "unknown"
|
||||||
@ -66,11 +67,13 @@ class SecurityExtension(Extension):
|
|||||||
if self.blocked_ips and client_ip in self.blocked_ips:
|
if self.blocked_ips and client_ip in self.blocked_ips:
|
||||||
logger.warning(f"Blocked request from IP: {client_ip}")
|
logger.warning(f"Blocked request from IP: {client_ip}")
|
||||||
from starlette.responses import PlainTextResponse
|
from starlette.responses import PlainTextResponse
|
||||||
|
|
||||||
return PlainTextResponse("403 Forbidden", status_code=403)
|
return PlainTextResponse("403 Forbidden", status_code=403)
|
||||||
|
|
||||||
if self.allowed_ips and client_ip not in self.allowed_ips:
|
if self.allowed_ips and client_ip not in self.allowed_ips:
|
||||||
logger.warning(f"Access denied for IP: {client_ip}")
|
logger.warning(f"Access denied for IP: {client_ip}")
|
||||||
from starlette.responses import PlainTextResponse
|
from starlette.responses import PlainTextResponse
|
||||||
|
|
||||||
return PlainTextResponse("403 Forbidden", status_code=403)
|
return PlainTextResponse("403 Forbidden", status_code=403)
|
||||||
|
|
||||||
return None
|
return None
|
||||||
@ -108,33 +111,30 @@ class MonitoringExtension(Extension):
|
|||||||
async def process_request(self, request: Request) -> Optional[Response]:
|
async def process_request(self, request: Request) -> Optional[Response]:
|
||||||
if self.enable_metrics:
|
if self.enable_metrics:
|
||||||
self.request_count += 1
|
self.request_count += 1
|
||||||
request.state.start_time = __import__('time').time()
|
request.state.start_time = __import__("time").time()
|
||||||
return None
|
return None
|
||||||
|
|
||||||
async def process_response(self, request: Request, response: Response) -> Response:
|
async def process_response(self, request: Request, response: Response) -> Response:
|
||||||
if self.enable_metrics and hasattr(request.state, 'start_time'):
|
if self.enable_metrics and hasattr(request.state, "start_time"):
|
||||||
response_time = __import__('time').time() - request.state.start_time
|
response_time = __import__("time").time() - request.state.start_time
|
||||||
self.response_times.append(response_time)
|
self.response_times.append(response_time)
|
||||||
|
|
||||||
if response.status_code >= 400:
|
if response.status_code >= 400:
|
||||||
self.error_count += 1
|
self.error_count += 1
|
||||||
|
|
||||||
logger.info(f"Request: {request.method} {request.url.path} - "
|
logger.info(f"Request: {request.method} {request.url.path} - " f"Status: {response.status_code} - " f"Time: {response_time:.3f}s")
|
||||||
f"Status: {response.status_code} - "
|
|
||||||
f"Time: {response_time:.3f}s")
|
|
||||||
|
|
||||||
return response
|
return response
|
||||||
|
|
||||||
def get_metrics(self) -> Dict[str, Any]:
|
def get_metrics(self) -> Dict[str, Any]:
|
||||||
avg_response_time = (sum(self.response_times) / len(self.response_times)
|
avg_response_time = sum(self.response_times) / len(self.response_times) if self.response_times else 0
|
||||||
if self.response_times else 0)
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"request_count": self.request_count,
|
"request_count": self.request_count,
|
||||||
"error_count": self.error_count,
|
"error_count": self.error_count,
|
||||||
"error_rate": self.error_count / max(self.request_count, 1),
|
"error_rate": self.error_count / max(self.request_count, 1),
|
||||||
"avg_response_time": avg_response_time,
|
"avg_response_time": avg_response_time,
|
||||||
"total_response_times": len(self.response_times)
|
"total_response_times": len(self.response_times),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -3,9 +3,10 @@ import logging.handlers
|
|||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Dict, Any, List, cast, Callable
|
from typing import Any, Callable, Dict, List, cast
|
||||||
|
|
||||||
import structlog
|
import structlog
|
||||||
from structlog.types import FilteringBoundLogger, EventDict
|
from structlog.types import EventDict, FilteringBoundLogger
|
||||||
|
|
||||||
from . import __version__
|
from . import __version__
|
||||||
|
|
||||||
@ -21,15 +22,15 @@ class StructlogFilter(logging.Filter):
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
for logger_name in self.logger_names:
|
for logger_name in self.logger_names:
|
||||||
if record.name == logger_name or record.name.startswith(logger_name + '.'):
|
if record.name == logger_name or record.name.startswith(logger_name + "."):
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
class UvicornStructlogFilter(logging.Filter):
|
class UvicornStructlogFilter(logging.Filter):
|
||||||
def filter(self, record: logging.LogRecord) -> bool:
|
def filter(self, record: logging.LogRecord) -> bool:
|
||||||
if hasattr(record, 'name') and 'uvicorn.access' in record.name:
|
if hasattr(record, "name") and "uvicorn.access" in record.name:
|
||||||
if hasattr(record, 'getMessage'):
|
if hasattr(record, "getMessage"):
|
||||||
msg = record.getMessage()
|
msg = record.getMessage()
|
||||||
if ' - "' in msg and '" ' in msg:
|
if ' - "' in msg and '" ' in msg:
|
||||||
parts = msg.split(' - "')
|
parts = msg.split(' - "')
|
||||||
@ -56,14 +57,14 @@ def add_log_level(logger: FilteringBoundLogger, method_name: str, event_dict: Ev
|
|||||||
|
|
||||||
|
|
||||||
def add_module_info(logger: FilteringBoundLogger, method_name: str, event_dict: EventDict) -> EventDict:
|
def add_module_info(logger: FilteringBoundLogger, method_name: str, event_dict: EventDict) -> EventDict:
|
||||||
if hasattr(logger, '_context') and 'logger_name' in logger._context:
|
if hasattr(logger, "_context") and "logger_name" in logger._context:
|
||||||
logger_name = logger._context['logger_name']
|
logger_name = logger._context["logger_name"]
|
||||||
if logger_name.startswith('pyserve'):
|
if logger_name.startswith("pyserve"):
|
||||||
event_dict["module"] = logger_name
|
event_dict["module"] = logger_name
|
||||||
elif logger_name.startswith('uvicorn'):
|
elif logger_name.startswith("uvicorn"):
|
||||||
event_dict["module"] = 'uvicorn'
|
event_dict["module"] = "uvicorn"
|
||||||
elif logger_name.startswith('starlette'):
|
elif logger_name.startswith("starlette"):
|
||||||
event_dict["module"] = 'starlette'
|
event_dict["module"] = "starlette"
|
||||||
else:
|
else:
|
||||||
event_dict["module"] = logger_name
|
event_dict["module"] = logger_name
|
||||||
return event_dict
|
return event_dict
|
||||||
@ -74,18 +75,19 @@ def filter_module_info(show_module: bool) -> Callable[[FilteringBoundLogger, str
|
|||||||
if not show_module and "module" in event_dict:
|
if not show_module and "module" in event_dict:
|
||||||
del event_dict["module"]
|
del event_dict["module"]
|
||||||
return event_dict
|
return event_dict
|
||||||
|
|
||||||
return processor
|
return processor
|
||||||
|
|
||||||
|
|
||||||
def colored_console_renderer(use_colors: bool = True, show_module: bool = True) -> structlog.dev.ConsoleRenderer:
|
def colored_console_renderer(use_colors: bool = True, show_module: bool = True) -> structlog.dev.ConsoleRenderer:
|
||||||
return structlog.dev.ConsoleRenderer(
|
return structlog.dev.ConsoleRenderer(
|
||||||
colors=use_colors and hasattr(sys.stderr, 'isatty') and sys.stderr.isatty(),
|
colors=use_colors and hasattr(sys.stderr, "isatty") and sys.stderr.isatty(),
|
||||||
level_styles={
|
level_styles={
|
||||||
"critical": "\033[35m", # Magenta
|
"critical": "\033[35m", # Magenta
|
||||||
"error": "\033[31m", # Red
|
"error": "\033[31m", # Red
|
||||||
"warning": "\033[33m", # Yellow
|
"warning": "\033[33m", # Yellow
|
||||||
"info": "\033[32m", # Green
|
"info": "\033[32m", # Green
|
||||||
"debug": "\033[36m", # Cyan
|
"debug": "\033[36m", # Cyan
|
||||||
},
|
},
|
||||||
pad_event=25,
|
pad_event=25,
|
||||||
)
|
)
|
||||||
@ -113,43 +115,35 @@ class PyServeLogManager:
|
|||||||
if self.configured:
|
if self.configured:
|
||||||
return
|
return
|
||||||
|
|
||||||
if 'format' not in config and 'console' not in config and 'files' not in config:
|
if "format" not in config and "console" not in config and "files" not in config:
|
||||||
level = config.get('level', 'INFO').upper()
|
level = config.get("level", "INFO").upper()
|
||||||
console_output = config.get('console_output', True)
|
console_output = config.get("console_output", True)
|
||||||
log_file = config.get('log_file', './logs/pyserve.log')
|
log_file = config.get("log_file", "./logs/pyserve.log")
|
||||||
config = {
|
config = {
|
||||||
'level': level,
|
"level": level,
|
||||||
'console_output': console_output,
|
"console_output": console_output,
|
||||||
'format': {
|
"format": {"type": "standard", "use_colors": True, "show_module": True, "timestamp_format": "%Y-%m-%d %H:%M:%S"},
|
||||||
'type': 'standard',
|
"files": [
|
||||||
'use_colors': True,
|
{
|
||||||
'show_module': True,
|
"path": log_file,
|
||||||
'timestamp_format': '%Y-%m-%d %H:%M:%S'
|
"level": level,
|
||||||
},
|
"loggers": [],
|
||||||
'files': [{
|
"max_bytes": 10 * 1024 * 1024,
|
||||||
'path': log_file,
|
"backup_count": 5,
|
||||||
'level': level,
|
"format": {"type": "standard", "use_colors": False, "show_module": True, "timestamp_format": "%Y-%m-%d %H:%M:%S"},
|
||||||
'loggers': [],
|
|
||||||
'max_bytes': 10 * 1024 * 1024,
|
|
||||||
'backup_count': 5,
|
|
||||||
'format': {
|
|
||||||
'type': 'standard',
|
|
||||||
'use_colors': False,
|
|
||||||
'show_module': True,
|
|
||||||
'timestamp_format': '%Y-%m-%d %H:%M:%S'
|
|
||||||
}
|
}
|
||||||
}]
|
],
|
||||||
}
|
}
|
||||||
|
|
||||||
main_level = config.get('level', 'INFO').upper()
|
main_level = config.get("level", "INFO").upper()
|
||||||
console_output = config.get('console_output', True)
|
console_output = config.get("console_output", True)
|
||||||
|
|
||||||
global_format = config.get('format', {})
|
global_format = config.get("format", {})
|
||||||
console_config = config.get('console', {})
|
console_config = config.get("console", {})
|
||||||
files_config = config.get('files', [])
|
files_config = config.get("files", [])
|
||||||
|
|
||||||
console_format = {**global_format, **console_config.get('format', {})}
|
console_format = {**global_format, **console_config.get("format", {})}
|
||||||
console_level = console_config.get('level', main_level)
|
console_level = console_config.get("level", main_level)
|
||||||
|
|
||||||
self._save_original_handlers()
|
self._save_original_handlers()
|
||||||
self._clear_all_handlers()
|
self._clear_all_handlers()
|
||||||
@ -159,38 +153,33 @@ class PyServeLogManager:
|
|||||||
console_output=console_output,
|
console_output=console_output,
|
||||||
console_format=console_format,
|
console_format=console_format,
|
||||||
console_level=console_level,
|
console_level=console_level,
|
||||||
files_config=files_config
|
files_config=files_config,
|
||||||
)
|
)
|
||||||
|
|
||||||
self._configure_stdlib_loggers(main_level)
|
self._configure_stdlib_loggers(main_level)
|
||||||
|
|
||||||
logger = self.get_logger('pyserve')
|
logger = self.get_logger("pyserve")
|
||||||
logger.info(
|
logger.info(
|
||||||
"PyServe logger initialized",
|
"PyServe logger initialized",
|
||||||
version=__version__,
|
version=__version__,
|
||||||
level=main_level,
|
level=main_level,
|
||||||
console_output=console_output,
|
console_output=console_output,
|
||||||
console_format=console_format.get('type', 'standard')
|
console_format=console_format.get("type", "standard"),
|
||||||
)
|
)
|
||||||
|
|
||||||
for i, file_config in enumerate(files_config):
|
for i, file_config in enumerate(files_config):
|
||||||
logger.info(
|
logger.info(
|
||||||
"File logging configured",
|
"File logging configured",
|
||||||
file_index=i,
|
file_index=i,
|
||||||
path=file_config.get('path'),
|
path=file_config.get("path"),
|
||||||
level=file_config.get('level', main_level),
|
level=file_config.get("level", main_level),
|
||||||
format_type=file_config.get('format', {}).get('type', 'standard')
|
format_type=file_config.get("format", {}).get("type", "standard"),
|
||||||
)
|
)
|
||||||
|
|
||||||
self.configured = True
|
self.configured = True
|
||||||
|
|
||||||
def _configure_structlog(
|
def _configure_structlog(
|
||||||
self,
|
self, main_level: str, console_output: bool, console_format: Dict[str, Any], console_level: str, files_config: List[Dict[str, Any]]
|
||||||
main_level: str,
|
|
||||||
console_output: bool,
|
|
||||||
console_format: Dict[str, Any],
|
|
||||||
console_level: str,
|
|
||||||
files_config: List[Dict[str, Any]]
|
|
||||||
) -> None:
|
) -> None:
|
||||||
shared_processors = [
|
shared_processors = [
|
||||||
structlog.stdlib.filter_by_level,
|
structlog.stdlib.filter_by_level,
|
||||||
@ -202,57 +191,46 @@ class PyServeLogManager:
|
|||||||
]
|
]
|
||||||
|
|
||||||
if console_output:
|
if console_output:
|
||||||
console_show_module = console_format.get('show_module', True)
|
console_show_module = console_format.get("show_module", True)
|
||||||
console_processors = shared_processors.copy()
|
console_processors = shared_processors.copy()
|
||||||
|
|
||||||
console_processors.append(filter_module_info(console_show_module))
|
console_processors.append(filter_module_info(console_show_module))
|
||||||
|
|
||||||
if console_format.get('type') == 'json':
|
if console_format.get("type") == "json":
|
||||||
console_processors.append(json_renderer())
|
console_processors.append(json_renderer())
|
||||||
else:
|
else:
|
||||||
console_processors.append(
|
console_processors.append(colored_console_renderer(console_format.get("use_colors", True), console_show_module))
|
||||||
colored_console_renderer(
|
|
||||||
console_format.get('use_colors', True),
|
|
||||||
console_show_module
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
console_handler = logging.StreamHandler(sys.stdout)
|
console_handler = logging.StreamHandler(sys.stdout)
|
||||||
console_handler.setLevel(getattr(logging, console_level))
|
console_handler.setLevel(getattr(logging, console_level))
|
||||||
console_handler.addFilter(UvicornStructlogFilter())
|
console_handler.addFilter(UvicornStructlogFilter())
|
||||||
|
|
||||||
console_formatter = structlog.stdlib.ProcessorFormatter(
|
console_formatter = structlog.stdlib.ProcessorFormatter(
|
||||||
processor=colored_console_renderer(
|
processor=(
|
||||||
console_format.get('use_colors', True),
|
colored_console_renderer(console_format.get("use_colors", True), console_show_module)
|
||||||
console_show_module
|
if console_format.get("type") != "json"
|
||||||
)
|
else json_renderer()
|
||||||
if console_format.get('type') != 'json'
|
),
|
||||||
else json_renderer(),
|
|
||||||
)
|
)
|
||||||
console_handler.setFormatter(console_formatter)
|
console_handler.setFormatter(console_formatter)
|
||||||
|
|
||||||
root_logger = logging.getLogger()
|
root_logger = logging.getLogger()
|
||||||
root_logger.setLevel(logging.DEBUG)
|
root_logger.setLevel(logging.DEBUG)
|
||||||
root_logger.addHandler(console_handler)
|
root_logger.addHandler(console_handler)
|
||||||
self.handlers['console'] = console_handler
|
self.handlers["console"] = console_handler
|
||||||
|
|
||||||
for i, file_config in enumerate(files_config):
|
for i, file_config in enumerate(files_config):
|
||||||
file_path = file_config.get('path', './logs/pyserve.log')
|
file_path = file_config.get("path", "./logs/pyserve.log")
|
||||||
file_level = file_config.get('level', main_level)
|
file_level = file_config.get("level", main_level)
|
||||||
file_loggers = file_config.get('loggers', [])
|
file_loggers = file_config.get("loggers", [])
|
||||||
max_bytes = file_config.get('max_bytes', 10 * 1024 * 1024)
|
max_bytes = file_config.get("max_bytes", 10 * 1024 * 1024)
|
||||||
backup_count = file_config.get('backup_count', 5)
|
backup_count = file_config.get("backup_count", 5)
|
||||||
file_format = file_config.get('format', {})
|
file_format = file_config.get("format", {})
|
||||||
file_show_module = file_format.get('show_module', True)
|
file_show_module = file_format.get("show_module", True)
|
||||||
|
|
||||||
self._ensure_log_directory(file_path)
|
self._ensure_log_directory(file_path)
|
||||||
|
|
||||||
file_handler = logging.handlers.RotatingFileHandler(
|
file_handler = logging.handlers.RotatingFileHandler(file_path, maxBytes=max_bytes, backupCount=backup_count, encoding="utf-8")
|
||||||
file_path,
|
|
||||||
maxBytes=max_bytes,
|
|
||||||
backupCount=backup_count,
|
|
||||||
encoding='utf-8'
|
|
||||||
)
|
|
||||||
file_handler.setLevel(getattr(logging, file_level))
|
file_handler.setLevel(getattr(logging, file_level))
|
||||||
|
|
||||||
if file_loggers:
|
if file_loggers:
|
||||||
@ -262,15 +240,13 @@ class PyServeLogManager:
|
|||||||
file_processors.append(filter_module_info(file_show_module))
|
file_processors.append(filter_module_info(file_show_module))
|
||||||
|
|
||||||
file_formatter = structlog.stdlib.ProcessorFormatter(
|
file_formatter = structlog.stdlib.ProcessorFormatter(
|
||||||
processor=json_renderer()
|
processor=json_renderer() if file_format.get("type") == "json" else plain_console_renderer(file_show_module),
|
||||||
if file_format.get('type') == 'json'
|
|
||||||
else plain_console_renderer(file_show_module),
|
|
||||||
)
|
)
|
||||||
file_handler.setFormatter(file_formatter)
|
file_handler.setFormatter(file_formatter)
|
||||||
|
|
||||||
root_logger = logging.getLogger()
|
root_logger = logging.getLogger()
|
||||||
root_logger.addHandler(file_handler)
|
root_logger.addHandler(file_handler)
|
||||||
self.handlers[f'file_{i}'] = file_handler
|
self.handlers[f"file_{i}"] = file_handler
|
||||||
|
|
||||||
base_processors = [
|
base_processors = [
|
||||||
structlog.stdlib.filter_by_level,
|
structlog.stdlib.filter_by_level,
|
||||||
@ -293,14 +269,14 @@ class PyServeLogManager:
|
|||||||
|
|
||||||
def _configure_stdlib_loggers(self, main_level: str) -> None:
|
def _configure_stdlib_loggers(self, main_level: str) -> None:
|
||||||
library_configs = {
|
library_configs = {
|
||||||
'uvicorn': 'DEBUG' if main_level == 'DEBUG' else 'WARNING',
|
"uvicorn": "DEBUG" if main_level == "DEBUG" else "WARNING",
|
||||||
'uvicorn.access': 'DEBUG' if main_level == 'DEBUG' else 'WARNING',
|
"uvicorn.access": "DEBUG" if main_level == "DEBUG" else "WARNING",
|
||||||
'uvicorn.error': 'DEBUG' if main_level == 'DEBUG' else 'ERROR',
|
"uvicorn.error": "DEBUG" if main_level == "DEBUG" else "ERROR",
|
||||||
'uvicorn.asgi': 'DEBUG' if main_level == 'DEBUG' else 'WARNING',
|
"uvicorn.asgi": "DEBUG" if main_level == "DEBUG" else "WARNING",
|
||||||
'starlette': 'DEBUG' if main_level == 'DEBUG' else 'WARNING',
|
"starlette": "DEBUG" if main_level == "DEBUG" else "WARNING",
|
||||||
'asyncio': 'WARNING',
|
"asyncio": "WARNING",
|
||||||
'concurrent.futures': 'WARNING',
|
"concurrent.futures": "WARNING",
|
||||||
'multiprocessing': 'WARNING',
|
"multiprocessing": "WARNING",
|
||||||
}
|
}
|
||||||
|
|
||||||
for logger_name, level in library_configs.items():
|
for logger_name, level in library_configs.items():
|
||||||
@ -309,7 +285,7 @@ class PyServeLogManager:
|
|||||||
logger.propagate = True
|
logger.propagate = True
|
||||||
|
|
||||||
def _save_original_handlers(self) -> None:
|
def _save_original_handlers(self) -> None:
|
||||||
logger_names = ['', 'uvicorn', 'uvicorn.access', 'uvicorn.error', 'starlette']
|
logger_names = ["", "uvicorn", "uvicorn.access", "uvicorn.error", "starlette"]
|
||||||
|
|
||||||
for name in logger_names:
|
for name in logger_names:
|
||||||
logger = logging.getLogger(name)
|
logger = logging.getLogger(name)
|
||||||
@ -320,7 +296,7 @@ class PyServeLogManager:
|
|||||||
for handler in root_logger.handlers[:]:
|
for handler in root_logger.handlers[:]:
|
||||||
root_logger.removeHandler(handler)
|
root_logger.removeHandler(handler)
|
||||||
|
|
||||||
logger_names = ['uvicorn', 'uvicorn.access', 'uvicorn.error', 'starlette']
|
logger_names = ["uvicorn", "uvicorn.access", "uvicorn.error", "starlette"]
|
||||||
for name in logger_names:
|
for name in logger_names:
|
||||||
logger = logging.getLogger(name)
|
logger = logging.getLogger(name)
|
||||||
for handler in logger.handlers[:]:
|
for handler in logger.handlers[:]:
|
||||||
@ -335,14 +311,17 @@ class PyServeLogManager:
|
|||||||
def get_logger(self, name: str) -> structlog.stdlib.BoundLogger:
|
def get_logger(self, name: str) -> structlog.stdlib.BoundLogger:
|
||||||
if not self._structlog_configured:
|
if not self._structlog_configured:
|
||||||
structlog.configure(
|
structlog.configure(
|
||||||
processors=cast(Any, [
|
processors=cast(
|
||||||
structlog.stdlib.filter_by_level,
|
Any,
|
||||||
add_timestamp,
|
[
|
||||||
add_log_level,
|
structlog.stdlib.filter_by_level,
|
||||||
structlog.processors.StackInfoRenderer(),
|
add_timestamp,
|
||||||
structlog.processors.format_exc_info,
|
add_log_level,
|
||||||
structlog.stdlib.ProcessorFormatter.wrap_for_formatter,
|
structlog.processors.StackInfoRenderer(),
|
||||||
]),
|
structlog.processors.format_exc_info,
|
||||||
|
structlog.stdlib.ProcessorFormatter.wrap_for_formatter,
|
||||||
|
],
|
||||||
|
),
|
||||||
context_class=dict,
|
context_class=dict,
|
||||||
logger_factory=structlog.stdlib.LoggerFactory(),
|
logger_factory=structlog.stdlib.LoggerFactory(),
|
||||||
wrapper_class=structlog.stdlib.BoundLogger,
|
wrapper_class=structlog.stdlib.BoundLogger,
|
||||||
@ -370,16 +349,8 @@ class PyServeLogManager:
|
|||||||
handler.close()
|
handler.close()
|
||||||
del self.handlers[name]
|
del self.handlers[name]
|
||||||
|
|
||||||
def create_access_log(
|
def create_access_log(self, method: str, path: str, status_code: int, response_time: float, client_ip: str, user_agent: str = "") -> None:
|
||||||
self,
|
access_logger = self.get_logger("pyserve.access")
|
||||||
method: str,
|
|
||||||
path: str,
|
|
||||||
status_code: int,
|
|
||||||
response_time: float,
|
|
||||||
client_ip: str,
|
|
||||||
user_agent: str = ""
|
|
||||||
) -> None:
|
|
||||||
access_logger = self.get_logger('pyserve.access')
|
|
||||||
access_logger.info(
|
access_logger.info(
|
||||||
"HTTP access",
|
"HTTP access",
|
||||||
method=method,
|
method=method,
|
||||||
@ -388,7 +359,7 @@ class PyServeLogManager:
|
|||||||
response_time_ms=round(response_time * 1000, 2),
|
response_time_ms=round(response_time * 1000, 2),
|
||||||
client_ip=client_ip,
|
client_ip=client_ip,
|
||||||
user_agent=user_agent,
|
user_agent=user_agent,
|
||||||
timestamp_format="access"
|
timestamp_format="access",
|
||||||
)
|
)
|
||||||
|
|
||||||
def shutdown(self) -> None:
|
def shutdown(self) -> None:
|
||||||
@ -416,14 +387,7 @@ def get_logger(name: str) -> structlog.stdlib.BoundLogger:
|
|||||||
return log_manager.get_logger(name)
|
return log_manager.get_logger(name)
|
||||||
|
|
||||||
|
|
||||||
def create_access_log(
|
def create_access_log(method: str, path: str, status_code: int, response_time: float, client_ip: str, user_agent: str = "") -> None:
|
||||||
method: str,
|
|
||||||
path: str,
|
|
||||||
status_code: int,
|
|
||||||
response_time: float,
|
|
||||||
client_ip: str,
|
|
||||||
user_agent: str = ""
|
|
||||||
) -> None:
|
|
||||||
log_manager.create_access_log(method, path, status_code, response_time, client_ip, user_agent)
|
log_manager.create_access_log(method, path, status_code, response_time, client_ip, user_agent)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
33
pyserve/path_matcher.py
Normal file
33
pyserve/path_matcher.py
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
"""
|
||||||
|
Path matcher module - uses Cython implementation if available, falls back to pure Python.
|
||||||
|
"""
|
||||||
|
|
||||||
|
try:
|
||||||
|
from pyserve._path_matcher import (
|
||||||
|
FastMountedPath,
|
||||||
|
FastMountManager,
|
||||||
|
match_and_modify_path,
|
||||||
|
path_matches_prefix,
|
||||||
|
strip_path_prefix,
|
||||||
|
)
|
||||||
|
|
||||||
|
CYTHON_AVAILABLE = True
|
||||||
|
except ImportError:
|
||||||
|
from pyserve._path_matcher_py import (
|
||||||
|
FastMountedPath,
|
||||||
|
FastMountManager,
|
||||||
|
match_and_modify_path,
|
||||||
|
path_matches_prefix,
|
||||||
|
strip_path_prefix,
|
||||||
|
)
|
||||||
|
|
||||||
|
CYTHON_AVAILABLE = False
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"FastMountedPath",
|
||||||
|
"FastMountManager",
|
||||||
|
"path_matches_prefix",
|
||||||
|
"strip_path_prefix",
|
||||||
|
"match_and_modify_path",
|
||||||
|
"CYTHON_AVAILABLE",
|
||||||
|
]
|
||||||
@ -1,11 +1,13 @@
|
|||||||
import re
|
|
||||||
import mimetypes
|
import mimetypes
|
||||||
|
import re
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Dict, Any, Optional, Pattern
|
from typing import Any, Dict, Optional, Pattern
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
import httpx
|
import httpx
|
||||||
from starlette.requests import Request
|
from starlette.requests import Request
|
||||||
from starlette.responses import Response, FileResponse, PlainTextResponse
|
from starlette.responses import FileResponse, PlainTextResponse, Response
|
||||||
|
|
||||||
from .logging_utils import get_logger
|
from .logging_utils import get_logger
|
||||||
|
|
||||||
logger = get_logger(__name__)
|
logger = get_logger(__name__)
|
||||||
@ -100,8 +102,7 @@ class RequestHandler:
|
|||||||
text = ""
|
text = ""
|
||||||
|
|
||||||
content_type = config.get("content_type", "text/plain")
|
content_type = config.get("content_type", "text/plain")
|
||||||
return PlainTextResponse(text, status_code=status_code,
|
return PlainTextResponse(text, status_code=status_code, media_type=content_type)
|
||||||
media_type=content_type)
|
|
||||||
|
|
||||||
if "proxy_pass" in config:
|
if "proxy_pass" in config:
|
||||||
return await self._handle_proxy(request, config, route_match.params)
|
return await self._handle_proxy(request, config, route_match.params)
|
||||||
@ -171,8 +172,7 @@ class RequestHandler:
|
|||||||
|
|
||||||
return PlainTextResponse("404 Not Found", status_code=404)
|
return PlainTextResponse("404 Not Found", status_code=404)
|
||||||
|
|
||||||
async def _handle_proxy(self, request: Request, config: Dict[str, Any],
|
async def _handle_proxy(self, request: Request, config: Dict[str, Any], params: Dict[str, str]) -> Response:
|
||||||
params: Dict[str, str]) -> Response:
|
|
||||||
proxy_url = config["proxy_pass"]
|
proxy_url = config["proxy_pass"]
|
||||||
|
|
||||||
for key, value in params.items():
|
for key, value in params.items():
|
||||||
@ -197,9 +197,15 @@ class RequestHandler:
|
|||||||
proxy_headers = dict(request.headers)
|
proxy_headers = dict(request.headers)
|
||||||
|
|
||||||
hop_by_hop_headers = [
|
hop_by_hop_headers = [
|
||||||
"connection", "keep-alive", "proxy-authenticate",
|
"connection",
|
||||||
"proxy-authorization", "te", "trailers", "transfer-encoding",
|
"keep-alive",
|
||||||
"upgrade", "host"
|
"proxy-authenticate",
|
||||||
|
"proxy-authorization",
|
||||||
|
"te",
|
||||||
|
"trailers",
|
||||||
|
"transfer-encoding",
|
||||||
|
"upgrade",
|
||||||
|
"host",
|
||||||
]
|
]
|
||||||
for header in hop_by_hop_headers:
|
for header in hop_by_hop_headers:
|
||||||
proxy_headers.pop(header, None)
|
proxy_headers.pop(header, None)
|
||||||
|
|||||||
@ -1,18 +1,19 @@
|
|||||||
import ssl
|
import ssl
|
||||||
import uvicorn
|
|
||||||
import time
|
import time
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, Dict, Optional
|
||||||
|
|
||||||
|
import uvicorn
|
||||||
from starlette.applications import Starlette
|
from starlette.applications import Starlette
|
||||||
from starlette.requests import Request
|
from starlette.requests import Request
|
||||||
from starlette.responses import Response, PlainTextResponse
|
from starlette.responses import PlainTextResponse, Response
|
||||||
from starlette.routing import Route
|
from starlette.routing import Route
|
||||||
from starlette.types import ASGIApp, Receive, Scope, Send
|
from starlette.types import ASGIApp, Receive, Scope, Send
|
||||||
from pathlib import Path
|
|
||||||
from typing import Optional, Dict, Any
|
|
||||||
|
|
||||||
from .config import Config
|
|
||||||
from .extensions import ExtensionManager, ASGIExtension
|
|
||||||
from .logging_utils import get_logger
|
|
||||||
from . import __version__
|
from . import __version__
|
||||||
|
from .config import Config
|
||||||
|
from .extensions import ASGIExtension, ExtensionManager
|
||||||
|
from .logging_utils import get_logger
|
||||||
|
|
||||||
logger = get_logger(__name__)
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
@ -21,7 +22,7 @@ class PyServeMiddleware:
|
|||||||
def __init__(self, app: ASGIApp, extension_manager: ExtensionManager):
|
def __init__(self, app: ASGIApp, extension_manager: ExtensionManager):
|
||||||
self.app = app
|
self.app = app
|
||||||
self.extension_manager = extension_manager
|
self.extension_manager = extension_manager
|
||||||
self.access_logger = get_logger('pyserve.access')
|
self.access_logger = get_logger("pyserve.access")
|
||||||
|
|
||||||
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
|
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
|
||||||
if scope["type"] != "http":
|
if scope["type"] != "http":
|
||||||
@ -48,14 +49,7 @@ class PyServeMiddleware:
|
|||||||
|
|
||||||
await response(scope, receive, send)
|
await response(scope, receive, send)
|
||||||
|
|
||||||
async def _try_asgi_mount(
|
async def _try_asgi_mount(self, scope: Scope, receive: Receive, send: Send, request: Request, start_time: float) -> bool:
|
||||||
self,
|
|
||||||
scope: Scope,
|
|
||||||
receive: Receive,
|
|
||||||
send: Send,
|
|
||||||
request: Request,
|
|
||||||
start_time: float
|
|
||||||
) -> bool:
|
|
||||||
for extension in self.extension_manager.extensions:
|
for extension in self.extension_manager.extensions:
|
||||||
if isinstance(extension, ASGIExtension):
|
if isinstance(extension, ASGIExtension):
|
||||||
mount = extension.get_asgi_handler(request)
|
mount = extension.get_asgi_handler(request)
|
||||||
@ -65,10 +59,7 @@ class PyServeMiddleware:
|
|||||||
modified_scope["path"] = mount.get_modified_path(request.url.path)
|
modified_scope["path"] = mount.get_modified_path(request.url.path)
|
||||||
modified_scope["root_path"] = scope.get("root_path", "") + mount.path
|
modified_scope["root_path"] = scope.get("root_path", "") + mount.path
|
||||||
|
|
||||||
logger.debug(
|
logger.debug(f"Routing to ASGI mount '{mount.name}': " f"{request.url.path} -> {modified_scope['path']}")
|
||||||
f"Routing to ASGI mount '{mount.name}': "
|
|
||||||
f"{request.url.path} -> {modified_scope['path']}"
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
response_started = False
|
response_started = False
|
||||||
@ -92,15 +83,12 @@ class PyServeMiddleware:
|
|||||||
mount=mount.name,
|
mount=mount.name,
|
||||||
status_code=status_code,
|
status_code=status_code,
|
||||||
process_time_ms=process_time,
|
process_time_ms=process_time,
|
||||||
user_agent=request.headers.get("user-agent", "")
|
user_agent=request.headers.get("user-agent", ""),
|
||||||
)
|
)
|
||||||
return True
|
return True
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error in ASGI mount '{mount.name}': {e}")
|
logger.error(f"Error in ASGI mount '{mount.name}': {e}")
|
||||||
error_response = PlainTextResponse(
|
error_response = PlainTextResponse("500 Internal Server Error", status_code=500)
|
||||||
"500 Internal Server Error",
|
|
||||||
status_code=500
|
|
||||||
)
|
|
||||||
await error_response(scope, receive, send)
|
await error_response(scope, receive, send)
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
@ -122,7 +110,7 @@ class PyServeMiddleware:
|
|||||||
path=path,
|
path=path,
|
||||||
status_code=status_code,
|
status_code=status_code,
|
||||||
process_time_ms=process_time,
|
process_time_ms=process_time,
|
||||||
user_agent=request.headers.get("user-agent", "")
|
user_agent=request.headers.get("user-agent", ""),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -145,27 +133,13 @@ class PyServeServer:
|
|||||||
if ext_config.type == "routing":
|
if ext_config.type == "routing":
|
||||||
config.setdefault("default_proxy_timeout", self.config.server.proxy_timeout)
|
config.setdefault("default_proxy_timeout", self.config.server.proxy_timeout)
|
||||||
|
|
||||||
self.extension_manager.load_extension(
|
self.extension_manager.load_extension(ext_config.type, config)
|
||||||
ext_config.type,
|
|
||||||
config
|
|
||||||
)
|
|
||||||
|
|
||||||
def _create_app(self) -> None:
|
def _create_app(self) -> None:
|
||||||
routes = [
|
routes = [
|
||||||
Route("/health", self._health_check, methods=["GET"]),
|
Route("/health", self._health_check, methods=["GET"]),
|
||||||
Route("/metrics", self._metrics, methods=["GET"]),
|
Route("/metrics", self._metrics, methods=["GET"]),
|
||||||
Route(
|
Route("/{path:path}", self._catch_all, methods=["GET", "POST", "PUT", "DELETE", "PATCH", "OPTIONS"]),
|
||||||
"/{path:path}",
|
|
||||||
self._catch_all,
|
|
||||||
methods=[
|
|
||||||
"GET",
|
|
||||||
"POST",
|
|
||||||
"PUT",
|
|
||||||
"DELETE",
|
|
||||||
"PATCH",
|
|
||||||
"OPTIONS"
|
|
||||||
]
|
|
||||||
),
|
|
||||||
]
|
]
|
||||||
|
|
||||||
self.app = Starlette(routes=routes)
|
self.app = Starlette(routes=routes)
|
||||||
@ -178,19 +152,16 @@ class PyServeServer:
|
|||||||
metrics = {}
|
metrics = {}
|
||||||
|
|
||||||
for extension in self.extension_manager.extensions:
|
for extension in self.extension_manager.extensions:
|
||||||
if hasattr(extension, 'get_metrics'):
|
if hasattr(extension, "get_metrics"):
|
||||||
try:
|
try:
|
||||||
ext_metrics = getattr(extension, 'get_metrics')()
|
ext_metrics = getattr(extension, "get_metrics")()
|
||||||
metrics.update(ext_metrics)
|
metrics.update(ext_metrics)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error("Error getting metrics from extension",
|
logger.error("Error getting metrics from extension", extension=type(extension).__name__, error=str(e))
|
||||||
extension=type(extension).__name__, error=str(e))
|
|
||||||
|
|
||||||
import json
|
import json
|
||||||
return Response(
|
|
||||||
json.dumps(metrics, ensure_ascii=False, indent=2),
|
return Response(json.dumps(metrics, ensure_ascii=False, indent=2), media_type="application/json")
|
||||||
media_type="application/json"
|
|
||||||
)
|
|
||||||
|
|
||||||
async def _catch_all(self, request: Request) -> Response:
|
async def _catch_all(self, request: Request) -> Response:
|
||||||
return PlainTextResponse("404 Not Found", status_code=404)
|
return PlainTextResponse("404 Not Found", status_code=404)
|
||||||
@ -209,10 +180,7 @@ class PyServeServer:
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
|
context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
|
||||||
context.load_cert_chain(
|
context.load_cert_chain(self.config.ssl.cert_file, self.config.ssl.key_file)
|
||||||
self.config.ssl.cert_file,
|
|
||||||
self.config.ssl.key_file
|
|
||||||
)
|
|
||||||
logger.info("SSL context created successfully")
|
logger.info("SSL context created successfully")
|
||||||
return context
|
return context
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@ -237,20 +205,17 @@ class PyServeServer:
|
|||||||
}
|
}
|
||||||
|
|
||||||
if ssl_context:
|
if ssl_context:
|
||||||
uvicorn_config.update({
|
uvicorn_config.update(
|
||||||
"ssl_keyfile": self.config.ssl.key_file,
|
{
|
||||||
"ssl_certfile": self.config.ssl.cert_file,
|
"ssl_keyfile": self.config.ssl.key_file,
|
||||||
})
|
"ssl_certfile": self.config.ssl.cert_file,
|
||||||
|
}
|
||||||
|
)
|
||||||
protocol = "https"
|
protocol = "https"
|
||||||
else:
|
else:
|
||||||
protocol = "http"
|
protocol = "http"
|
||||||
|
|
||||||
logger.info(
|
logger.info("Starting PyServe server", protocol=protocol, host=self.config.server.host, port=self.config.server.port)
|
||||||
"Starting PyServe server",
|
|
||||||
protocol=protocol,
|
|
||||||
host=self.config.server.host,
|
|
||||||
port=self.config.server.port
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
assert self.app is not None, "App not initialized"
|
assert self.app is not None, "App not initialized"
|
||||||
@ -306,6 +271,7 @@ class PyServeServer:
|
|||||||
self.extension_manager.cleanup()
|
self.extension_manager.cleanup()
|
||||||
|
|
||||||
from .logging_utils import shutdown_logging
|
from .logging_utils import shutdown_logging
|
||||||
|
|
||||||
shutdown_logging()
|
shutdown_logging()
|
||||||
|
|
||||||
logger.info("Server stopped")
|
logger.info("Server stopped")
|
||||||
@ -317,13 +283,12 @@ class PyServeServer:
|
|||||||
metrics = {"server_status": "running"}
|
metrics = {"server_status": "running"}
|
||||||
|
|
||||||
for extension in self.extension_manager.extensions:
|
for extension in self.extension_manager.extensions:
|
||||||
if hasattr(extension, 'get_metrics'):
|
if hasattr(extension, "get_metrics"):
|
||||||
try:
|
try:
|
||||||
ext_metrics = getattr(extension, 'get_metrics')()
|
ext_metrics = getattr(extension, "get_metrics")()
|
||||||
metrics.update(ext_metrics)
|
metrics.update(ext_metrics)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error("Error getting metrics from extension",
|
logger.error("Error getting metrics from extension", extension=type(extension).__name__, error=str(e))
|
||||||
extension=type(extension).__name__, error=str(e))
|
|
||||||
|
|
||||||
return metrics
|
return metrics
|
||||||
|
|
||||||
|
|||||||
72
scripts/build_cython.py
Normal file
72
scripts/build_cython.py
Normal file
@ -0,0 +1,72 @@
|
|||||||
|
"""
|
||||||
|
Build script for Cython extensions.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
python scripts/build_cython.py build_ext --inplace
|
||||||
|
|
||||||
|
Or via make:
|
||||||
|
make build-cython
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
def build_extensions():
|
||||||
|
try:
|
||||||
|
from Cython.Build import cythonize
|
||||||
|
except ImportError:
|
||||||
|
print("Cython not installed. Skipping Cython build.")
|
||||||
|
print("Install with: pip install cython")
|
||||||
|
return False
|
||||||
|
|
||||||
|
try:
|
||||||
|
from setuptools import Extension
|
||||||
|
from setuptools.dist import Distribution
|
||||||
|
from setuptools.command.build_ext import build_ext
|
||||||
|
except ImportError:
|
||||||
|
print("setuptools not installed. Skipping Cython build.")
|
||||||
|
print("Install with: pip install setuptools")
|
||||||
|
return False
|
||||||
|
|
||||||
|
extensions = [
|
||||||
|
Extension(
|
||||||
|
"pyserve._path_matcher",
|
||||||
|
sources=["pyserve/_path_matcher.pyx"],
|
||||||
|
extra_compile_args=["-O3", "-ffast-math"],
|
||||||
|
define_macros=[("NPY_NO_DEPRECATED_API", "NPY_1_7_API_VERSION")],
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
ext_modules = cythonize(
|
||||||
|
extensions,
|
||||||
|
compiler_directives={
|
||||||
|
"language_level": "3",
|
||||||
|
"boundscheck": False,
|
||||||
|
"wraparound": False,
|
||||||
|
"cdivision": True,
|
||||||
|
"embedsignature": True,
|
||||||
|
},
|
||||||
|
annotate=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
dist = Distribution({"ext_modules": ext_modules})
|
||||||
|
dist.package_dir = {"": "."}
|
||||||
|
|
||||||
|
cmd = build_ext(dist)
|
||||||
|
cmd.ensure_finalized()
|
||||||
|
cmd.inplace = True
|
||||||
|
cmd.run()
|
||||||
|
|
||||||
|
print("\nCython extensions built successfully!")
|
||||||
|
print(" - pyserve/_path_matcher" + (".pyd" if sys.platform == "win32" else ".so"))
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
project_root = Path(__file__).parent.parent
|
||||||
|
os.chdir(project_root)
|
||||||
|
|
||||||
|
success = build_extensions()
|
||||||
|
sys.exit(0 if success else 1)
|
||||||
273
tests/test_path_matcher.py
Normal file
273
tests/test_path_matcher.py
Normal file
@ -0,0 +1,273 @@
|
|||||||
|
"""
|
||||||
|
Tests for path_matcher module.
|
||||||
|
|
||||||
|
Run with: pytest tests/test_path_matcher.py -v
|
||||||
|
"""
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from pyserve.path_matcher import (
|
||||||
|
FastMountedPath,
|
||||||
|
FastMountManager,
|
||||||
|
path_matches_prefix,
|
||||||
|
strip_path_prefix,
|
||||||
|
match_and_modify_path,
|
||||||
|
CYTHON_AVAILABLE,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class TestFastMountedPath:
|
||||||
|
def test_root_mount_matches_everything(self):
|
||||||
|
"""Root mount should match all paths."""
|
||||||
|
mount = FastMountedPath("")
|
||||||
|
|
||||||
|
assert mount.matches("/") is True
|
||||||
|
assert mount.matches("/api") is True
|
||||||
|
assert mount.matches("/api/users") is True
|
||||||
|
assert mount.matches("/anything/at/all") is True
|
||||||
|
|
||||||
|
def test_slash_root_mount_matches_everything(self):
|
||||||
|
"""'/' mount should match all paths."""
|
||||||
|
mount = FastMountedPath("/")
|
||||||
|
|
||||||
|
assert mount.matches("/") is True
|
||||||
|
assert mount.matches("/api") is True
|
||||||
|
assert mount.matches("/api/users") is True
|
||||||
|
|
||||||
|
def test_exact_path_match(self):
|
||||||
|
"""Exact path should match."""
|
||||||
|
mount = FastMountedPath("/api")
|
||||||
|
|
||||||
|
assert mount.matches("/api") is True
|
||||||
|
assert mount.matches("/api/") is True
|
||||||
|
assert mount.matches("/api/users") is True
|
||||||
|
|
||||||
|
def test_no_false_prefix_match(self):
|
||||||
|
"""/api should not match /api-v2."""
|
||||||
|
mount = FastMountedPath("/api")
|
||||||
|
|
||||||
|
assert mount.matches("/api-v2") is False
|
||||||
|
assert mount.matches("/api2") is False
|
||||||
|
assert mount.matches("/apiv2") is False
|
||||||
|
|
||||||
|
def test_shorter_path_no_match(self):
|
||||||
|
"""Request path shorter than mount path should not match."""
|
||||||
|
mount = FastMountedPath("/api/v1")
|
||||||
|
|
||||||
|
assert mount.matches("/api") is False
|
||||||
|
assert mount.matches("/ap") is False
|
||||||
|
assert mount.matches("/") is False
|
||||||
|
|
||||||
|
def test_trailing_slash_normalized(self):
|
||||||
|
"""Trailing slashes should be normalized."""
|
||||||
|
mount1 = FastMountedPath("/api/")
|
||||||
|
mount2 = FastMountedPath("/api")
|
||||||
|
|
||||||
|
assert mount1.path == "/api"
|
||||||
|
assert mount2.path == "/api"
|
||||||
|
assert mount1.matches("/api/users") is True
|
||||||
|
assert mount2.matches("/api/users") is True
|
||||||
|
|
||||||
|
def test_get_modified_path_strips_prefix(self):
|
||||||
|
"""Modified path should have prefix stripped."""
|
||||||
|
mount = FastMountedPath("/api")
|
||||||
|
|
||||||
|
assert mount.get_modified_path("/api") == "/"
|
||||||
|
assert mount.get_modified_path("/api/") == "/"
|
||||||
|
assert mount.get_modified_path("/api/users") == "/users"
|
||||||
|
assert mount.get_modified_path("/api/users/123") == "/users/123"
|
||||||
|
|
||||||
|
def test_get_modified_path_no_strip(self):
|
||||||
|
"""With strip_path=False, path should not be modified."""
|
||||||
|
mount = FastMountedPath("/api", strip_path=False)
|
||||||
|
|
||||||
|
assert mount.get_modified_path("/api/users") == "/api/users"
|
||||||
|
assert mount.get_modified_path("/api") == "/api"
|
||||||
|
|
||||||
|
def test_root_mount_modified_path(self):
|
||||||
|
"""Root mount should return original path."""
|
||||||
|
mount = FastMountedPath("")
|
||||||
|
|
||||||
|
assert mount.get_modified_path("/api/users") == "/api/users"
|
||||||
|
assert mount.get_modified_path("/") == "/"
|
||||||
|
|
||||||
|
def test_name_property(self):
|
||||||
|
"""Name should be set correctly."""
|
||||||
|
mount1 = FastMountedPath("/api")
|
||||||
|
mount2 = FastMountedPath("/api", name="API Mount")
|
||||||
|
|
||||||
|
assert mount1.name == "/api"
|
||||||
|
assert mount2.name == "API Mount"
|
||||||
|
|
||||||
|
def test_repr(self):
|
||||||
|
"""Repr should be informative."""
|
||||||
|
mount = FastMountedPath("/api", name="API")
|
||||||
|
assert "FastMountedPath" in repr(mount)
|
||||||
|
assert "/api" in repr(mount)
|
||||||
|
|
||||||
|
|
||||||
|
class TestFastMountManager:
|
||||||
|
def test_empty_manager(self):
|
||||||
|
"""Empty manager should return None."""
|
||||||
|
manager = FastMountManager()
|
||||||
|
|
||||||
|
assert manager.get_mount("/api") is None
|
||||||
|
assert manager.mount_count == 0
|
||||||
|
|
||||||
|
def test_add_mount(self):
|
||||||
|
"""Adding mounts should work."""
|
||||||
|
manager = FastMountManager()
|
||||||
|
mount = FastMountedPath("/api")
|
||||||
|
|
||||||
|
manager.add_mount(mount)
|
||||||
|
|
||||||
|
assert manager.mount_count == 1
|
||||||
|
assert manager.get_mount("/api/users") is mount
|
||||||
|
|
||||||
|
def test_longest_prefix_matching(self):
|
||||||
|
"""Longer prefixes should match first."""
|
||||||
|
manager = FastMountManager()
|
||||||
|
|
||||||
|
api_mount = FastMountedPath("/api", name="api")
|
||||||
|
api_v1_mount = FastMountedPath("/api/v1", name="api_v1")
|
||||||
|
api_v2_mount = FastMountedPath("/api/v2", name="api_v2")
|
||||||
|
|
||||||
|
manager.add_mount(api_mount)
|
||||||
|
manager.add_mount(api_v2_mount)
|
||||||
|
manager.add_mount(api_v1_mount)
|
||||||
|
|
||||||
|
assert manager.get_mount("/api/v1/users").name == "api_v1"
|
||||||
|
assert manager.get_mount("/api/v2/items").name == "api_v2"
|
||||||
|
assert manager.get_mount("/api/v3/other").name == "api"
|
||||||
|
assert manager.get_mount("/api").name == "api"
|
||||||
|
|
||||||
|
def test_remove_mount(self):
|
||||||
|
"""Removing mounts should work."""
|
||||||
|
manager = FastMountManager()
|
||||||
|
|
||||||
|
manager.add_mount(FastMountedPath("/api"))
|
||||||
|
manager.add_mount(FastMountedPath("/admin"))
|
||||||
|
|
||||||
|
assert manager.mount_count == 2
|
||||||
|
|
||||||
|
result = manager.remove_mount("/api")
|
||||||
|
|
||||||
|
assert result is True
|
||||||
|
assert manager.mount_count == 1
|
||||||
|
assert manager.get_mount("/api/users") is None
|
||||||
|
assert manager.get_mount("/admin/users") is not None
|
||||||
|
|
||||||
|
def test_remove_nonexistent_mount(self):
|
||||||
|
"""Removing nonexistent mount should return False."""
|
||||||
|
manager = FastMountManager()
|
||||||
|
|
||||||
|
result = manager.remove_mount("/api")
|
||||||
|
|
||||||
|
assert result is False
|
||||||
|
|
||||||
|
def test_list_mounts(self):
|
||||||
|
"""list_mounts should return mount info."""
|
||||||
|
manager = FastMountManager()
|
||||||
|
|
||||||
|
manager.add_mount(FastMountedPath("/api", name="API"))
|
||||||
|
manager.add_mount(FastMountedPath("/admin", name="Admin"))
|
||||||
|
|
||||||
|
mounts = manager.list_mounts()
|
||||||
|
|
||||||
|
assert len(mounts) == 2
|
||||||
|
assert all("path" in m and "name" in m and "strip_path" in m for m in mounts)
|
||||||
|
|
||||||
|
def test_mounts_property_returns_copy(self):
|
||||||
|
"""mounts property should return a copy."""
|
||||||
|
manager = FastMountManager()
|
||||||
|
manager.add_mount(FastMountedPath("/api"))
|
||||||
|
|
||||||
|
mounts1 = manager.mounts
|
||||||
|
mounts2 = manager.mounts
|
||||||
|
|
||||||
|
assert mounts1 is not mounts2
|
||||||
|
assert mounts1 == mounts2
|
||||||
|
|
||||||
|
|
||||||
|
class TestUtilityFunctions:
|
||||||
|
"""Tests for standalone utility functions."""
|
||||||
|
|
||||||
|
def test_path_matches_prefix_basic(self):
|
||||||
|
"""Basic prefix matching."""
|
||||||
|
assert path_matches_prefix("/api/users", "/api") is True
|
||||||
|
assert path_matches_prefix("/api", "/api") is True
|
||||||
|
assert path_matches_prefix("/api-v2", "/api") is False
|
||||||
|
assert path_matches_prefix("/ap", "/api") is False
|
||||||
|
|
||||||
|
def test_path_matches_prefix_root(self):
|
||||||
|
"""Root prefix matches everything."""
|
||||||
|
assert path_matches_prefix("/anything", "") is True
|
||||||
|
assert path_matches_prefix("/anything", "/") is True
|
||||||
|
|
||||||
|
def test_strip_path_prefix_basic(self):
|
||||||
|
"""Basic path stripping."""
|
||||||
|
assert strip_path_prefix("/api/users", "/api") == "/users"
|
||||||
|
assert strip_path_prefix("/api", "/api") == "/"
|
||||||
|
assert strip_path_prefix("/api/", "/api") == "/"
|
||||||
|
|
||||||
|
def test_strip_path_prefix_root(self):
|
||||||
|
"""Root prefix doesn't strip anything."""
|
||||||
|
assert strip_path_prefix("/api/users", "") == "/api/users"
|
||||||
|
assert strip_path_prefix("/api/users", "/") == "/api/users"
|
||||||
|
|
||||||
|
def test_match_and_modify_combined(self):
|
||||||
|
"""Combined match and modify operation."""
|
||||||
|
matches, path = match_and_modify_path("/api/users", "/api")
|
||||||
|
assert matches is True
|
||||||
|
assert path == "/users"
|
||||||
|
|
||||||
|
matches, path = match_and_modify_path("/api", "/api")
|
||||||
|
assert matches is True
|
||||||
|
assert path == "/"
|
||||||
|
|
||||||
|
matches, path = match_and_modify_path("/other", "/api")
|
||||||
|
assert matches is False
|
||||||
|
assert path is None
|
||||||
|
|
||||||
|
def test_match_and_modify_no_strip(self):
|
||||||
|
"""Combined operation with strip_path=False."""
|
||||||
|
matches, path = match_and_modify_path("/api/users", "/api", strip_path=False)
|
||||||
|
assert matches is True
|
||||||
|
assert path == "/api/users"
|
||||||
|
|
||||||
|
|
||||||
|
class TestCythonAvailability:
|
||||||
|
def test_cython_available_is_bool(self):
|
||||||
|
"""CYTHON_AVAILABLE should be a boolean."""
|
||||||
|
assert isinstance(CYTHON_AVAILABLE, bool)
|
||||||
|
|
||||||
|
def test_module_works_regardless(self):
|
||||||
|
"""Module should work whether Cython is available or not."""
|
||||||
|
mount = FastMountedPath("/test")
|
||||||
|
assert mount.matches("/test/path") is True
|
||||||
|
|
||||||
|
|
||||||
|
class TestPerformance:
|
||||||
|
def test_many_matches(self):
|
||||||
|
"""Should handle many match operations."""
|
||||||
|
mount = FastMountedPath("/api/v1/users")
|
||||||
|
|
||||||
|
for _ in range(10000):
|
||||||
|
assert mount.matches("/api/v1/users/123/posts") is True
|
||||||
|
assert mount.matches("/other/path") is False
|
||||||
|
|
||||||
|
def test_many_mounts(self):
|
||||||
|
"""Should handle many mounts."""
|
||||||
|
manager = FastMountManager()
|
||||||
|
|
||||||
|
for i in range(100):
|
||||||
|
manager.add_mount(FastMountedPath(f"/api/v{i}"))
|
||||||
|
|
||||||
|
assert manager.mount_count == 100
|
||||||
|
|
||||||
|
mount = manager.get_mount("/api/v50/users")
|
||||||
|
assert mount is not None
|
||||||
|
assert mount.path == "/api/v50"
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
pytest.main([__file__, "-v"])
|
||||||
Loading…
x
Reference in New Issue
Block a user