Add test infrastructure and improve docstrings

Added pytest configuration, test dependencies, and initial test files for the backend. Introduced .env.test for test environment variables. Enhanced docstrings for database and session utility functions. Updated .gitignore for test artifacts.
This commit is contained in:
João Vitória Silva
2025-10-08 21:07:31 +01:00
parent 0f7e82faf6
commit 4a5c810772
13 changed files with 2951 additions and 16 deletions

8
.gitignore vendored
View File

@@ -12,6 +12,14 @@ backend/app/*/__pycache__/
backend/app/*/*/__pycache__/
backend/app/*/*/*/__pycache__/
backend/app/*.pyc
backend/tests/__pycache__/
backend/tests/*/__pycache__/
# Tests
backend/.coverage
backend/htmlcov/
backend/coverage.xml
backend/.pytest_cache/
# Logs
backend/app/logs/*.log

6
backend/.env.test Normal file
View File

@@ -0,0 +1,6 @@
# Test Environment Variables for Endurain Backend Tests
SECRET_KEY=test-secret-key-for-testing-purposes-minimum-32-characters-long
JWT_ACCESS_TOKEN_EXPIRE_MINUTES=15
JWT_REFRESH_TOKEN_EXPIRE_DAYS=7
FRONTEND_PROTOCOL=http
DATABASE_URL=sqlite:///:memory:

View File

@@ -1,7 +1,6 @@
import os
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, declarative_base
from sqlalchemy.engine.url import URL
@@ -39,6 +38,21 @@ Base = declarative_base()
def get_db():
"""
Yields a new SQLAlchemy database session.
This generator function creates a new database session using SessionLocal,
yields it for use in database operations, and ensures the session is properly
closed after use. Intended for use as a dependency in FastAPI routes or other
contexts where session management is required.
Yields:
Session: An active SQLAlchemy database session.
Example:
with get_db() as db:
# use db session here
"""
# Create a new database session and return it
db = SessionLocal()

View File

@@ -67,6 +67,19 @@ def create_session_object(
hashed_refresh_token: str,
refresh_token_exp: datetime,
) -> session_schema.UsersSessions:
"""
Creates a UsersSessions object representing a user session with device and request metadata.
Args:
session_id (str): Unique identifier for the session.
user (users_schema.UserRead): The user associated with the session.
request (Request): The HTTP request object containing client information.
hashed_refresh_token (str): The hashed refresh token for the session.
refresh_token_exp (datetime): The expiration datetime for the refresh token.
Returns:
session_schema.UsersSessions: The session object populated with user, device, and request details.
"""
user_agent = get_user_agent(request)
device_info = parse_user_agent(user_agent)

265
backend/poetry.lock generated
View File

@@ -38,7 +38,7 @@ version = "4.11.0"
description = "High-level concurrency and networking framework on top of asyncio or Trio"
optional = false
python-versions = ">=3.9"
groups = ["main"]
groups = ["main", "dev"]
files = [
{file = "anyio-4.11.0-py3-none-any.whl", hash = "sha256:0287e96f4d26d4149305414d4e3bc32f0dcd0862365a4bddea19d7a1ec38c4fc"},
{file = "anyio-4.11.0.tar.gz", hash = "sha256:82a8d0b81e318cc5ce71a5f1f8b5c4e63619620b63141ef8c995fa0db95a57c4"},
@@ -355,7 +355,7 @@ version = "2025.8.3"
description = "Python package for providing Mozilla's CA Bundle."
optional = false
python-versions = ">=3.7"
groups = ["main"]
groups = ["main", "dev"]
files = [
{file = "certifi-2025.8.3-py3-none-any.whl", hash = "sha256:f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5"},
{file = "certifi-2025.8.3.tar.gz", hash = "sha256:e564105f78ded564e3ae7c923924435e1daa7463faeab5bb932bc53ffae63407"},
@@ -567,12 +567,129 @@ version = "0.4.6"
description = "Cross-platform colored terminal text."
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
groups = ["main"]
markers = "platform_system == \"Windows\" or os_name == \"nt\" or sys_platform == \"win32\""
groups = ["main", "dev"]
files = [
{file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
]
markers = {main = "platform_system == \"Windows\" or os_name == \"nt\" or sys_platform == \"win32\"", dev = "sys_platform == \"win32\""}
[[package]]
name = "coverage"
version = "7.10.7"
description = "Code coverage measurement for Python"
optional = false
python-versions = ">=3.9"
groups = ["dev"]
files = [
{file = "coverage-7.10.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:fc04cc7a3db33664e0c2d10eb8990ff6b3536f6842c9590ae8da4c614b9ed05a"},
{file = "coverage-7.10.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e201e015644e207139f7e2351980feb7040e6f4b2c2978892f3e3789d1c125e5"},
{file = "coverage-7.10.7-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:240af60539987ced2c399809bd34f7c78e8abe0736af91c3d7d0e795df633d17"},
{file = "coverage-7.10.7-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:8421e088bc051361b01c4b3a50fd39a4b9133079a2229978d9d30511fd05231b"},
{file = "coverage-7.10.7-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6be8ed3039ae7f7ac5ce058c308484787c86e8437e72b30bf5e88b8ea10f3c87"},
{file = "coverage-7.10.7-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e28299d9f2e889e6d51b1f043f58d5f997c373cc12e6403b90df95b8b047c13e"},
{file = "coverage-7.10.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c4e16bd7761c5e454f4efd36f345286d6f7c5fa111623c355691e2755cae3b9e"},
{file = "coverage-7.10.7-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b1c81d0e5e160651879755c9c675b974276f135558cf4ba79fee7b8413a515df"},
{file = "coverage-7.10.7-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:606cc265adc9aaedcc84f1f064f0e8736bc45814f15a357e30fca7ecc01504e0"},
{file = "coverage-7.10.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:10b24412692df990dbc34f8fb1b6b13d236ace9dfdd68df5b28c2e39cafbba13"},
{file = "coverage-7.10.7-cp310-cp310-win32.whl", hash = "sha256:b51dcd060f18c19290d9b8a9dd1e0181538df2ce0717f562fff6cf74d9fc0b5b"},
{file = "coverage-7.10.7-cp310-cp310-win_amd64.whl", hash = "sha256:3a622ac801b17198020f09af3eaf45666b344a0d69fc2a6ffe2ea83aeef1d807"},
{file = "coverage-7.10.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a609f9c93113be646f44c2a0256d6ea375ad047005d7f57a5c15f614dc1b2f59"},
{file = "coverage-7.10.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:65646bb0359386e07639c367a22cf9b5bf6304e8630b565d0626e2bdf329227a"},
{file = "coverage-7.10.7-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5f33166f0dfcce728191f520bd2692914ec70fac2713f6bf3ce59c3deacb4699"},
{file = "coverage-7.10.7-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:35f5e3f9e455bb17831876048355dca0f758b6df22f49258cb5a91da23ef437d"},
{file = "coverage-7.10.7-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4da86b6d62a496e908ac2898243920c7992499c1712ff7c2b6d837cc69d9467e"},
{file = "coverage-7.10.7-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:6b8b09c1fad947c84bbbc95eca841350fad9cbfa5a2d7ca88ac9f8d836c92e23"},
{file = "coverage-7.10.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4376538f36b533b46f8971d3a3e63464f2c7905c9800db97361c43a2b14792ab"},
{file = "coverage-7.10.7-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:121da30abb574f6ce6ae09840dae322bef734480ceafe410117627aa54f76d82"},
{file = "coverage-7.10.7-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:88127d40df529336a9836870436fc2751c339fbaed3a836d42c93f3e4bd1d0a2"},
{file = "coverage-7.10.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ba58bbcd1b72f136080c0bccc2400d66cc6115f3f906c499013d065ac33a4b61"},
{file = "coverage-7.10.7-cp311-cp311-win32.whl", hash = "sha256:972b9e3a4094b053a4e46832b4bc829fc8a8d347160eb39d03f1690316a99c14"},
{file = "coverage-7.10.7-cp311-cp311-win_amd64.whl", hash = "sha256:a7b55a944a7f43892e28ad4bc0561dfd5f0d73e605d1aa5c3c976b52aea121d2"},
{file = "coverage-7.10.7-cp311-cp311-win_arm64.whl", hash = "sha256:736f227fb490f03c6488f9b6d45855f8e0fd749c007f9303ad30efab0e73c05a"},
{file = "coverage-7.10.7-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7bb3b9ddb87ef7725056572368040c32775036472d5a033679d1fa6c8dc08417"},
{file = "coverage-7.10.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:18afb24843cbc175687225cab1138c95d262337f5473512010e46831aa0c2973"},
{file = "coverage-7.10.7-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:399a0b6347bcd3822be369392932884b8216d0944049ae22925631a9b3d4ba4c"},
{file = "coverage-7.10.7-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:314f2c326ded3f4b09be11bc282eb2fc861184bc95748ae67b360ac962770be7"},
{file = "coverage-7.10.7-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c41e71c9cfb854789dee6fc51e46743a6d138b1803fab6cb860af43265b42ea6"},
{file = "coverage-7.10.7-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc01f57ca26269c2c706e838f6422e2a8788e41b3e3c65e2f41148212e57cd59"},
{file = "coverage-7.10.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a6442c59a8ac8b85812ce33bc4d05bde3fb22321fa8294e2a5b487c3505f611b"},
{file = "coverage-7.10.7-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:78a384e49f46b80fb4c901d52d92abe098e78768ed829c673fbb53c498bef73a"},
{file = "coverage-7.10.7-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:5e1e9802121405ede4b0133aa4340ad8186a1d2526de5b7c3eca519db7bb89fb"},
{file = "coverage-7.10.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d41213ea25a86f69efd1575073d34ea11aabe075604ddf3d148ecfec9e1e96a1"},
{file = "coverage-7.10.7-cp312-cp312-win32.whl", hash = "sha256:77eb4c747061a6af8d0f7bdb31f1e108d172762ef579166ec84542f711d90256"},
{file = "coverage-7.10.7-cp312-cp312-win_amd64.whl", hash = "sha256:f51328ffe987aecf6d09f3cd9d979face89a617eacdaea43e7b3080777f647ba"},
{file = "coverage-7.10.7-cp312-cp312-win_arm64.whl", hash = "sha256:bda5e34f8a75721c96085903c6f2197dc398c20ffd98df33f866a9c8fd95f4bf"},
{file = "coverage-7.10.7-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:981a651f543f2854abd3b5fcb3263aac581b18209be49863ba575de6edf4c14d"},
{file = "coverage-7.10.7-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:73ab1601f84dc804f7812dc297e93cd99381162da39c47040a827d4e8dafe63b"},
{file = "coverage-7.10.7-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:a8b6f03672aa6734e700bbcd65ff050fd19cddfec4b031cc8cf1c6967de5a68e"},
{file = "coverage-7.10.7-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:10b6ba00ab1132a0ce4428ff68cf50a25efd6840a42cdf4239c9b99aad83be8b"},
{file = "coverage-7.10.7-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c79124f70465a150e89340de5963f936ee97097d2ef76c869708c4248c63ca49"},
{file = "coverage-7.10.7-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:69212fbccdbd5b0e39eac4067e20a4a5256609e209547d86f740d68ad4f04911"},
{file = "coverage-7.10.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7ea7c6c9d0d286d04ed3541747e6597cbe4971f22648b68248f7ddcd329207f0"},
{file = "coverage-7.10.7-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b9be91986841a75042b3e3243d0b3cb0b2434252b977baaf0cd56e960fe1e46f"},
{file = "coverage-7.10.7-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:b281d5eca50189325cfe1f365fafade89b14b4a78d9b40b05ddd1fc7d2a10a9c"},
{file = "coverage-7.10.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:99e4aa63097ab1118e75a848a28e40d68b08a5e19ce587891ab7fd04475e780f"},
{file = "coverage-7.10.7-cp313-cp313-win32.whl", hash = "sha256:dc7c389dce432500273eaf48f410b37886be9208b2dd5710aaf7c57fd442c698"},
{file = "coverage-7.10.7-cp313-cp313-win_amd64.whl", hash = "sha256:cac0fdca17b036af3881a9d2729a850b76553f3f716ccb0360ad4dbc06b3b843"},
{file = "coverage-7.10.7-cp313-cp313-win_arm64.whl", hash = "sha256:4b6f236edf6e2f9ae8fcd1332da4e791c1b6ba0dc16a2dc94590ceccb482e546"},
{file = "coverage-7.10.7-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a0ec07fd264d0745ee396b666d47cef20875f4ff2375d7c4f58235886cc1ef0c"},
{file = "coverage-7.10.7-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:dd5e856ebb7bfb7672b0086846db5afb4567a7b9714b8a0ebafd211ec7ce6a15"},
{file = "coverage-7.10.7-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f57b2a3c8353d3e04acf75b3fed57ba41f5c0646bbf1d10c7c282291c97936b4"},
{file = "coverage-7.10.7-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:1ef2319dd15a0b009667301a3f84452a4dc6fddfd06b0c5c53ea472d3989fbf0"},
{file = "coverage-7.10.7-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:83082a57783239717ceb0ad584de3c69cf581b2a95ed6bf81ea66034f00401c0"},
{file = "coverage-7.10.7-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:50aa94fb1fb9a397eaa19c0d5ec15a5edd03a47bf1a3a6111a16b36e190cff65"},
{file = "coverage-7.10.7-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:2120043f147bebb41c85b97ac45dd173595ff14f2a584f2963891cbcc3091541"},
{file = "coverage-7.10.7-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:2fafd773231dd0378fdba66d339f84904a8e57a262f583530f4f156ab83863e6"},
{file = "coverage-7.10.7-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:0b944ee8459f515f28b851728ad224fa2d068f1513ef6b7ff1efafeb2185f999"},
{file = "coverage-7.10.7-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4b583b97ab2e3efe1b3e75248a9b333bd3f8b0b1b8e5b45578e05e5850dfb2c2"},
{file = "coverage-7.10.7-cp313-cp313t-win32.whl", hash = "sha256:2a78cd46550081a7909b3329e2266204d584866e8d97b898cd7fb5ac8d888b1a"},
{file = "coverage-7.10.7-cp313-cp313t-win_amd64.whl", hash = "sha256:33a5e6396ab684cb43dc7befa386258acb2d7fae7f67330ebb85ba4ea27938eb"},
{file = "coverage-7.10.7-cp313-cp313t-win_arm64.whl", hash = "sha256:86b0e7308289ddde73d863b7683f596d8d21c7d8664ce1dee061d0bcf3fbb4bb"},
{file = "coverage-7.10.7-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:b06f260b16ead11643a5a9f955bd4b5fd76c1a4c6796aeade8520095b75de520"},
{file = "coverage-7.10.7-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:212f8f2e0612778f09c55dd4872cb1f64a1f2b074393d139278ce902064d5b32"},
{file = "coverage-7.10.7-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3445258bcded7d4aa630ab8296dea4d3f15a255588dd535f980c193ab6b95f3f"},
{file = "coverage-7.10.7-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bb45474711ba385c46a0bfe696c695a929ae69ac636cda8f532be9e8c93d720a"},
{file = "coverage-7.10.7-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:813922f35bd800dca9994c5971883cbc0d291128a5de6b167c7aa697fcf59360"},
{file = "coverage-7.10.7-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:93c1b03552081b2a4423091d6fb3787265b8f86af404cff98d1b5342713bdd69"},
{file = "coverage-7.10.7-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:cc87dd1b6eaf0b848eebb1c86469b9f72a1891cb42ac7adcfbce75eadb13dd14"},
{file = "coverage-7.10.7-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:39508ffda4f343c35f3236fe8d1a6634a51f4581226a1262769d7f970e73bffe"},
{file = "coverage-7.10.7-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:925a1edf3d810537c5a3abe78ec5530160c5f9a26b1f4270b40e62cc79304a1e"},
{file = "coverage-7.10.7-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2c8b9a0636f94c43cd3576811e05b89aa9bc2d0a85137affc544ae5cb0e4bfbd"},
{file = "coverage-7.10.7-cp314-cp314-win32.whl", hash = "sha256:b7b8288eb7cdd268b0304632da8cb0bb93fadcfec2fe5712f7b9cc8f4d487be2"},
{file = "coverage-7.10.7-cp314-cp314-win_amd64.whl", hash = "sha256:1ca6db7c8807fb9e755d0379ccc39017ce0a84dcd26d14b5a03b78563776f681"},
{file = "coverage-7.10.7-cp314-cp314-win_arm64.whl", hash = "sha256:097c1591f5af4496226d5783d036bf6fd6cd0cbc132e071b33861de756efb880"},
{file = "coverage-7.10.7-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:a62c6ef0d50e6de320c270ff91d9dd0a05e7250cac2a800b7784bae474506e63"},
{file = "coverage-7.10.7-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:9fa6e4dd51fe15d8738708a973470f67a855ca50002294852e9571cdbd9433f2"},
{file = "coverage-7.10.7-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:8fb190658865565c549b6b4706856d6a7b09302c797eb2cf8e7fe9dabb043f0d"},
{file = "coverage-7.10.7-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:affef7c76a9ef259187ef31599a9260330e0335a3011732c4b9effa01e1cd6e0"},
{file = "coverage-7.10.7-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6e16e07d85ca0cf8bafe5f5d23a0b850064e8e945d5677492b06bbe6f09cc699"},
{file = "coverage-7.10.7-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:03ffc58aacdf65d2a82bbeb1ffe4d01ead4017a21bfd0454983b88ca73af94b9"},
{file = "coverage-7.10.7-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:1b4fd784344d4e52647fd7857b2af5b3fbe6c239b0b5fa63e94eb67320770e0f"},
{file = "coverage-7.10.7-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:0ebbaddb2c19b71912c6f2518e791aa8b9f054985a0769bdb3a53ebbc765c6a1"},
{file = "coverage-7.10.7-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:a2d9a3b260cc1d1dbdb1c582e63ddcf5363426a1a68faa0f5da28d8ee3c722a0"},
{file = "coverage-7.10.7-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:a3cc8638b2480865eaa3926d192e64ce6c51e3d29c849e09d5b4ad95efae5399"},
{file = "coverage-7.10.7-cp314-cp314t-win32.whl", hash = "sha256:67f8c5cbcd3deb7a60b3345dffc89a961a484ed0af1f6f73de91705cc6e31235"},
{file = "coverage-7.10.7-cp314-cp314t-win_amd64.whl", hash = "sha256:e1ed71194ef6dea7ed2d5cb5f7243d4bcd334bfb63e59878519be558078f848d"},
{file = "coverage-7.10.7-cp314-cp314t-win_arm64.whl", hash = "sha256:7fe650342addd8524ca63d77b2362b02345e5f1a093266787d210c70a50b471a"},
{file = "coverage-7.10.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fff7b9c3f19957020cac546c70025331113d2e61537f6e2441bc7657913de7d3"},
{file = "coverage-7.10.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:bc91b314cef27742da486d6839b677b3f2793dfe52b51bbbb7cf736d5c29281c"},
{file = "coverage-7.10.7-cp39-cp39-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:567f5c155eda8df1d3d439d40a45a6a5f029b429b06648235f1e7e51b522b396"},
{file = "coverage-7.10.7-cp39-cp39-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2af88deffcc8a4d5974cf2d502251bc3b2db8461f0b66d80a449c33757aa9f40"},
{file = "coverage-7.10.7-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c7315339eae3b24c2d2fa1ed7d7a38654cba34a13ef19fbcb9425da46d3dc594"},
{file = "coverage-7.10.7-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:912e6ebc7a6e4adfdbb1aec371ad04c68854cd3bf3608b3514e7ff9062931d8a"},
{file = "coverage-7.10.7-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:f49a05acd3dfe1ce9715b657e28d138578bc40126760efb962322c56e9ca344b"},
{file = "coverage-7.10.7-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:cce2109b6219f22ece99db7644b9622f54a4e915dad65660ec435e89a3ea7cc3"},
{file = "coverage-7.10.7-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:f3c887f96407cea3916294046fc7dab611c2552beadbed4ea901cbc6a40cc7a0"},
{file = "coverage-7.10.7-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:635adb9a4507c9fd2ed65f39693fa31c9a3ee3a8e6dc64df033e8fdf52a7003f"},
{file = "coverage-7.10.7-cp39-cp39-win32.whl", hash = "sha256:5a02d5a850e2979b0a014c412573953995174743a3f7fa4ea5a6e9a3c5617431"},
{file = "coverage-7.10.7-cp39-cp39-win_amd64.whl", hash = "sha256:c134869d5ffe34547d14e174c866fd8fe2254918cc0a95e99052903bc1543e07"},
{file = "coverage-7.10.7-py3-none-any.whl", hash = "sha256:f7941f6f2fe6dd6807a1208737b8a0cbcf1cc6d7b07d24998ad2d63590868260"},
{file = "coverage-7.10.7.tar.gz", hash = "sha256:f4ab143ab113be368a3e9b795f9cd7906c5ef407d6173fe9675a902e1fffc239"},
]
[package.extras]
toml = ["tomli"]
[[package]]
name = "crashtest"
@@ -845,6 +962,22 @@ files = [
dnspython = ">=2.0.0"
idna = ">=2.0.0"
[[package]]
name = "faker"
version = "33.3.1"
description = "Faker is a Python package that generates fake data for you."
optional = false
python-versions = ">=3.8"
groups = ["dev"]
files = [
{file = "Faker-33.3.1-py3-none-any.whl", hash = "sha256:ac4cf2f967ce02c898efa50651c43180bd658a7707cfd676fcc5410ad1482c03"},
{file = "faker-33.3.1.tar.gz", hash = "sha256:49dde3b06a5602177bc2ad013149b6f60a290b7154539180d37b6f876ae79b20"},
]
[package.dependencies]
python-dateutil = ">=2.4"
typing-extensions = "*"
[[package]]
name = "fastapi"
version = "0.115.14"
@@ -1228,7 +1361,7 @@ version = "0.16.0"
description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1"
optional = false
python-versions = ">=3.8"
groups = ["main"]
groups = ["main", "dev"]
files = [
{file = "h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86"},
{file = "h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1"},
@@ -1303,7 +1436,7 @@ version = "1.0.9"
description = "A minimal low-level HTTP client."
optional = false
python-versions = ">=3.8"
groups = ["main"]
groups = ["main", "dev"]
files = [
{file = "httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55"},
{file = "httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8"},
@@ -1325,7 +1458,7 @@ version = "0.28.1"
description = "The next generation HTTP client."
optional = false
python-versions = ">=3.8"
groups = ["main"]
groups = ["main", "dev"]
files = [
{file = "httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad"},
{file = "httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc"},
@@ -1350,7 +1483,7 @@ version = "3.10"
description = "Internationalized Domain Names in Applications (IDNA)"
optional = false
python-versions = ">=3.6"
groups = ["main"]
groups = ["main", "dev"]
files = [
{file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"},
{file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"},
@@ -1383,6 +1516,18 @@ perf = ["ipython"]
test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"]
type = ["pytest-mypy"]
[[package]]
name = "iniconfig"
version = "2.1.0"
description = "brain-dead simple config-ini parsing"
optional = false
python-versions = ">=3.8"
groups = ["dev"]
files = [
{file = "iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760"},
{file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"},
]
[[package]]
name = "installer"
version = "0.7.0"
@@ -2069,7 +2214,7 @@ version = "25.0"
description = "Core utilities for Python packages"
optional = false
python-versions = ">=3.8"
groups = ["main"]
groups = ["main", "dev"]
files = [
{file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"},
{file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"},
@@ -2287,6 +2432,22 @@ docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.1.3)", "sphinx-a
test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.4)", "pytest-cov (>=6)", "pytest-mock (>=3.14)"]
type = ["mypy (>=1.14.1)"]
[[package]]
name = "pluggy"
version = "1.6.0"
description = "plugin and hook calling mechanisms for python"
optional = false
python-versions = ">=3.9"
groups = ["dev"]
files = [
{file = "pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746"},
{file = "pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3"},
]
[package.extras]
dev = ["pre-commit", "tox"]
testing = ["coverage", "pytest", "pytest-benchmark"]
[[package]]
name = "poetry"
version = "2.2.1"
@@ -2636,6 +2797,21 @@ files = [
[package.dependencies]
typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0"
[[package]]
name = "pygments"
version = "2.19.2"
description = "Pygments is a syntax highlighting package written in Python."
optional = false
python-versions = ">=3.8"
groups = ["dev"]
files = [
{file = "pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b"},
{file = "pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887"},
]
[package.extras]
windows-terminal = ["colorama (>=0.4.6)"]
[[package]]
name = "pyotp"
version = "2.9.0"
@@ -2663,13 +2839,74 @@ files = [
{file = "pyproject_hooks-1.2.0.tar.gz", hash = "sha256:1e859bd5c40fae9448642dd871adf459e5e2084186e8d2c2a79a824c970da1f8"},
]
[[package]]
name = "pytest"
version = "8.4.2"
description = "pytest: simple powerful testing with Python"
optional = false
python-versions = ">=3.9"
groups = ["dev"]
files = [
{file = "pytest-8.4.2-py3-none-any.whl", hash = "sha256:872f880de3fc3a5bdc88a11b39c9710c3497a547cfa9320bc3c5e62fbf272e79"},
{file = "pytest-8.4.2.tar.gz", hash = "sha256:86c0d0b93306b961d58d62a4db4879f27fe25513d4b969df351abdddb3c30e01"},
]
[package.dependencies]
colorama = {version = ">=0.4", markers = "sys_platform == \"win32\""}
iniconfig = ">=1"
packaging = ">=20"
pluggy = ">=1.5,<2"
pygments = ">=2.7.2"
[package.extras]
dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "requests", "setuptools", "xmlschema"]
[[package]]
name = "pytest-asyncio"
version = "0.24.0"
description = "Pytest support for asyncio"
optional = false
python-versions = ">=3.8"
groups = ["dev"]
files = [
{file = "pytest_asyncio-0.24.0-py3-none-any.whl", hash = "sha256:a811296ed596b69bf0b6f3dc40f83bcaf341b155a269052d82efa2b25ac7037b"},
{file = "pytest_asyncio-0.24.0.tar.gz", hash = "sha256:d081d828e576d85f875399194281e92bf8a68d60d72d1a2faf2feddb6c46b276"},
]
[package.dependencies]
pytest = ">=8.2,<9"
[package.extras]
docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"]
testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"]
[[package]]
name = "pytest-cov"
version = "6.3.0"
description = "Pytest plugin for measuring coverage."
optional = false
python-versions = ">=3.9"
groups = ["dev"]
files = [
{file = "pytest_cov-6.3.0-py3-none-any.whl", hash = "sha256:440db28156d2468cafc0415b4f8e50856a0d11faefa38f30906048fe490f1749"},
{file = "pytest_cov-6.3.0.tar.gz", hash = "sha256:35c580e7800f87ce892e687461166e1ac2bcb8fb9e13aea79032518d6e503ff2"},
]
[package.dependencies]
coverage = {version = ">=7.5", extras = ["toml"]}
pluggy = ">=1.2"
pytest = ">=6.2.5"
[package.extras]
testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"]
[[package]]
name = "python-dateutil"
version = "2.9.0.post0"
description = "Extensions to the standard Python datetime module"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
groups = ["main"]
groups = ["main", "dev"]
files = [
{file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"},
{file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"},
@@ -3030,7 +3267,7 @@ version = "1.17.0"
description = "Python 2 and 3 compatibility utilities"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
groups = ["main"]
groups = ["main", "dev"]
files = [
{file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"},
{file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"},
@@ -3042,7 +3279,7 @@ version = "1.3.1"
description = "Sniff out which async library your code is running under"
optional = false
python-versions = ">=3.7"
groups = ["main"]
groups = ["main", "dev"]
files = [
{file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"},
{file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"},
@@ -3268,7 +3505,7 @@ version = "4.15.0"
description = "Backported and Experimental Type Hints for Python 3.9+"
optional = false
python-versions = ">=3.9"
groups = ["main"]
groups = ["main", "dev"]
files = [
{file = "typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548"},
{file = "typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466"},
@@ -3816,4 +4053,4 @@ cffi = ["cffi (>=1.17,<2.0)", "cffi (>=2.0.0b)"]
[metadata]
lock-version = "2.1"
python-versions = "^3.12"
content-hash = "21aaa612a6b2e17d133dfe82f7b3a8bec5141aae082c195c3a3ac151c33eac04"
content-hash = "66eb128347626f74c8da8cd0862d9c071a360d173aeca76519fdf62c4010953c"

View File

@@ -43,6 +43,35 @@ pyotp = "^2.9.0"
qrcode = {extras = ["pil"], version = "^8.2"}
pwdlib = {extras = ["argon2", "bcrypt"], version = "^0.2.1"}
[tool.poetry.group.dev.dependencies]
pytest = "^8.3.4"
pytest-asyncio = "^0.24.0"
pytest-cov = "^6.0.0"
httpx = "^0.28.1"
faker = "^33.1.0"
[tool.pytest.ini_options]
testpaths = ["tests"]
python_files = ["test_*.py"]
python_classes = ["Test*"]
python_functions = ["test_*"]
asyncio_mode = "auto"
asyncio_default_fixture_loop_scope = "function"
addopts = "-v --strict-markers --cov=app --cov-report=term-missing --cov-report=html"
[tool.coverage.run]
source = ["app"]
omit = ["*/tests/*", "*/__pycache__/*", "*/alembic/*"]
[tool.coverage.report]
exclude_lines = [
"pragma: no cover",
"def __repr__",
"raise AssertionError",
"raise NotImplementedError",
"if __name__ == .__main__.:",
"if TYPE_CHECKING:",
]
[build-system]
requires = ["poetry-core"]

View File

@@ -0,0 +1,3 @@
"""
Tests for Endurain backend application.
"""

306
backend/tests/conftest.py Normal file
View File

@@ -0,0 +1,306 @@
import os
import sys
from importlib import import_module
from pathlib import Path
from unittest.mock import MagicMock
from dotenv import load_dotenv
import pytest
from fastapi import Request, FastAPI
from fastapi.testclient import TestClient
from sqlalchemy.orm import Session
# Load test environment variables from .env.test before importing app modules
env_test_path = Path(__file__).parent.parent / ".env.test"
load_dotenv(dotenv_path=env_test_path)
# Add the app directory to the Python path
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "app"))
import session.router as session_router
import session.password_hasher as session_password_hasher
import session.token_manager as session_token_manager
import users.user.schema as user_schema
# Variables and constants
DEFAULT_ROUTER_MODULES = [
"session.router",
]
@pytest.fixture
def password_hasher() -> session_password_hasher.PasswordHasher:
"""
Creates and returns an instance of session_password_hasher.PasswordHasher using the get_password_hasher function.
Returns:
session_password_hasher.PasswordHasher: An instance of the password hasher utility.
"""
return session_password_hasher.get_password_hasher()
@pytest.fixture
def token_manager() -> session_token_manager.TokenManager:
"""
Creates and returns a session_token_manager.TokenManager instance configured with a test secret key.
Returns:
session_token_manager.TokenManager: An instance of session_token_manager.TokenManager initialized with a test secret key for use in testing.
"""
return session_token_manager.TokenManager(
secret_key="test-secret-key-for-testing-only-min-32-chars"
)
@pytest.fixture
def mock_db() -> MagicMock:
"""
Creates and returns a MagicMock object that mimics the interface of a SQLAlchemy Session.
Returns:
MagicMock: A mock object with the specification of a SQLAlchemy Session.
"""
return MagicMock(spec=Session)
@pytest.fixture
def sample_user_read() -> user_schema.UserRead:
"""
Creates and returns a sample instance of UserRead for testing purposes.
Returns:
user_schema.UserRead: A sample user object with predefined attributes.
"""
return user_schema.UserRead(
id=1,
name="Test User",
username="testuser",
email="test@example.com",
active=True,
access_type=user_schema.UserAccessType.REGULAR,
)
@pytest.fixture
def sample_inactive_user():
"""
Creates and returns a sample inactive user instance for testing purposes.
Returns:
user_schema.UserRead: An instance representing an inactive user with predefined attributes.
"""
return user_schema.UserRead(
id=2,
name="Inactive User",
username="inactive",
email="inactive@example.com",
active=False,
access_type=user_schema.UserAccessType.REGULAR,
)
@pytest.fixture
def mock_request() -> Request:
"""
Creates and returns a mock Request object with predefined headers and client host.
Returns:
Request: A MagicMock instance mimicking a Request object, with custom headers and client host set for testing purposes.
"""
mock_req = MagicMock(spec=Request)
mock_req.headers = {
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36",
"X-Client-Type": "web",
}
mock_req.client = MagicMock()
mock_req.client.host = "127.0.0.1"
return mock_req
def _include_router_if_exists(app: FastAPI, dotted: str):
"""
Attempts to import a module by its dotted path and include its 'router' attribute in the given FastAPI app if present.
Args:
app (FastAPI): The FastAPI application instance to which the router should be added.
dotted (str): The dotted path of the module to import (e.g., 'myapp.api.v1.users').
Notes:
- If the module does not exist or does not have a 'router' attribute, the function silently ignores the error.
- This is useful for conditionally including routers in a modular FastAPI project.
"""
try:
mod = import_module(dotted)
router = getattr(mod, "router", None)
if router is not None:
app.include_router(router)
except Exception:
# Silently ignore if module isn't present in this project
pass
def _override_if_exists(app: FastAPI, dotted: str, attr: str, override_callable):
"""
Overrides a FastAPI dependency if it exists in the specified module.
Args:
app (FastAPI): The FastAPI application instance where the dependency override will be applied.
dotted (str): The dotted path to the module containing the dependency provider.
attr (str): The attribute name of the dependency provider within the module.
override_callable (Callable): The callable to use as the override for the dependency.
Returns:
Optional[Callable]: The original provider if it exists and was overridden, otherwise None.
Notes:
- If the module or attribute does not exist, or any exception occurs, the function silently fails and returns None.
"""
try:
mod = import_module(dotted)
provider = getattr(mod, attr, None)
if provider is not None:
app.dependency_overrides[provider] = override_callable
return provider
except Exception:
pass
return None
@pytest.fixture
def fast_api_app(password_hasher, token_manager, mock_db) -> FastAPI:
"""
Creates and configures a FastAPI application instance for testing purposes.
This function sets up the FastAPI app with test-specific dependency overrides,
including password hasher, token manager, and a mock database. It also injects
a fake in-memory store for pending multi-factor authentication (MFA) attempts,
and includes any default routers specified in the configuration.
password_hasher: An object or callable used to hash passwords, typically a mock for testing.
token_manager: An object or callable responsible for managing authentication tokens, typically a mock for testing.
mock_db: A mock database session or connection to be used in place of the real database during tests.
FastAPI: A configured FastAPI application instance with all test dependencies and routers included.
"""
app = FastAPI()
# Include any routers you have configured
for dotted in DEFAULT_ROUTER_MODULES:
_include_router_if_exists(app, dotted)
app.state._client_type = "web"
def _client_type_override():
return app.state._client_type
class FakePendingMFAStore:
"""
A fake in-memory store for tracking pending multi-factor authentication (MFA) login attempts.
Primarily used for testing purposes to simulate the behavior of a pending MFA store.
Attributes:
calls (list): A list that records each call to add_pending_login as a tuple of (username, user_id).
Methods:
add_pending_login(username, user_id):
"""
def __init__(self):
"""
Initializes the object and creates an empty list to store call records.
"""
self.calls = []
def add_pending_login(self, username, user_id):
"""
Adds a pending login attempt to the internal calls list.
Args:
username (str): The username of the user attempting to log in.
user_id (Any): The unique identifier of the user.
Returns:
None
"""
self.calls.append((username, user_id))
fake_store = FakePendingMFAStore()
app.state.fake_store = fake_store
try:
app.dependency_overrides[
session_router.session_security.header_client_type_scheme
] = _client_type_override
app.dependency_overrides[
session_router.session_schema.get_pending_mfa_store
] = lambda: fake_store
except Exception:
pass
# Generic overrides
_override_if_exists(
app, "session.password_hasher", "get_password_hasher", lambda: password_hasher
)
_override_if_exists(
app,
"session.session_password_hasher",
"get_password_hasher",
lambda: password_hasher,
)
_override_if_exists(
app, "session.token_manager", "get_token_manager", lambda: token_manager
)
_override_if_exists(
app, "session.session_token_manager", "get_token_manager", lambda: token_manager
)
_override_if_exists(
app, "core.database", "get_db", lambda: mock_db
) or _override_if_exists(
app, "core_database", "get_db", lambda: mock_db
) or _override_if_exists(
app, "app.core.database", "get_db", lambda: mock_db
)
return app
@pytest.fixture
def fast_api_client(fast_api_app: FastAPI) -> TestClient:
"""
Creates and returns a TestClient instance for the given FastAPI application.
Args:
fast_api_app (FastAPI): The FastAPI application instance to test.
Returns:
TestClient: A test client for making requests to the FastAPI app during testing.
"""
return TestClient(fast_api_app)
@pytest.fixture
def set_client_type_web(fast_api_app: FastAPI):
"""
Sets the client type of the FastAPI application to "web".
Args:
fast_api_app (FastAPI): The FastAPI application instance.
Returns:
str: The client type that was set ("web").
"""
fast_api_app.state._client_type = "web"
return fast_api_app.state._client_type
@pytest.fixture
def set_client_type_mobile(fast_api_app: FastAPI):
"""
Sets the client type of the given FastAPI application to "mobile".
Args:
fast_api_app (FastAPI): The FastAPI application instance whose client type is to be set.
Returns:
str: The value of the client type after setting it to "mobile".
"""
fast_api_app.state._client_type = "mobile"
return fast_api_app.state._client_type

View File

@@ -0,0 +1,3 @@
"""
Tests for Endurain session module backend application.
"""

View File

@@ -0,0 +1,483 @@
import pytest
import re
import string
from pwdlib import PasswordHash
from pwdlib.hashers.argon2 import Argon2Hasher
from pwdlib.hashers.bcrypt import BcryptHasher
from session.password_hasher import PasswordHasher, PasswordPolicyError
class TestPasswordHasherSecurity:
"""
Test suite for the PasswordHasher class and related password security functionality.
This class contains comprehensive tests to ensure the correctness, security, and policy enforcement
of password hashing, verification, password generation, and password validation logic. The tests cover:
- Initialization of PasswordHasher with various types of hashers (None, single hasher, iterable, PasswordHash instance, incorrect types).
- Hashing and verifying passwords, including correct, incorrect, case-sensitive, very long, and Unicode passwords.
- Ensuring that password hashes are unique and non-empty.
- Verifying and updating password hashes when algorithms change.
- Password generation, including length requirements, character class inclusion, and uniqueness.
- Password validation, enforcing policy requirements such as minimum length, presence of uppercase, lowercase, digits, and special characters.
- Handling of edge cases such as empty passwords, whitespace-only passwords, and custom minimum length requirements.
- Ensuring proper error handling and exception raising for invalid inputs and policy violations.
Each test asserts the expected behavior and error handling to guarantee robust password security mechanisms.
"""
def test_password_hasher_initialization_with_none(self):
"""
Test that initializing PasswordHasher with None as the hasher uses the default hasher implementation.
Verifies that hashing and verifying a password works correctly and that the resulting hash is not empty.
"""
hasher = PasswordHasher(hasher=None)
password = "TestPassword123!"
hashed = hasher.hash_password(password)
assert hasher.verify(password, hashed), "Default hasher should work correctly"
assert len(hashed) > 0, "Hash should not be empty"
def test_password_hasher_initialization_with_passwordhash_instance(self):
"""
Test that the PasswordHasher can be initialized with a PasswordHash instance and
correctly hash and verify a password using the provided hasher(s).
Steps:
- Create a PasswordHash instance with an Argon2Hasher.
- Initialize PasswordHasher with the PasswordHash instance.
- Hash a sample password.
- Verify that the original password matches the hashed value.
Asserts:
- The password verification should succeed, confirming that the PasswordHash instance
works as expected when passed to PasswordHasher.
"""
# Create a PasswordHash instance
pw_hash = PasswordHash([Argon2Hasher()])
hasher = PasswordHasher(hasher=pw_hash)
password = "TestPassword123!"
hashed = hasher.hash_password(password)
assert hasher.verify(
password, hashed
), "PasswordHash instance should work correctly"
def test_password_hasher_initialization_with_single_hasher(self):
"""
Tests the initialization of the PasswordHasher with a single hasher instance.
This test verifies that:
- A PasswordHasher can be initialized with a single Argon2Hasher.
- The hasher can successfully hash a password.
- The hasher can verify the password against the hashed value, ensuring correct functionality when using an iterable of hashers.
"""
# Create a PasswordHash instance with a single hasher
hasher = PasswordHasher(hasher=Argon2Hasher())
password = "TestPassword123!"
hashed = hasher.hash_password(password)
assert hasher.verify(
password, hashed
), "Iterable of hashers should work correctly"
def test_password_hasher_initialization_with_iterable(self):
"""
Test that PasswordHasher can be initialized with an iterable of hasher instances (such as a tuple),
and that password hashing and verification work correctly with this configuration.
"""
# Pass as a tuple (iterable that's not a list)
hasher = PasswordHasher(hasher=(Argon2Hasher(), BcryptHasher()))
password = "TestPassword123!"
hashed = hasher.hash_password(password)
assert hasher.verify(
password, hashed
), "Iterable of hashers should work correctly"
def test_password_hasher_initialization_with_incorrect_type(self):
"""
Test that initializing PasswordHasher with an unsupported type (e.g., integer) raises a TypeError.
This test verifies that passing a non-iterable and unsupported type as the `hasher` argument to
PasswordHasher triggers a TypeError with the expected error message, ensuring type safety and
proper error handling during initialization.
"""
# Integers are not iterable and not supported types, so this should raise TypeError
with pytest.raises(
TypeError,
match="Unsupported hasher type.*Must be Argon2Hasher, BcryptHasher, Iterable, PasswordHash, or None",
):
PasswordHasher(hasher=12345)
def test_password_hasher_initialization_with_generic_iterable(self):
"""
Test that PasswordHasher can be initialized with a generic iterable (such as a set)
of hasher instances, and that it correctly hashes and verifies a password using
the provided hashers. Ensures that non-list/tuple iterables are properly handled.
"""
# Use a set (which is iterable but not a list or tuple)
# This will go through the generic Iterable path and call list(hasher)
hasher_set = {Argon2Hasher(), BcryptHasher()}
hasher = PasswordHasher(hasher=hasher_set)
password = "TestPassword123!"
hashed = hasher.hash_password(password)
assert hasher.verify(
password, hashed
), "Generic iterable (set) should work correctly"
def test_hash_password_produces_different_hashes(self, password_hasher):
"""
Tests that hashing the same password multiple times produces different hashes,
ensuring that the password hasher uses a random salt or similar mechanism.
Also verifies that the generated hashes are not empty.
"""
password = "TestPassword123!"
hash1 = password_hasher.hash_password(password)
hash2 = password_hasher.hash_password(password)
assert hash1 != hash2, "Same password should produce different hashes"
assert len(hash1) > 0, "Hash should not be empty"
assert len(hash2) > 0, "Hash should not be empty"
def test_verify_correct_password(self, password_hasher):
"""
Tests that the password hasher correctly verifies a valid password.
This test hashes a known password and then verifies that the original password
is successfully validated against the generated hash. It asserts that the
verification returns True, indicating that the password hasher works as expected
for correct passwords.
"""
password = "CorrectPassword123!"
hashed = password_hasher.hash_password(password)
assert password_hasher.verify(
password, hashed
), "Correct password should verify successfully"
def test_verify_incorrect_password(self, password_hasher):
"""
Test that verifying an incorrect password against a hashed correct password fails.
This test ensures that the password hasher does not validate a wrong password,
even if it is similar to the correct one. It hashes a known correct password,
then attempts to verify a different, incorrect password against the hash,
expecting the verification to fail.
"""
correct_password = "CorrectPassword123!"
wrong_password = "WrongPassword123!"
hashed = password_hasher.hash_password(correct_password)
assert not password_hasher.verify(
wrong_password, hashed
), "Incorrect password should fail verification"
def test_verify_case_sensitivity(self, password_hasher):
"""
Tests that the password verification is case-sensitive by ensuring that a password
with different casing does not match the hashed password.
"""
password = "TestPassword123!"
hashed = password_hasher.hash_password(password)
assert not password_hasher.verify(
"testpassword123!", hashed
), "Password verification should be case-sensitive"
def test_verify_and_update_returns_none_for_current_hash(self, password_hasher):
"""
Tests that the `verify_and_update` method returns `None` for the updated hash
when the provided password is hashed with the current algorithm.
This ensures that if the password hash is already up-to-date with the latest
hashing algorithm, no rehashing or hash update is performed.
Args:
password_hasher: An instance of the password hasher to be tested.
Asserts:
- The password is verified successfully.
- The updated hash is None, indicating no update is needed for the current hash algorithm.
"""
password = "TestPassword123!"
hashed = password_hasher.hash_password(password)
is_valid, updated_hash = password_hasher.verify_and_update(password, hashed)
assert is_valid, "Password should verify successfully"
assert (
updated_hash is None
), "Updated hash should be None for current hash algorithm"
def test_verify_and_update_with_incorrect_password(self, password_hasher):
"""
Test that verify_and_update returns False when an incorrect password is provided.
This test hashes a correct password, then attempts to verify and update the hash using an incorrect password.
It asserts that the verification fails, ensuring that the password hasher does not validate incorrect credentials.
"""
correct_password = "CorrectPassword123!"
wrong_password = "WrongPassword123!"
hashed = password_hasher.hash_password(correct_password)
is_valid, _updated_hash = password_hasher.verify_and_update(
wrong_password, hashed
)
assert not is_valid, "Incorrect password should not verify"
def test_generate_password_meets_length_requirement(self, password_hasher):
"""
Test that the `generate_password` method of the password_hasher generates passwords
of the exact specified length for various input lengths.
Args:
password_hasher: An instance of the password hasher class with a `generate_password` method.
Asserts:
The generated password's length matches the requested length for each value in [8, 12, 16, 20].
"""
lengths = [8, 12, 16, 20]
for length in lengths:
password = password_hasher.generate_password(length)
assert (
len(password) == length
), f"Generated password should be {length} characters long"
def test_generate_password_has_required_character_classes(self, password_hasher):
"""
Test that the password generated by the password_hasher contains at least one character from each required class:
uppercase letters, lowercase letters, digits, and punctuation. Asserts that the generated password meets all these criteria.
"""
password = password_hasher.generate_password(12)
has_upper = any(c in string.ascii_uppercase for c in password)
has_lower = any(c in string.ascii_lowercase for c in password)
has_digit = any(c in string.digits for c in password)
has_punct = any(c in string.punctuation for c in password)
assert has_upper, "Generated password should contain uppercase letters"
assert has_lower, "Generated password should contain lowercase letters"
assert has_digit, "Generated password should contain digits"
assert has_punct, "Generated password should contain punctuation"
def test_generate_password_uniqueness(self, password_hasher):
"""
Test that the `generate_password` method of the password hasher produces unique passwords
for multiple invocations, ensuring cryptographic security by verifying that all generated
passwords in a sample are distinct.
"""
passwords = [password_hasher.generate_password(12) for _ in range(10)]
unique_passwords = set(passwords)
assert (
len(unique_passwords) == 10
), "Generated passwords should be unique (cryptographically secure)"
def test_validate_password_accepts_valid_password(self, password_hasher):
"""
Test that the `validate_password` method of the password hasher accepts valid passwords.
This test iterates over a list of valid passwords and asserts that none of them raise a
PasswordPolicyError when passed to the `validate_password` method. If a valid password
raises an error, the test fails with an appropriate message.
"""
valid_passwords = [
"ValidPass1!",
"Str0ng!Pass",
"C0mplex@Password",
"Test123!Password",
]
for password in valid_passwords:
try:
password_hasher.validate_password(password)
except PasswordPolicyError:
pytest.fail(
f"Valid password '{password}' should not raise PasswordPolicyError"
)
def test_validate_password_rejects_short_password(self, password_hasher):
"""
Test that the password hasher's validate_password method raises a PasswordPolicyError
when given a password that is too short, and verifies that the error message indicates
the password length issue.
"""
with pytest.raises(PasswordPolicyError) as exc_info:
password_hasher.validate_password("Short1!")
error_message = str(exc_info.value)
assert re.search(
r"too short", error_message, re.IGNORECASE
), f"Error should mention password is too short, got: {error_message}"
def test_validate_password_rejects_missing_uppercase(self, password_hasher):
"""
Test that `validate_password` raises a `PasswordPolicyError` with a message containing
"uppercase letter" when the password does not contain any uppercase characters.
"""
with pytest.raises(PasswordPolicyError, match="uppercase letter"):
password_hasher.validate_password("lowercase123!")
def test_validate_password_rejects_missing_lowercase(self, password_hasher):
"""
Test that the password validator rejects passwords that do not contain any lowercase letters.
This test ensures that when a password consisting only of uppercase letters, digits, and symbols
is provided, the `validate_password` method raises a `PasswordPolicyError` with a message indicating
the requirement for at least one lowercase letter.
"""
with pytest.raises(PasswordPolicyError, match="lowercase letter"):
password_hasher.validate_password("UPPERCASE123!")
def test_validate_password_rejects_missing_digit(self, password_hasher):
"""
Test that the password validation rejects passwords missing a digit.
This test verifies that the `validate_password` method of the `password_hasher`
raises a `PasswordPolicyError` with a message containing "digit" when the input
password does not contain any numeric characters.
"""
with pytest.raises(PasswordPolicyError, match="digit"):
password_hasher.validate_password("NoDigitsHere!")
def test_validate_password_rejects_missing_punctuation(self, password_hasher):
"""
Test that the password validator rejects passwords that do not contain any special (punctuation) characters.
This test ensures that when a password lacking special characters is validated,
a PasswordPolicyError is raised with a message indicating the requirement for a special character.
"""
with pytest.raises(PasswordPolicyError, match="special character"):
password_hasher.validate_password("NoSpecialChars123")
def test_validate_password_custom_min_length(self, password_hasher):
"""
Test that the password validator enforces a custom minimum length requirement.
This test verifies that when a password shorter than the specified `min_length` is validated,
a `PasswordPolicyError` is raised and the error message correctly mentions the custom minimum length.
"""
short_but_valid = "Ab1!" # Valid, but short
# Should fail with higher min_length
with pytest.raises(PasswordPolicyError) as exc_info:
password_hasher.validate_password(short_but_valid, min_length=10)
error_message = str(exc_info.value)
assert re.search(
r"too short.*need ≥ 10", error_message, re.IGNORECASE
), f"Error should mention custom minimum length, got: {error_message}"
def test_is_valid_password_returns_true_for_valid(self, password_hasher):
"""
Tests that the is_valid_password method returns True when provided with a valid password.
This test verifies that the password_hasher's is_valid_password function correctly identifies
a valid password ("ValidPass123!") and returns True, ensuring that the password validation
logic accepts passwords that meet the required criteria.
"""
assert password_hasher.is_valid_password(
"ValidPass123!"
), "Valid password should return True"
def test_is_valid_password_returns_false_for_invalid(self, password_hasher):
"""
Tests that the is_valid_password method of the password_hasher returns False for various
invalid passwords.
The test covers the following invalid cases:
- Passwords that are too short
- Passwords without uppercase letters
- Passwords without lowercase letters
- Passwords without digits
- Passwords without special characters
"""
invalid_passwords = [
"short", # Too short
"nouppercase123!", # No uppercase
"NOLOWERCASE123!", # No lowercase
"NoDigitsHere!", # No digits
"NoSpecialChar123", # No punctuation
]
for password in invalid_passwords:
assert not password_hasher.is_valid_password(
password
), f"Invalid password '{password}' should return False"
def test_empty_password_handling(self, password_hasher):
"""
Test that the password hasher raises a PasswordPolicyError when validating an empty password.
This test ensures that the password validation logic correctly identifies and rejects empty passwords,
enforcing the password policy by raising the appropriate exception.
"""
with pytest.raises(PasswordPolicyError):
password_hasher.validate_password("")
def test_whitespace_only_password(self, password_hasher):
"""
Test that the password hasher raises a PasswordPolicyError when validating a password consisting only
of whitespace characters.
"""
with pytest.raises(PasswordPolicyError):
password_hasher.validate_password(" ")
def test_very_long_password(self, password_hasher):
"""
Tests that the password hasher can correctly hash and verify a very long password (1003 characters).
Ensures that both hashing and verification processes handle long input without errors or data loss.
"""
long_password = "A1!" + "x" * 1000 # 1003 characters
hashed = password_hasher.hash_password(long_password)
assert password_hasher.verify(
long_password, hashed
), "Very long passwords should hash and verify correctly"
def test_unicode_characters_in_password(self, password_hasher):
"""
Tests that the password hasher can correctly hash and verify passwords containing Unicode characters.
Ensures that passwords with special or non-ASCII characters are supported and function as expected.
"""
unicode_password = "Tëst123!Pāśswörd"
hashed = password_hasher.hash_password(unicode_password)
assert password_hasher.verify(
unicode_password, hashed
), "Unicode passwords should hash and verify correctly"
def test_special_characters_in_password(self, password_hasher):
"""
Tests that the password hasher correctly handles passwords containing special characters.
Verifies that passwords with various special characters can be hashed and subsequently verified successfully.
"""
special_passwords = [
"Test!@#$123",
"Pass%^&*()456",
"Word[]{}789",
"Secure<>?:012",
]
for password in special_passwords:
hashed = password_hasher.hash_password(password)
assert password_hasher.verify(
password, hashed
), f"Special character password '{password}' should work correctly"
def test_generate_password_length_too_short(self):
"""
Test that PasswordHasher.generate_password raises a PasswordPolicyError when the requested password length is
less than the minimum allowed (8 characters), and verifies that the error message indicates the password is
too short and specifies the minimum length requirement.
"""
with pytest.raises(PasswordPolicyError) as exc_info:
PasswordHasher.generate_password(length=7)
assert "too short" in str(exc_info.value).lower()
assert "must be ≥ 8" in str(exc_info.value) or "must be >= 8" in str(
exc_info.value
)

View File

@@ -0,0 +1,236 @@
from unittest.mock import MagicMock, patch
import pytest
from fastapi import HTTPException, status
class TestLoginEndpointSecurity:
"""
Test suite for verifying the security and behavior of the login endpoint.
This class contains tests that cover various scenarios for the login endpoint, including:
- Successful login without Multi-Factor Authentication (MFA) for different client types.
- Login attempts when MFA is required, ensuring the correct response and MFA flow.
- Handling of invalid client types, ensuring forbidden access is enforced.
- Login attempts with invalid credentials, ensuring proper error handling.
- Login attempts with inactive users, ensuring access is denied as expected.
Each test uses extensive mocking to simulate authentication, user activity checks, MFA status, and session/token creation, allowing for isolated and reliable testing of the endpoint's logic and security requirements.
"""
@pytest.mark.parametrize(
"client_type, expected_status, returns_tokens",
[
("web", status.HTTP_200_OK, False),
("mobile", status.HTTP_200_OK, True),
],
)
def test_login_without_mfa(
self,
fast_api_app,
fast_api_client,
sample_user_read,
client_type,
expected_status,
returns_tokens,
):
"""
Test the login endpoint behavior when Multi-Factor Authentication (MFA) is not enabled for the user.
This test verifies that:
- The login process completes successfully without requiring MFA.
- The correct response is returned based on whether tokens are expected.
- The appropriate authentication, user activity, and MFA checks are patched and simulated.
- The fake store is not called during the process.
Args:
fast_api_app: The FastAPI application instance under test.
fast_api_client: The test client for making HTTP requests to the FastAPI app.
sample_user_read: A sample user object returned by the authentication mock.
client_type: The type of client making the request (used in headers and app state).
expected_status: The expected HTTP status code of the response.
returns_tokens: Boolean indicating if the endpoint should return tokens or just a session ID.
"""
fast_api_app.state._client_type = client_type
with patch(
"session.router.session_utils.authenticate_user"
) as mock_auth, patch("session.router.users_utils.check_user_is_active"), patch(
"session.router.profile_utils.is_mfa_enabled_for_user"
) as mock_mfa, patch(
"session.router.session_utils.complete_login"
) as mock_complete:
mock_auth.return_value = sample_user_read
mock_mfa.return_value = False
mock_complete.return_value = (
{"session_id": "test-session"}
if not returns_tokens
else {
"access_token": "token",
"refresh_token": "refresh",
"session_id": "session",
"token_type": "Bearer",
"expires_in": 900,
}
)
resp = fast_api_client.post(
"/token",
data={"username": "testuser", "password": "secret"},
headers={"X-Client-Type": client_type},
)
assert resp.status_code == expected_status
body = resp.json()
if returns_tokens:
assert body["access_token"] == "token"
assert body["refresh_token"] == "refresh"
assert body["session_id"] == "session"
assert body["token_type"] == "Bearer"
assert isinstance(body["expires_in"], int)
else:
assert body == {"session_id": "test-session"}
assert fast_api_app.state.fake_store.calls == []
@pytest.mark.parametrize(
"client_type, expected_status",
[
("web", status.HTTP_202_ACCEPTED),
("mobile", status.HTTP_200_OK),
],
)
def test_login_with_mfa_required(
self,
fast_api_app,
fast_api_client,
sample_user_read,
client_type,
expected_status,
):
"""
Test the login endpoint when Multi-Factor Authentication (MFA) is required.
This test verifies that when a user with MFA enabled attempts to log in,
the API responds with the correct status code and indicates that MFA is required.
It mocks the authentication, user activity check, and MFA status check to simulate
the scenario where MFA is enabled for the user.
Args:
fast_api_app: The FastAPI application instance under test.
fast_api_client: The test client for making HTTP requests to the FastAPI app.
sample_user_read: A sample user object returned by the authentication mock.
client_type: The type of client making the request (used in headers).
expected_status: The expected HTTP status code for the response.
Asserts:
- The response status code matches the expected status.
- The response JSON contains 'mfa_required' set to True.
- The response JSON contains the correct 'username'.
- The fake_store in the app state records the correct call.
"""
fast_api_app.state._client_type = client_type
with patch(
"session.router.session_utils.authenticate_user"
) as mock_auth, patch("session.router.users_utils.check_user_is_active"), patch(
"session.router.profile_utils.is_mfa_enabled_for_user"
) as mock_mfa:
mock_auth.return_value = sample_user_read
mock_mfa.return_value = True
resp = fast_api_client.post(
"/token",
data={"username": "testuser", "password": "secret"},
headers={"X-Client-Type": client_type},
)
assert resp.status_code == expected_status
body = resp.json()
assert body["mfa_required"] is True
assert body["username"] == "testuser"
assert fast_api_app.state.fake_store.calls == [
("testuser", sample_user_read.id)
]
def test_invalid_client_type_forbidden(
self, fast_api_app, fast_api_client, sample_user_read
):
"""
Test that a login attempt with an invalid client type returns a 403 Forbidden response.
This test sets the application's client type to "desktop" and mocks the authentication,
user activity check, MFA status, token creation, and session creation utilities. It then
sends a POST request to the "/token" endpoint with the "X-Client-Type" header set to "desktop".
The test asserts that the response status code is 403 Forbidden and the response detail
indicates an invalid client type.
Args:
fast_api_app: The FastAPI application instance.
fast_api_client: The test client for making HTTP requests.
sample_user_read: A sample user object returned by the authentication mock.
"""
fast_api_app.state._client_type = "desktop"
with patch(
"session.router.session_utils.authenticate_user"
) as mock_auth, patch("session.router.users_utils.check_user_is_active"), patch(
"session.router.profile_utils.is_mfa_enabled_for_user"
) as mock_mfa, patch(
"session.router.session_utils.create_tokens"
) as mock_create_tokens, patch(
"session.router.session_utils.create_session"
) as mock_create_session:
mock_auth.return_value = sample_user_read
mock_mfa.return_value = False
mock_create_tokens.return_value = (
"sid",
object(),
"acc",
object(),
"ref",
"csrf",
)
mock_create_session.return_value = None
resp = fast_api_client.post(
"/token",
data={"username": "x", "password": "y"},
headers={"X-Client-Type": "desktop"},
)
assert resp.status_code == status.HTTP_403_FORBIDDEN
assert resp.json()["detail"] == "Invalid client type"
def test_login_with_invalid_credentials(self, password_hasher, mock_db):
"""
Test that the login endpoint raises an HTTPException with status code 401
when invalid credentials are provided. Mocks the authenticate_user function
to simulate authentication failure and verifies that the exception is raised
with the correct status code and detail.
"""
with patch("session.router.session_utils.authenticate_user") as mock_auth:
mock_auth.side_effect = HTTPException(
status_code=401, detail="Invalid username"
)
with pytest.raises(HTTPException) as exc_info:
mock_auth("invalid", "password", password_hasher, mock_db)
assert exc_info.value.status_code == 401
def test_login_with_inactive_user(self, sample_inactive_user):
"""
Test that the login endpoint raises an HTTPException with status code 403
when attempting to authenticate an inactive user.
This test mocks the authentication and user activity check utilities to simulate
the scenario where a user is found but is inactive. It asserts that the correct
exception is raised with the expected status code.
"""
with patch("session.router.session_utils.authenticate_user") as mock_auth:
with patch("session.router.users_utils.check_user_is_active") as mock_check:
mock_auth.return_value = sample_inactive_user
mock_check.side_effect = HTTPException(
status_code=403, detail="User is inactive"
)
with pytest.raises(HTTPException) as exc_info:
mock_check(sample_inactive_user)
assert exc_info.value.status_code == 403

View File

@@ -0,0 +1,443 @@
from datetime import datetime, timezone
import pytest
from fastapi import HTTPException
import session.token_manager as session_token_manager
class TestTokenManagerSecurity:
"""
Test suite for the TokenManager class, focusing on security-related aspects of token creation, decoding, validation, and CSRF token generation.
This class contains tests that verify:
- Correct extraction of claims from tokens.
- Proper handling of missing or invalid claims.
- Decoding of valid and invalid tokens, including detection of tampering and use of incorrect secrets.
- Validation of token expiration, including handling of expired tokens.
- Creation of access and refresh tokens, ensuring correct types, expiration times, and uniqueness.
- Security properties of CSRF tokens, such as uniqueness and sufficient length.
- Robustness against empty or None tokens.
- Use of secure algorithms for token signing.
- Consistency of token expiration times in UTC.
- Uniqueness of tokens for different session IDs and for repeated calls with the same user/session.
Each test ensures that the TokenManager behaves securely and correctly under various scenarios, raising appropriate exceptions and maintaining cryptographic standards.
"""
def test_get_token_claim_returns_correct_value(
self, token_manager, sample_user_read
):
"""
Tests that the `get_token_claim` method of the token manager returns the correct values for specific claims.
This test creates a token for a given session and user, then verifies that:
- The "sub" claim matches the user's ID.
- The "sid" claim matches the session ID.
"""
session_id = "test-session-123"
_, token = token_manager.create_token(
session_id, sample_user_read, session_token_manager.TokenType.ACCESS
)
sub_claim = token_manager.get_token_claim(token, "sub")
sid_claim = token_manager.get_token_claim(token, "sid")
assert (
sub_claim == sample_user_read.id
), "sub claim should match user ID (as int)"
assert sid_claim == session_id, "sid claim should match session ID"
def test_get_token_claim_with_missing_claim(self, token_manager, sample_user_read):
"""
Test that get_token_claim raises an HTTPException with status code 401 when attempting to retrieve a claim
that does not exist in the token. Verifies that the exception detail message indicates the claim is missing.
"""
session_id = "test-session-id"
_, token = token_manager.create_token(
session_id, sample_user_read, session_token_manager.TokenType.ACCESS
)
with pytest.raises(HTTPException) as exc_info:
token_manager.get_token_claim(token, "nonexistent_claim")
assert exc_info.value.status_code == 401
assert "missing" in exc_info.value.detail.lower()
def test_decode_valid_token(self, token_manager, sample_user_read):
"""
Tests that a valid token generated by the token manager can be successfully decoded.
This test verifies that:
- A token created with a valid session ID and user information can be decoded.
- The decoded payload is not None.
- The decoded payload has a 'claims' attribute.
- The 'claims' attribute of the payload is not None.
Args:
token_manager: The token manager instance used to create and decode tokens.
sample_user_read: A sample user object used for token creation.
"""
session_id = "test-session-id"
_, token = token_manager.create_token(
session_id, sample_user_read, session_token_manager.TokenType.ACCESS
)
payload = token_manager.decode_token(token)
assert payload is not None, "Decoded payload should not be None"
assert hasattr(payload, "claims"), "Payload should have claims attribute"
assert payload.claims is not None, "Claims should not be None"
def test_decode_token_contains_expected_claims(
self, token_manager, sample_user_read
):
"""
Test that the decoded token contains the expected claims.
This test verifies that when a token is created using the token manager,
the decoded payload includes the required claims:
- 'sub': the user ID (subject)
- 'sid': the session ID
- 'exp': the expiration timestamp
Args:
token_manager: The token manager instance used to create and decode tokens.
sample_user_read: A sample user object used for token creation.
Asserts:
- The 'sub' claim is present in the token payload.
- The 'sid' claim is present in the token payload.
- The 'exp' claim is present in the token payload.
"""
session_id = "test-session-id"
_, token = token_manager.create_token(
session_id, sample_user_read, session_token_manager.TokenType.ACCESS
)
payload = token_manager.decode_token(token)
claims = payload.claims
assert "sub" in claims, "Token should have 'sub' claim (user_id)"
assert "sid" in claims, "Token should have 'sid' claim (session_id)"
assert "exp" in claims, "Token should have 'exp' claim"
# Note: 'type' claim is not actually used in token creation
def test_decode_token_with_invalid_token(self, token_manager):
"""
Tests that the `decode_token` method of the token manager raises an HTTPException with status code 401
when provided with various invalid JWT tokens.
Args:
token_manager: An instance of the token manager to be tested.
Asserts:
- An HTTPException is raised for each invalid token.
- The raised exception has a status code of 401.
"""
invalid_tokens = [
"invalid.token.here",
"not-a-jwt",
"",
"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.invalid",
]
for invalid_token in invalid_tokens:
with pytest.raises(HTTPException) as exc_info:
token_manager.decode_token(invalid_token)
assert exc_info.value.status_code == 401
def test_decode_token_with_wrong_secret(self, sample_user_read):
"""
Test that decoding a token with a different secret key than the one used to create it raises an HTTP 401 Unauthorized exception.
This test creates a token using one instance of TokenManager with a specific secret key, then attempts to decode the token using another TokenManager instance with a different secret key. It asserts that an HTTPException with status code 401 is raised, indicating unauthorized access due to the wrong secret.
"""
# Create token with one manager
manager1 = session_token_manager.TokenManager(
secret_key="secret-key-one-min-32-characters-long"
)
_, token = manager1.create_token(
"session-id", sample_user_read, session_token_manager.TokenType.ACCESS
)
# Try to decode with different manager
manager2 = session_token_manager.TokenManager(
secret_key="secret-key-two-min-32-characters-long"
)
with pytest.raises(HTTPException) as exc_info:
manager2.decode_token(token)
assert exc_info.value.status_code == 401
def test_validate_token_expiration_with_valid_token(
self, token_manager, sample_user_read
):
"""
Test that `validate_token_expiration` does not raise an exception when provided with a valid (non-expired) token.
This test creates a valid access token using the token manager and verifies that calling
`validate_token_expiration` with this token does not raise an `HTTPException`. If an exception is raised,
the test fails, indicating that valid tokens are incorrectly being marked as expired.
"""
session_id = "test-session-id"
_, token = token_manager.create_token(
session_id, sample_user_read, session_token_manager.TokenType.ACCESS
)
# Should not raise an exception
try:
token_manager.validate_token_expiration(token)
except HTTPException:
pytest.fail("Valid token should not raise HTTPException")
def test_validate_token_expiration_with_expired_token(self, token_manager):
"""
Test that validate_token_expiration raises an HTTPException with status code 401
when provided with an expired JWT token.
Args:
self: The test class instance.
token_manager: The token manager instance to be tested.
Asserts:
- An HTTPException is raised when an expired token is validated.
- The exception's status code is 401 (Unauthorized).
"""
token = "eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJzaWQiOiJzZXNzaW9uLWlkIiwiaXNzIjoiaHR0cDovL2xvY2FsaG9zdDo4MDgwIiwiYXVkIjoiaHR0cDovL2xvY2FsaG9zdDo4MDgwIiwic3ViIjoxLCJzY29wZSI6WyJwcm9maWxlIiwidXNlcnM6cmVhZCIsImdlYXJzOnJlYWQiLCJnZWFyczp3cml0ZSIsImFjdGl2aXRpZXM6cmVhZCIsImFjdGl2aXRpZXM6d3JpdGUiLCJoZWFsdGg6cmVhZCIsImhlYWx0aDp3cml0ZSIsImhlYWx0aF90YXJnZXRzOnJlYWQiLCJoZWFsdGhfdGFyZ2V0czp3cml0ZSJdLCJpYXQiOjE3NTk5NTMxODUsIm5iZiI6MTc1OTk1MzE4NSwiZXhwIjoxNzU5OTU0MDg1LCJqdGkiOiI3OWY2NDJlZC00NzNkLTQxMGYtYWMyNS0yMjYxMDU5YTM4NjIifQ.VSizGzvIIi_EJYD_YmfZBEBE_9aJbhLW-25cD1kEOeM"
with pytest.raises(HTTPException) as excinfo:
token_manager.validate_token_expiration(token)
assert excinfo.value.status_code == 401
def test_create_access_token(self, token_manager, sample_user_read):
"""
Tests the creation of an access token using the token manager.
This test verifies that:
- The generated token is not None.
- The token is a non-empty string.
- The expiration time is a datetime object.
- The expiration time is set in the future.
Args:
token_manager: The token manager instance used to create tokens.
sample_user_read: A sample user object for whom the token is created.
"""
session_id = "test-session-id"
exp_time, token = token_manager.create_token(
session_id, sample_user_read, session_token_manager.TokenType.ACCESS
)
assert token is not None, "Token should not be None"
assert isinstance(token, str), "Token should be a string"
assert len(token) > 0, "Token should not be empty"
assert isinstance(exp_time, datetime), "Expiration should be a datetime"
assert exp_time > datetime.now(
timezone.utc
), "Expiration should be in the future"
def test_create_refresh_token(self, token_manager, sample_user_read):
"""
Test the creation of a refresh token using the token manager.
This test verifies that the `create_token` method of the token manager:
- Returns a non-None, non-empty string token.
- Returns an expiration time as a `datetime` object.
- Ensures the expiration time is set in the future.
Args:
token_manager: The token manager instance used to create the token.
sample_user_read: A sample user object to associate with the token.
Asserts:
- The generated token is not None.
- The token is a string and not empty.
- The expiration time is a `datetime` object and is set in the future.
"""
session_id = "test-session-id"
exp_time, token = token_manager.create_token(
session_id, sample_user_read, session_token_manager.TokenType.REFRESH
)
assert token is not None, "Token should not be None"
assert isinstance(token, str), "Token should be a string"
assert len(token) > 0, "Token should not be empty"
assert isinstance(exp_time, datetime), "Expiration should be a datetime"
assert exp_time > datetime.now(
timezone.utc
), "Expiration should be in the future"
def test_access_token_shorter_expiration_than_refresh(
self, token_manager, sample_user_read
):
"""
Test that the access token has a shorter expiration time than the refresh token.
This test verifies that when creating both an access token and a refresh token for the same session and user,
the expiration time of the access token is less than that of the refresh token, ensuring correct token lifetimes.
"""
session_id = "test-session-id"
access_exp, _ = token_manager.create_token(
session_id, sample_user_read, session_token_manager.TokenType.ACCESS
)
refresh_exp, _ = token_manager.create_token(
session_id, sample_user_read, session_token_manager.TokenType.REFRESH
)
assert (
access_exp < refresh_exp
), "Access token should expire before refresh token"
def test_create_csrf_token_generates_unique_tokens(self, token_manager):
"""
Test that `create_csrf_token` generates unique CSRF tokens.
This test calls the `create_csrf_token` method of the token manager multiple times
and asserts that all generated tokens are unique, ensuring cryptographic security.
"""
tokens = [token_manager.create_csrf_token() for _ in range(10)]
unique_tokens = set(tokens)
assert (
len(unique_tokens) == 10
), "CSRF tokens should be unique (cryptographically secure)"
def test_create_csrf_token_has_sufficient_length(self, token_manager):
"""
Test that the CSRF token generated by the token manager has a sufficient length.
This test ensures that the `create_csrf_token` method of the token manager
returns a token string that is at least 32 characters long, which is important
for maintaining adequate security against brute-force attacks.
Args:
token_manager: An instance of the token manager responsible for generating CSRF tokens.
Asserts:
The generated CSRF token has a length of at least 32 characters.
"""
token = token_manager.create_csrf_token()
assert len(token) >= 32, "CSRF token should be at least 32 characters long"
def test_tokens_are_different_for_same_user(self, token_manager, sample_user_read):
"""
Test that creating multiple tokens for the same user and session results in unique tokens.
This test ensures that even when the same user and session ID are provided to the token manager,
each call to `create_token` generates a distinct token, verifying that token generation does not
produce duplicate tokens for identical input parameters.
Args:
token_manager: The token manager instance responsible for creating tokens.
sample_user_read: A sample user object used for token creation.
Asserts:
The two generated tokens are not equal, confirming token uniqueness.
"""
session_id = "test-session-id"
_, token1 = token_manager.create_token(
session_id, sample_user_read, session_token_manager.TokenType.ACCESS
)
_, token2 = token_manager.create_token(
session_id, sample_user_read, session_token_manager.TokenType.ACCESS
)
assert token1 != token2, "Tokens should be unique even for the same user"
def test_token_tampering_detection(self, token_manager, sample_user_read):
"""
Tests that the token manager correctly detects and rejects tampered tokens.
This test creates a valid token, deliberately modifies (tampers with) its contents,
and then attempts to decode it. The expected behavior is that the token manager
raises an HTTPException with a 401 status code, indicating unauthorized access
due to token tampering.
Args:
token_manager: The token manager instance used to create and decode tokens.
sample_user_read: A sample user object used for token creation.
Raises:
HTTPException: If the tampered token is detected as invalid, with status code 401.
"""
session_id = "test-session-id"
_, token = token_manager.create_token(
session_id, sample_user_read, session_token_manager.TokenType.ACCESS
)
# Tamper with the token
tampered_token = token[:-5] + "XXXXX"
with pytest.raises(HTTPException) as exc_info:
token_manager.decode_token(tampered_token)
assert exc_info.value.status_code == 401
def test_empty_token_handling(self, token_manager):
"""
Test that the token manager raises an HTTPException with status code 401
when attempting to decode an empty token string.
"""
with pytest.raises(HTTPException) as exc_info:
token_manager.decode_token("")
assert exc_info.value.status_code == 401
def test_none_token_handling(self, token_manager):
"""
Test that the token_manager.decode_token method raises an appropriate exception
(HTTPException, AttributeError, or TypeError) when called with a None token.
"""
with pytest.raises((HTTPException, AttributeError, TypeError)):
token_manager.decode_token(None)
def test_token_algorithm_is_secure(self, token_manager):
"""
Test to ensure that the token manager uses a secure algorithm.
This test asserts that the default algorithm used by the token manager is "HS256",
which is considered secure for signing tokens. If a weaker algorithm is used,
the test will fail, indicating a potential security risk.
"""
assert (
token_manager.algorithm == "HS256"
), "Default algorithm should be HS256 or stronger"
def test_different_session_ids_produce_different_tokens(
self, token_manager, sample_user_read
):
"""
Test that creating tokens with different session IDs produces different token values.
This test ensures that the token manager generates unique tokens for different session IDs,
even when the user and token type are the same. It verifies that the tokens created for
distinct session IDs are not equal, which is important for session isolation and security.
Args:
token_manager: The token manager instance used to create tokens.
sample_user_read: A sample user object used for token creation.
Asserts:
The tokens generated for different session IDs are not equal.
"""
_, token1 = token_manager.create_token(
"session-id-1", sample_user_read, session_token_manager.TokenType.ACCESS
)
_, token2 = token_manager.create_token(
"session-id-2", sample_user_read, session_token_manager.TokenType.ACCESS
)
assert token1 != token2, "Different session IDs should produce different tokens"
def test_token_expiration_time_is_in_utc(self, token_manager, sample_user_read):
"""
Test that the token expiration time generated by the token manager is set in the UTC timezone.
This test verifies that when a token is created using the token manager, the returned expiration time (`exp_time`)
has its `tzinfo` attribute set to `timezone.utc`, ensuring that all expiration times are consistently in UTC.
"""
session_id = "test-session-id"
exp_time, _ = token_manager.create_token(
session_id, sample_user_read, session_token_manager.TokenType.ACCESS
)
assert (
exp_time.tzinfo == timezone.utc
), "Expiration time should be in UTC timezone"

File diff suppressed because it is too large Load Diff