Skip to content

Commit c54582f

Browse files
author
Mateusz
committed
Add tuple unpacking support to GeneratedToken and improve thread safety
1 parent 5592461 commit c54582f

File tree

5 files changed

+103
-98
lines changed

5 files changed

+103
-98
lines changed

.gitignore

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -117,4 +117,5 @@ var/*.log
117117
var/*.md
118118
var/*.json
119119
var/*.jsonl
120-
var/*.py
120+
var/*.py
121+
pyright_output.json

dev/pyright_output.json

Lines changed: 13 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,13 @@
1-
{
2-
"version": "1.1.407",
3-
"time": "1767034635449",
4-
"generalDiagnostics": [],
5-
"summary": {
6-
"filesAnalyzed": 1011,
7-
"errorCount": 0,
8-
"warningCount": 0,
9-
"informationCount": 0,
10-
"timeInSec": 41.452
11-
}
12-
}
13-
1+
{
2+
"version": "1.1.407",
3+
"time": "1767041279293",
4+
"generalDiagnostics": [],
5+
"summary": {
6+
"filesAnalyzed": 1011,
7+
"errorCount": 0,
8+
"warningCount": 0,
9+
"informationCount": 0,
10+
"timeInSec": 18.36
11+
}
12+
}
13+

src/core/auth/sso/token_service.py

Lines changed: 72 additions & 67 deletions
Original file line numberDiff line numberDiff line change
@@ -1,35 +1,40 @@
1-
"""
2-
Token service for SSO authentication.
3-
4-
This module provides secure token generation, hashing, and verification
5-
using Argon2id with 2025-recommended security parameters.
6-
"""
7-
8-
import base64
9-
import secrets
10-
11-
import pydantic
12-
from argon2 import PasswordHasher
13-
from argon2.exceptions import InvalidHashError, VerificationError, VerifyMismatchError
14-
15-
from src.core.auth.sso.exceptions import TokenError
16-
17-
18-
class GeneratedToken(pydantic.BaseModel):
19-
"""Result of token generation.
20-
21-
Contains both the plaintext token (for immediate use) and its hash
22-
(for secure storage and verification).
23-
24-
Attributes:
25-
plaintext: Base64url-encoded token (43+ characters) with 256-bit entropy
26-
hash: Argon2id hash of the token for secure storage
27-
"""
28-
29-
plaintext: str
30-
hash: str
31-
32-
model_config = {"frozen": True}
1+
"""
2+
Token service for SSO authentication.
3+
4+
This module provides secure token generation, hashing, and verification
5+
using Argon2id with 2025-recommended security parameters.
6+
"""
7+
8+
import base64
9+
import secrets
10+
11+
import pydantic
12+
from argon2 import PasswordHasher
13+
from argon2.exceptions import InvalidHashError, VerificationError, VerifyMismatchError
14+
15+
from src.core.auth.sso.exceptions import TokenError
16+
17+
18+
class GeneratedToken(pydantic.BaseModel):
19+
"""Result of token generation.
20+
21+
Contains both the plaintext token (for immediate use) and its hash
22+
(for secure storage and verification).
23+
24+
Attributes:
25+
plaintext: Base64url-encoded token (43+ characters) with 256-bit entropy
26+
hash: Argon2id hash of the token for secure storage
27+
"""
28+
29+
plaintext: str
30+
hash: str
31+
32+
model_config = {"frozen": True}
33+
34+
def __iter__(self): # type: ignore[override]
35+
"""Allow tuple unpacking for backward compatibility."""
36+
yield self.plaintext
37+
yield self.hash
3338

3439

3540
class TokenService:
@@ -99,41 +104,41 @@ def __init__(
99104
salt_len=16, # Always 16 bytes salt
100105
)
101106

102-
def generate_token(self) -> GeneratedToken:
103-
"""
104-
Generate a new agent token with 256-bit entropy.
105-
106-
The token is generated using cryptographically secure random bytes
107-
and encoded as base64url for Bearer token compatibility.
108-
109-
Returns:
110-
GeneratedToken: Object containing both plaintext token and its hash
111-
- plaintext: Base64url-encoded token (43+ characters)
112-
- hash: Argon2id hash of token
113-
114-
Raises:
115-
TokenError: If token generation or hashing fails
116-
"""
117-
try:
118-
# Generate 256 bits (32 bytes) of cryptographically secure random data
119-
token_bytes = secrets.token_bytes(32)
120-
121-
# Encode as base64url (URL-safe, no padding)
122-
plaintext_token = (
123-
base64.urlsafe_b64encode(token_bytes).decode("ascii").rstrip("=")
124-
)
125-
126-
# Hash the token using Argon2id
127-
token_hash = self.hash_token(plaintext_token)
128-
129-
return GeneratedToken(plaintext=plaintext_token, hash=token_hash)
130-
131-
except Exception as e:
132-
raise TokenError(
133-
"Failed to generate token",
134-
details={"error": str(e)},
135-
original_error=e,
136-
) from e
107+
def generate_token(self) -> GeneratedToken:
108+
"""
109+
Generate a new agent token with 256-bit entropy.
110+
111+
The token is generated using cryptographically secure random bytes
112+
and encoded as base64url for Bearer token compatibility.
113+
114+
Returns:
115+
GeneratedToken: Object containing both plaintext token and its hash
116+
- plaintext: Base64url-encoded token (43+ characters)
117+
- hash: Argon2id hash of token
118+
119+
Raises:
120+
TokenError: If token generation or hashing fails
121+
"""
122+
try:
123+
# Generate 256 bits (32 bytes) of cryptographically secure random data
124+
token_bytes = secrets.token_bytes(32)
125+
126+
# Encode as base64url (URL-safe, no padding)
127+
plaintext_token = (
128+
base64.urlsafe_b64encode(token_bytes).decode("ascii").rstrip("=")
129+
)
130+
131+
# Hash the token using Argon2id
132+
token_hash = self.hash_token(plaintext_token)
133+
134+
return GeneratedToken(plaintext=plaintext_token, hash=token_hash)
135+
136+
except Exception as e:
137+
raise TokenError(
138+
"Failed to generate token",
139+
details={"error": str(e)},
140+
original_error=e,
141+
) from e
137142

138143
def hash_token(self, token: str) -> str:
139144
"""

src/core/ports/streaming_processors.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -196,7 +196,7 @@ def __init__(self, max_cached_sessions: int = 256) -> None:
196196
self._max_cached_sessions = max_cached_sessions
197197
self._session_order: list[str] = [] # Track LRU order
198198
self._logger = logging.getLogger(__name__)
199-
self._lock = threading.Lock()
199+
self._lock = threading.RLock()
200200

201201
async def process(self, content: StreamingContent) -> StreamingContent:
202202
"""Process streaming content and check for tool call loops.

tests/unit/test_token_service.py

Lines changed: 15 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -6,20 +6,19 @@
66

77
import pytest
88
from src.core.auth.sso.exceptions import TokenError
9-
from src.core.auth.sso.token_service import TokenService
9+
from src.core.auth.sso.token_service import GeneratedToken, TokenService
1010

1111

1212
class TestTokenService:
1313
"""Unit tests for TokenService."""
1414

1515
def test_generate_token_returns_tuple(self) -> None:
16-
"""Test that generate_token returns a tuple of (token, hash)."""
16+
"""Test that generate_token returns a GeneratedToken that can be unpacked as a tuple."""
1717
service = TokenService(memory_cost=8192, time_cost=1, parallelism=1)
1818
result = service.generate_token()
1919

20-
assert isinstance(result, tuple)
21-
assert len(result) == 2
22-
20+
assert isinstance(result, GeneratedToken)
21+
# Verify it can be unpacked as a tuple for backward compatibility
2322
plaintext_token, token_hash = result
2423
assert isinstance(plaintext_token, str)
2524
assert isinstance(token_hash, str)
@@ -98,17 +97,17 @@ def test_hash_token_produces_different_hashes(self) -> None:
9897
assert service.verify_token(plaintext_token, hash2) is True
9998
assert service.verify_token(plaintext_token, hash3) is True
10099

101-
def test_generated_tokens_are_unique(self) -> None:
102-
"""Test that multiple generated tokens are unique."""
103-
service = TokenService(memory_cost=8, time_cost=1, parallelism=1)
104-
105-
tokens = set()
106-
for _ in range(20):
107-
plaintext_token, _ = service.generate_token()
108-
tokens.add(plaintext_token)
109-
110-
# All tokens should be unique
111-
assert len(tokens) == 20
100+
def test_generated_tokens_are_unique(self) -> None:
101+
"""Test that multiple generated tokens are unique."""
102+
service = TokenService(memory_cost=8, time_cost=1, parallelism=1)
103+
104+
tokens = set()
105+
for _ in range(20):
106+
plaintext_token, _ = service.generate_token()
107+
tokens.add(plaintext_token)
108+
109+
# All tokens should be unique
110+
assert len(tokens) == 20
112111

113112
def test_token_hash_does_not_contain_plaintext(self) -> None:
114113
"""Test that token hash does not contain the plaintext token."""

0 commit comments

Comments
 (0)