Skip to content

Commit

Permalink
Merge pull request #1173 from lsst-sqre/tickets/DM-47789-queue
Browse files Browse the repository at this point in the history
DM-47789: Move scopes validation into a Pydantic type
  • Loading branch information
rra authored Nov 27, 2024
2 parents 6e9bf9f + 8538d51 commit 79da42c
Show file tree
Hide file tree
Showing 5 changed files with 46 additions and 48 deletions.
1 change: 1 addition & 0 deletions docs/documenteer.toml
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@ nitpick_ignore = [
["py:class", "pydantic.functional_serializers.PlainSerializer"],
["py:class", "pydantic.functional_validators.AfterValidator"],
["py:class", "pydantic.functional_validators.BeforeValidator"],
["py:class", "pydantic.functional_validators.PlainValidator"],
["py:class", "pydantic.main.BaseModel"],
["py:class", "pydantic.networks.UrlConstraints"],
["py:class", "pydantic.types.SecretStr"],
Expand Down
13 changes: 4 additions & 9 deletions src/gafaelfawr/models/history.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,14 +6,13 @@
from datetime import datetime # noqa: F401: needed for docs
from typing import Any, Generic, Self, TypeVar

from pydantic import BaseModel, Field, field_validator
from pydantic import BaseModel, Field
from safir.database import DatetimeIdCursor, PaginatedList, PaginationCursor
from safir.datetime import current_datetime
from sqlalchemy.orm import InstrumentedAttribute

from ..pydantic import IpAddress, Timestamp
from ..pydantic import IpAddress, Scopes, Timestamp
from ..schema import TokenChangeHistory
from ..util import normalize_scopes
from .enums import AdminChange, TokenChange, TokenType

# Not used directly but needed to prevent documentation build errors because
Expand Down Expand Up @@ -115,7 +114,7 @@ class TokenChangeHistoryEntry(BaseModel):
examples=["1NOV_8aPwhCWj6rM-p1XgQ"],
)

scopes: list[str] = Field(
scopes: Scopes = Field(
..., title="Scopes of the token", examples=[["read:all"]]
)

Expand Down Expand Up @@ -158,7 +157,7 @@ class TokenChangeHistoryEntry(BaseModel):
examples=["old name"],
)

old_scopes: list[str] | None = Field(
old_scopes: Scopes | None = Field(
None,
title="Previous scopes of the token",
description=(
Expand Down Expand Up @@ -204,10 +203,6 @@ class TokenChangeHistoryEntry(BaseModel):
examples=[1614985631],
)

_normalize_scopes = field_validator("scopes", "old_scopes", mode="before")(
normalize_scopes
)

def model_dump_reduced(self) -> dict[str, Any]:
"""Convert to a dictionary while suppressing some fields.
Expand Down
20 changes: 8 additions & 12 deletions src/gafaelfawr/models/token.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,8 @@

from ..constants import USERNAME_REGEX
from ..exceptions import InvalidTokenError
from ..pydantic import Timestamp
from ..util import normalize_scopes, random_128_bits
from ..pydantic import Scopes, Timestamp
from ..util import random_128_bits
from .enums import TokenType
from .userinfo import Group

Expand Down Expand Up @@ -141,7 +141,7 @@ class TokenBase(BaseModel):
max_length=64,
)

scopes: list[str] = Field(
scopes: Scopes = Field(
...,
title="Token scopes",
description="Scopes of the token",
Expand All @@ -162,10 +162,6 @@ class TokenBase(BaseModel):
examples=[1616986130],
)

_normalize_scopes = field_validator("scopes", mode="before")(
normalize_scopes
)


class TokenInfo(TokenBase):
"""Information about a token.
Expand Down Expand Up @@ -383,8 +379,8 @@ class AdminTokenRequest(BaseModel):
validate_default=True,
)

scopes: list[str] = Field(
default_factory=list,
scopes: Scopes = Field(
[],
title="Token scopes",
examples=[["read:all"]],
)
Expand Down Expand Up @@ -489,8 +485,8 @@ class UserTokenRequest(BaseModel):
max_length=64,
)

scopes: list[str] = Field(
default_factory=list,
scopes: Scopes = Field(
[],
title="Token scope",
examples=[["read:all"]],
)
Expand Down Expand Up @@ -518,7 +514,7 @@ class UserTokenModifyRequest(BaseModel):
max_length=64,
)

scopes: list[str] | None = Field(
scopes: Scopes | None = Field(
None, title="Token scopes", examples=[["read:all"]]
)

Expand Down
34 changes: 33 additions & 1 deletion src/gafaelfawr/pydantic.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,16 @@

from __future__ import annotations

from collections.abc import Iterable
from ipaddress import IPv4Address, IPv6Address
from typing import Annotated, TypeAlias

from pydantic import BeforeValidator, PlainSerializer
from pydantic import BeforeValidator, PlainSerializer, PlainValidator
from safir.pydantic import UtcDatetime

__all__ = [
"IpAddress",
"Scopes",
"Timestamp",
]

Expand Down Expand Up @@ -45,6 +47,36 @@ def _normalize_ip_address(v: str | IPv4Address | IPv6Address) -> str:
"""


def _normalize_scopes(v: str | Iterable[str]) -> list[str]:
"""Pydantic validator for scope fields.
Scopes are stored in the database as a comma-delimited, sorted list.
Convert to the list representation we want to use in Python, ensuring the
scopes remain sorted.
Parameters
----------
v
Field representing token scopes.
Returns
-------
set of str
Scopes as a set.
"""
if isinstance(v, str):
return [] if not v else sorted(v.split(","))
else:
return sorted(v)


Scopes: TypeAlias = Annotated[list[str], PlainValidator(_normalize_scopes)]
"""Type for a list of scopes.
The scopes will be forced to sorted order by validation.
"""


Timestamp: TypeAlias = Annotated[
UtcDatetime,
PlainSerializer(lambda t: int(t.timestamp()), return_type=int),
Expand Down
26 changes: 0 additions & 26 deletions src/gafaelfawr/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@
"base64_to_number",
"group_name_for_github_team",
"is_bot_user",
"normalize_scopes",
"number_to_base64",
"random_128_bits",
]
Expand Down Expand Up @@ -104,31 +103,6 @@ def group_name_for_github_team(organization: str, team: str) -> str:
return group_name


def normalize_scopes(v: str | list[str] | None) -> list[str] | None:
"""Pydantic validator for scope fields.
Scopes are stored in the database as a comma-delimited, sorted list.
Convert to the list representation we want to use in Python, preserving
`None`.
Parameters
----------
v
The field representing token scopes.
Returns
-------
list of str or None
The scopes as a list.
"""
if v is None:
return None
elif isinstance(v, str):
return [] if not v else v.split(",")
else:
return v


def number_to_base64(data: int) -> bytes:
"""Convert an integer to base64-encoded bytes in big endian order.
Expand Down

0 comments on commit 79da42c

Please sign in to comment.