diff --git a/CHANGES b/CHANGES index 844acbf..4c9eff0 100644 --- a/CHANGES +++ b/CHANGES @@ -20,3 +20,34 @@ Version 0.1 ----------- First public release. + +Version 1.0.0 +------------- +- Added comprehensive type hints to all modules +- Added `dict_to_message()`, `messages_to_list()`, and `parse_list()` utility functions +- Added `ReadOnlyProtoSerializer` class for read-only serializers +- Added `handle_exception()` method to Service class for customizable exception handling +- Added `StreamingResponseMixin` with `stream_queryset()` and `check_streaming_context()` utilities +- Added `grpc_exception_raised` signal for global exception handling +- Added `enable_signal_logging()` and `disable_signal_logging()` functions +- Added `SignalTester` context manager for testing signals +- Added new settings: `MAX_MESSAGE_LENGTH`, `PAGINATION_PAGE_SIZE`, `PAGINATION_MAX_PAGE_SIZE`, `EXCEPTION_HANDLER`, `DEFAULT_AUTHENTICATION_CLASSES`, `DEFAULT_PERMISSION_CLASSES` +- Added `get()`, `as_dict()`, and `__repr__()` methods to GRPCSettings +- Added `override_grpc_settings()` function for testing +- Added `set_code()`, `set_details()`, `is_active()`, and `time_remaining()` methods to FakeContext +- Added support for `PositiveBigIntegerField`, `FileField`, `ImageField`, `BinaryField`, and `JSONField` in proto generator +- Improved all docstrings with detailed descriptions and usage examples +- Improved error messages with f-strings and actionable guidance +- Improved exception handling with proper exception types and chaining +- Improved signal handler management with dispatch UIDs +- Improved proto type mapping to use `uint32`/`uint64` for positive integers +- Improved FakeRpcError with better `__str__()` and `__repr__()` methods +- Improved FakeServer error messages to show available methods +- Improved test multi-callables to accept full gRPC parameters +- Replaced `%` formatting with f-strings throughout +- Replaced assertions with `ImproperlyConfigured` exceptions in serializers +- Enhanced protobuf utility functions with explicit parameters +- Enhanced code organization with extracted helper functions +- Enhanced settings with more comprehensive defaults +- Fixed potential signal handler duplication issues +- Fixed queryset evaluation caching protection diff --git a/django_grpc_framework/__init__.py b/django_grpc_framework/__init__.py index fc79d63..5becc17 100644 --- a/django_grpc_framework/__init__.py +++ b/django_grpc_framework/__init__.py @@ -1 +1 @@ -__version__ = '0.2.1' +__version__ = "1.0.0" diff --git a/django_grpc_framework/generics.py b/django_grpc_framework/generics.py index f3ed527..1b029e1 100644 --- a/django_grpc_framework/generics.py +++ b/django_grpc_framework/generics.py @@ -1,180 +1,331 @@ -from django.db.models.query import QuerySet -from django.shortcuts import get_object_or_404 +""" +Generic service classes for Django gRPC Framework. + +Provides base classes for building gRPC services that interact with Django models, +similar to Django REST Framework's generic views and viewsets. +""" + +from typing import Any, Optional, Type + from django.core.exceptions import ValidationError +from django.db.models import Model, QuerySet from django.http import Http404 +from django.shortcuts import get_object_or_404 import grpc -from django_grpc_framework.utils import model_meta from django_grpc_framework import mixins, services +from django_grpc_framework.utils import model_meta class GenericService(services.Service): """ Base class for all other generic services. + + Provides common functionality for working with Django models and querysets + in a gRPC context, including: + - Queryset management + - Serializer integration + - Object lookup and filtering + - Context handling + + Subclasses should either set the `queryset` and `serializer_class` attributes + or override the corresponding getter methods. """ - # Either set this attribute or override ``get_queryset()``. - queryset = None - # Either set this attribute or override ``get_serializer_class()``. - serializer_class = None - # Set this if you want to use object lookups other than id - lookup_field = None - lookup_request_field = None - def get_queryset(self): + # Either set this attribute or override ``get_queryset()`` + queryset: Optional[QuerySet] = None + + # Either set this attribute or override ``get_serializer_class()`` + serializer_class: Optional[Type] = None + + # Set this if you want to use object lookups other than primary key + lookup_field: Optional[str] = None + lookup_request_field: Optional[str] = None + + def get_queryset(self) -> QuerySet: """ Get the list of items for this service. - This must be an iterable, and may be a queryset. - Defaults to using ``self.queryset``. - If you are overriding a handler method, it is important that you call - ``get_queryset()`` instead of accessing the ``queryset`` attribute as - ``queryset`` will get evaluated only once. + This must be an iterable, and may be a queryset. Defaults to using + ``self.queryset``. The queryset is re-evaluated on each request to + ensure fresh data. - Override this to provide dynamic behavior, for example:: + Returns: + QuerySet for this service - def get_queryset(self): - if self.action == 'ListSpecialUser': - return SpecialUser.objects.all() - return super().get_queryset() + Raises: + AssertionError: If neither queryset attribute nor this method is properly set + + Note: + If you are overriding a handler method, it is important that you call + ``get_queryset()`` instead of accessing the ``queryset`` attribute directly, + as ``queryset`` will get evaluated only once. + + Example: + Override this to provide dynamic behavior:: + + def get_queryset(self): + if self.action == 'ListSpecialUser': + return SpecialUser.objects.all() + return super().get_queryset() """ assert self.queryset is not None, ( - "'%s' should either include a ``queryset`` attribute, " - "or override the ``get_queryset()`` method." - % self.__class__.__name__ + f"'{self.__class__.__name__}' should either include a `queryset` " + f"attribute, or override the `get_queryset()` method." ) + queryset = self.queryset if isinstance(queryset, QuerySet): - # Ensure queryset is re-evaluated on each request. + # Ensure queryset is re-evaluated on each request queryset = queryset.all() return queryset - def get_serializer_class(self): + def get_serializer_class(self) -> Type: """ - Return the class to use for the serializer. Defaults to using - `self.serializer_class`. + Return the class to use for the serializer. + + Defaults to using `self.serializer_class`. + + Returns: + Serializer class + + Raises: + AssertionError: If neither serializer_class attribute nor this + method is properly set """ assert self.serializer_class is not None, ( - "'%s' should either include a `serializer_class` attribute, " - "or override the `get_serializer_class()` method." - % self.__class__.__name__ + f"'{self.__class__.__name__}' should either include a " + f"`serializer_class` attribute, or override the " + f"`get_serializer_class()` method." ) return self.serializer_class - def get_object(self): + def get_object(self) -> Model: """ - Returns an object instance that should be used for detail services. - Defaults to using the lookup_field parameter to filter the base - queryset. + Return the object instance for detail services. + + Uses the lookup_field parameter to filter the base queryset. Defaults + to using the model's primary key field if lookup_field is not set. + + Returns: + Model instance + + Raises: + grpc.RpcError: With NOT_FOUND status if object doesn't exist + + Note: + This method filters the queryset using filter_queryset() before + performing the lookup, allowing for additional filtering logic. """ queryset = self.filter_queryset(self.get_queryset()) - lookup_field = ( - self.lookup_field - or model_meta.get_model_pk(queryset.model).name - ) + + # Determine which field to use for lookup + lookup_field = self.lookup_field or model_meta.get_model_pk(queryset.model).name lookup_request_field = self.lookup_request_field or lookup_field + + # Validate that the request has the required field assert hasattr(self.request, lookup_request_field), ( - 'Expected service %s to be called with request that has a field ' - 'named "%s". Fix your request protocol definition, or set the ' - '`.lookup_field` attribute on the service correctly.' % - (self.__class__.__name__, lookup_request_field) + f"Expected service {self.__class__.__name__} to be called with a " + f"request that has a field named '{lookup_request_field}'. Fix your " + f"request protocol definition, or set the `.lookup_field` attribute " + f"on the service correctly." ) + + # Get the lookup value from the request lookup_value = getattr(self.request, lookup_request_field) filter_kwargs = {lookup_field: lookup_value} + + # Attempt to retrieve the object try: return get_object_or_404(queryset, **filter_kwargs) - except (TypeError, ValueError, ValidationError, Http404): - self.context.abort(grpc.StatusCode.NOT_FOUND, ( - '%s: %s not found!' % - (queryset.model.__name__, lookup_value) - )) + except (TypeError, ValueError, ValidationError, Http404) as exc: + model_name = queryset.model.__name__ + error_msg = f"{model_name} with {lookup_field}='{lookup_value}' not found" + self.context.abort(grpc.StatusCode.NOT_FOUND, error_msg) - def get_serializer(self, *args, **kwargs): + def get_serializer(self, *args, **kwargs) -> Any: """ - Return the serializer instance that should be used for validating and - deserializing input, and for serializing output. + Return the serializer instance for validation and serialization. + + The serializer is used for validating and deserializing input, and for + serializing output. + + Args: + *args: Positional arguments passed to serializer + **kwargs: Keyword arguments passed to serializer + + Returns: + Serializer instance """ serializer_class = self.get_serializer_class() - kwargs.setdefault('context', self.get_serializer_context()) + kwargs.setdefault("context", self.get_serializer_context()) return serializer_class(*args, **kwargs) - def get_serializer_context(self): + def get_serializer_context(self) -> dict[str, Any]: """ - Extra context provided to the serializer class. Defaults to including - ``grpc_request``, ``grpc_context``, and ``service`` keys. + Return extra context provided to the serializer class. + + Defaults to including ``grpc_request``, ``grpc_context``, and ``service`` keys. + + Returns: + Dictionary of context data """ return { - 'grpc_request': self.request, - 'grpc_context': self.context, - 'service': self, + "grpc_request": self.request, + "grpc_context": self.context, + "service": self, } - def filter_queryset(self, queryset): - """Given a queryset, filter it, returning a new queryset.""" + def filter_queryset(self, queryset: QuerySet) -> QuerySet: + """ + Filter the queryset, returning a new queryset. + + Override this method to implement custom filtering logic. + + Args: + queryset: The base queryset to filter + + Returns: + Filtered queryset + + Example: + def filter_queryset(self, queryset): + # Apply custom filtering + return queryset.filter(is_active=True) + """ return queryset -class CreateService(mixins.CreateModelMixin, - GenericService): +class CreateService(mixins.CreateModelMixin, GenericService): """ - Concrete service for creating a model instance that provides a ``Create()`` - handler. + Concrete service for creating a model instance. + + Provides a ``Create()`` handler that accepts a protobuf message, + validates it, and creates a new model instance. + + Example: + class UserCreateService(CreateService): + queryset = User.objects.all() + serializer_class = UserProtoSerializer """ + pass -class ListService(mixins.ListModelMixin, - GenericService): +class ListService(mixins.ListModelMixin, GenericService): """ - Concrete service for listing a queryset that provides a ``List()`` handler. + Concrete service for listing a queryset. + + Provides a ``List()`` handler that streams model instances back to + the client. + + Example: + class UserListService(ListService): + queryset = User.objects.all() + serializer_class = UserProtoSerializer + + def filter_queryset(self, queryset): + # Optional: add filtering + return queryset.filter(is_active=True) """ + pass -class RetrieveService(mixins.RetrieveModelMixin, - GenericService): +class RetrieveService(mixins.RetrieveModelMixin, GenericService): """ - Concrete service for retrieving a model instance that provides a - ``Retrieve()`` handler. + Concrete service for retrieving a model instance. + + Provides a ``Retrieve()`` handler that fetches a single instance + by its identifier (typically primary key). + + Example: + class UserRetrieveService(RetrieveService): + queryset = User.objects.all() + serializer_class = UserProtoSerializer + lookup_field = 'id' """ + pass -class DestroyService(mixins.DestroyModelMixin, - GenericService): +class DestroyService(mixins.DestroyModelMixin, GenericService): """ - Concrete service for deleting a model instance that provides a ``Destroy()`` - handler. + Concrete service for deleting a model instance. + + Provides a ``Destroy()`` handler that deletes an instance by its + identifier. + + Example: + class UserDestroyService(DestroyService): + queryset = User.objects.all() + serializer_class = UserProtoSerializer """ + pass -class UpdateService(mixins.UpdateModelMixin, - GenericService): +class UpdateService(mixins.UpdateModelMixin, GenericService): """ - Concrete service for updating a model instance that provides a - ``Update()`` handler. + Concrete service for updating a model instance. + + Provides an ``Update()`` handler that modifies an existing instance + with new data from the protobuf message. + + Example: + class UserUpdateService(UpdateService): + queryset = User.objects.all() + serializer_class = UserProtoSerializer """ + pass -class ReadOnlyModelService(mixins.RetrieveModelMixin, - mixins.ListModelMixin, - GenericService): +class ReadOnlyModelService( + mixins.RetrieveModelMixin, mixins.ListModelMixin, GenericService +): """ - Concrete service that provides default ``List()`` and ``Retrieve()`` - handlers. + Concrete service that provides read-only access to a model. + + Provides default ``List()`` and ``Retrieve()`` handlers for viewing + model instances without modification capabilities. + + Example: + class UserReadOnlyService(ReadOnlyModelService): + queryset = User.objects.all() + serializer_class = UserProtoSerializer """ + pass -class ModelService(mixins.CreateModelMixin, - mixins.RetrieveModelMixin, - mixins.UpdateModelMixin, - mixins.DestroyModelMixin, - mixins.ListModelMixin, - GenericService): +class ModelService( + mixins.CreateModelMixin, + mixins.RetrieveModelMixin, + mixins.UpdateModelMixin, + mixins.DestroyModelMixin, + mixins.ListModelMixin, + GenericService, +): """ - Concrete service that provides default ``Create()``, ``Retrieve()``, - ``Update()``, ``Destroy()`` and ``List()`` handlers. + Concrete service that provides full CRUD operations for a model. + + Provides default ``Create()``, ``Retrieve()``, ``Update()``, ``Destroy()`` + and ``List()`` handlers. + + This is the most common service class and provides all standard operations + for a model, similar to Django REST Framework's ModelViewSet. + + Example: + class UserService(ModelService): + queryset = User.objects.all() + serializer_class = UserProtoSerializer + + def get_queryset(self): + # Optional: customize queryset + if self.action == 'List': + return User.objects.filter(is_active=True) + return super().get_queryset() """ + pass diff --git a/django_grpc_framework/proto_serializers.py b/django_grpc_framework/proto_serializers.py index b1dd51d..0202d24 100644 --- a/django_grpc_framework/proto_serializers.py +++ b/django_grpc_framework/proto_serializers.py @@ -1,100 +1,341 @@ +""" +Protocol Buffer serializers for Django gRPC Framework. + +Provides serializer classes that bridge Django REST Framework's serialization +with Protocol Buffer messages, enabling seamless conversion between Django models, +Python dictionaries, and protobuf messages. +""" + +from typing import Any, Dict, List, Optional, Type + +from django.core.exceptions import ImproperlyConfigured +from google.protobuf.message import Message +from rest_framework.exceptions import ValidationError from rest_framework.serializers import ( - BaseSerializer, Serializer, ListSerializer, ModelSerializer, + BaseSerializer, + ListSerializer, + ModelSerializer, + Serializer, LIST_SERIALIZER_KWARGS, ) from rest_framework.settings import api_settings -from rest_framework.exceptions import ValidationError + from django_grpc_framework.protobuf.json_format import ( - message_to_dict, parse_dict + message_to_dict, + parse_dict, ) class BaseProtoSerializer(BaseSerializer): + """ + Base class for Protocol Buffer serializers. + + Extends Django REST Framework's BaseSerializer to add protobuf message + conversion capabilities. Handles bidirectional conversion between protobuf + messages and Python dictionaries. + + Attributes: + initial_message: The original protobuf message if initialized with one + """ + def __init__(self, *args, **kwargs): - message = kwargs.pop('message', None) + """ + Initialize the serializer. + + Args: + *args: Positional arguments passed to parent + message: Optional protobuf message to deserialize + **kwargs: Keyword arguments passed to parent + """ + message = kwargs.pop("message", None) if message is not None: self.initial_message = message - kwargs['data'] = self.message_to_data(message) + kwargs["data"] = self.message_to_data(message) super().__init__(*args, **kwargs) - def message_to_data(self, message): - """Protobuf message -> Dict of python primitive datatypes.""" - raise NotImplementedError('`message_to_data()` must be implemented.') + def message_to_data(self, message: Message) -> Dict[str, Any]: + """ + Convert a protobuf message to a dictionary of Python primitives. + + Args: + message: Protocol Buffer message instance + + Returns: + Dictionary representation of the message + + Raises: + NotImplementedError: Must be implemented by subclasses + """ + raise NotImplementedError( + f"{self.__class__.__name__}.message_to_data() must be implemented." + ) - def data_to_message(self, data): - """Protobuf message <- Dict of python primitive datatypes.""" - raise NotImplementedError('`data_to_message()` must be implemented.') + def data_to_message(self, data: Dict[str, Any]) -> Message: + """ + Convert a dictionary of Python primitives to a protobuf message. + + Args: + data: Dictionary of primitive Python types + + Returns: + Protocol Buffer message instance + + Raises: + NotImplementedError: Must be implemented by subclasses + """ + raise NotImplementedError( + f"{self.__class__.__name__}.data_to_message() must be implemented." + ) @property - def message(self): - if not hasattr(self, '_message'): + def message(self) -> Message: + """ + Get the protobuf message representation of the serialized data. + + Returns: + Protocol Buffer message created from serializer data + + Note: + The message is cached after first access. To regenerate, delete + the _message attribute. + """ + if not hasattr(self, "_message"): self._message = self.data_to_message(self.data) return self._message @classmethod def many_init(cls, *args, **kwargs): - allow_empty = kwargs.pop('allow_empty', None) + """ + Initialize a serializer for handling multiple instances. + + This method is called when the serializer is instantiated with + many=True. + + Args: + *args: Positional arguments + allow_empty: Whether to allow empty lists + **kwargs: Keyword arguments + + Returns: + ListProtoSerializer instance + """ + allow_empty = kwargs.pop("allow_empty", None) child_serializer = cls(*args, **kwargs) - list_kwargs = { - 'child': child_serializer, - } + + list_kwargs = {"child": child_serializer} + if allow_empty is not None: - list_kwargs['allow_empty'] = allow_empty - list_kwargs.update({ - key: value for key, value in kwargs.items() - if key in LIST_SERIALIZER_KWARGS - }) - meta = getattr(cls, 'Meta', None) - list_serializer_class = getattr(meta, 'list_serializer_class', ListProtoSerializer) + list_kwargs["allow_empty"] = allow_empty + + # Include only LIST_SERIALIZER_KWARGS from kwargs + list_kwargs.update( + { + key: value + for key, value in kwargs.items() + if key in LIST_SERIALIZER_KWARGS + } + ) + + # Get list serializer class from Meta or use default + meta = getattr(cls, "Meta", None) + list_serializer_class = getattr( + meta, "list_serializer_class", ListProtoSerializer + ) + return list_serializer_class(*args, **list_kwargs) class ProtoSerializer(BaseProtoSerializer, Serializer): - def message_to_data(self, message): - """Protobuf message -> Dict of python primitive datatypes. + """ + Standard Protocol Buffer serializer with explicit field definitions. + + Similar to DRF's Serializer, requires explicit field definitions. Handles + conversion between protobuf messages and validated Python data. + + Example: + class UserProtoSerializer(ProtoSerializer): + id = serializers.IntegerField(read_only=True) + name = serializers.CharField(max_length=100) + email = serializers.EmailField() + + class Meta: + proto_class = user_pb2.User + """ + + def message_to_data(self, message: Message) -> Dict[str, Any]: + """ + Convert a protobuf message to a dictionary. + + Uses the utility function to convert protobuf to dict with sensible + defaults for API usage. + + Args: + message: Protocol Buffer message instance + + Returns: + Dictionary representation of the message """ return message_to_dict(message) - def data_to_message(self, data): - """Protobuf message <- Dict of python primitive datatypes.""" - assert hasattr(self, 'Meta'), ( - 'Class {serializer_class} missing "Meta" attribute'.format( - serializer_class=self.__class__.__name__ + def data_to_message(self, data: Dict[str, Any]) -> Message: + """ + Convert validated data to a protobuf message. + + Args: + data: Dictionary of validated Python primitives + + Returns: + Protocol Buffer message instance + + Raises: + ImproperlyConfigured: If Meta class or proto_class is missing + """ + if not hasattr(self, "Meta"): + raise ImproperlyConfigured( + f'{self.__class__.__name__} is missing a "Meta" class. ' + f'Define a Meta class with a "proto_class" attribute.' ) - ) - assert hasattr(self.Meta, 'proto_class'), ( - 'Class {serializer_class} missing "Meta.proto_class" attribute'.format( - serializer_class=self.__class__.__name__ + + if not hasattr(self.Meta, "proto_class"): + raise ImproperlyConfigured( + f'{self.__class__.__name__}.Meta is missing a "proto_class" ' + f"attribute. Set Meta.proto_class to your protobuf message class." ) - ) + return parse_dict(data, self.Meta.proto_class()) class ListProtoSerializer(BaseProtoSerializer, ListSerializer): - def message_to_data(self, message): + """ + Serializer for handling lists of Protocol Buffer messages. + + Automatically used when instantiating a ProtoSerializer with many=True. + Handles conversion of message lists to/from data lists. + + Attributes: + child: The child serializer instance for individual items + """ + + def message_to_data(self, message: List[Message]) -> List[Dict[str, Any]]: """ - List of protobuf messages -> List of dicts of python primitive datatypes. + Convert a list of protobuf messages to a list of dictionaries. + + Args: + message: List of Protocol Buffer message instances + + Returns: + List of dictionary representations + + Raises: + ValidationError: If input is not a list """ if not isinstance(message, list): - error_message = self.error_messages['not_a_list'].format( + error_message = self.error_messages["not_a_list"].format( input_type=type(message).__name__ ) - raise ValidationError({ - api_settings.NON_FIELD_ERRORS_KEY: [error_message] - }, code='not_a_list') - ret = [] - for item in message: - ret.append(self.child.message_to_data(item)) - return ret + raise ValidationError( + {api_settings.NON_FIELD_ERRORS_KEY: [error_message]}, code="not_a_list" + ) - def data_to_message(self, data): + return [self.child.message_to_data(item) for item in message] + + def data_to_message(self, data: List[Dict[str, Any]]) -> List[Message]: """ - List of protobuf messages <- List of dicts of python primitive datatypes. + Convert a list of dictionaries to a list of protobuf messages. + + Args: + data: List of dictionaries containing validated data + + Returns: + List of Protocol Buffer message instances """ - return [ - self.child.data_to_message(item) for item in data - ] + return [self.child.data_to_message(item) for item in data] class ModelProtoSerializer(ProtoSerializer, ModelSerializer): - pass \ No newline at end of file + """ + Protocol Buffer serializer that automatically generates fields from a model. + + Combines the automatic field generation of ModelSerializer with protobuf + message conversion. This is the most commonly used serializer for gRPC + services backed by Django models. + + Example: + class UserProtoSerializer(ModelProtoSerializer): + class Meta: + model = User + proto_class = user_pb2.User + fields = ['id', 'name', 'email', 'created_at'] + read_only_fields = ['id', 'created_at'] + + Example (all fields): + class UserProtoSerializer(ModelProtoSerializer): + class Meta: + model = User + proto_class = user_pb2.User + fields = '__all__' + + Example (with validation): + class UserProtoSerializer(ModelProtoSerializer): + class Meta: + model = User + proto_class = user_pb2.User + fields = ['id', 'name', 'email'] + + def validate_email(self, value): + if User.objects.filter(email=value).exists(): + raise ValidationError('Email already exists') + return value + """ + + pass + + +class ReadOnlyProtoSerializer(ModelProtoSerializer): + """ + Read-only Protocol Buffer serializer for models. + + Convenience class that makes all fields read-only by default. Useful for + response-only serializers where no writes are expected. + + Example: + class UserReadOnlyProtoSerializer(ReadOnlyProtoSerializer): + class Meta: + model = User + proto_class = user_pb2.User + fields = ['id', 'name', 'email', 'created_at'] + """ + + def get_fields(self): + """ + Get all fields and mark them as read-only. + + Returns: + Dictionary of field instances, all marked as read_only + """ + fields = super().get_fields() + for field in fields.values(): + field.read_only = True + return fields + + def create(self, validated_data): + """ + Disabled for read-only serializers. + + Raises: + NotImplementedError: Always, as this serializer is read-only + """ + raise NotImplementedError( + f"{self.__class__.__name__} is read-only and does not support create()" + ) + + def update(self, instance, validated_data): + """ + Disabled for read-only serializers. + + Raises: + NotImplementedError: Always, as this serializer is read-only + """ + raise NotImplementedError( + f"{self.__class__.__name__} is read-only and does not support update()" + ) diff --git a/django_grpc_framework/protobuf/generators.py b/django_grpc_framework/protobuf/generators.py index 676ccfa..560073c 100644 --- a/django_grpc_framework/protobuf/generators.py +++ b/django_grpc_framework/protobuf/generators.py @@ -1,5 +1,6 @@ import io from collections import OrderedDict +from typing import Optional, Dict, List from django.db import models from rest_framework.utils import model_meta @@ -7,178 +8,268 @@ class ModelProtoGenerator: + """ + Generates Protocol Buffer (.proto) definitions from Django models. + + Args: + model: Django model class to generate proto from + field_names: Optional list of field names to include (defaults to all fields) + package: Optional package name (defaults to lowercase model name) + """ + type_mapping = { - # Numeric - models.AutoField: 'int32', - models.SmallIntegerField: 'int32', - models.IntegerField: 'int32', - models.BigIntegerField: 'int64', - models.PositiveSmallIntegerField: 'int32', - models.PositiveIntegerField: 'int32', - models.FloatField: 'float', - models.DecimalField: 'string', + # Numeric types + models.AutoField: "int32", + models.SmallIntegerField: "int32", + models.IntegerField: "int32", + models.BigIntegerField: "int64", + models.PositiveSmallIntegerField: "uint32", + models.PositiveIntegerField: "uint32", + models.PositiveBigIntegerField: "uint64", + models.FloatField: "float", + models.DecimalField: "string", # Decimal as string to preserve precision # Boolean - models.BooleanField: 'bool', - models.NullBooleanField: 'bool', - # Date and time - models.DateField: 'string', - models.TimeField: 'string', - models.DateTimeField: 'string', - models.DurationField: 'string', - # String - models.CharField: 'string', - models.TextField: 'string', - models.EmailField: 'string', - models.SlugField: 'string', - models.URLField: 'string', - models.UUIDField: 'string', - models.GenericIPAddressField: 'string', - models.FilePathField: 'string', - # Default - models.Field: 'string', + models.BooleanField: "bool", + models.NullBooleanField: "bool", + # Date and time (represented as strings for compatibility) + models.DateField: "string", + models.TimeField: "string", + models.DateTimeField: "string", + models.DurationField: "string", + # String types + models.CharField: "string", + models.TextField: "string", + models.EmailField: "string", + models.SlugField: "string", + models.URLField: "string", + models.UUIDField: "string", + models.GenericIPAddressField: "string", + models.FilePathField: "string", + models.FileField: "string", + models.ImageField: "string", + # Binary + models.BinaryField: "bytes", + # JSON (if available) + models.JSONField: "string", + # Default fallback + models.Field: "string", } - def __init__(self, model, field_names=None, package=None): + def __init__( + self, + model: type[models.Model], + field_names: Optional[List[str]] = None, + package: Optional[str] = None, + ): + if not issubclass(model, models.Model): + raise TypeError(f"{model} must be a Django model class") + self.model = model self.field_names = field_names - if not package: - package = model.__name__.lower() - self.package = package + self.package = package or model.__name__.lower() self.type_mapping = ClassLookupDict(self.type_mapping) - # Retrieve metadata about fields & relationships on the model class. + + # Retrieve metadata about fields & relationships on the model class self.field_info = model_meta.get_field_info(model) self._writer = _CodeWriter() - def get_proto(self): - self._writer.write_line('syntax = "proto3";') - self._writer.write_line('') - self._writer.write_line('package %s;' % self.package) - self._writer.write_line('') - self._writer.write_line('import "google/protobuf/empty.proto";') - self._writer.write_line('') + def get_proto(self) -> str: + """Generate and return the complete .proto file content.""" + self._write_header() self._generate_service() - self._writer.write_line('') - self._generate_message() + self._writer.write_line("") + self._generate_messages() return self._writer.get_code() - def _generate_service(self): - self._writer.write_line('service %sController {' % self.model.__name__) + def _write_header(self) -> None: + """Write the proto file header with syntax, package, and imports.""" + self._writer.write_line('syntax = "proto3";') + self._writer.write_line("") + self._writer.write_line(f"package {self.package};") + self._writer.write_line("") + self._writer.write_line('import "google/protobuf/empty.proto";') + self._writer.write_line("") + + def _generate_service(self) -> None: + """Generate the gRPC service definition with CRUD operations.""" + model_name = self.model.__name__ + self._writer.write_line(f"service {model_name}Controller {{") + with self._writer.indent(): + # List - streams multiple instances self._writer.write_line( - 'rpc List(%sListRequest) returns (stream %s) {}' % - (self.model.__name__, self.model.__name__) + f"rpc List({model_name}ListRequest) returns (stream {model_name}) {{}}" ) + # Create - returns created instance self._writer.write_line( - 'rpc Create(%s) returns (%s) {}' % - (self.model.__name__, self.model.__name__) + f"rpc Create({model_name}) returns ({model_name}) {{}}" ) + # Retrieve - returns single instance self._writer.write_line( - 'rpc Retrieve(%sRetrieveRequest) returns (%s) {}' % - (self.model.__name__, self.model.__name__) + f"rpc Retrieve({model_name}RetrieveRequest) returns ({model_name}) {{}}" ) + # Update - returns updated instance self._writer.write_line( - 'rpc Update(%s) returns (%s) {}' % - (self.model.__name__, self.model.__name__) + f"rpc Update({model_name}) returns ({model_name}) {{}}" ) + # Destroy - returns empty response self._writer.write_line( - 'rpc Destroy(%s) returns (google.protobuf.Empty) {}' % - self.model.__name__ + f"rpc Destroy({model_name}) returns (google.protobuf.Empty) {{}}" ) - self._writer.write_line('}') - def _generate_message(self): - self._writer.write_line('message %s {' % self.model.__name__) + self._writer.write_line("}") + + def _generate_messages(self) -> None: + """Generate all message definitions for the model.""" + self._generate_main_message() + self._writer.write_line("") + self._generate_list_request_message() + self._writer.write_line("") + self._generate_retrieve_request_message() + + def _generate_main_message(self) -> None: + """Generate the main message representing the model.""" + model_name = self.model.__name__ + self._writer.write_line(f"message {model_name} {{") + with self._writer.indent(): - number = 0 - for field_name, proto_type in self.get_fields().items(): - number += 1 - self._writer.write_line( - '%s %s = %s;' % - (proto_type, field_name, number) - ) - self._writer.write_line('}') - self._writer.write_line('') - self._writer.write_line('message %sListRequest {' % self.model.__name__) - self._writer.write_line('}') - self._writer.write_line('') - self._writer.write_line('message %sRetrieveRequest {' % self.model.__name__) + for number, (field_name, proto_type) in enumerate( + self.get_fields().items(), start=1 + ): + self._writer.write_line(f"{proto_type} {field_name} = {number};") + + self._writer.write_line("}") + + def _generate_list_request_message(self) -> None: + """Generate the List request message (empty for now, can be extended with filters).""" + model_name = self.model.__name__ + self._writer.write_line(f"message {model_name}ListRequest {{") + with self._writer.indent(): + self._writer.write_line("// Add filtering/pagination fields here if needed") + self._writer.write_line("}") + + def _generate_retrieve_request_message(self) -> None: + """Generate the Retrieve request message with primary key field.""" + model_name = self.model.__name__ + self._writer.write_line(f"message {model_name}RetrieveRequest {{") + with self._writer.indent(): pk_field_name = self.field_info.pk.name pk_proto_type = self.build_proto_type( pk_field_name, self.field_info, self.model ) - self._writer.write_line( - '%s %s = 1;' % - (pk_proto_type, pk_field_name) - ) - self._writer.write_line('}') + self._writer.write_line(f"{pk_proto_type} {pk_field_name} = 1;") + + self._writer.write_line("}") - def get_fields(self): + def get_fields(self) -> OrderedDict: """ - Return the dict of field names -> proto types. + Return an ordered dict of field names to proto types. + + Raises: + ValueError: If the model is abstract """ if model_meta.is_abstract_model(self.model): - raise ValueError('Cannot generate proto for abstract model.') + raise ValueError( + f"Cannot generate proto for abstract model {self.model.__name__}" + ) + fields = OrderedDict() for field_name in self.get_field_names(): - if field_name in fields: - continue - fields[field_name] = self.build_proto_type( - field_name, self.field_info, self.model - ) + if field_name not in fields: # Avoid duplicates + fields[field_name] = self.build_proto_type( + field_name, self.field_info, self.model + ) return fields - def get_field_names(self): - field_names = self.field_names - if not field_names: - field_names = ( - [self.field_info.pk.name] - + list(self.field_info.fields) - + list(self.field_info.forward_relations) - ) - return field_names + def get_field_names(self) -> List[str]: + """ + Get the list of field names to include in the proto. + + Returns list in order: primary key, regular fields, forward relations. + """ + if self.field_names: + return self.field_names + + return ( + [self.field_info.pk.name] + + list(self.field_info.fields) + + list(self.field_info.forward_relations) + ) + + def build_proto_type( + self, field_name: str, field_info, model_class: type[models.Model] + ) -> str: + """ + Build the proto type string for a given field. + + Args: + field_name: Name of the field + field_info: Field metadata from model_meta.get_field_info + model_class: The model class containing the field - def build_proto_type(self, field_name, field_info, model_class): + Returns: + Proto type string (e.g., 'string', 'int32', 'repeated int64') + + Raises: + ValueError: If field_name is not valid for the model + """ if field_name in field_info.fields_and_pk: model_field = field_info.fields_and_pk[field_name] return self._build_standard_proto_type(model_field) + elif field_name in field_info.relations: relation_info = field_info.relations[field_name] return self._build_relational_proto_type(relation_info) + else: raise ValueError( - 'Field name `%s` is not valid for model `%s`.' % - (field_name, model_class.__name__) + f"Field name `{field_name}` is not valid for model `{model_class.__name__}`" ) - def _build_standard_proto_type(self, model_field): - if model_field.one_to_one and model_field.primary_key: + def _build_standard_proto_type(self, model_field) -> str: + """Build proto type for standard Django model fields.""" + # Handle OneToOne primary key fields + if ( + hasattr(model_field, "one_to_one") + and model_field.one_to_one + and model_field.primary_key + ): info = model_meta.get_field_info(model_field.related_model) - return self.build_proto_type( - info.pk.name, info, model_field.related_model - ) - else: - return self.type_mapping[model_field] + return self.build_proto_type(info.pk.name, info, model_field.related_model) - def _build_relational_proto_type(self, relation_info): + return self.type_mapping[model_field] + + def _build_relational_proto_type(self, relation_info) -> str: + """Build proto type for relational fields (ForeignKey, ManyToMany, etc.).""" info = model_meta.get_field_info(relation_info.related_model) - to_field = info.pk.name - if relation_info.to_field and not relation_info.reverse: - to_field = relation_info.to_field - proto_type = self.build_proto_type( - to_field, info, relation_info.related_model + + # Determine which field to use (default to pk) + to_field = ( + relation_info.to_field + if (relation_info.to_field and not relation_info.reverse) + else info.pk.name ) + + proto_type = self.build_proto_type(to_field, info, relation_info.related_model) + + # Add 'repeated' prefix for to-many relationships if relation_info.to_many: - proto_type = 'repeated ' + proto_type + proto_type = f"repeated {proto_type}" + return proto_type class _CodeWriter: + """Helper class to write indented code with proper formatting.""" + def __init__(self): self.buffer = io.StringIO() self._indent = 0 + self._indent_str = " " # 4 spaces def indent(self): + """Context manager for indented code blocks.""" return self def __enter__(self): @@ -188,10 +279,11 @@ def __enter__(self): def __exit__(self, *args): self._indent -= 1 - def write_line(self, line): - for i in range(self._indent): - self.buffer.write(" ") - print(line, file=self.buffer) + def write_line(self, line: str) -> None: + """Write a line with proper indentation.""" + indent = self._indent_str * self._indent + self.buffer.write(f"{indent}{line}\n") - def get_code(self): - return self.buffer.getvalue() \ No newline at end of file + def get_code(self) -> str: + """Get the complete generated code.""" + return self.buffer.getvalue() diff --git a/django_grpc_framework/protobuf/json_format.py b/django_grpc_framework/protobuf/json_format.py index b8ddfe5..5ac6566 100644 --- a/django_grpc_framework/protobuf/json_format.py +++ b/django_grpc_framework/protobuf/json_format.py @@ -1,12 +1,207 @@ +""" +Protocol Buffer utility functions for converting between messages and dictionaries. + +Provides convenient wrappers around google.protobuf.json_format with sensible defaults +for common use cases in API development. +""" + +from typing import Any, Dict, Type, TypeVar + from google.protobuf.json_format import MessageToDict, ParseDict +from google.protobuf.message import Message + + +# Type variable for protobuf messages +T = TypeVar("T", bound=Message) + + +def message_to_dict( + message: Message, + *, + including_default_value_fields: bool = True, + preserving_proto_field_name: bool = True, + use_integers_for_enums: bool = False, + descriptor_pool: Any = None, + float_precision: int | None = None, + **kwargs, +) -> Dict[str, Any]: + """ + Convert a Protocol Buffer message to a Python dictionary. + + This is a convenience wrapper around MessageToDict with sensible defaults + for API development. All fields are included by default, and proto field + names are preserved (snake_case) instead of converting to camelCase. + + Args: + message: The Protocol Buffer message to convert + including_default_value_fields: Include fields with default values (default: True) + preserving_proto_field_name: Keep snake_case field names (default: True) + use_integers_for_enums: Use enum integers instead of names (default: False) + descriptor_pool: A descriptor pool for resolving types + float_precision: Number of digits after decimal point for floats (default: None) + **kwargs: Additional arguments passed to MessageToDict + + Returns: + Dictionary representation of the message + + Example: + >>> from my_proto import User + >>> user = User(id=1, name="Alice", email="alice@example.com") + >>> message_to_dict(user) + {'id': 1, 'name': 'Alice', 'email': 'alice@example.com'} + """ + # Build kwargs dict with defaults + options = { + "including_default_value_fields": including_default_value_fields, + "preserving_proto_field_name": preserving_proto_field_name, + "use_integers_for_enums": use_integers_for_enums, + } + + if descriptor_pool is not None: + options["descriptor_pool"] = descriptor_pool + if float_precision is not None: + options["float_precision"] = float_precision + + # Merge with any additional kwargs + options.update(kwargs) + + return MessageToDict(message, **options) + + +def parse_dict( + js_dict: Dict[str, Any], + message: T, + *, + ignore_unknown_fields: bool = True, + descriptor_pool: Any = None, + max_recursion_depth: int = 100, + **kwargs, +) -> T: + """ + Parse a Python dictionary into a Protocol Buffer message. + + This is a convenience wrapper around ParseDict with sensible defaults + for API development. Unknown fields are ignored by default to allow + for graceful handling of extra fields in JSON payloads. + + Args: + js_dict: Dictionary to parse (typically from JSON) + message: Protocol Buffer message instance to populate + ignore_unknown_fields: Ignore fields not in the message schema (default: True) + descriptor_pool: A descriptor pool for resolving types + max_recursion_depth: Maximum recursion depth for nested messages (default: 100) + **kwargs: Additional arguments passed to ParseDict + + Returns: + The populated message instance (same object as `message` parameter) + + Raises: + ParseError: If the dictionary cannot be parsed into the message + + Example: + >>> from my_proto import User + >>> data = {'id': 1, 'name': 'Alice', 'email': 'alice@example.com'} + >>> user = User() + >>> parse_dict(data, user) + >>> print(user.name) + Alice + """ + # Build kwargs dict with defaults + options = { + "ignore_unknown_fields": ignore_unknown_fields, + "max_recursion_depth": max_recursion_depth, + } + + if descriptor_pool is not None: + options["descriptor_pool"] = descriptor_pool + + # Merge with any additional kwargs + options.update(kwargs) + + return ParseDict(js_dict, message, **options) + + +def dict_to_message( + js_dict: Dict[str, Any], + message_class: Type[T], + *, + ignore_unknown_fields: bool = True, + descriptor_pool: Any = None, + max_recursion_depth: int = 100, + **kwargs, +) -> T: + """ + Create and populate a new Protocol Buffer message from a dictionary. + + This is a convenience function that creates a new message instance + and populates it in one step. + + Args: + js_dict: Dictionary to parse (typically from JSON) + message_class: Protocol Buffer message class to instantiate + ignore_unknown_fields: Ignore fields not in the message schema (default: True) + descriptor_pool: A descriptor pool for resolving types + max_recursion_depth: Maximum recursion depth for nested messages (default: 100) + **kwargs: Additional arguments passed to ParseDict + + Returns: + A new populated message instance + + Example: + >>> from my_proto import User + >>> data = {'id': 1, 'name': 'Alice', 'email': 'alice@example.com'} + >>> user = dict_to_message(data, User) + >>> print(user.name) + Alice + """ + message = message_class() + return parse_dict( + js_dict, + message, + ignore_unknown_fields=ignore_unknown_fields, + descriptor_pool=descriptor_pool, + max_recursion_depth=max_recursion_depth, + **kwargs, + ) + + +def messages_to_list(messages: list[Message], **kwargs) -> list[Dict[str, Any]]: + """ + Convert a list of Protocol Buffer messages to a list of dictionaries. + + Args: + messages: List of Protocol Buffer messages + **kwargs: Arguments passed to message_to_dict + + Returns: + List of dictionary representations + + Example: + >>> users = [User(id=1, name="Alice"), User(id=2, name="Bob")] + >>> messages_to_list(users) + [{'id': 1, 'name': 'Alice'}, {'id': 2, 'name': 'Bob'}] + """ + return [message_to_dict(msg, **kwargs) for msg in messages] + +def parse_list( + js_list: list[Dict[str, Any]], message_class: Type[T], **kwargs +) -> list[T]: + """ + Parse a list of dictionaries into Protocol Buffer messages. -def message_to_dict(message, **kwargs): - kwargs.setdefault('including_default_value_fields', True) - kwargs.setdefault('preserving_proto_field_name', True) - return MessageToDict(message, **kwargs) + Args: + js_list: List of dictionaries to parse + message_class: Protocol Buffer message class to instantiate + **kwargs: Arguments passed to dict_to_message + Returns: + List of populated message instances -def parse_dict(js_dict, message, **kwargs): - kwargs.setdefault('ignore_unknown_fields', True) - return ParseDict(js_dict, message, **kwargs) + Example: + >>> data = [{'id': 1, 'name': 'Alice'}, {'id': 2, 'name': 'Bob'}] + >>> users = parse_list(data, User) + >>> print(users[0].name) + Alice + """ + return [dict_to_message(item, message_class, **kwargs) for item in js_list] diff --git a/django_grpc_framework/services.py b/django_grpc_framework/services.py index 4e341cf..e345e32 100644 --- a/django_grpc_framework/services.py +++ b/django_grpc_framework/services.py @@ -1,63 +1,325 @@ +""" +Base service class for Django gRPC Framework. + +Provides the core Service class that acts as a bridge between gRPC servicers +and Django-style request handling, similar to Django REST Framework's views. +""" + +import logging from functools import update_wrapper +from typing import Any, Callable, Dict, Optional, Type import grpc from django.db.models.query import QuerySet +from google.protobuf.message import Message from django_grpc_framework.signals import grpc_request_started, grpc_request_finished +logger = logging.getLogger(__name__) + + class Service: + """ + Base class for gRPC services in Django gRPC Framework. + + This class provides the foundation for building gRPC services that integrate + with Django models and follows patterns similar to Django REST Framework's + views and viewsets. + + The service acts as a controller that handles gRPC requests, manages context, + and delegates to handler methods (like List, Create, Retrieve, etc.). + + Attributes: + request: The current gRPC request message + context: The current gRPC context + action: The name of the current action being executed + + Example: + class UserService(Service): + def List(self, request, context): + users = User.objects.all() + for user in users: + yield user_pb2.User(id=user.id, name=user.name) + """ + + # These will be set by the servicer wrapper + request: Optional[Message] = None + context: Optional[grpc.ServicerContext] = None + action: Optional[str] = None + def __init__(self, **kwargs): + """ + Initialize the service with arbitrary keyword arguments. + + Any keyword arguments passed will be set as attributes on the instance. + This allows for dependency injection and configuration at instantiation. + + Args: + **kwargs: Arbitrary keyword arguments to set as instance attributes + """ for key, value in kwargs.items(): setattr(self, key, value) @classmethod - def as_servicer(cls, **initkwargs): + def as_servicer(cls, **initkwargs) -> Any: """ - Returns a gRPC servicer instance:: + Create a gRPC servicer instance from this service class. + + This class method returns a servicer object that can be registered with + a gRPC server. The servicer dynamically creates handler methods for each + RPC defined in the service class. + + Args: + **initkwargs: Keyword arguments passed to service __init__ for each request - servicer = PostService.as_servicer() - add_PostControllerServicer_to_server(servicer, server) + Returns: + A servicer instance ready to be registered with a gRPC server + + Raises: + TypeError: If initkwargs contains keys not present as class attributes + + Example: + servicer = UserService.as_servicer() + add_UserControllerServicer_to_server(servicer, server) + + Example with configuration: + servicer = UserService.as_servicer( + queryset=User.objects.filter(is_active=True) + ) """ - for key in initkwargs: - if not hasattr(cls, key): - raise TypeError( - "%s() received an invalid keyword %r. as_servicer only " - "accepts arguments that are already attributes of the " - "class." % (cls.__name__, key) - ) - if isinstance(getattr(cls, 'queryset', None), QuerySet): - def force_evaluation(): - raise RuntimeError( - 'Do not evaluate the `.queryset` attribute directly, ' - 'as the result will be cached and reused between requests.' - ' Use `.all()` or call `.get_queryset()` instead.' + # Validate that all initkwargs correspond to existing class attributes + _validate_initkwargs(cls, initkwargs) + + # Protect against queryset evaluation caching + _protect_queryset_evaluation(cls) + + # Create the servicer class that wraps our service + servicer = _create_servicer_wrapper(cls, initkwargs) + + return servicer + + def handle_exception(self, exc: Exception) -> None: + """ + Handle exceptions that occur during request processing. + + Override this method to customize exception handling. By default, + logs the exception and sets appropriate gRPC status codes. + + Args: + exc: The exception that occurred + """ + logger.exception(f"Exception in {self.__class__.__name__}.{self.action}: {exc}") + + if not self.context._state.client: # Check if context is still active + return + + # Set appropriate status code based on exception type + if isinstance(exc, NotImplementedError): + self.context.set_code(grpc.StatusCode.UNIMPLEMENTED) + self.context.set_details("Method not implemented") + elif isinstance(exc, PermissionError): + self.context.set_code(grpc.StatusCode.PERMISSION_DENIED) + self.context.set_details("Permission denied") + elif isinstance(exc, ValueError): + self.context.set_code(grpc.StatusCode.INVALID_ARGUMENT) + self.context.set_details(str(exc)) + else: + self.context.set_code(grpc.StatusCode.INTERNAL) + self.context.set_details("Internal server error") + + +def _validate_initkwargs(cls: Type[Service], initkwargs: Dict[str, Any]) -> None: + """ + Validate that initkwargs only contains existing class attributes. + + Args: + cls: The service class + initkwargs: Keyword arguments to validate + + Raises: + TypeError: If any key in initkwargs is not a class attribute + """ + for key in initkwargs: + if not hasattr(cls, key): + raise TypeError( + f"{cls.__name__}() received an invalid keyword '{key}'. " + f"as_servicer() only accepts arguments that are already " + f"attributes of the class." + ) + + +def _protect_queryset_evaluation(cls: Type[Service]) -> None: + """ + Prevent queryset caching by raising an error if accessed directly. + + This ensures that querysets are re-evaluated for each request rather + than being cached at the class level. + + Args: + cls: The service class to protect + """ + queryset = getattr(cls, "queryset", None) + + if isinstance(queryset, QuerySet): + + def force_evaluation(): + raise RuntimeError( + "Do not evaluate the `.queryset` attribute directly, as the " + "result will be cached and reused between requests. Use " + "`.all()` or call `.get_queryset()` instead." + ) + + cls.queryset._fetch_all = force_evaluation + + +def _create_servicer_wrapper(cls: Type[Service], initkwargs: Dict[str, Any]) -> Any: + """ + Create a servicer wrapper class that delegates to the service. + + The wrapper dynamically creates handler methods for each action defined + on the service class. Each handler creates a fresh service instance, + sends signals, and delegates to the appropriate method. + + Args: + cls: The service class to wrap + initkwargs: Keyword arguments to pass to service __init__ + + Returns: + An instance of the servicer wrapper class + """ + + class Servicer: + """ + Dynamic servicer that wraps a Django gRPC Framework service. + + This class intercepts attribute access to dynamically create handler + methods for gRPC actions. Each handler creates a fresh service instance, + manages request lifecycle, and sends appropriate signals. + """ + + def __getattr__(self, action: str) -> Callable: + """ + Dynamically create a handler method for the requested action. + + Args: + action: The name of the action/RPC method + + Returns: + A handler function for the gRPC call + """ + # If the service doesn't implement this action, return not_implemented + if not hasattr(cls, action): + return not_implemented + + def handler(request: Message, context: grpc.ServicerContext) -> Any: + """ + Handle a gRPC request by delegating to the service. + + Args: + request: The protobuf request message + context: The gRPC service context + + Returns: + The response from the service method + """ + # Send request started signal + grpc_request_started.send( + sender=handler, request=request, context=context ) - cls.queryset._fetch_all = force_evaluation - - class Servicer: - def __getattr__(self, action): - if not hasattr(cls, action): - return not_implemented - - def handler(request, context): - grpc_request_started.send(sender=handler, request=request, context=context) - try: - self = cls(**initkwargs) - self.request = request - self.context = context - self.action = action - return getattr(self, action)(request, context) - finally: - grpc_request_finished.send(sender=handler) - update_wrapper(handler, getattr(cls, action)) - return handler - update_wrapper(Servicer, cls, updated=()) - return Servicer() - - -def not_implemented(request, context): - """Method not implemented""" + + try: + # Create a fresh service instance for this request + service = cls(**initkwargs) + service.request = request + service.context = context + service.action = action + + # Delegate to the service method + return getattr(service, action)(request, context) + + except Exception as exc: + # Let the service handle the exception + if "service" in locals(): + service.handle_exception(exc) + raise + + finally: + # Always send request finished signal + grpc_request_finished.send(sender=handler) + + # Preserve metadata from the original method + update_wrapper(handler, getattr(cls, action)) + return handler + + # Preserve metadata from the service class + update_wrapper(Servicer, cls, updated=()) + + return Servicer() + + +def not_implemented(request: Message, context: grpc.ServicerContext) -> None: + """ + Default handler for unimplemented gRPC methods. + + This function is called when a gRPC method is invoked but not implemented + in the service class. It sets the appropriate status code and raises an + exception. + + Args: + request: The protobuf request message (unused) + context: The gRPC service context + + Raises: + NotImplementedError: Always, to indicate the method is not implemented + """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + +class StreamingResponseMixin: + """ + Mixin for services that return streaming responses. + + Provides utility methods for working with streaming gRPC responses, + such as sending multiple messages over time. + + Example: + class UserListService(StreamingResponseMixin, Service): + def List(self, request, context): + for user in self.get_queryset(): + yield self.serialize_message(user) + """ + + def check_streaming_context(self) -> bool: + """ + Check if the gRPC context is still active for streaming. + + Returns: + True if the context is active, False otherwise + """ + return not self.context.is_active() if self.context else False + + def stream_queryset( + self, queryset: QuerySet, serializer_class: Type, chunk_size: int = 100 + ): + """ + Stream a queryset efficiently in chunks. + + Args: + queryset: Django queryset to stream + serializer_class: Serializer class for converting objects + chunk_size: Number of objects to fetch at once + + Yields: + Protobuf messages representing queryset objects + """ + iterator = queryset.iterator(chunk_size=chunk_size) + + for obj in iterator: + if self.check_streaming_context(): + break + + serializer = serializer_class(obj) + yield serializer.message diff --git a/django_grpc_framework/settings.py b/django_grpc_framework/settings.py index 2f4394e..6043daf 100644 --- a/django_grpc_framework/settings.py +++ b/django_grpc_framework/settings.py @@ -1,96 +1,195 @@ """ -Settings for gRPC framework are all namespaced in the GRPC_FRAMEWORK setting. -For example your project's `settings.py` file might look like this: +Settings for Django gRPC Framework. -GRPC_FRAMEWORK = { - 'ROOT_HANDLERS_HOOK': 'path.to.my.custom_grpc_handlers', -} +All settings are namespaced in the GRPC_FRAMEWORK setting. For example, +your project's `settings.py` file might look like this: + + GRPC_FRAMEWORK = { + 'ROOT_HANDLERS_HOOK': 'path.to.my.custom_grpc_handlers', + 'SERVER_INTERCEPTORS': [ + 'myapp.interceptors.AuthInterceptor', + 'myapp.interceptors.LoggingInterceptor', + ], + 'PAGINATION_PAGE_SIZE': 100, + } -This module provides the `grpc_setting` object, that is used to access -gRPC framework settings, checking for user settings first, then falling -back to the defaults. +This module provides the `grpc_settings` object, used to access gRPC framework +settings, checking for user settings first, then falling back to the defaults. + +Example usage: + from django_grpc_framework.settings import grpc_settings + print(grpc_settings.ROOT_HANDLERS_HOOK) """ + +from typing import Any, Dict, List, Optional, Set, Union + from django.conf import settings from django.test.signals import setting_changed from django.utils.module_loading import import_string -DEFAULTS = { +# Default settings for Django gRPC Framework +DEFAULTS: Dict[str, Any] = { # Root grpc handlers hook configuration - 'ROOT_HANDLERS_HOOK': None, - - # gRPC server configuration - 'SERVER_INTERCEPTORS': None, + # If None, defaults to .grpc_handlers + "ROOT_HANDLERS_HOOK": None, + # gRPC server interceptors (list of import strings or classes) + "SERVER_INTERCEPTORS": None, + # Maximum message size for gRPC requests/responses (in bytes) + "MAX_MESSAGE_LENGTH": 4 * 1024 * 1024, # 4MB + # Default pagination settings + "PAGINATION_PAGE_SIZE": 100, + "PAGINATION_MAX_PAGE_SIZE": 1000, + # Exception handler for gRPC services + "EXCEPTION_HANDLER": "django_grpc_framework.exceptions.exception_handler", + # Default authentication classes + "DEFAULT_AUTHENTICATION_CLASSES": [], + # Default permission classes + "DEFAULT_PERMISSION_CLASSES": [], } - -# List of settings that may be in string import notation. -IMPORT_STRINGS = [ - 'ROOT_HANDLERS_HOOK', - 'SERVER_INTERCEPTORS', +# Settings that may be in string import notation and should be imported +IMPORT_STRINGS: List[str] = [ + "ROOT_HANDLERS_HOOK", + "SERVER_INTERCEPTORS", + "EXCEPTION_HANDLER", + "DEFAULT_AUTHENTICATION_CLASSES", + "DEFAULT_PERMISSION_CLASSES", ] -def perform_import(val, setting_name): +def perform_import(val: Any, setting_name: str) -> Any: """ - If the given setting is a string import notation, - then perform the necessary import or imports. + Import the given setting if it's a string import notation. + + Handles None values, string imports, and lists/tuples of string imports. + For ROOT_HANDLERS_HOOK, if None, attempts to import from ROOT_URLCONF. + + Args: + val: The setting value to potentially import + setting_name: The name of the setting (for error messages) + + Returns: + The imported class/function or the original value + + Raises: + ImportError: If the import string cannot be resolved """ if val is None: - # We need the ROOT_URLCONF so we do this lazily - if setting_name == 'ROOT_HANDLERS_HOOK': + # Special handling for ROOT_HANDLERS_HOOK - lazy import from ROOT_URLCONF + if setting_name == "ROOT_HANDLERS_HOOK": return import_from_string( - '%s.grpc_handlers' % settings.ROOT_URLCONF, + f"{settings.ROOT_URLCONF}.grpc_handlers", setting_name, ) return None + elif isinstance(val, str): return import_from_string(val, setting_name) + elif isinstance(val, (list, tuple)): return [import_from_string(item, setting_name) for item in val] + return val -def import_from_string(val, setting_name): +def import_from_string(val: str, setting_name: str) -> Any: """ - Attempt to import a class from a string representation. + Import a class or function from a string representation. + + Args: + val: The import string (e.g., 'myapp.handlers.MyHandler') + setting_name: The name of the setting (for error messages) + + Returns: + The imported class or function + + Raises: + ImportError: If the import fails with detailed error information """ try: return import_string(val) except ImportError as e: raise ImportError( - "Could not import '%s' for GRPC setting '%s'. %s: %s." % - (val, setting_name, e.__class__.__name__, e) - ) + f"Could not import '{val}' for GRPC_FRAMEWORK setting " + f"'{setting_name}'. {e.__class__.__name__}: {e}" + ) from e class GRPCSettings: """ - A settings object that allows gRPC Framework settings to be accessed as - properties. For example: + Settings object for Django gRPC Framework. + + Allows gRPC Framework settings to be accessed as properties. Settings are + resolved in this order: + 1. User settings from Django settings.GRPC_FRAMEWORK + 2. Default values from DEFAULTS + + Settings with string import paths are automatically resolved and return + the imported class/function rather than the string literal. + + The settings object caches resolved values for performance. Call reload() + to clear the cache when settings change. + Example: from django_grpc_framework.settings import grpc_settings - print(grpc_settings.ROOT_HANDLERS_HOOK) - Any setting with string import paths will be automatically resolved - and return the class, rather than the string literal. + # Access a setting + page_size = grpc_settings.PAGINATION_PAGE_SIZE + + # Get interceptors (automatically imported) + interceptors = grpc_settings.SERVER_INTERCEPTORS """ - def __init__(self, user_settings=None, defaults=None, import_strings=None): + + def __init__( + self, + user_settings: Optional[Dict[str, Any]] = None, + defaults: Optional[Dict[str, Any]] = None, + import_strings: Optional[List[str]] = None, + ): + """ + Initialize the settings object. + + Args: + user_settings: Optional user-defined settings dict + defaults: Optional defaults dict (uses DEFAULTS if not provided) + import_strings: Optional list of settings that should be imported + """ if user_settings: self._user_settings = user_settings self.defaults = defaults or DEFAULTS self.import_strings = import_strings or IMPORT_STRINGS - self._cached_attrs = set() + self._cached_attrs: Set[str] = set() @property - def user_settings(self): - if not hasattr(self, '_user_settings'): - self._user_settings = getattr(settings, 'GRPC_FRAMEWORK', {}) + def user_settings(self) -> Dict[str, Any]: + """ + Get user-defined settings from Django settings.GRPC_FRAMEWORK. + + Lazy-loaded and cached after first access. + + Returns: + Dictionary of user-defined settings + """ + if not hasattr(self, "_user_settings"): + self._user_settings = getattr(settings, "GRPC_FRAMEWORK", {}) return self._user_settings - def __getattr__(self, attr): + def __getattr__(self, attr: str) -> Any: + """ + Retrieve a setting value. + + Args: + attr: The setting name to retrieve + + Returns: + The setting value (imported if necessary) + + Raises: + AttributeError: If the setting name is not valid + """ if attr not in self.defaults: - raise AttributeError("Invalid gRPC setting: '%s'" % attr) + raise AttributeError(f"Invalid GRPC_FRAMEWORK setting: '{attr}'") try: # Check if present in user settings @@ -99,30 +198,113 @@ def __getattr__(self, attr): # Fall back to defaults val = self.defaults[attr] - # Coerce import strings into classes + # Coerce import strings into classes/functions if attr in self.import_strings: val = perform_import(val, attr) - # Cache the result + # Cache the result for performance self._cached_attrs.add(attr) setattr(self, attr, val) return val - def reload(self): + def __repr__(self) -> str: + """ + String representation showing cached settings. + + Returns: + String representation of the settings object + """ + cached = ", ".join(sorted(self._cached_attrs)) or "none" + return f"" + + def reload(self) -> None: + """ + Clear all cached settings. + + Call this method when Django settings change to force re-evaluation + of all settings on next access. + """ for attr in self._cached_attrs: delattr(self, attr) self._cached_attrs.clear() - if hasattr(self, '_user_settings'): - delattr(self, '_user_settings') + + if hasattr(self, "_user_settings"): + delattr(self, "_user_settings") + + def get(self, key: str, default: Any = None) -> Any: + """ + Get a setting value with a fallback default. + + Similar to dict.get(), but respects the settings hierarchy. + + Args: + key: The setting name + default: Value to return if setting doesn't exist + + Returns: + The setting value or default + """ + try: + return getattr(self, key) + except AttributeError: + return default + + def as_dict(self) -> Dict[str, Any]: + """ + Get all settings as a dictionary. + + Useful for debugging or serialization. Note that this will trigger + import of all string-based settings. + + Returns: + Dictionary of all settings with their current values + """ + return {key: getattr(self, key) for key in self.defaults.keys()} +# Global settings instance grpc_settings = GRPCSettings(None, DEFAULTS, IMPORT_STRINGS) -def reload_grpc_settings(*args, **kwargs): - setting = kwargs['setting'] - if setting == 'GRPC_FRAMEWORK' or setting == 'ROOT_URLCONF': +def reload_grpc_settings(*args, **kwargs) -> None: + """ + Signal handler to reload settings when Django settings change. + + Connected to Django's setting_changed signal. Reloads the settings + cache when GRPC_FRAMEWORK or ROOT_URLCONF settings change. + + Args: + *args: Positional arguments from the signal + **kwargs: Keyword arguments from the signal (must include 'setting') + """ + setting = kwargs.get("setting") + if setting in ("GRPC_FRAMEWORK", "ROOT_URLCONF"): grpc_settings.reload() +# Connect signal handler for automatic reloading setting_changed.connect(reload_grpc_settings) + + +# Convenience function for testing +def override_grpc_settings(**kwargs) -> "GRPCSettings": + """ + Create a temporary settings object with overridden values. + + Useful for testing without modifying the global settings object. + + Args: + **kwargs: Settings to override + + Returns: + New GRPCSettings instance with overridden values + + Example: + test_settings = override_grpc_settings( + PAGINATION_PAGE_SIZE=50, + MAX_MESSAGE_LENGTH=1024 + ) + print(test_settings.PAGINATION_PAGE_SIZE) # 50 + """ + merged_settings = {**grpc_settings.user_settings, **kwargs} + return GRPCSettings(merged_settings, DEFAULTS, IMPORT_STRINGS) diff --git a/django_grpc_framework/signals.py b/django_grpc_framework/signals.py index 3b402c2..abe36f1 100644 --- a/django_grpc_framework/signals.py +++ b/django_grpc_framework/signals.py @@ -1,12 +1,306 @@ -from django.dispatch import Signal -from django.db import reset_queries, close_old_connections +""" +Signal definitions for Django gRPC Framework. +Provides signals that are sent at various points during gRPC request processing, +similar to Django's request_started and request_finished signals. These signals +enable middleware-like functionality and lifecycle hooks for gRPC services. +Available signals: + - grpc_request_started: Sent when a gRPC request begins processing + - grpc_request_finished: Sent when a gRPC request completes processing + - grpc_exception_raised: Sent when an exception occurs during processing + +Database connection management is automatically handled, similar to WSGI handlers, +to prevent connection leaks and ensure query counting works correctly in debug mode. + +Example usage: + from django_grpc_framework.signals import grpc_request_started + + def log_request(sender, request, context, **kwargs): + print(f"gRPC request started: {request}") + + grpc_request_started.connect(log_request) +""" + +import logging +from typing import Any, Optional + +from django.core.signals import Signal +from django.db import close_old_connections, reset_queries + +from google.protobuf.message import Message +import grpc + + +logger = logging.getLogger(__name__) + + +# Signal sent when a gRPC request begins processing grpc_request_started = Signal() +""" +Signal sent at the start of gRPC request processing. + +This signal is sent before the request handler is invoked. It's similar to +Django's request_started signal but for gRPC requests. + +Provides: + sender (callable): The handler function processing the request + request (Message): The protobuf request message + context (grpc.ServicerContext): The gRPC context + +Example: + from django_grpc_framework.signals import grpc_request_started + + def on_request_start(sender, request, context, **kwargs): + print(f"Request started: {request.__class__.__name__}") + context.set_compression(grpc.Compression.Gzip) + + grpc_request_started.connect(on_request_start) +""" + +# Signal sent when a gRPC request finishes processing grpc_request_finished = Signal() +""" +Signal sent at the end of gRPC request processing. + +This signal is sent after the request handler completes, regardless of +whether it succeeded or failed. It's always sent in a finally block. + +Provides: + sender (callable): The handler function that processed the request + +Example: + from django_grpc_framework.signals import grpc_request_finished + + def on_request_finish(sender, **kwargs): + print("Request finished, cleaning up resources") + + grpc_request_finished.connect(on_request_finish) +""" + +# Signal sent when an exception occurs during request processing +grpc_exception_raised = Signal() +""" +Signal sent when an exception is raised during gRPC request processing. + +This signal allows you to handle or log exceptions globally across all +gRPC services. + +Provides: + sender (callable): The handler function where the exception occurred + exception (Exception): The exception that was raised + request (Message): The protobuf request message (if available) + context (grpc.ServicerContext): The gRPC context (if available) + +Example: + from django_grpc_framework.signals import grpc_exception_raised + import sentry_sdk + + def on_exception(sender, exception, **kwargs): + sentry_sdk.capture_exception(exception) + logger.error(f"gRPC exception: {exception}") + + grpc_exception_raised.connect(on_exception) +""" + + +def _handle_database_connections() -> None: + """ + Set up database connection management for gRPC requests. + + Manages database connections similarly to WSGI handlers to prevent + connection leaks and ensure proper query tracking in debug mode. + + Connection management: + - Request start: Reset query logs and close old connections + - Request finish: Close old connections + + This ensures that: + 1. Each request starts with a clean connection state + 2. Query counts are accurate in DEBUG mode + 3. Stale connections are cleaned up + 4. Connection pool is managed properly + """ + # Reset query logging when a request starts (DEBUG mode) + grpc_request_started.connect(reset_queries, dispatch_uid="grpc_reset_queries") + + # Close old connections at request start + grpc_request_started.connect( + close_old_connections, dispatch_uid="grpc_close_old_connections_start" + ) + + # Close old connections at request finish + grpc_request_finished.connect( + close_old_connections, dispatch_uid="grpc_close_old_connections_finish" + ) + + +# Automatically set up database connection management +_handle_database_connections() + + +# Logging handlers for debugging +def _log_request_started( + sender: Any, + request: Optional[Message] = None, + context: Optional[grpc.ServicerContext] = None, + **kwargs, +) -> None: + """ + Log when a gRPC request starts (if debug logging is enabled). + + Args: + sender: The handler function + request: The protobuf request message + context: The gRPC context + **kwargs: Additional keyword arguments + """ + if logger.isEnabledFor(logging.DEBUG): + request_class = request.__class__.__name__ if request else "Unknown" + logger.debug(f"gRPC request started: {request_class}") + + +def _log_request_finished(sender: Any, **kwargs) -> None: + """ + Log when a gRPC request finishes (if debug logging is enabled). + + Args: + sender: The handler function + **kwargs: Additional keyword arguments + """ + if logger.isEnabledFor(logging.DEBUG): + handler_name = getattr(sender, "__name__", "Unknown") + logger.debug(f"gRPC request finished: {handler_name}") + + +def _log_exception_raised( + sender: Any, exception: Optional[Exception] = None, **kwargs +) -> None: + """ + Log when an exception is raised during gRPC request processing. + + Args: + sender: The handler function + exception: The exception that was raised + **kwargs: Additional keyword arguments + """ + if exception: + handler_name = getattr(sender, "__name__", "Unknown") + logger.error( + f"Exception in gRPC handler {handler_name}: {exception}", exc_info=exception + ) + + +def enable_signal_logging() -> None: + """ + Enable logging for all gRPC signals. + + Useful for debugging gRPC request lifecycle. Logs will only appear + when the Django logging level is set to DEBUG or lower. + + Call this function in your Django settings or startup code to enable + signal logging across your application. + + Example: + # In settings.py or apps.py ready() method + from django_grpc_framework.signals import enable_signal_logging + enable_signal_logging() + """ + grpc_request_started.connect( + _log_request_started, dispatch_uid="grpc_log_request_started" + ) + grpc_request_finished.connect( + _log_request_finished, dispatch_uid="grpc_log_request_finished" + ) + grpc_exception_raised.connect( + _log_exception_raised, dispatch_uid="grpc_log_exception_raised" + ) + + +def disable_signal_logging() -> None: + """ + Disable logging for all gRPC signals. + + Removes the logging signal handlers that were added by enable_signal_logging(). + + Example: + from django_grpc_framework.signals import disable_signal_logging + disable_signal_logging() + """ + grpc_request_started.disconnect(dispatch_uid="grpc_log_request_started") + grpc_request_finished.disconnect(dispatch_uid="grpc_log_request_finished") + grpc_exception_raised.disconnect(dispatch_uid="grpc_log_exception_raised") + + +# Context manager for testing +class SignalTester: + """ + Context manager for testing signal behavior. + + Allows you to verify that signals are sent correctly during tests + by capturing signal calls. + + Example: + from django_grpc_framework.signals import SignalTester, grpc_request_started + + with SignalTester(grpc_request_started) as tester: + # Make gRPC call + service.List(request, context) + + # Verify signal was sent + assert tester.call_count == 1 + assert tester.calls[0]['request'] == request + """ + + def __init__(self, signal: Signal): + """ + Initialize the signal tester. + + Args: + signal: The signal to monitor + """ + self.signal = signal + self.calls: list[dict] = [] + self.call_count: int = 0 + + def __enter__(self): + """Start monitoring the signal.""" + self.signal.connect(self._capture_call, weak=False) + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + """Stop monitoring the signal.""" + self.signal.disconnect(self._capture_call) + return False + + def _capture_call(self, sender, **kwargs) -> None: + """ + Capture a signal call. + + Args: + sender: The signal sender + **kwargs: Signal arguments + """ + self.call_count += 1 + self.calls.append({"sender": sender, **kwargs}) + + def reset(self) -> None: + """Reset the captured calls.""" + self.calls.clear() + self.call_count = 0 + + def assert_called_once(self) -> None: + """Assert that the signal was called exactly once.""" + assert self.call_count == 1, ( + f"Expected signal to be called once, but was called {self.call_count} times" + ) + def assert_called(self) -> None: + """Assert that the signal was called at least once.""" + assert self.call_count > 0, "Expected signal to be called, but it was not" -# db connection state managed similarly to the wsgi handler -grpc_request_started.connect(reset_queries) -grpc_request_started.connect(close_old_connections) -grpc_request_finished.connect(close_old_connections) \ No newline at end of file + def assert_not_called(self) -> None: + """Assert that the signal was not called.""" + assert self.call_count == 0, ( + f"Expected signal not to be called, but was called {self.call_count} times" + ) diff --git a/django_grpc_framework/test.py b/django_grpc_framework/test.py index 8e09b5b..e44a37e 100644 --- a/django_grpc_framework/test.py +++ b/django_grpc_framework/test.py @@ -1,8 +1,35 @@ +""" +Test utilities for Django gRPC Framework. + +Provides test case classes and utilities for testing gRPC services without +requiring a real gRPC server. Similar to Django's test client, these utilities +allow you to test gRPC services in-process. + +Test case classes: + - RPCSimpleTestCase: For simple tests without database + - RPCTransactionTestCase: For tests with database transactions + - RPCTestCase: Standard test case with database fixtures + +Example usage: + from django_grpc_framework.test import RPCTestCase + from myapp.proto import user_pb2 + + class UserServiceTest(RPCTestCase): + def test_list_users(self): + request = user_pb2.UserListRequest() + response = self.channel.unary_stream( + '/user.UserController/List' + )(request) + users = list(response) + self.assertEqual(len(users), 2) +""" + from contextlib import contextmanager +from typing import Any, Iterator, List, Optional, Tuple +from django.db import close_old_connections from django.test import testcases import grpc -from django.db import close_old_connections from django_grpc_framework.settings import grpc_settings from django_grpc_framework.signals import grpc_request_started, grpc_request_finished @@ -10,141 +37,572 @@ @contextmanager def _disable_close_old_connections(): + """ + Temporarily disable database connection closing during tests. + + Database connection signals are disconnected during test execution to + prevent premature connection closing that can interfere with test + transactions and fixtures. + + Yields: + None + """ + # Use dispatch_uid for safe disconnect try: - grpc_request_started.disconnect(close_old_connections) - grpc_request_finished.disconnect(close_old_connections) + grpc_request_started.disconnect( + close_old_connections, dispatch_uid="grpc_close_old_connections_start" + ) + grpc_request_finished.disconnect( + close_old_connections, dispatch_uid="grpc_close_old_connections_finish" + ) yield finally: - grpc_request_started.connect(close_old_connections) - grpc_request_finished.connect(close_old_connections) + grpc_request_started.connect( + close_old_connections, dispatch_uid="grpc_close_old_connections_start" + ) + grpc_request_finished.connect( + close_old_connections, dispatch_uid="grpc_close_old_connections_finish" + ) class Channel: + """ + Fake gRPC channel for testing services in-process. + + Simulates a gRPC channel without requiring network connections or a + running server. Handlers are invoked directly with fake contexts. + + This class mimics the interface of grpc.Channel and can be used as a + drop-in replacement for testing. + + Example: + channel = Channel() + stub = UserControllerStub(channel) + response = stub.List(UserListRequest()) + """ + def __init__(self): - server = FakeServer() - grpc_settings.ROOT_HANDLERS_HOOK(server) - self.server = server + """ + Initialize the fake channel. + + Creates a fake server and registers all gRPC handlers from the + ROOT_HANDLERS_HOOK setting. + """ + self.server = FakeServer() + grpc_settings.ROOT_HANDLERS_HOOK(self.server) def __enter__(self): + """Context manager entry.""" return self - def __exit__(self, exc_tp, exc_val, exc_tb): + def __exit__(self, exc_type, exc_val, exc_tb): + """Context manager exit.""" pass - def unary_unary(self, method, *args, **kwargs): + def unary_unary( + self, + method: str, + request_serializer: Optional[Any] = None, + response_deserializer: Optional[Any] = None, + ) -> "UnaryUnary": + """ + Create a unary-unary callable for the given method. + + Args: + method: Full RPC method name (e.g., '/user.UserController/Retrieve') + request_serializer: Optional serializer (unused in fake channel) + response_deserializer: Optional deserializer (unused in fake channel) + + Returns: + UnaryUnary callable that can be invoked with a request + """ return UnaryUnary(self, method) - def unary_stream(self, method, *args, **kwargs): + def unary_stream( + self, + method: str, + request_serializer: Optional[Any] = None, + response_deserializer: Optional[Any] = None, + ) -> "UnaryStream": + """ + Create a unary-stream callable for the given method. + + Args: + method: Full RPC method name (e.g., '/user.UserController/List') + request_serializer: Optional serializer (unused in fake channel) + response_deserializer: Optional deserializer (unused in fake channel) + + Returns: + UnaryStream callable that can be invoked with a request + """ return UnaryStream(self, method) - def stream_unary(self, method, *args, **kwargs): + def stream_unary( + self, + method: str, + request_serializer: Optional[Any] = None, + response_deserializer: Optional[Any] = None, + ) -> "StreamUnary": + """ + Create a stream-unary callable for the given method. + + Args: + method: Full RPC method name + request_serializer: Optional serializer (unused in fake channel) + response_deserializer: Optional deserializer (unused in fake channel) + + Returns: + StreamUnary callable that can be invoked with a request iterator + """ return StreamUnary(self, method) - def stream_stream(self, method, *args, **kwargs): + def stream_stream( + self, + method: str, + request_serializer: Optional[Any] = None, + response_deserializer: Optional[Any] = None, + ) -> "StreamStream": + """ + Create a stream-stream callable for the given method. + + Args: + method: Full RPC method name + request_serializer: Optional serializer (unused in fake channel) + response_deserializer: Optional deserializer (unused in fake channel) + + Returns: + StreamStream callable that can be invoked with a request iterator + """ return StreamStream(self, method) class _MultiCallable: - def __init__(self, channel, method_full_rpc_name): + """ + Base class for fake multi-callable objects. + + Provides common functionality for all RPC pattern types (unary-unary, + unary-stream, etc.). + """ + + def __init__(self, channel: Channel, method_full_rpc_name: str): + """ + Initialize the multi-callable. + + Args: + channel: The fake channel instance + method_full_rpc_name: Full RPC method name + + Raises: + KeyError: If the method is not registered on the server + """ self._handler = channel.server._find_method_handler(method_full_rpc_name) def with_call(self, *args, **kwargs): - raise NotImplementedError + """ + Not implemented for fake channel. + + Raises: + NotImplementedError: Always + """ + raise NotImplementedError( + "with_call() is not supported in test channel. " + "Use the standard __call__() method instead." + ) def future(self, *args, **kwargs): - raise NotImplementedError + """ + Not implemented for fake channel. + + Raises: + NotImplementedError: Always + """ + raise NotImplementedError( + "future() is not supported in test channel. " + "Use the standard __call__() method instead." + ) class UnaryUnary(_MultiCallable, grpc.UnaryUnaryMultiCallable): - def __call__(self, request, timeout=None, metadata=None, *args, **kwargs): + """ + Fake unary-unary multi-callable for testing. + + Handles single request -> single response RPC pattern. + """ + + def __call__( + self, + request: Any, + timeout: Optional[float] = None, + metadata: Optional[List[Tuple[str, str]]] = None, + credentials: Optional[Any] = None, + wait_for_ready: Optional[bool] = None, + compression: Optional[grpc.Compression] = None, + ) -> Any: + """ + Invoke the RPC method. + + Args: + request: The request message + timeout: Request timeout (unused in fake channel) + metadata: Request metadata + credentials: Call credentials (unused in fake channel) + wait_for_ready: Wait for ready flag (unused in fake channel) + compression: Compression method (unused in fake channel) + + Returns: + The response message + """ with _disable_close_old_connections(): context = FakeContext() - context._invocation_metadata.extend(metadata or []) + if metadata: + context._invocation_metadata.extend(metadata) return self._handler.unary_unary(request, context) class UnaryStream(_MultiCallable, grpc.UnaryStreamMultiCallable): - def __call__(self, request, timeout=None, metadata=None, *args, **kwargs): + """ + Fake unary-stream multi-callable for testing. + + Handles single request -> streaming response RPC pattern. + """ + + def __call__( + self, + request: Any, + timeout: Optional[float] = None, + metadata: Optional[List[Tuple[str, str]]] = None, + credentials: Optional[Any] = None, + wait_for_ready: Optional[bool] = None, + compression: Optional[grpc.Compression] = None, + ) -> Iterator[Any]: + """ + Invoke the RPC method. + + Args: + request: The request message + timeout: Request timeout (unused in fake channel) + metadata: Request metadata + credentials: Call credentials (unused in fake channel) + wait_for_ready: Wait for ready flag (unused in fake channel) + compression: Compression method (unused in fake channel) + + Returns: + Iterator of response messages + """ with _disable_close_old_connections(): context = FakeContext() - context._invocation_metadata.extend(metadata or []) + if metadata: + context._invocation_metadata.extend(metadata) return self._handler.unary_stream(request, context) class StreamUnary(_MultiCallable, grpc.StreamUnaryMultiCallable): - def __call__(self, request_iterator, timeout=None, metadata=None, *args, **kwargs): + """ + Fake stream-unary multi-callable for testing. + + Handles streaming request -> single response RPC pattern. + """ + + def __call__( + self, + request_iterator: Iterator[Any], + timeout: Optional[float] = None, + metadata: Optional[List[Tuple[str, str]]] = None, + credentials: Optional[Any] = None, + wait_for_ready: Optional[bool] = None, + compression: Optional[grpc.Compression] = None, + ) -> Any: + """ + Invoke the RPC method. + + Args: + request_iterator: Iterator of request messages + timeout: Request timeout (unused in fake channel) + metadata: Request metadata + credentials: Call credentials (unused in fake channel) + wait_for_ready: Wait for ready flag (unused in fake channel) + compression: Compression method (unused in fake channel) + + Returns: + The response message + """ with _disable_close_old_connections(): context = FakeContext() - context._invocation_metadata.extend(metadata or []) + if metadata: + context._invocation_metadata.extend(metadata) return self._handler.stream_unary(request_iterator, context) class StreamStream(_MultiCallable, grpc.StreamStreamMultiCallable): - def __call__(self, request_iterator, timeout=None, metadata=None, *args, **kwargs): + """ + Fake stream-stream multi-callable for testing. + + Handles streaming request -> streaming response RPC pattern. + """ + + def __call__( + self, + request_iterator: Iterator[Any], + timeout: Optional[float] = None, + metadata: Optional[List[Tuple[str, str]]] = None, + credentials: Optional[Any] = None, + wait_for_ready: Optional[bool] = None, + compression: Optional[grpc.Compression] = None, + ) -> Iterator[Any]: + """ + Invoke the RPC method. + + Args: + request_iterator: Iterator of request messages + timeout: Request timeout (unused in fake channel) + metadata: Request metadata + credentials: Call credentials (unused in fake channel) + wait_for_ready: Wait for ready flag (unused in fake channel) + compression: Compression method (unused in fake channel) + + Returns: + Iterator of response messages + """ with _disable_close_old_connections(): context = FakeContext() - context._invocation_metadata.extend(metadata or []) + if metadata: + context._invocation_metadata.extend(metadata) return self._handler.stream_stream(request_iterator, context) class FakeRpcError(grpc.RpcError): - def __init__(self, code, details): + """ + Fake gRPC error for testing error handling. + + Simulates a gRPC RpcError that would be raised by a real gRPC channel. + + Example: + try: + service.Retrieve(request, context) + except FakeRpcError as e: + self.assertEqual(e.code(), grpc.StatusCode.NOT_FOUND) + self.assertIn('not found', e.details()) + """ + + def __init__(self, code: grpc.StatusCode, details: str): + """ + Initialize the fake RPC error. + + Args: + code: gRPC status code + details: Error details string + """ + super().__init__() self._code = code self._details = details - def code(self): + def code(self) -> grpc.StatusCode: + """ + Get the status code. + + Returns: + gRPC status code + """ return self._code - def details(self): + def details(self) -> str: + """ + Get the error details. + + Returns: + Error details string + """ return self._details - def __repr__(self): - return '' % (self._code, self._details) + def __repr__(self) -> str: + """String representation of the error.""" + return f'' + + def __str__(self) -> str: + """Human-readable string representation.""" + return f"{self._code.name}: {self._details}" class FakeServer: + """ + Fake gRPC server for testing. + + Simulates a gRPC server by storing method handlers without actually + binding to a network port. + """ + def __init__(self): - self.rpc_method_handlers = {} + """Initialize the fake server with an empty handler registry.""" + self.rpc_method_handlers: dict[str, Any] = {} - def add_generic_rpc_handlers(self, generic_rpc_handlers): + def add_generic_rpc_handlers(self, generic_rpc_handlers: List[Any]) -> None: + """ + Register generic RPC handlers. + + Args: + generic_rpc_handlers: List of generic RPC handlers to register + """ from grpc._server import _validate_generic_rpc_handlers + _validate_generic_rpc_handlers(generic_rpc_handlers) self.rpc_method_handlers.update(generic_rpc_handlers[0]._method_handlers) - def _find_method_handler(self, method_full_rpc_name): - return self.rpc_method_handlers[method_full_rpc_name] + def _find_method_handler(self, method_full_rpc_name: str) -> Any: + """ + Find a method handler by its full RPC name. + + Args: + method_full_rpc_name: Full RPC method name + + Returns: + The method handler + + Raises: + KeyError: If method handler is not found + """ + try: + return self.rpc_method_handlers[method_full_rpc_name] + except KeyError: + raise KeyError( + f"Method '{method_full_rpc_name}' not found. " + f"Available methods: {list(self.rpc_method_handlers.keys())}" + ) class FakeContext: - def __init__(self): - self._invocation_metadata = [] + """ + Fake gRPC context for testing. + + Simulates a gRPC ServicerContext with minimal functionality needed + for testing. + + Attributes: + _invocation_metadata: List of metadata tuples + _code: Response status code + _details: Response details string + """ - def abort(self, code, details): + def __init__(self): + """Initialize the fake context.""" + self._invocation_metadata: List[Tuple[str, str]] = [] + self._code: Optional[grpc.StatusCode] = None + self._details: Optional[str] = None + self._state = type("State", (), {"client": True})() + + def abort(self, code: grpc.StatusCode, details: str) -> None: + """ + Abort the RPC with an error. + + Args: + code: gRPC status code + details: Error details + + Raises: + FakeRpcError: Always, with the provided code and details + """ + self._code = code + self._details = details raise FakeRpcError(code, details) - def invocation_metadata(self): + def invocation_metadata(self) -> List[Tuple[str, str]]: + """ + Get the invocation metadata. + + Returns: + List of metadata tuples + """ return self._invocation_metadata + def set_code(self, code: grpc.StatusCode) -> None: + """Set the response status code.""" + self._code = code + + def set_details(self, details: str) -> None: + """Set the response details.""" + self._details = details + + def is_active(self) -> bool: + """Check if the context is active.""" + return True + + def time_remaining(self) -> Optional[float]: + """Get time remaining (always None for fake context).""" + return None + class RPCSimpleTestCase(testcases.SimpleTestCase): + """ + Test case for gRPC services without database support. + + Use this for testing services that don't require database access. + Similar to Django's SimpleTestCase. + + Example: + class CalculatorServiceTest(RPCSimpleTestCase): + def test_add(self): + request = calculator_pb2.AddRequest(a=2, b=3) + response = self.channel.unary_unary( + '/calculator.Calculator/Add' + )(request) + self.assertEqual(response.result, 5) + """ + channel_class = Channel def setUp(self): + """Set up the test by creating a fake channel.""" super().setUp() self.channel = self.channel_class() class RPCTransactionTestCase(testcases.TransactionTestCase): + """ + Test case for gRPC services with transaction support. + + Use this when you need to test transaction behavior or when tests + need to check database state after transaction commits. + + Example: + class UserServiceTransactionTest(RPCTransactionTestCase): + def test_user_creation_transaction(self): + request = user_pb2.User(name='Alice') + response = self.channel.unary_unary( + '/user.UserController/Create' + )(request) + self.assertEqual(User.objects.count(), 1) + """ + channel_class = Channel def setUp(self): + """Set up the test by creating a fake channel.""" super().setUp() self.channel = self.channel_class() class RPCTestCase(testcases.TestCase): + """ + Standard test case for gRPC services with database fixtures. + + Most common test case class. Supports database fixtures and runs + each test in a transaction that's rolled back afterwards. + + Example: + class UserServiceTest(RPCTestCase): + fixtures = ['users.json'] + + def test_list_users(self): + request = user_pb2.UserListRequest() + response = self.channel.unary_stream( + '/user.UserController/List' + )(request) + users = list(response) + self.assertGreater(len(users), 0) + """ + channel_class = Channel def setUp(self): + """Set up the test by creating a fake channel.""" super().setUp() self.channel = self.channel_class() diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..ae79c6b --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,12 @@ +[project] +name = "django-grpc-framework" +version = "1.0.0" +description = "gRPC for Django." +readme = "README.rst" +requires-python = ">=3.10,<4.0" +dependencies = [ + "django>=4.0.0", + "djangorestframework>=3.16.1", + "grpcio>=1.76.0", + "grpcio-tools>=1.76.0", +] diff --git a/uv.lock b/uv.lock new file mode 100644 index 0000000..9fe0b62 --- /dev/null +++ b/uv.lock @@ -0,0 +1,235 @@ +version = 1 +revision = 3 +requires-python = ">=3.10, <4.0" + +[[package]] +name = "asgiref" +version = "3.10.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/46/08/4dfec9b90758a59acc6be32ac82e98d1fbfc321cb5cfa410436dbacf821c/asgiref-3.10.0.tar.gz", hash = "sha256:d89f2d8cd8b56dada7d52fa7dc8075baa08fb836560710d38c292a7a3f78c04e", size = 37483, upload-time = "2025-10-05T09:15:06.557Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/17/9c/fc2331f538fbf7eedba64b2052e99ccf9ba9d6888e2f41441ee28847004b/asgiref-3.10.0-py3-none-any.whl", hash = "sha256:aef8a81283a34d0ab31630c9b7dfe70c812c95eba78171367ca8745e88124734", size = 24050, upload-time = "2025-10-05T09:15:05.11Z" }, +] + +[[package]] +name = "django" +version = "5.2.7" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "asgiref" }, + { name = "sqlparse" }, + { name = "tzdata", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b1/96/bd84e2bb997994de8bcda47ae4560991084e86536541d7214393880f01a8/django-5.2.7.tar.gz", hash = "sha256:e0f6f12e2551b1716a95a63a1366ca91bbcd7be059862c1b18f989b1da356cdd", size = 10865812, upload-time = "2025-10-01T14:22:12.081Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8f/ef/81f3372b5dd35d8d354321155d1a38894b2b766f576d0abffac4d8ae78d9/django-5.2.7-py3-none-any.whl", hash = "sha256:59a13a6515f787dec9d97a0438cd2efac78c8aca1c80025244b0fe507fe0754b", size = 8307145, upload-time = "2025-10-01T14:22:49.476Z" }, +] + +[[package]] +name = "django-grpc-framework" +version = "1.0.0" +source = { virtual = "." } +dependencies = [ + { name = "django" }, + { name = "djangorestframework" }, + { name = "grpcio" }, + { name = "grpcio-tools" }, +] + +[package.metadata] +requires-dist = [ + { name = "django", specifier = ">=4.0.0" }, + { name = "djangorestframework", specifier = ">=3.16.1" }, + { name = "grpcio", specifier = ">=1.76.0" }, + { name = "grpcio-tools", specifier = ">=1.76.0" }, +] + +[[package]] +name = "djangorestframework" +version = "3.16.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "django" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/8a/95/5376fe618646fde6899b3cdc85fd959716bb67542e273a76a80d9f326f27/djangorestframework-3.16.1.tar.gz", hash = "sha256:166809528b1aced0a17dc66c24492af18049f2c9420dbd0be29422029cfc3ff7", size = 1089735, upload-time = "2025-08-06T17:50:53.251Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b0/ce/bf8b9d3f415be4ac5588545b5fcdbbb841977db1c1d923f7568eeabe1689/djangorestframework-3.16.1-py3-none-any.whl", hash = "sha256:33a59f47fb9c85ede792cbf88bde71893bcda0667bc573f784649521f1102cec", size = 1080442, upload-time = "2025-08-06T17:50:50.667Z" }, +] + +[[package]] +name = "grpcio" +version = "1.76.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b6/e0/318c1ce3ae5a17894d5791e87aea147587c9e702f24122cc7a5c8bbaeeb1/grpcio-1.76.0.tar.gz", hash = "sha256:7be78388d6da1a25c0d5ec506523db58b18be22d9c37d8d3a32c08be4987bd73", size = 12785182, upload-time = "2025-10-21T16:23:12.106Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/88/17/ff4795dc9a34b6aee6ec379f1b66438a3789cd1315aac0cbab60d92f74b3/grpcio-1.76.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:65a20de41e85648e00305c1bb09a3598f840422e522277641145a32d42dcefcc", size = 5840037, upload-time = "2025-10-21T16:20:25.069Z" }, + { url = "https://files.pythonhosted.org/packages/4e/ff/35f9b96e3fa2f12e1dcd58a4513a2e2294a001d64dec81677361b7040c9a/grpcio-1.76.0-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:40ad3afe81676fd9ec6d9d406eda00933f218038433980aa19d401490e46ecde", size = 11836482, upload-time = "2025-10-21T16:20:30.113Z" }, + { url = "https://files.pythonhosted.org/packages/3e/1c/8374990f9545e99462caacea5413ed783014b3b66ace49e35c533f07507b/grpcio-1.76.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:035d90bc79eaa4bed83f524331d55e35820725c9fbb00ffa1904d5550ed7ede3", size = 6407178, upload-time = "2025-10-21T16:20:32.733Z" }, + { url = "https://files.pythonhosted.org/packages/1e/77/36fd7d7c75a6c12542c90a6d647a27935a1ecaad03e0ffdb7c42db6b04d2/grpcio-1.76.0-cp310-cp310-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:4215d3a102bd95e2e11b5395c78562967959824156af11fa93d18fdd18050990", size = 7075684, upload-time = "2025-10-21T16:20:35.435Z" }, + { url = "https://files.pythonhosted.org/packages/38/f7/e3cdb252492278e004722306c5a8935eae91e64ea11f0af3437a7de2e2b7/grpcio-1.76.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:49ce47231818806067aea3324d4bf13825b658ad662d3b25fada0bdad9b8a6af", size = 6611133, upload-time = "2025-10-21T16:20:37.541Z" }, + { url = "https://files.pythonhosted.org/packages/7e/20/340db7af162ccd20a0893b5f3c4a5d676af7b71105517e62279b5b61d95a/grpcio-1.76.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:8cc3309d8e08fd79089e13ed4819d0af72aa935dd8f435a195fd152796752ff2", size = 7195507, upload-time = "2025-10-21T16:20:39.643Z" }, + { url = "https://files.pythonhosted.org/packages/10/f0/b2160addc1487bd8fa4810857a27132fb4ce35c1b330c2f3ac45d697b106/grpcio-1.76.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:971fd5a1d6e62e00d945423a567e42eb1fa678ba89072832185ca836a94daaa6", size = 8160651, upload-time = "2025-10-21T16:20:42.492Z" }, + { url = "https://files.pythonhosted.org/packages/2c/2c/ac6f98aa113c6ef111b3f347854e99ebb7fb9d8f7bb3af1491d438f62af4/grpcio-1.76.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:9d9adda641db7207e800a7f089068f6f645959f2df27e870ee81d44701dd9db3", size = 7620568, upload-time = "2025-10-21T16:20:45.995Z" }, + { url = "https://files.pythonhosted.org/packages/90/84/7852f7e087285e3ac17a2703bc4129fafee52d77c6c82af97d905566857e/grpcio-1.76.0-cp310-cp310-win32.whl", hash = "sha256:063065249d9e7e0782d03d2bca50787f53bd0fb89a67de9a7b521c4a01f1989b", size = 3998879, upload-time = "2025-10-21T16:20:48.592Z" }, + { url = "https://files.pythonhosted.org/packages/10/30/d3d2adcbb6dd3ff59d6ac3df6ef830e02b437fb5c90990429fd180e52f30/grpcio-1.76.0-cp310-cp310-win_amd64.whl", hash = "sha256:a6ae758eb08088d36812dd5d9af7a9859c05b1e0f714470ea243694b49278e7b", size = 4706892, upload-time = "2025-10-21T16:20:50.697Z" }, + { url = "https://files.pythonhosted.org/packages/a0/00/8163a1beeb6971f66b4bbe6ac9457b97948beba8dd2fc8e1281dce7f79ec/grpcio-1.76.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:2e1743fbd7f5fa713a1b0a8ac8ebabf0ec980b5d8809ec358d488e273b9cf02a", size = 5843567, upload-time = "2025-10-21T16:20:52.829Z" }, + { url = "https://files.pythonhosted.org/packages/10/c1/934202f5cf335e6d852530ce14ddb0fef21be612ba9ecbbcbd4d748ca32d/grpcio-1.76.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:a8c2cf1209497cf659a667d7dea88985e834c24b7c3b605e6254cbb5076d985c", size = 11848017, upload-time = "2025-10-21T16:20:56.705Z" }, + { url = "https://files.pythonhosted.org/packages/11/0b/8dec16b1863d74af6eb3543928600ec2195af49ca58b16334972f6775663/grpcio-1.76.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:08caea849a9d3c71a542827d6df9d5a69067b0a1efbea8a855633ff5d9571465", size = 6412027, upload-time = "2025-10-21T16:20:59.3Z" }, + { url = "https://files.pythonhosted.org/packages/d7/64/7b9e6e7ab910bea9d46f2c090380bab274a0b91fb0a2fe9b0cd399fffa12/grpcio-1.76.0-cp311-cp311-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:f0e34c2079d47ae9f6188211db9e777c619a21d4faba6977774e8fa43b085e48", size = 7075913, upload-time = "2025-10-21T16:21:01.645Z" }, + { url = "https://files.pythonhosted.org/packages/68/86/093c46e9546073cefa789bd76d44c5cb2abc824ca62af0c18be590ff13ba/grpcio-1.76.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8843114c0cfce61b40ad48df65abcfc00d4dba82eae8718fab5352390848c5da", size = 6615417, upload-time = "2025-10-21T16:21:03.844Z" }, + { url = "https://files.pythonhosted.org/packages/f7/b6/5709a3a68500a9c03da6fb71740dcdd5ef245e39266461a03f31a57036d8/grpcio-1.76.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8eddfb4d203a237da6f3cc8a540dad0517d274b5a1e9e636fd8d2c79b5c1d397", size = 7199683, upload-time = "2025-10-21T16:21:06.195Z" }, + { url = "https://files.pythonhosted.org/packages/91/d3/4b1f2bf16ed52ce0b508161df3a2d186e4935379a159a834cb4a7d687429/grpcio-1.76.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:32483fe2aab2c3794101c2a159070584e5db11d0aa091b2c0ea9c4fc43d0d749", size = 8163109, upload-time = "2025-10-21T16:21:08.498Z" }, + { url = "https://files.pythonhosted.org/packages/5c/61/d9043f95f5f4cf085ac5dd6137b469d41befb04bd80280952ffa2a4c3f12/grpcio-1.76.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:dcfe41187da8992c5f40aa8c5ec086fa3672834d2be57a32384c08d5a05b4c00", size = 7626676, upload-time = "2025-10-21T16:21:10.693Z" }, + { url = "https://files.pythonhosted.org/packages/36/95/fd9a5152ca02d8881e4dd419cdd790e11805979f499a2e5b96488b85cf27/grpcio-1.76.0-cp311-cp311-win32.whl", hash = "sha256:2107b0c024d1b35f4083f11245c0e23846ae64d02f40b2b226684840260ed054", size = 3997688, upload-time = "2025-10-21T16:21:12.746Z" }, + { url = "https://files.pythonhosted.org/packages/60/9c/5c359c8d4c9176cfa3c61ecd4efe5affe1f38d9bae81e81ac7186b4c9cc8/grpcio-1.76.0-cp311-cp311-win_amd64.whl", hash = "sha256:522175aba7af9113c48ec10cc471b9b9bd4f6ceb36aeb4544a8e2c80ed9d252d", size = 4709315, upload-time = "2025-10-21T16:21:15.26Z" }, + { url = "https://files.pythonhosted.org/packages/bf/05/8e29121994b8d959ffa0afd28996d452f291b48cfc0875619de0bde2c50c/grpcio-1.76.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:81fd9652b37b36f16138611c7e884eb82e0cec137c40d3ef7c3f9b3ed00f6ed8", size = 5799718, upload-time = "2025-10-21T16:21:17.939Z" }, + { url = "https://files.pythonhosted.org/packages/d9/75/11d0e66b3cdf998c996489581bdad8900db79ebd83513e45c19548f1cba4/grpcio-1.76.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:04bbe1bfe3a68bbfd4e52402ab7d4eb59d72d02647ae2042204326cf4bbad280", size = 11825627, upload-time = "2025-10-21T16:21:20.466Z" }, + { url = "https://files.pythonhosted.org/packages/28/50/2f0aa0498bc188048f5d9504dcc5c2c24f2eb1a9337cd0fa09a61a2e75f0/grpcio-1.76.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d388087771c837cdb6515539f43b9d4bf0b0f23593a24054ac16f7a960be16f4", size = 6359167, upload-time = "2025-10-21T16:21:23.122Z" }, + { url = "https://files.pythonhosted.org/packages/66/e5/bbf0bb97d29ede1d59d6588af40018cfc345b17ce979b7b45424628dc8bb/grpcio-1.76.0-cp312-cp312-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:9f8f757bebaaea112c00dba718fc0d3260052ce714e25804a03f93f5d1c6cc11", size = 7044267, upload-time = "2025-10-21T16:21:25.995Z" }, + { url = "https://files.pythonhosted.org/packages/f5/86/f6ec2164f743d9609691115ae8ece098c76b894ebe4f7c94a655c6b03e98/grpcio-1.76.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:980a846182ce88c4f2f7e2c22c56aefd515daeb36149d1c897f83cf57999e0b6", size = 6573963, upload-time = "2025-10-21T16:21:28.631Z" }, + { url = "https://files.pythonhosted.org/packages/60/bc/8d9d0d8505feccfdf38a766d262c71e73639c165b311c9457208b56d92ae/grpcio-1.76.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f92f88e6c033db65a5ae3d97905c8fea9c725b63e28d5a75cb73b49bda5024d8", size = 7164484, upload-time = "2025-10-21T16:21:30.837Z" }, + { url = "https://files.pythonhosted.org/packages/67/e6/5d6c2fc10b95edf6df9b8f19cf10a34263b7fd48493936fffd5085521292/grpcio-1.76.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4baf3cbe2f0be3289eb68ac8ae771156971848bb8aaff60bad42005539431980", size = 8127777, upload-time = "2025-10-21T16:21:33.577Z" }, + { url = "https://files.pythonhosted.org/packages/3f/c8/dce8ff21c86abe025efe304d9e31fdb0deaaa3b502b6a78141080f206da0/grpcio-1.76.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:615ba64c208aaceb5ec83bfdce7728b80bfeb8be97562944836a7a0a9647d882", size = 7594014, upload-time = "2025-10-21T16:21:41.882Z" }, + { url = "https://files.pythonhosted.org/packages/e0/42/ad28191ebf983a5d0ecef90bab66baa5a6b18f2bfdef9d0a63b1973d9f75/grpcio-1.76.0-cp312-cp312-win32.whl", hash = "sha256:45d59a649a82df5718fd9527ce775fd66d1af35e6d31abdcdc906a49c6822958", size = 3984750, upload-time = "2025-10-21T16:21:44.006Z" }, + { url = "https://files.pythonhosted.org/packages/9e/00/7bd478cbb851c04a48baccaa49b75abaa8e4122f7d86da797500cccdd771/grpcio-1.76.0-cp312-cp312-win_amd64.whl", hash = "sha256:c088e7a90b6017307f423efbb9d1ba97a22aa2170876223f9709e9d1de0b5347", size = 4704003, upload-time = "2025-10-21T16:21:46.244Z" }, + { url = "https://files.pythonhosted.org/packages/fc/ed/71467ab770effc9e8cef5f2e7388beb2be26ed642d567697bb103a790c72/grpcio-1.76.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:26ef06c73eb53267c2b319f43e6634c7556ea37672029241a056629af27c10e2", size = 5807716, upload-time = "2025-10-21T16:21:48.475Z" }, + { url = "https://files.pythonhosted.org/packages/2c/85/c6ed56f9817fab03fa8a111ca91469941fb514e3e3ce6d793cb8f1e1347b/grpcio-1.76.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:45e0111e73f43f735d70786557dc38141185072d7ff8dc1829d6a77ac1471468", size = 11821522, upload-time = "2025-10-21T16:21:51.142Z" }, + { url = "https://files.pythonhosted.org/packages/ac/31/2b8a235ab40c39cbc141ef647f8a6eb7b0028f023015a4842933bc0d6831/grpcio-1.76.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:83d57312a58dcfe2a3a0f9d1389b299438909a02db60e2f2ea2ae2d8034909d3", size = 6362558, upload-time = "2025-10-21T16:21:54.213Z" }, + { url = "https://files.pythonhosted.org/packages/bd/64/9784eab483358e08847498ee56faf8ff6ea8e0a4592568d9f68edc97e9e9/grpcio-1.76.0-cp313-cp313-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:3e2a27c89eb9ac3d81ec8835e12414d73536c6e620355d65102503064a4ed6eb", size = 7049990, upload-time = "2025-10-21T16:21:56.476Z" }, + { url = "https://files.pythonhosted.org/packages/2b/94/8c12319a6369434e7a184b987e8e9f3b49a114c489b8315f029e24de4837/grpcio-1.76.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:61f69297cba3950a524f61c7c8ee12e55c486cb5f7db47ff9dcee33da6f0d3ae", size = 6575387, upload-time = "2025-10-21T16:21:59.051Z" }, + { url = "https://files.pythonhosted.org/packages/15/0f/f12c32b03f731f4a6242f771f63039df182c8b8e2cf8075b245b409259d4/grpcio-1.76.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6a15c17af8839b6801d554263c546c69c4d7718ad4321e3166175b37eaacca77", size = 7166668, upload-time = "2025-10-21T16:22:02.049Z" }, + { url = "https://files.pythonhosted.org/packages/ff/2d/3ec9ce0c2b1d92dd59d1c3264aaec9f0f7c817d6e8ac683b97198a36ed5a/grpcio-1.76.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:25a18e9810fbc7e7f03ec2516addc116a957f8cbb8cbc95ccc80faa072743d03", size = 8124928, upload-time = "2025-10-21T16:22:04.984Z" }, + { url = "https://files.pythonhosted.org/packages/1a/74/fd3317be5672f4856bcdd1a9e7b5e17554692d3db9a3b273879dc02d657d/grpcio-1.76.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:931091142fd8cc14edccc0845a79248bc155425eee9a98b2db2ea4f00a235a42", size = 7589983, upload-time = "2025-10-21T16:22:07.881Z" }, + { url = "https://files.pythonhosted.org/packages/45/bb/ca038cf420f405971f19821c8c15bcbc875505f6ffadafe9ffd77871dc4c/grpcio-1.76.0-cp313-cp313-win32.whl", hash = "sha256:5e8571632780e08526f118f74170ad8d50fb0a48c23a746bef2a6ebade3abd6f", size = 3984727, upload-time = "2025-10-21T16:22:10.032Z" }, + { url = "https://files.pythonhosted.org/packages/41/80/84087dc56437ced7cdd4b13d7875e7439a52a261e3ab4e06488ba6173b0a/grpcio-1.76.0-cp313-cp313-win_amd64.whl", hash = "sha256:f9f7bd5faab55f47231ad8dba7787866b69f5e93bc306e3915606779bbfb4ba8", size = 4702799, upload-time = "2025-10-21T16:22:12.709Z" }, + { url = "https://files.pythonhosted.org/packages/b4/46/39adac80de49d678e6e073b70204091e76631e03e94928b9ea4ecf0f6e0e/grpcio-1.76.0-cp314-cp314-linux_armv7l.whl", hash = "sha256:ff8a59ea85a1f2191a0ffcc61298c571bc566332f82e5f5be1b83c9d8e668a62", size = 5808417, upload-time = "2025-10-21T16:22:15.02Z" }, + { url = "https://files.pythonhosted.org/packages/9c/f5/a4531f7fb8b4e2a60b94e39d5d924469b7a6988176b3422487be61fe2998/grpcio-1.76.0-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:06c3d6b076e7b593905d04fdba6a0525711b3466f43b3400266f04ff735de0cd", size = 11828219, upload-time = "2025-10-21T16:22:17.954Z" }, + { url = "https://files.pythonhosted.org/packages/4b/1c/de55d868ed7a8bd6acc6b1d6ddc4aa36d07a9f31d33c912c804adb1b971b/grpcio-1.76.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fd5ef5932f6475c436c4a55e4336ebbe47bd3272be04964a03d316bbf4afbcbc", size = 6367826, upload-time = "2025-10-21T16:22:20.721Z" }, + { url = "https://files.pythonhosted.org/packages/59/64/99e44c02b5adb0ad13ab3adc89cb33cb54bfa90c74770f2607eea629b86f/grpcio-1.76.0-cp314-cp314-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:b331680e46239e090f5b3cead313cc772f6caa7d0fc8de349337563125361a4a", size = 7049550, upload-time = "2025-10-21T16:22:23.637Z" }, + { url = "https://files.pythonhosted.org/packages/43/28/40a5be3f9a86949b83e7d6a2ad6011d993cbe9b6bd27bea881f61c7788b6/grpcio-1.76.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2229ae655ec4e8999599469559e97630185fdd53ae1e8997d147b7c9b2b72cba", size = 6575564, upload-time = "2025-10-21T16:22:26.016Z" }, + { url = "https://files.pythonhosted.org/packages/4b/a9/1be18e6055b64467440208a8559afac243c66a8b904213af6f392dc2212f/grpcio-1.76.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:490fa6d203992c47c7b9e4a9d39003a0c2bcc1c9aa3c058730884bbbb0ee9f09", size = 7176236, upload-time = "2025-10-21T16:22:28.362Z" }, + { url = "https://files.pythonhosted.org/packages/0f/55/dba05d3fcc151ce6e81327541d2cc8394f442f6b350fead67401661bf041/grpcio-1.76.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:479496325ce554792dba6548fae3df31a72cef7bad71ca2e12b0e58f9b336bfc", size = 8125795, upload-time = "2025-10-21T16:22:31.075Z" }, + { url = "https://files.pythonhosted.org/packages/4a/45/122df922d05655f63930cf42c9e3f72ba20aadb26c100ee105cad4ce4257/grpcio-1.76.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:1c9b93f79f48b03ada57ea24725d83a30284a012ec27eab2cf7e50a550cbbbcc", size = 7592214, upload-time = "2025-10-21T16:22:33.831Z" }, + { url = "https://files.pythonhosted.org/packages/4a/6e/0b899b7f6b66e5af39e377055fb4a6675c9ee28431df5708139df2e93233/grpcio-1.76.0-cp314-cp314-win32.whl", hash = "sha256:747fa73efa9b8b1488a95d0ba1039c8e2dca0f741612d80415b1e1c560febf4e", size = 4062961, upload-time = "2025-10-21T16:22:36.468Z" }, + { url = "https://files.pythonhosted.org/packages/19/41/0b430b01a2eb38ee887f88c1f07644a1df8e289353b78e82b37ef988fb64/grpcio-1.76.0-cp314-cp314-win_amd64.whl", hash = "sha256:922fa70ba549fce362d2e2871ab542082d66e2aaf0c19480ea453905b01f384e", size = 4834462, upload-time = "2025-10-21T16:22:39.772Z" }, +] + +[[package]] +name = "grpcio-tools" +version = "1.76.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "grpcio" }, + { name = "protobuf" }, + { name = "setuptools" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a0/77/17d60d636ccd86a0db0eccc24d02967bbc3eea86b9db7324b04507ebaa40/grpcio_tools-1.76.0.tar.gz", hash = "sha256:ce80169b5e6adf3e8302f3ebb6cb0c3a9f08089133abca4b76ad67f751f5ad88", size = 5390807, upload-time = "2025-10-21T16:26:55.416Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/57/4b/6fceb806f6d5055793f5db0d7a1e3449ea16482c2aec3ad93b05678c325a/grpcio_tools-1.76.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:9b99086080ca394f1da9894ee20dedf7292dd614e985dcba58209a86a42de602", size = 2545596, upload-time = "2025-10-21T16:24:25.134Z" }, + { url = "https://files.pythonhosted.org/packages/3b/11/57af2f3f32016e6e2aae063a533aae2c0e6c577bc834bef97277a7fa9733/grpcio_tools-1.76.0-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:8d95b5c2394bbbe911cbfc88d15e24c9e174958cb44dad6aa8c46fe367f6cc2a", size = 5843462, upload-time = "2025-10-21T16:24:31.046Z" }, + { url = "https://files.pythonhosted.org/packages/3f/8b/470bedaf7fb75fb19500b4c160856659746dcf53e3d9241fcc17e3af7155/grpcio_tools-1.76.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d54e9ce2ffc5d01341f0c8898c1471d887ae93d77451884797776e0a505bd503", size = 2591938, upload-time = "2025-10-21T16:24:33.219Z" }, + { url = "https://files.pythonhosted.org/packages/77/3e/530e848e00d6fe2db152984b2c9432bb8497a3699719fd7898d05cb7d95e/grpcio_tools-1.76.0-cp310-cp310-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:c83f39f64c2531336bd8d5c846a2159c9ea6635508b0f8ed3ad0d433e25b53c9", size = 2905296, upload-time = "2025-10-21T16:24:34.938Z" }, + { url = "https://files.pythonhosted.org/packages/75/b5/632229d17364eb7db5d3d793131172b2380323c4e6500f528743e477267c/grpcio_tools-1.76.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:be480142fae0d986d127d6cb5cbc0357e4124ba22e96bb8b9ece32c48bc2c8ea", size = 2656266, upload-time = "2025-10-21T16:24:37.485Z" }, + { url = "https://files.pythonhosted.org/packages/ff/71/5756aa9a14d16738b04677b89af8612112d69fb098ffdbc5666020933f23/grpcio_tools-1.76.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7fefd41fc4ca11fab36f42bdf0f3812252988f8798fca8bec8eae049418deacd", size = 3105798, upload-time = "2025-10-21T16:24:40.408Z" }, + { url = "https://files.pythonhosted.org/packages/ab/de/9058021da11be399abe6c5d2a9a2abad1b00d367111018637195d107539b/grpcio_tools-1.76.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:63551f371082173e259e7f6ec24b5f1fe7d66040fadd975c966647bca605a2d3", size = 3654923, upload-time = "2025-10-21T16:24:42.52Z" }, + { url = "https://files.pythonhosted.org/packages/8e/93/29f04cc18f1023b2a4342374a45b1cd87a0e1458fc44aea74baad5431dcd/grpcio_tools-1.76.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:75a2c34584c99ff47e5bb267866e7dec68d30cd3b2158e1ee495bfd6db5ad4f0", size = 3322558, upload-time = "2025-10-21T16:24:44.356Z" }, + { url = "https://files.pythonhosted.org/packages/d9/ab/8936708d30b9a2484f6b093dfc57843c1d0380de0eba78a8ad8693535f26/grpcio_tools-1.76.0-cp310-cp310-win32.whl", hash = "sha256:908758789b0a612102c88e8055b7191eb2c4290d5d6fc50fb9cac737f8011ef1", size = 993621, upload-time = "2025-10-21T16:24:46.7Z" }, + { url = "https://files.pythonhosted.org/packages/3d/d2/c5211feb81a532eca2c4dddd00d4971b91c10837cd083781f6ab3a6fdb5b/grpcio_tools-1.76.0-cp310-cp310-win_amd64.whl", hash = "sha256:ec6e49e7c4b2a222eb26d1e1726a07a572b6e629b2cf37e6bb784c9687904a52", size = 1158401, upload-time = "2025-10-21T16:24:48.416Z" }, + { url = "https://files.pythonhosted.org/packages/73/d1/efbeed1a864c846228c0a3b322e7a2d6545f025e35246aebf96496a36004/grpcio_tools-1.76.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:c6480f6af6833850a85cca1c6b435ef4ffd2ac8e88ef683b4065233827950243", size = 2545931, upload-time = "2025-10-21T16:24:50.201Z" }, + { url = "https://files.pythonhosted.org/packages/af/8e/f257c0f565d9d44658301238b01a9353bc6f3b272bb4191faacae042579d/grpcio_tools-1.76.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:c7c23fe1dc09818e16a48853477806ad77dd628b33996f78c05a293065f8210c", size = 5844794, upload-time = "2025-10-21T16:24:53.312Z" }, + { url = "https://files.pythonhosted.org/packages/c7/c0/6c1e89c67356cb20e19ed670c5099b13e40fd678cac584c778f931666a86/grpcio_tools-1.76.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fcdce7f7770ff052cd4e60161764b0b3498c909bde69138f8bd2e7b24a3ecd8f", size = 2591772, upload-time = "2025-10-21T16:24:55.729Z" }, + { url = "https://files.pythonhosted.org/packages/c0/10/5f33aa7bc3ddaad0cfd2f4e950ac4f1a310e8d0c7b1358622a581e8b7a2f/grpcio_tools-1.76.0-cp311-cp311-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:b598fdcebffa931c7da5c9e90b5805fff7e9bc6cf238319358a1b85704c57d33", size = 2905140, upload-time = "2025-10-21T16:24:57.952Z" }, + { url = "https://files.pythonhosted.org/packages/f4/3e/23e3a52a77368f47188ed83c34eb53866d3ce0f73835b2f6764844ae89eb/grpcio_tools-1.76.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:6a9818ff884796b12dcf8db32126e40ec1098cacf5697f27af9cfccfca1c1fae", size = 2656475, upload-time = "2025-10-21T16:25:00.811Z" }, + { url = "https://files.pythonhosted.org/packages/51/85/a74ae87ec7dbd3d2243881f5c548215aed1148660df7945be3a125ba9a21/grpcio_tools-1.76.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:105e53435b2eed3961da543db44a2a34479d98d18ea248219856f30a0ca4646b", size = 3106158, upload-time = "2025-10-21T16:25:03.642Z" }, + { url = "https://files.pythonhosted.org/packages/54/d5/a6ed1e5823bc5d55a1eb93e0c14ccee0b75951f914832ab51fb64d522a0f/grpcio_tools-1.76.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:454a1232c7f99410d92fa9923c7851fd4cdaf657ee194eac73ea1fe21b406d6e", size = 3654980, upload-time = "2025-10-21T16:25:05.717Z" }, + { url = "https://files.pythonhosted.org/packages/f9/29/c05d5501ba156a242079ef71d073116d2509c195b5e5e74c545f0a3a3a69/grpcio_tools-1.76.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ca9ccf667afc0268d45ab202af4556c72e57ea36ebddc93535e1a25cbd4f8aba", size = 3322658, upload-time = "2025-10-21T16:25:07.885Z" }, + { url = "https://files.pythonhosted.org/packages/02/b6/ee0317b91da19a7537d93c4161cbc2a45a165c8893209b0bbd470d830ffa/grpcio_tools-1.76.0-cp311-cp311-win32.whl", hash = "sha256:a83c87513b708228b4cad7619311daba65b40937745103cadca3db94a6472d9c", size = 993837, upload-time = "2025-10-21T16:25:10.133Z" }, + { url = "https://files.pythonhosted.org/packages/81/63/9623cadf0406b264737f16d4ed273bb2d65001d87fbd803b565c45d665d1/grpcio_tools-1.76.0-cp311-cp311-win_amd64.whl", hash = "sha256:2ce5e87ec71f2e4041dce4351f2a8e3b713e3bca6b54c69c3fbc6c7ad1f4c386", size = 1158634, upload-time = "2025-10-21T16:25:12.705Z" }, + { url = "https://files.pythonhosted.org/packages/4f/ca/a931c1439cabfe305c9afd07e233150cd0565aa062c20d1ee412ed188852/grpcio_tools-1.76.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:4ad555b8647de1ebaffb25170249f89057721ffb74f7da96834a07b4855bb46a", size = 2546852, upload-time = "2025-10-21T16:25:15.024Z" }, + { url = "https://files.pythonhosted.org/packages/4c/07/935cfbb7dccd602723482a86d43fbd992f91e9867bca0056a1e9f348473e/grpcio_tools-1.76.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:243af7c8fc7ff22a40a42eb8e0f6f66963c1920b75aae2a2ec503a9c3c8b31c1", size = 5841777, upload-time = "2025-10-21T16:25:17.425Z" }, + { url = "https://files.pythonhosted.org/packages/e4/92/8fcb5acebdccb647e0fa3f002576480459f6cf81e79692d7b3c4d6e29605/grpcio_tools-1.76.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8207b890f423142cc0025d041fb058f7286318df6a049565c27869d73534228b", size = 2594004, upload-time = "2025-10-21T16:25:19.809Z" }, + { url = "https://files.pythonhosted.org/packages/9d/ea/64838e8113b7bfd4842b15c815a7354cb63242fdce9d6648d894b5d50897/grpcio_tools-1.76.0-cp312-cp312-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:3dafa34c2626a6691d103877e8a145f54c34cf6530975f695b396ed2fc5c98f8", size = 2905563, upload-time = "2025-10-21T16:25:21.889Z" }, + { url = "https://files.pythonhosted.org/packages/a6/d6/53798827d821098219e58518b6db52161ce4985620850aa74ce3795da8a7/grpcio_tools-1.76.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:30f1d2dda6ece285b3d9084e94f66fa721ebdba14ae76b2bc4c581c8a166535c", size = 2656936, upload-time = "2025-10-21T16:25:24.369Z" }, + { url = "https://files.pythonhosted.org/packages/89/a3/d9c1cefc46a790eec520fe4e70e87279abb01a58b1a3b74cf93f62b824a2/grpcio_tools-1.76.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a889af059dc6dbb82d7b417aa581601316e364fe12eb54c1b8d95311ea50916d", size = 3109811, upload-time = "2025-10-21T16:25:26.711Z" }, + { url = "https://files.pythonhosted.org/packages/50/75/5997752644b73b5d59377d333a51c8a916606df077f5a487853e37dca289/grpcio_tools-1.76.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c3f2c3c44c56eb5d479ab178f0174595d0a974c37dade442f05bb73dfec02f31", size = 3658786, upload-time = "2025-10-21T16:25:28.819Z" }, + { url = "https://files.pythonhosted.org/packages/84/47/dcf8380df4bd7931ffba32fc6adc2de635b6569ca27fdec7121733797062/grpcio_tools-1.76.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:479ce02dff684046f909a487d452a83a96b4231f7c70a3b218a075d54e951f56", size = 3325144, upload-time = "2025-10-21T16:25:30.863Z" }, + { url = "https://files.pythonhosted.org/packages/04/88/ea3e5fdb874d8c2d04488e4b9d05056537fba70915593f0c283ac77df188/grpcio_tools-1.76.0-cp312-cp312-win32.whl", hash = "sha256:9ba4bb539936642a44418b38ee6c3e8823c037699e2cb282bd8a44d76a4be833", size = 993523, upload-time = "2025-10-21T16:25:32.594Z" }, + { url = "https://files.pythonhosted.org/packages/de/b1/ce7d59d147675ec191a55816be46bc47a343b5ff07279eef5817c09cc53e/grpcio_tools-1.76.0-cp312-cp312-win_amd64.whl", hash = "sha256:0cd489016766b05f9ed8a6b6596004b62c57d323f49593eac84add032a6d43f7", size = 1158493, upload-time = "2025-10-21T16:25:34.5Z" }, + { url = "https://files.pythonhosted.org/packages/13/01/b16fe73f129df49811d886dc99d3813a33cf4d1c6e101252b81c895e929f/grpcio_tools-1.76.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:ff48969f81858397ef33a36b326f2dbe2053a48b254593785707845db73c8f44", size = 2546312, upload-time = "2025-10-21T16:25:37.138Z" }, + { url = "https://files.pythonhosted.org/packages/25/17/2594c5feb76bb0b25bfbf91ec1075b276e1b2325e4bc7ea649a7b5dbf353/grpcio_tools-1.76.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:aa2f030fd0ef17926026ee8e2b700e388d3439155d145c568fa6b32693277613", size = 5839627, upload-time = "2025-10-21T16:25:40.082Z" }, + { url = "https://files.pythonhosted.org/packages/c7/c6/097b1aa26fbf72fb3cdb30138a2788529e4f10d8759de730a83f5c06726e/grpcio_tools-1.76.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:bacbf3c54f88c38de8e28f8d9b97c90b76b105fb9ddef05d2c50df01b32b92af", size = 2592817, upload-time = "2025-10-21T16:25:42.301Z" }, + { url = "https://files.pythonhosted.org/packages/03/78/d1d985b48592a674509a85438c1a3d4c36304ddfc99d1b05d27233b51062/grpcio_tools-1.76.0-cp313-cp313-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:0d4e4afe9a0e3c24fad2f1af45f98cf8700b2bfc4d790795756ba035d2ea7bdc", size = 2905186, upload-time = "2025-10-21T16:25:44.395Z" }, + { url = "https://files.pythonhosted.org/packages/b9/0e/770afbb47f0b5f594b93a7b46a95b892abda5eebe60efb511e96cee52170/grpcio_tools-1.76.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fbbd4e1fc5af98001ceef5e780e8c10921d94941c3809238081e73818ef707f1", size = 2656188, upload-time = "2025-10-21T16:25:46.942Z" }, + { url = "https://files.pythonhosted.org/packages/3d/2b/017c2fcf4c5d3cf00cf7d5ce21eb88521de0d89bdcf26538ad2862ec6d07/grpcio_tools-1.76.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b05efe5a59883ab8292d596657273a60e0c3e4f5a9723c32feb9fc3a06f2f3ef", size = 3109141, upload-time = "2025-10-21T16:25:49.137Z" }, + { url = "https://files.pythonhosted.org/packages/e9/5f/2495f88e3d50c6f2c2da2752bad4fa3a30c52ece6c9d8b0c636cd8b1430b/grpcio_tools-1.76.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:be483b90e62b7892eb71fa1fc49750bee5b2ee35b5ec99dd2b32bed4bedb5d71", size = 3657892, upload-time = "2025-10-21T16:25:52.362Z" }, + { url = "https://files.pythonhosted.org/packages/5e/1d/c4f39d31b19d9baf35d900bf3f969ce1c842f63a8560c8003ed2e5474760/grpcio_tools-1.76.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:630cd7fd3e8a63e20703a7ad816979073c2253e591b5422583c27cae2570de73", size = 3324778, upload-time = "2025-10-21T16:25:54.629Z" }, + { url = "https://files.pythonhosted.org/packages/b4/b6/35ee3a6e4af85a93da28428f81f4b29bcb36f6986b486ad71910fcc02e25/grpcio_tools-1.76.0-cp313-cp313-win32.whl", hash = "sha256:eb2567280f9f6da5444043f0e84d8408c7a10df9ba3201026b30e40ef3814736", size = 993084, upload-time = "2025-10-21T16:25:56.52Z" }, + { url = "https://files.pythonhosted.org/packages/f3/7a/5bd72344d86ee860e5920c9a7553cfe3bc7b1fce79f18c00ac2497f5799f/grpcio_tools-1.76.0-cp313-cp313-win_amd64.whl", hash = "sha256:0071b1c0bd0f5f9d292dca4efab32c92725d418e57f9c60acdc33c0172af8b53", size = 1158151, upload-time = "2025-10-21T16:25:58.468Z" }, + { url = "https://files.pythonhosted.org/packages/f0/c0/aa20eebe8f3553b7851643e9c88d237c3a6ca30ade646897e25dbb27be99/grpcio_tools-1.76.0-cp314-cp314-linux_armv7l.whl", hash = "sha256:c53c5719ef2a435997755abde3826ba4087174bd432aa721d8fac781fcea79e4", size = 2546297, upload-time = "2025-10-21T16:26:01.258Z" }, + { url = "https://files.pythonhosted.org/packages/d9/98/6af702804934443c1d0d4d27d21b990d92d22ddd1b6bec6b056558cbbffa/grpcio_tools-1.76.0-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:e3db1300d7282264639eeee7243f5de7e6a7c0283f8bf05d66c0315b7b0f0b36", size = 5839804, upload-time = "2025-10-21T16:26:05.495Z" }, + { url = "https://files.pythonhosted.org/packages/ea/8d/7725fa7b134ef8405ffe0a37c96eeb626e5af15d70e1bdac4f8f1abf842e/grpcio_tools-1.76.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0b018a4b7455a7e8c16d0fdb3655a6ba6c9536da6de6c5d4f11b6bb73378165b", size = 2593922, upload-time = "2025-10-21T16:26:07.563Z" }, + { url = "https://files.pythonhosted.org/packages/de/ff/5b6b5012c79fa72f9107dc13f7226d9ce7e059ea639fd8c779e0dd284386/grpcio_tools-1.76.0-cp314-cp314-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:ec6e4de3866e47cfde56607b1fae83ecc5aa546e06dec53de11f88063f4b5275", size = 2905327, upload-time = "2025-10-21T16:26:09.668Z" }, + { url = "https://files.pythonhosted.org/packages/24/01/2691d369ea462cd6b6c92544122885ca01f7fa5ac75dee023e975e675858/grpcio_tools-1.76.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:b8da4d828883913f1852bdd67383713ae5c11842f6c70f93f31893eab530aead", size = 2656214, upload-time = "2025-10-21T16:26:11.773Z" }, + { url = "https://files.pythonhosted.org/packages/6a/e7/3f8856e6ec3dd492336a91572993344966f237b0e3819fbe96437b19d313/grpcio_tools-1.76.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:5c120c2cf4443121800e7f9bcfe2e94519fa25f3bb0b9882359dd3b252c78a7b", size = 3109889, upload-time = "2025-10-21T16:26:15.058Z" }, + { url = "https://files.pythonhosted.org/packages/f3/e4/ce5248072e47db276dc7e069e93978dcde490c959788ce7cce8081d0bfdc/grpcio_tools-1.76.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:8b7df5591d699cd9076065f1f15049e9c3597e0771bea51c8c97790caf5e4197", size = 3657939, upload-time = "2025-10-21T16:26:17.34Z" }, + { url = "https://files.pythonhosted.org/packages/f6/df/81ff88af93c52135e425cd5ec9fe8b186169c7d5f9e0409bdf2bbedc3919/grpcio_tools-1.76.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:a25048c5f984d33e3f5b6ad7618e98736542461213ade1bd6f2fcfe8ce804e3d", size = 3324752, upload-time = "2025-10-21T16:26:20.092Z" }, + { url = "https://files.pythonhosted.org/packages/35/3d/f6b83044afbf6522254a3b509515a00fed16a819c87731a478dbdd1d35c1/grpcio_tools-1.76.0-cp314-cp314-win32.whl", hash = "sha256:4b77ce6b6c17869858cfe14681ad09ed3a8a80e960e96035de1fd87f78158740", size = 1015578, upload-time = "2025-10-21T16:26:22.517Z" }, + { url = "https://files.pythonhosted.org/packages/95/4d/31236cddb7ffb09ba4a49f4f56d2608fec3bbb21c7a0a975d93bca7cd22e/grpcio_tools-1.76.0-cp314-cp314-win_amd64.whl", hash = "sha256:2ccd2c8d041351cc29d0fc4a84529b11ee35494a700b535c1f820b642f2a72fc", size = 1190242, upload-time = "2025-10-21T16:26:25.296Z" }, +] + +[[package]] +name = "protobuf" +version = "6.33.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/19/ff/64a6c8f420818bb873713988ca5492cba3a7946be57e027ac63495157d97/protobuf-6.33.0.tar.gz", hash = "sha256:140303d5c8d2037730c548f8c7b93b20bb1dc301be280c378b82b8894589c954", size = 443463, upload-time = "2025-10-15T20:39:52.159Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/ee/52b3fa8feb6db4a833dfea4943e175ce645144532e8a90f72571ad85df4e/protobuf-6.33.0-cp310-abi3-win32.whl", hash = "sha256:d6101ded078042a8f17959eccd9236fb7a9ca20d3b0098bbcb91533a5680d035", size = 425593, upload-time = "2025-10-15T20:39:40.29Z" }, + { url = "https://files.pythonhosted.org/packages/7b/c6/7a465f1825872c55e0341ff4a80198743f73b69ce5d43ab18043699d1d81/protobuf-6.33.0-cp310-abi3-win_amd64.whl", hash = "sha256:9a031d10f703f03768f2743a1c403af050b6ae1f3480e9c140f39c45f81b13ee", size = 436882, upload-time = "2025-10-15T20:39:42.841Z" }, + { url = "https://files.pythonhosted.org/packages/e1/a9/b6eee662a6951b9c3640e8e452ab3e09f117d99fc10baa32d1581a0d4099/protobuf-6.33.0-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:905b07a65f1a4b72412314082c7dbfae91a9e8b68a0cc1577515f8df58ecf455", size = 427521, upload-time = "2025-10-15T20:39:43.803Z" }, + { url = "https://files.pythonhosted.org/packages/10/35/16d31e0f92c6d2f0e77c2a3ba93185130ea13053dd16200a57434c882f2b/protobuf-6.33.0-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:e0697ece353e6239b90ee43a9231318302ad8353c70e6e45499fa52396debf90", size = 324445, upload-time = "2025-10-15T20:39:44.932Z" }, + { url = "https://files.pythonhosted.org/packages/e6/eb/2a981a13e35cda8b75b5585aaffae2eb904f8f351bdd3870769692acbd8a/protobuf-6.33.0-cp39-abi3-manylinux2014_s390x.whl", hash = "sha256:e0a1715e4f27355afd9570f3ea369735afc853a6c3951a6afe1f80d8569ad298", size = 339159, upload-time = "2025-10-15T20:39:46.186Z" }, + { url = "https://files.pythonhosted.org/packages/21/51/0b1cbad62074439b867b4e04cc09b93f6699d78fd191bed2bbb44562e077/protobuf-6.33.0-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:35be49fd3f4fefa4e6e2aacc35e8b837d6703c37a2168a55ac21e9b1bc7559ef", size = 323172, upload-time = "2025-10-15T20:39:47.465Z" }, + { url = "https://files.pythonhosted.org/packages/07/d1/0a28c21707807c6aacd5dc9c3704b2aa1effbf37adebd8caeaf68b17a636/protobuf-6.33.0-py3-none-any.whl", hash = "sha256:25c9e1963c6734448ea2d308cfa610e692b801304ba0908d7bfa564ac5132995", size = 170477, upload-time = "2025-10-15T20:39:51.311Z" }, +] + +[[package]] +name = "setuptools" +version = "80.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/18/5d/3bf57dcd21979b887f014ea83c24ae194cfcd12b9e0fda66b957c69d1fca/setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c", size = 1319958, upload-time = "2025-05-27T00:56:51.443Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a3/dc/17031897dae0efacfea57dfd3a82fdd2a2aeb58e0ff71b77b87e44edc772/setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922", size = 1201486, upload-time = "2025-05-27T00:56:49.664Z" }, +] + +[[package]] +name = "sqlparse" +version = "0.5.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e5/40/edede8dd6977b0d3da179a342c198ed100dd2aba4be081861ee5911e4da4/sqlparse-0.5.3.tar.gz", hash = "sha256:09f67787f56a0b16ecdbde1bfc7f5d9c3371ca683cfeaa8e6ff60b4807ec9272", size = 84999, upload-time = "2024-12-10T12:05:30.728Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a9/5c/bfd6bd0bf979426d405cc6e71eceb8701b148b16c21d2dc3c261efc61c7b/sqlparse-0.5.3-py3-none-any.whl", hash = "sha256:cf2196ed3418f3ba5de6af7e82c694a9fbdbfecccdfc72e281548517081f16ca", size = 44415, upload-time = "2024-12-10T12:05:27.824Z" }, +] + +[[package]] +name = "typing-extensions" +version = "4.15.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, +] + +[[package]] +name = "tzdata" +version = "2025.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/95/32/1a225d6164441be760d75c2c42e2780dc0873fe382da3e98a2e1e48361e5/tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9", size = 196380, upload-time = "2025-03-23T13:54:43.652Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5c/23/c7abc0ca0a1526a0774eca151daeb8de62ec457e77262b66b359c3c7679e/tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8", size = 347839, upload-time = "2025-03-23T13:54:41.845Z" }, +]