Skip to content

LITE-21319 Recent queries are now automatically cached #31

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Dec 16, 2021
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ jobs:
- name: Wait sonar to process report
uses: jakejarvis/wait-action@master
with:
time: '60s'
time: '120s'
- name: SonarQube Quality Gate check
uses: sonarsource/sonarqube-quality-gate-action@master
timeout-minutes: 5
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/deploy.yml
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ jobs:
- name: Wait sonar to process report
uses: jakejarvis/wait-action@master
with:
time: '60s'
time: '120s'
- name: SonarQube Quality Gate check
uses: sonarsource/sonarqube-quality-gate-action@master
timeout-minutes: 5
Expand Down
3 changes: 1 addition & 2 deletions dj_rql/drf/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,12 @@
#

from dj_rql.drf._utils import get_query
from dj_rql.drf.backend import FilterCache, RQLFilterBackend
from dj_rql.drf.backend import RQLFilterBackend
from dj_rql.drf.paginations import RQLContentRangeLimitOffsetPagination, RQLLimitOffsetPagination


__all__ = [
'get_query',
'FilterCache',
'RQLContentRangeLimitOffsetPagination',
'RQLFilterBackend',
'RQLLimitOffsetPagination',
Expand Down
52 changes: 43 additions & 9 deletions dj_rql/drf/backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from rest_framework.filters import BaseFilterBackend


class FilterCache:
class _FilterClassCache:
CACHE = {}

@classmethod
Expand All @@ -25,15 +25,38 @@ class ViewSet(mixins.ListModelMixin, GenericViewSet):
"""
OPENAPI_RETRIEVE_SPECIFICATION = False

_CACHES = {}

def filter_queryset(self, request, queryset, view):
filter_class = self.get_filter_class(view)
if not filter_class:
return queryset

filter_instance = self._get_filter_instance(filter_class, queryset, view)
rql_ast, queryset = filter_instance.apply_filters(
self.get_query(filter_instance, request, view), request, view,
)
query = self.get_query(filter_instance, request, view)

def apply_filters_lazy():
return filter_instance.apply_filters(query, request, view)

if filter_class.QUERIES_CACHE_BACKEND and request.method in ('GET', 'HEAD', 'OPTIONS'):
# We must use the combination of queryset and query to make a cache key as
# queryset can already contain some filters (e.x. based on authentication)
cache_key = hash(str(queryset.query) + query)

query_cache = self._get_or_init_cache(filter_class, view)
filters_result = query_cache.get(cache_key)
if not filters_result:
filters_result = apply_filters_lazy()
query_cache[cache_key] = filters_result
else:
filters_result = apply_filters_lazy()

rql_ast, queryset = filters_result

request.rql_ast = rql_ast
if queryset.select_data:
request.rql_select = queryset.select_data

return queryset

def get_schema_operation_parameters(self, view):
Expand All @@ -59,14 +82,25 @@ def get_filter_class(view):
def get_query(cls, filter_instance, request, view):
return get_query(request)

@staticmethod
def _get_filter_instance(filter_class, queryset, view):
qual_name = '{0}.{1}'.format(view.basename, filter_class.__name__)
@classmethod
def _get_or_init_cache(cls, filter_class, view):
qual_name = cls._get_filter_cls_qual_name(filter_class, view)
return cls._CACHES.setdefault(
qual_name, filter_class.QUERIES_CACHE_BACKEND(int(filter_class.QUERIES_CACHE_SIZE)),
)

filter_instance = FilterCache.CACHE.get(qual_name)
@classmethod
def _get_filter_instance(cls, filter_class, queryset, view):
qual_name = cls._get_filter_cls_qual_name(filter_class, view)

filter_instance = _FilterClassCache.CACHE.get(qual_name)
if filter_instance:
return filter_class(queryset=queryset, instance=filter_instance)

filter_instance = filter_class(queryset)
FilterCache.CACHE[qual_name] = filter_instance
_FilterClassCache.CACHE[qual_name] = filter_instance
return filter_instance

@staticmethod
def _get_filter_cls_qual_name(filter_class, view):
return '{0}.{1}'.format(view.basename, filter_class.__name__)
24 changes: 14 additions & 10 deletions dj_rql/filter_cls.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@
from datetime import datetime
from uuid import uuid4

from cachetools import LFUCache

from dj_rql._dataclasses import FilterArgs, OptimizationArgs
from dj_rql.constants import (
ComparisonOperators,
Expand Down Expand Up @@ -62,6 +64,12 @@ class RQLFilterClass:
OPENAPI_SPECIFICATION = RQLFilterClassSpecification
"""Class for OpenAPI specifications generation."""

QUERIES_CACHE_BACKEND = LFUCache
"""Class for query caching (can be `None`)."""

QUERIES_CACHE_SIZE = 20
"""Default number of cached queries."""

def __init__(self, queryset, instance=None):
self.queryset = queryset
self._is_distinct = self.DISTINCT
Expand Down Expand Up @@ -193,11 +201,11 @@ def apply_filters(self, query, request=None, view=None):
self._view = view

rql_ast, qs, select_filters = None, self.queryset, []
qs.select_data = None

if query:
rql_ast = RQLParser.parse_query(query)
rql_transformer = RQLToDjangoORMTransformer(self)

try:
qs = rql_transformer.transform(rql_ast)
except LarkError as e:
Expand All @@ -214,21 +222,17 @@ def apply_filters(self, query, request=None, view=None):
if self._is_distinct:
qs = qs.distinct()

if request:
request.rql_ast = rql_ast
qs.select_data = None

if self.SELECT:
select_data = self._build_select_data(select_filters)
qs = self._apply_optimizations(qs, select_data)

if request:
request.rql_select = {
'depth': 0,
'select': select_data,
}
qs.select_data = {
'depth': 0,
'select': select_data,
}

self.queryset = qs

self._request = None
self._view = None

Expand Down
13 changes: 12 additions & 1 deletion dj_rql/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@
# Copyright © 2021 Ingram Micro Inc. All rights reserved.
#

from cachetools import LFUCache

from dj_rql.exceptions import RQLFilterParsingError
from dj_rql.grammar import RQL_GRAMMAR

Expand All @@ -10,9 +12,18 @@


class RQLLarkParser(Lark):
def __init__(self, *args, **kwargs):
super(RQLLarkParser, self).__init__(*args, **kwargs)

self._cache = LFUCache(maxsize=1000)

def parse_query(self, query):
cache_key = hash(query)
if cache_key in self._cache:
return self._cache[cache_key]
try:
rql_ast = RQLParser.parse(query)
rql_ast = self.parse(query)
self._cache[cache_key] = rql_ast
return rql_ast
except LarkError:
raise RQLFilterParsingError()
Expand Down
1 change: 1 addition & 0 deletions requirements/dev.txt
Original file line number Diff line number Diff line change
@@ -1,2 +1,3 @@
lark-parser==0.11.0
Django>=2.2.19
cachetools>=4.2.4
3 changes: 3 additions & 0 deletions tests/dj_rf/filters.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
#
# Copyright © 2021 Ingram Micro Inc. All rights reserved.
#
from cachetools import LRUCache

from dj_rql.constants import FilterLookups, RQL_NULL
from dj_rql.drf.fields import SelectField
Expand Down Expand Up @@ -189,3 +190,5 @@ class BooksFilterClass(RQLFilterClass):

class SelectBooksFilterClass(BooksFilterClass):
SELECT = True
QUERIES_CACHE_BACKEND = LRUCache
QUERIES_CACHE_SIZE = 100
1 change: 1 addition & 0 deletions tests/dj_rf/view.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,5 +68,6 @@ class AutoViewSet(DRFViewSet):
def rql_filter_class(self):
class Cls(AutoRQLFilterClass):
MODEL = Book
QUERIES_CACHE_BACKEND = None

return Cls
5 changes: 3 additions & 2 deletions tests/test_drf/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
# Copyright © 2021 Ingram Micro Inc. All rights reserved.
#

from dj_rql.drf.backend import FilterCache
from dj_rql.drf.backend import RQLFilterBackend, _FilterClassCache

import pytest

Expand All @@ -18,4 +18,5 @@ def api_client():

@pytest.fixture
def clear_cache():
FilterCache.clear()
_FilterClassCache.clear()
RQLFilterBackend._CACHES = {}
47 changes: 38 additions & 9 deletions tests/test_drf/test_common_drf_backend.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
#
# Copyright © 2021 Ingram Micro Inc. All rights reserved.
#
from cachetools import LFUCache, LRUCache

from dj_rql.drf import FilterCache, RQLFilterBackend
from dj_rql.drf import RQLFilterBackend
from dj_rql.drf.backend import _FilterClassCache

from django.db import connection
from django.test.utils import CaptureQueriesContext
Expand Down Expand Up @@ -91,28 +93,55 @@ class View:


@pytest.mark.django_db
def test_cache(api_client, clear_cache):
def test_filter_cls_cache(api_client, clear_cache):
books = [
Book.objects.create(title='F'),
Book.objects.create(title='G'),
]

assert FilterCache.CACHE == {}
assert _FilterClassCache.CACHE == {}
response = api_client.get('{0}?{1}'.format(reverse('book-list'), 'title=F'))
assert response.data == [{'id': books[0].pk}]

expected_cache_key = 'book.BooksFilterClass'
assert expected_cache_key in FilterCache.CACHE
cache_item_id = id(FilterCache.CACHE[expected_cache_key])
assert expected_cache_key in _FilterClassCache.CACHE
cache_item_id = id(_FilterClassCache.CACHE[expected_cache_key])

response = api_client.get('{0}?{1}'.format(reverse('book-list'), 'title=G'))
assert response.data == [{'id': books[1].pk}]

assert expected_cache_key in FilterCache.CACHE
assert id(FilterCache.CACHE[expected_cache_key]) == cache_item_id
assert expected_cache_key in _FilterClassCache.CACHE
assert id(_FilterClassCache.CACHE[expected_cache_key]) == cache_item_id

FilterCache.clear()
assert FilterCache.CACHE == {}
_FilterClassCache.clear()
assert _FilterClassCache.CACHE == {}


@pytest.mark.django_db
def test_query_cache(api_client, clear_cache):
books = [
Book.objects.create(title='F'),
Book.objects.create(title='G'),
]

for _ in range(4):
response = api_client.get('{0}?{1}'.format(reverse('book-list'), 'title=F'))
assert response.data == [{'id': books[0].pk}]

response = api_client.get('{0}?{1}'.format(reverse('book-list'), 'title=X'))
assert response.data == []

response = api_client.get(reverse('select-list') + '?select(-id)')
assert response.status_code == HTTP_200_OK
assert 'id' not in response.data[0]

caches = RQLFilterBackend._CACHES
assert isinstance(caches['book.BooksFilterClass'], LFUCache)
assert caches['book.BooksFilterClass'].currsize == 2
assert caches['book.BooksFilterClass'].maxsize == 20
assert isinstance(caches['select.SelectBooksFilterClass'], LRUCache)
assert caches['select.SelectBooksFilterClass'].currsize == 1
assert caches['select.SelectBooksFilterClass'].maxsize == 100


@pytest.mark.django_db
Expand Down
Loading