From 9cab4422d59b3a0725588b888924c8fb9d05ade0 Mon Sep 17 00:00:00 2001 From: Shamsuddin Ahmed Date: Tue, 8 Oct 2024 22:09:36 +0600 Subject: [PATCH 01/15] Fix metadata deserialization in async mode for PGVector --- langchain_postgres/vectorstores.py | 41 ++++++++++++++++++++++-------- 1 file changed, 31 insertions(+), 10 deletions(-) diff --git a/langchain_postgres/vectorstores.py b/langchain_postgres/vectorstores.py index e1630a1..922009c 100644 --- a/langchain_postgres/vectorstores.py +++ b/langchain_postgres/vectorstores.py @@ -1058,17 +1058,38 @@ async def asimilarity_search_with_score_by_vector( def _results_to_docs_and_scores(self, results: Any) -> List[Tuple[Document, float]]: """Return docs and scores from results.""" - docs = [ - ( - Document( - id=str(result.EmbeddingStore.id), - page_content=result.EmbeddingStore.document, - metadata=result.EmbeddingStore.cmetadata, - ), - result.distance if self.embeddings is not None else None, + docs = [] + for result in results: + metadata = result.EmbeddingStore.cmetadata + + # Attempt to convert metadata to a dict + try: + if isinstance(metadata, dict): + pass # Already a dict + elif isinstance(metadata, str): + metadata = json.loads(metadata) + elif hasattr(metadata, 'buf'): + # For Fragment types (e.g., from asyncpg) + metadata_bytes = metadata.buf + metadata_str = metadata_bytes.decode('utf-8') + metadata = json.loads(metadata_str) + elif hasattr(metadata, 'decode'): + # For other byte-like types + metadata_str = metadata.decode('utf-8') + metadata = json.loads(metadata_str) + else: + metadata = {} # Default to empty dict if unknown type + except Exception as e: + self.logger.warning(f"Failed to deserialize metadata: {e}") + metadata = {} + + doc = Document( + id=str(result.EmbeddingStore.id), + page_content=result.EmbeddingStore.document, + metadata=metadata, ) - for result in results - ] + score = result.distance if self.embeddings is not None else None + docs.append((doc, score)) return docs def _handle_field_filter( From 512728f9f4a327738c98c9c7fc0f71710fbb1e3c Mon Sep 17 00:00:00 2001 From: Shamsuddin Ahmed Date: Wed, 16 Oct 2024 11:40:31 +0600 Subject: [PATCH 02/15] Update existing asynchronous tests to include metadata assertions --- tests/unit_tests/test_vectorstore.py | 18 ++++++++++++------ 1 file changed, 12 insertions(+), 6 deletions(-) diff --git a/tests/unit_tests/test_vectorstore.py b/tests/unit_tests/test_vectorstore.py index 9945e51..a3fac56 100644 --- a/tests/unit_tests/test_vectorstore.py +++ b/tests/unit_tests/test_vectorstore.py @@ -159,9 +159,12 @@ def test_pgvector_with_metadatas_with_scores() -> None: @pytest.mark.asyncio async def test_async_pgvector_with_metadatas_with_scores() -> None: - """Test end to end construction and search.""" + """Test construction and search with metadata deserialization.""" texts = ["foo", "bar", "baz"] - metadatas = [{"page": str(i)} for i in range(len(texts))] + metadatas = [ + {"page": str(i), "info": {"nested": f"value{i}"}} + for i in range(len(texts)) + ] docsearch = await PGVector.afrom_texts( texts=texts, collection_name="test_collection", @@ -170,10 +173,13 @@ async def test_async_pgvector_with_metadatas_with_scores() -> None: connection=CONNECTION_STRING, pre_delete_collection=True, ) - output = await docsearch.asimilarity_search_with_score("foo", k=1) - assert output == [ - (Document(page_content="foo", metadata={"page": "0"}, id=AnyStr()), 0.0) - ] + output = await docsearch.asimilarity_search_with_score("foo", k=3) + for i, (doc, score) in enumerate(output): + expected_metadata = metadatas[i] + assert doc.page_content == texts[i] + assert doc.metadata == expected_metadata + assert isinstance(doc.metadata, dict) + assert score is not None def test_pgvector_with_filter_match() -> None: From 015a08648c78469ec79f5df9440c6b1e95f51010 Mon Sep 17 00:00:00 2001 From: Shamsuddin Ahmed Date: Wed, 16 Oct 2024 11:42:39 +0600 Subject: [PATCH 03/15] Add tests for metadata deserialization in async PGVector operations --- tests/unit_tests/test_vectorstore.py | 38 ++++++++++++++++++++++++---- 1 file changed, 33 insertions(+), 5 deletions(-) diff --git a/tests/unit_tests/test_vectorstore.py b/tests/unit_tests/test_vectorstore.py index a3fac56..618be6c 100644 --- a/tests/unit_tests/test_vectorstore.py +++ b/tests/unit_tests/test_vectorstore.py @@ -202,9 +202,12 @@ def test_pgvector_with_filter_match() -> None: @pytest.mark.asyncio async def test_async_pgvector_with_filter_match() -> None: - """Test end to end construction and search.""" + """Test search with filter and metadata deserialization.""" texts = ["foo", "bar", "baz"] - metadatas = [{"page": str(i)} for i in range(len(texts))] + metadatas = [ + {"page": str(i), "info": {"nested": f"value{i}"}} + for i in range(len(texts)) + ] docsearch = await PGVector.afrom_texts( texts=texts, collection_name="test_collection_filter", @@ -214,11 +217,36 @@ async def test_async_pgvector_with_filter_match() -> None: pre_delete_collection=True, ) output = await docsearch.asimilarity_search_with_score( - "foo", k=1, filter={"page": "0"} + "foo", k=3, filter={"page": "0"} ) - assert output == [ - (Document(page_content="foo", metadata={"page": "0"}, id=AnyStr()), 0.0) + assert len(output) == 1 + doc, score = output[0] + assert doc.page_content == "foo" + assert doc.metadata == metadatas[0] + assert isinstance(doc.metadata, dict) + assert score is not None + + +@pytest.mark.asyncio +async def test_async_pgvector_metadata_deserialization() -> None: + """Test that metadata is correctly deserialized in async operations.""" + texts = ["foo", "bar", "baz"] + metadatas = [ + {"page": str(i), "info": {"nested": f"value{i}"}} + for i in range(len(texts)) ] + docsearch = await PGVector.afrom_texts( + texts=texts, + collection_name="test_collection_metadata", + embedding=FakeEmbeddingsWithAdaDimension(), + metadatas=metadatas, + connection=CONNECTION_STRING, + pre_delete_collection=True, + ) + output = await docsearch.asimilarity_search("foo", k=3) + for doc, expected_metadata in zip(output, metadatas): + assert doc.metadata == expected_metadata + assert isinstance(doc.metadata, dict) def test_pgvector_with_filter_distant_match() -> None: From a56c921ca7bac5c41aa2091ccd16850e704c7e3e Mon Sep 17 00:00:00 2001 From: Shamsuddin Ahmed Date: Sun, 15 Dec 2024 21:16:29 +0300 Subject: [PATCH 04/15] fix test conflict with #149 --- examples/vectorstore.ipynb | 127 +++---------- tests/unit_tests/test_vectorstore.py | 255 ++++++++++++--------------- 2 files changed, 133 insertions(+), 249 deletions(-) diff --git a/examples/vectorstore.ipynb b/examples/vectorstore.ipynb index 1ae43b2..b5abb67 100644 --- a/examples/vectorstore.ipynb +++ b/examples/vectorstore.ipynb @@ -45,10 +45,10 @@ "metadata": { "tags": [] }, - "outputs": [], "source": [ "!pip install --quiet -U langchain_cohere" - ] + ], + "outputs": [] }, { "cell_type": "markdown", @@ -65,7 +65,6 @@ "metadata": { "tags": [] }, - "outputs": [], "source": [ "from langchain_cohere import CohereEmbeddings\n", "from langchain_postgres import PGVector\n", @@ -83,7 +82,8 @@ " connection=connection,\n", " use_jsonb=True,\n", ")" - ] + ], + "outputs": [] }, { "cell_type": "markdown", @@ -124,7 +124,6 @@ "metadata": { "tags": [] }, - "outputs": [], "source": [ "docs = [\n", " Document(page_content='there are cats in the pond', metadata={\"id\": 1, \"location\": \"pond\", \"topic\": \"animals\"}),\n", @@ -138,7 +137,8 @@ " Document(page_content='the library hosts a weekly story time for kids', metadata={\"id\": 9, \"location\": \"library\", \"topic\": \"reading\"}),\n", " Document(page_content='a cooking class for beginners is offered at the community center', metadata={\"id\": 10, \"location\": \"community center\", \"topic\": \"classes\"})\n", "]\n" - ] + ], + "outputs": [] }, { "cell_type": "code", @@ -147,21 +147,10 @@ "metadata": { "tags": [] }, - "outputs": [ - { - "data": { - "text/plain": [ - "[1, 2, 3, 4, 5, 6, 7, 8, 9, 10]" - ] - }, - "execution_count": 6, - "metadata": {}, - "output_type": "execute_result" - } - ], "source": [ "vectorstore.add_documents(docs, ids=[doc.metadata['id'] for doc in docs])" - ] + ], + "outputs": [] }, { "cell_type": "code", @@ -170,30 +159,10 @@ "metadata": { "tags": [] }, - "outputs": [ - { - "data": { - "text/plain": [ - "[Document(page_content='there are cats in the pond', metadata={'id': 1, 'topic': 'animals', 'location': 'pond'}),\n", - " Document(page_content='the book club meets at the library', metadata={'id': 8, 'topic': 'reading', 'location': 'library'}),\n", - " Document(page_content='the library hosts a weekly story time for kids', metadata={'id': 9, 'topic': 'reading', 'location': 'library'}),\n", - " Document(page_content='the new art exhibit is fascinating', metadata={'id': 5, 'topic': 'art', 'location': 'museum'}),\n", - " Document(page_content='ducks are also found in the pond', metadata={'id': 2, 'topic': 'animals', 'location': 'pond'}),\n", - " Document(page_content='the market also sells fresh oranges', metadata={'id': 4, 'topic': 'food', 'location': 'market'}),\n", - " Document(page_content='a cooking class for beginners is offered at the community center', metadata={'id': 10, 'topic': 'classes', 'location': 'community center'}),\n", - " Document(page_content='fresh apples are available at the market', metadata={'id': 3, 'topic': 'food', 'location': 'market'}),\n", - " Document(page_content='a sculpture exhibit is also at the museum', metadata={'id': 6, 'topic': 'art', 'location': 'museum'}),\n", - " Document(page_content='a new coffee shop opened on Main Street', metadata={'id': 7, 'topic': 'food', 'location': 'Main Street'})]" - ] - }, - "execution_count": 7, - "metadata": {}, - "output_type": "execute_result" - } - ], "source": [ "vectorstore.similarity_search('kitty', k=10)" - ] + ], + "outputs": [] }, { "cell_type": "markdown", @@ -210,7 +179,6 @@ "metadata": { "tags": [] }, - "outputs": [], "source": [ "docs = [\n", " Document(page_content='there are cats in the pond', metadata={\"id\": 1, \"location\": \"pond\", \"topic\": \"animals\"}),\n", @@ -224,7 +192,8 @@ " Document(page_content='the library hosts a weekly story time for kids', metadata={\"id\": 9, \"location\": \"library\", \"topic\": \"reading\"}),\n", " Document(page_content='a cooking class for beginners is offered at the community center', metadata={\"id\": 10, \"location\": \"community center\", \"topic\": \"classes\"})\n", "]\n" - ] + ], + "outputs": [] }, { "cell_type": "markdown", @@ -260,26 +229,12 @@ "metadata": { "tags": [] }, - "outputs": [ - { - "data": { - "text/plain": [ - "[Document(page_content='there are cats in the pond', metadata={'id': 1, 'topic': 'animals', 'location': 'pond'}),\n", - " Document(page_content='the library hosts a weekly story time for kids', metadata={'id': 9, 'topic': 'reading', 'location': 'library'}),\n", - " Document(page_content='the new art exhibit is fascinating', metadata={'id': 5, 'topic': 'art', 'location': 'museum'}),\n", - " Document(page_content='ducks are also found in the pond', metadata={'id': 2, 'topic': 'animals', 'location': 'pond'})]" - ] - }, - "execution_count": 9, - "metadata": {}, - "output_type": "execute_result" - } - ], "source": [ "vectorstore.similarity_search('kitty', k=10, filter={\n", " 'id': {'$in': [1, 5, 2, 9]}\n", "})" - ] + ], + "outputs": [] }, { "cell_type": "markdown", @@ -296,25 +251,13 @@ "metadata": { "tags": [] }, - "outputs": [ - { - "data": { - "text/plain": [ - "[Document(page_content='ducks are also found in the pond', metadata={'id': 2, 'topic': 'animals', 'location': 'pond'}),\n", - " Document(page_content='there are cats in the pond', metadata={'id': 1, 'topic': 'animals', 'location': 'pond'})]" - ] - }, - "execution_count": 10, - "metadata": {}, - "output_type": "execute_result" - } - ], "source": [ "vectorstore.similarity_search('ducks', k=10, filter={\n", " 'id': {'$in': [1, 5, 2, 9]},\n", " 'location': {'$in': [\"pond\", \"market\"]}\n", "})" - ] + ], + "outputs": [] }, { "cell_type": "code", @@ -323,19 +266,6 @@ "metadata": { "tags": [] }, - "outputs": [ - { - "data": { - "text/plain": [ - "[Document(page_content='ducks are also found in the pond', metadata={'id': 2, 'topic': 'animals', 'location': 'pond'}),\n", - " Document(page_content='there are cats in the pond', metadata={'id': 1, 'topic': 'animals', 'location': 'pond'})]" - ] - }, - "execution_count": 11, - "metadata": {}, - "output_type": "execute_result" - } - ], "source": [ "vectorstore.similarity_search('ducks', k=10, filter={\n", " '$and': [\n", @@ -344,7 +274,8 @@ " ]\n", "}\n", ")" - ] + ], + "outputs": [] }, { "cell_type": "code", @@ -353,30 +284,12 @@ "metadata": { "tags": [] }, - "outputs": [ - { - "data": { - "text/plain": [ - "[Document(page_content='the book club meets at the library', metadata={'id': 8, 'topic': 'reading', 'location': 'library'}),\n", - " Document(page_content='the new art exhibit is fascinating', metadata={'id': 5, 'topic': 'art', 'location': 'museum'}),\n", - " Document(page_content='the library hosts a weekly story time for kids', metadata={'id': 9, 'topic': 'reading', 'location': 'library'}),\n", - " Document(page_content='a sculpture exhibit is also at the museum', metadata={'id': 6, 'topic': 'art', 'location': 'museum'}),\n", - " Document(page_content='the market also sells fresh oranges', metadata={'id': 4, 'topic': 'food', 'location': 'market'}),\n", - " Document(page_content='a cooking class for beginners is offered at the community center', metadata={'id': 10, 'topic': 'classes', 'location': 'community center'}),\n", - " Document(page_content='a new coffee shop opened on Main Street', metadata={'id': 7, 'topic': 'food', 'location': 'Main Street'}),\n", - " Document(page_content='fresh apples are available at the market', metadata={'id': 3, 'topic': 'food', 'location': 'market'})]" - ] - }, - "execution_count": 12, - "metadata": {}, - "output_type": "execute_result" - } - ], "source": [ "vectorstore.similarity_search('bird', k=10, filter={\n", " 'location': { \"$ne\": 'pond'}\n", "})" - ] + ], + "outputs": [] } ], "metadata": { diff --git a/tests/unit_tests/test_vectorstore.py b/tests/unit_tests/test_vectorstore.py index 618be6c..ef4eeb3 100644 --- a/tests/unit_tests/test_vectorstore.py +++ b/tests/unit_tests/test_vectorstore.py @@ -1,6 +1,6 @@ """Test PGVector functionality.""" import contextlib -from typing import Any, AsyncGenerator, Dict, Generator, List, Optional +from typing import Any, AsyncGenerator, Dict, Generator, List, Optional, Sequence import pytest from langchain_core.documents import Document @@ -25,9 +25,12 @@ ADA_TOKEN_COUNT = 1536 -class AnyStr(str): - def __eq__(self, other: Any) -> bool: - return isinstance(other, str) +def _compare_documents(left: Sequence[Document], right: Sequence[Document]) -> None: + """Compare lists of documents, irrespective of IDs.""" + assert len(left) == len(right) + for left_doc, right_doc in zip(left, right): + assert left_doc.page_content == right_doc.page_content + assert left_doc.metadata == right_doc.metadata class FakeEmbeddingsWithAdaDimension(FakeEmbeddings): @@ -55,7 +58,7 @@ def test_pgvector() -> None: pre_delete_collection=True, ) output = docsearch.similarity_search("foo", k=1) - assert output == [Document(page_content="foo", id=AnyStr())] + _compare_documents(output, [Document(page_content="foo")]) @pytest.mark.asyncio @@ -70,7 +73,7 @@ async def test_async_pgvector() -> None: pre_delete_collection=True, ) output = await docsearch.asimilarity_search("foo", k=1) - assert output == [Document(page_content="foo", id=AnyStr())] + _compare_documents(output, [Document(page_content="foo")]) def test_pgvector_embeddings() -> None: @@ -86,7 +89,7 @@ def test_pgvector_embeddings() -> None: pre_delete_collection=True, ) output = docsearch.similarity_search("foo", k=1) - assert output == [Document(page_content="foo", id=AnyStr())] + _compare_documents(output, [Document(page_content="foo")]) @pytest.mark.asyncio @@ -103,7 +106,7 @@ async def test_async_pgvector_embeddings() -> None: pre_delete_collection=True, ) output = await docsearch.asimilarity_search("foo", k=1) - assert output == [Document(page_content="foo", id=AnyStr())] + _compare_documents(output, [Document(page_content="foo")]) def test_pgvector_with_metadatas() -> None: @@ -119,7 +122,7 @@ def test_pgvector_with_metadatas() -> None: pre_delete_collection=True, ) output = docsearch.similarity_search("foo", k=1) - assert output == [Document(page_content="foo", metadata={"page": "0"}, id=AnyStr())] + _compare_documents(output, [Document(page_content="foo", metadata={"page": "0"})]) @pytest.mark.asyncio @@ -136,7 +139,7 @@ async def test_async_pgvector_with_metadatas() -> None: pre_delete_collection=True, ) output = await docsearch.asimilarity_search("foo", k=1) - assert output == [Document(page_content="foo", metadata={"page": "0"}, id=AnyStr())] + _compare_documents(output, [Document(page_content="foo", metadata={"page": "0"})]) def test_pgvector_with_metadatas_with_scores() -> None: @@ -152,19 +155,16 @@ def test_pgvector_with_metadatas_with_scores() -> None: pre_delete_collection=True, ) output = docsearch.similarity_search_with_score("foo", k=1) - assert output == [ - (Document(page_content="foo", metadata={"page": "0"}, id=AnyStr()), 0.0) - ] + docs, scores = zip(*output) + _compare_documents(docs, [Document(page_content="foo", metadata={"page": "0"})]) + assert scores == (0.0,) @pytest.mark.asyncio async def test_async_pgvector_with_metadatas_with_scores() -> None: - """Test construction and search with metadata deserialization.""" + """Test end to end construction and search.""" texts = ["foo", "bar", "baz"] - metadatas = [ - {"page": str(i), "info": {"nested": f"value{i}"}} - for i in range(len(texts)) - ] + metadatas = [{"page": str(i)} for i in range(len(texts))] docsearch = await PGVector.afrom_texts( texts=texts, collection_name="test_collection", @@ -173,13 +173,10 @@ async def test_async_pgvector_with_metadatas_with_scores() -> None: connection=CONNECTION_STRING, pre_delete_collection=True, ) - output = await docsearch.asimilarity_search_with_score("foo", k=3) - for i, (doc, score) in enumerate(output): - expected_metadata = metadatas[i] - assert doc.page_content == texts[i] - assert doc.metadata == expected_metadata - assert isinstance(doc.metadata, dict) - assert score is not None + output = await docsearch.asimilarity_search_with_score("foo", k=1) + docs, scores = zip(*output) + _compare_documents(docs, [Document(page_content="foo", metadata={"page": "0"})]) + assert scores == (0.0,) def test_pgvector_with_filter_match() -> None: @@ -195,19 +192,16 @@ def test_pgvector_with_filter_match() -> None: pre_delete_collection=True, ) output = docsearch.similarity_search_with_score("foo", k=1, filter={"page": "0"}) - assert output == [ - (Document(page_content="foo", metadata={"page": "0"}, id=AnyStr()), 0.0) - ] + docs, scores = zip(*output) + _compare_documents(docs, [Document(page_content="foo", metadata={"page": "0"})]) + assert scores == (0.0,) @pytest.mark.asyncio async def test_async_pgvector_with_filter_match() -> None: - """Test search with filter and metadata deserialization.""" + """Test end to end construction and search.""" texts = ["foo", "bar", "baz"] - metadatas = [ - {"page": str(i), "info": {"nested": f"value{i}"}} - for i in range(len(texts)) - ] + metadatas = [{"page": str(i)} for i in range(len(texts))] docsearch = await PGVector.afrom_texts( texts=texts, collection_name="test_collection_filter", @@ -217,36 +211,11 @@ async def test_async_pgvector_with_filter_match() -> None: pre_delete_collection=True, ) output = await docsearch.asimilarity_search_with_score( - "foo", k=3, filter={"page": "0"} - ) - assert len(output) == 1 - doc, score = output[0] - assert doc.page_content == "foo" - assert doc.metadata == metadatas[0] - assert isinstance(doc.metadata, dict) - assert score is not None - - -@pytest.mark.asyncio -async def test_async_pgvector_metadata_deserialization() -> None: - """Test that metadata is correctly deserialized in async operations.""" - texts = ["foo", "bar", "baz"] - metadatas = [ - {"page": str(i), "info": {"nested": f"value{i}"}} - for i in range(len(texts)) - ] - docsearch = await PGVector.afrom_texts( - texts=texts, - collection_name="test_collection_metadata", - embedding=FakeEmbeddingsWithAdaDimension(), - metadatas=metadatas, - connection=CONNECTION_STRING, - pre_delete_collection=True, + "foo", k=1, filter={"page": "0"} ) - output = await docsearch.asimilarity_search("foo", k=3) - for doc, expected_metadata in zip(output, metadatas): - assert doc.metadata == expected_metadata - assert isinstance(doc.metadata, dict) + docs, scores = zip(*output) + _compare_documents(docs, [Document(page_content="foo", metadata={"page": "0"})]) + assert scores == (0.0,) def test_pgvector_with_filter_distant_match() -> None: @@ -262,12 +231,9 @@ def test_pgvector_with_filter_distant_match() -> None: pre_delete_collection=True, ) output = docsearch.similarity_search_with_score("foo", k=1, filter={"page": "2"}) - assert output == [ - ( - Document(page_content="baz", metadata={"page": "2"}, id=AnyStr()), - 0.0013003906671379406, - ) - ] + docs, scores = zip(*output) + _compare_documents(docs, [Document(page_content="baz", metadata={"page": "2"})]) + assert scores == (0.0013003906671379406,) @pytest.mark.asyncio @@ -286,12 +252,9 @@ async def test_async_pgvector_with_filter_distant_match() -> None: output = await docsearch.asimilarity_search_with_score( "foo", k=1, filter={"page": "2"} ) - assert output == [ - ( - Document(page_content="baz", metadata={"page": "2"}, id=AnyStr()), - 0.0013003906671379406, - ) - ] + docs, scores = zip(*output) + _compare_documents(docs, [Document(page_content="baz", metadata={"page": "2"})]) + assert scores == (0.0013003906671379406,) def test_pgvector_with_filter_no_match() -> None: @@ -627,17 +590,16 @@ def test_pgvector_relevance_score() -> None: ) output = docsearch.similarity_search_with_relevance_scores("foo", k=3) - assert output == [ - (Document(page_content="foo", metadata={"page": "0"}, id=AnyStr()), 1.0), - ( - Document(page_content="bar", metadata={"page": "1"}, id=AnyStr()), - 0.9996744261675065, - ), - ( - Document(page_content="baz", metadata={"page": "2"}, id=AnyStr()), - 0.9986996093328621, - ), - ] + docs, scores = zip(*output) + _compare_documents( + docs, + [ + Document(page_content="foo", metadata={"page": "0"}), + Document(page_content="bar", metadata={"page": "1"}), + Document(page_content="baz", metadata={"page": "2"}), + ], + ) + assert scores == (1.0, 0.9996744261675065, 0.9986996093328621) @pytest.mark.asyncio @@ -655,17 +617,16 @@ async def test_async_pgvector_relevance_score() -> None: ) output = await docsearch.asimilarity_search_with_relevance_scores("foo", k=3) - assert output == [ - (Document(page_content="foo", metadata={"page": "0"}, id=AnyStr()), 1.0), - ( - Document(page_content="bar", metadata={"page": "1"}, id=AnyStr()), - 0.9996744261675065, - ), - ( - Document(page_content="baz", metadata={"page": "2"}, id=AnyStr()), - 0.9986996093328621, - ), - ] + docs, scores = zip(*output) + _compare_documents( + docs, + [ + Document(page_content="foo", metadata={"page": "0"}), + Document(page_content="bar", metadata={"page": "1"}), + Document(page_content="baz", metadata={"page": "2"}), + ], + ) + assert scores == (1.0, 0.9996744261675065, 0.9986996093328621) def test_pgvector_retriever_search_threshold() -> None: @@ -686,10 +647,13 @@ def test_pgvector_retriever_search_threshold() -> None: search_kwargs={"k": 3, "score_threshold": 0.999}, ) output = retriever.get_relevant_documents("summer") - assert output == [ - Document(page_content="foo", metadata={"page": "0"}, id=AnyStr()), - Document(page_content="bar", metadata={"page": "1"}, id=AnyStr()), - ] + _compare_documents( + output, + [ + Document(page_content="foo", metadata={"page": "0"}), + Document(page_content="bar", metadata={"page": "1"}), + ], + ) @pytest.mark.asyncio @@ -711,10 +675,13 @@ async def test_async_pgvector_retriever_search_threshold() -> None: search_kwargs={"k": 3, "score_threshold": 0.999}, ) output = await retriever.aget_relevant_documents("summer") - assert output == [ - Document(page_content="foo", metadata={"page": "0"}, id=AnyStr()), - Document(page_content="bar", metadata={"page": "1"}, id=AnyStr()), - ] + _compare_documents( + output, + [ + Document(page_content="foo", metadata={"page": "0"}), + Document(page_content="bar", metadata={"page": "1"}), + ], + ) def test_pgvector_retriever_search_threshold_custom_normalization_fn() -> None: @@ -741,7 +708,7 @@ def test_pgvector_retriever_search_threshold_custom_normalization_fn() -> None: @pytest.mark.asyncio async def test_async_pgvector_retriever_search_threshold_custom_normalization_fn() -> ( - None + None ): """Test searching with threshold and custom normalization function""" texts = ["foo", "bar", "baz"] @@ -775,7 +742,7 @@ def test_pgvector_max_marginal_relevance_search() -> None: pre_delete_collection=True, ) output = docsearch.max_marginal_relevance_search("foo", k=1, fetch_k=3) - assert output == [Document(page_content="foo", id=AnyStr())] + _compare_documents(output, [Document(page_content="foo")]) @pytest.mark.asyncio @@ -790,7 +757,7 @@ async def test_async_pgvector_max_marginal_relevance_search() -> None: pre_delete_collection=True, ) output = await docsearch.amax_marginal_relevance_search("foo", k=1, fetch_k=3) - assert output == [Document(page_content="foo", id=AnyStr())] + _compare_documents(output, [Document(page_content="foo")]) def test_pgvector_max_marginal_relevance_search_with_score() -> None: @@ -804,7 +771,9 @@ def test_pgvector_max_marginal_relevance_search_with_score() -> None: pre_delete_collection=True, ) output = docsearch.max_marginal_relevance_search_with_score("foo", k=1, fetch_k=3) - assert output == [(Document(page_content="foo", id=AnyStr()), 0.0)] + docs, scores = zip(*output) + _compare_documents(docs, [Document(page_content="foo")]) + assert scores == (0.0,) @pytest.mark.asyncio @@ -821,7 +790,9 @@ async def test_async_pgvector_max_marginal_relevance_search_with_score() -> None output = await docsearch.amax_marginal_relevance_search_with_score( "foo", k=1, fetch_k=3 ) - assert output == [(Document(page_content="foo", id=AnyStr()), 0.0)] + docs, scores = zip(*output) + _compare_documents(docs, [Document(page_content="foo")]) + assert scores == (0.0,) def test_pgvector_with_custom_connection() -> None: @@ -835,7 +806,7 @@ def test_pgvector_with_custom_connection() -> None: pre_delete_collection=True, ) output = docsearch.similarity_search("foo", k=1) - assert output == [Document(page_content="foo", id=AnyStr())] + _compare_documents(output, [Document(page_content="foo")]) @pytest.mark.asyncio @@ -850,7 +821,7 @@ async def test_async_pgvector_with_custom_connection() -> None: pre_delete_collection=True, ) output = await docsearch.asimilarity_search("foo", k=1) - assert output == [Document(page_content="foo", id=AnyStr())] + _compare_documents(output, [Document(page_content="foo")]) def test_pgvector_with_custom_engine_args() -> None: @@ -873,7 +844,7 @@ def test_pgvector_with_custom_engine_args() -> None: engine_args=engine_args, ) output = docsearch.similarity_search("foo", k=1) - assert output == [Document(page_content="foo", id=AnyStr())] + _compare_documents(output, [Document(page_content="foo")]) # We should reuse this test-case across other integrations @@ -907,7 +878,7 @@ async def async_pgvector() -> AsyncGenerator[PGVector, None]: @contextlib.contextmanager def get_vectorstore( - *, embedding: Optional[Embeddings] = None + *, embedding: Optional[Embeddings] = None ) -> Generator[PGVector, None, None]: """Get a pre-populated-vectorstore""" store = PGVector.from_documents( @@ -927,7 +898,7 @@ def get_vectorstore( @contextlib.asynccontextmanager async def aget_vectorstore( - *, embedding: Optional[Embeddings] = None + *, embedding: Optional[Embeddings] = None ) -> AsyncGenerator[PGVector, None]: """Get a pre-populated-vectorstore""" store = await PGVector.afrom_documents( @@ -947,8 +918,8 @@ async def aget_vectorstore( @pytest.mark.parametrize("test_filter, expected_ids", TYPE_1_FILTERING_TEST_CASES) def test_pgvector_with_with_metadata_filters_1( - test_filter: Dict[str, Any], - expected_ids: List[int], + test_filter: Dict[str, Any], + expected_ids: List[int], ) -> None: """Test end to end construction and search.""" with get_vectorstore() as pgvector: @@ -959,8 +930,8 @@ def test_pgvector_with_with_metadata_filters_1( @pytest.mark.asyncio @pytest.mark.parametrize("test_filter, expected_ids", TYPE_1_FILTERING_TEST_CASES) async def test_async_pgvector_with_with_metadata_filters_1( - test_filter: Dict[str, Any], - expected_ids: List[int], + test_filter: Dict[str, Any], + expected_ids: List[int], ) -> None: """Test end to end construction and search.""" async with aget_vectorstore() as pgvector: @@ -970,9 +941,9 @@ async def test_async_pgvector_with_with_metadata_filters_1( @pytest.mark.parametrize("test_filter, expected_ids", TYPE_2_FILTERING_TEST_CASES) def test_pgvector_with_with_metadata_filters_2( - pgvector: PGVector, - test_filter: Dict[str, Any], - expected_ids: List[int], + pgvector: PGVector, + test_filter: Dict[str, Any], + expected_ids: List[int], ) -> None: """Test end to end construction and search.""" docs = pgvector.similarity_search("meow", k=5, filter=test_filter) @@ -982,9 +953,9 @@ def test_pgvector_with_with_metadata_filters_2( @pytest.mark.asyncio @pytest.mark.parametrize("test_filter, expected_ids", TYPE_2_FILTERING_TEST_CASES) async def test_async_pgvector_with_with_metadata_filters_2( - async_pgvector: PGVector, - test_filter: Dict[str, Any], - expected_ids: List[int], + async_pgvector: PGVector, + test_filter: Dict[str, Any], + expected_ids: List[int], ) -> None: """Test end to end construction and search.""" docs = await async_pgvector.asimilarity_search("meow", k=5, filter=test_filter) @@ -993,9 +964,9 @@ async def test_async_pgvector_with_with_metadata_filters_2( @pytest.mark.parametrize("test_filter, expected_ids", TYPE_3_FILTERING_TEST_CASES) def test_pgvector_with_with_metadata_filters_3( - pgvector: PGVector, - test_filter: Dict[str, Any], - expected_ids: List[int], + pgvector: PGVector, + test_filter: Dict[str, Any], + expected_ids: List[int], ) -> None: """Test end to end construction and search.""" docs = pgvector.similarity_search("meow", k=5, filter=test_filter) @@ -1005,9 +976,9 @@ def test_pgvector_with_with_metadata_filters_3( @pytest.mark.asyncio @pytest.mark.parametrize("test_filter, expected_ids", TYPE_3_FILTERING_TEST_CASES) async def test_async_pgvector_with_with_metadata_filters_3( - async_pgvector: PGVector, - test_filter: Dict[str, Any], - expected_ids: List[int], + async_pgvector: PGVector, + test_filter: Dict[str, Any], + expected_ids: List[int], ) -> None: """Test end to end construction and search.""" docs = await async_pgvector.asimilarity_search("meow", k=5, filter=test_filter) @@ -1016,9 +987,9 @@ async def test_async_pgvector_with_with_metadata_filters_3( @pytest.mark.parametrize("test_filter, expected_ids", TYPE_4_FILTERING_TEST_CASES) def test_pgvector_with_with_metadata_filters_4( - pgvector: PGVector, - test_filter: Dict[str, Any], - expected_ids: List[int], + pgvector: PGVector, + test_filter: Dict[str, Any], + expected_ids: List[int], ) -> None: """Test end to end construction and search.""" docs = pgvector.similarity_search("meow", k=5, filter=test_filter) @@ -1028,9 +999,9 @@ def test_pgvector_with_with_metadata_filters_4( @pytest.mark.asyncio @pytest.mark.parametrize("test_filter, expected_ids", TYPE_4_FILTERING_TEST_CASES) async def test_async_pgvector_with_with_metadata_filters_4( - async_pgvector: PGVector, - test_filter: Dict[str, Any], - expected_ids: List[int], + async_pgvector: PGVector, + test_filter: Dict[str, Any], + expected_ids: List[int], ) -> None: """Test end to end construction and search.""" docs = await async_pgvector.asimilarity_search("meow", k=5, filter=test_filter) @@ -1039,9 +1010,9 @@ async def test_async_pgvector_with_with_metadata_filters_4( @pytest.mark.parametrize("test_filter, expected_ids", TYPE_5_FILTERING_TEST_CASES) def test_pgvector_with_with_metadata_filters_5( - pgvector: PGVector, - test_filter: Dict[str, Any], - expected_ids: List[int], + pgvector: PGVector, + test_filter: Dict[str, Any], + expected_ids: List[int], ) -> None: """Test end to end construction and search.""" docs = pgvector.similarity_search("meow", k=5, filter=test_filter) @@ -1051,9 +1022,9 @@ def test_pgvector_with_with_metadata_filters_5( @pytest.mark.asyncio @pytest.mark.parametrize("test_filter, expected_ids", TYPE_5_FILTERING_TEST_CASES) async def test_async_pgvector_with_with_metadata_filters_5( - async_pgvector: PGVector, - test_filter: Dict[str, Any], - expected_ids: List[int], + async_pgvector: PGVector, + test_filter: Dict[str, Any], + expected_ids: List[int], ) -> None: """Test end to end construction and search.""" docs = await async_pgvector.asimilarity_search("meow", k=5, filter=test_filter) From ad890bb4505fb1ffcf08578999ce1aa39ae34dd2 Mon Sep 17 00:00:00 2001 From: Shamsuddin Ahmed Date: Sat, 8 Feb 2025 13:20:01 +0300 Subject: [PATCH 05/15] fix: add missing import for json in vectorstores.py --- langchain_postgres/vectorstores.py | 1 + 1 file changed, 1 insertion(+) diff --git a/langchain_postgres/vectorstores.py b/langchain_postgres/vectorstores.py index 922009c..54bf2ff 100644 --- a/langchain_postgres/vectorstores.py +++ b/langchain_postgres/vectorstores.py @@ -1,6 +1,7 @@ # pylint: disable=too-many-lines from __future__ import annotations +import json import contextlib import enum import logging From ecb7e8acd080b3ff51c2fdf22f2864e490e47da3 Mon Sep 17 00:00:00 2001 From: Shamsuddin Ahmed Date: Sat, 8 Feb 2025 13:20:34 +0300 Subject: [PATCH 06/15] test: add unit test for metadata deserialization in PGVector --- tests/unit_tests/test_pgvector_metadata.py | 84 ++++++++++++++++++++++ 1 file changed, 84 insertions(+) create mode 100644 tests/unit_tests/test_pgvector_metadata.py diff --git a/tests/unit_tests/test_pgvector_metadata.py b/tests/unit_tests/test_pgvector_metadata.py new file mode 100644 index 0000000..018e486 --- /dev/null +++ b/tests/unit_tests/test_pgvector_metadata.py @@ -0,0 +1,84 @@ +import json +import pytest +from langchain_core.documents import Document +from langchain_postgres.vectorstores import PGVector +from tests.unit_tests.fake_embeddings import FakeEmbeddings + + +# Fake classes to simulate different metadata types +class FakeFragment: + """Simulate a non-dict JSON fragment (e.g., asyncpg Fragment) with a 'buf' attribute.""" + + def __init__(self, data: bytes): + self.buf = data + + +class FakeBytes: + """Simulate an object with a decode() method.""" + + def __init__(self, data: bytes): + self.data = data + + def decode(self, encoding: str) -> str: + return self.data.decode(encoding) + + +class FakeEmbeddingStore: + """Simulate the EmbeddingStore object attached to a result row.""" + + def __init__(self, id, document, cmetadata): + self.id = id + self.document = document + self.cmetadata = cmetadata + + +class FakeResult: + """Simulate a database result row with an EmbeddingStore attribute.""" + + def __init__(self, embedding_store): + self.EmbeddingStore = embedding_store + + +def fake_results(): + # 1. Metadata already a dict. + result1 = FakeResult(FakeEmbeddingStore("1", "doc1", {"user": "foo"})) + # 2. Metadata as a JSON string. + result2 = FakeResult(FakeEmbeddingStore("2", "doc2", '{"user": "bar"}')) + # 3. Metadata as a fake Fragment (simulate asyncpg) with a .buf attribute. + fragment = FakeFragment(b'{"user": "baz"}') + result3 = FakeResult(FakeEmbeddingStore("3", "doc3", fragment)) + # 4. Metadata as an object with a decode() method. + fake_bytes = FakeBytes(b'{"user": "qux"}') + result4 = FakeResult(FakeEmbeddingStore("4", "doc4", fake_bytes)) + # 5. Metadata of an invalid type should result in empty dict. + result5 = FakeResult(FakeEmbeddingStore("5", "doc5", 12345)) + return [result1, result2, result3, result4, result5] + + +def test_metadata_deserialization(): + """Test that PGVector correctly deserializes non-dict metadata.""" + # Create a PGVector instance using a fake embedding function. + pg_vector = PGVector( + embeddings=FakeEmbeddings(), + connection="sqlite://", # Using SQLite for testing purposes. + collection_name="fake_collection", + pre_delete_collection=True, + use_jsonb=True, + async_mode=False, + ) + results = fake_results() + docs_and_scores = pg_vector._results_to_docs_and_scores(results) + docs = [doc for doc, score in docs_and_scores] + + # Verify that each document’s metadata is as expected. + assert docs[0].metadata == {"user": "foo"} + assert docs[1].metadata == {"user": "bar"} + assert docs[2].metadata == {"user": "baz"} + assert docs[3].metadata == {"user": "qux"} + # For an invalid type, we expect an empty dict. + assert docs[4].metadata == {} + + +if __name__ == "__main__": + test_metadata_deserialization() + print("All metadata deserialization tests passed.") From 9227acd90bd540bb987b2d3850a50f85f87b1e15 Mon Sep 17 00:00:00 2001 From: Shamsuddin Ahmed Date: Sat, 8 Feb 2025 13:47:07 +0300 Subject: [PATCH 07/15] test: add unit tests for metadata deserialization in PGVector --- tests/unit_tests/test_pgvector_metadata.py | 57 ++++++++++++++-------- 1 file changed, 36 insertions(+), 21 deletions(-) diff --git a/tests/unit_tests/test_pgvector_metadata.py b/tests/unit_tests/test_pgvector_metadata.py index 018e486..7143dfa 100644 --- a/tests/unit_tests/test_pgvector_metadata.py +++ b/tests/unit_tests/test_pgvector_metadata.py @@ -1,20 +1,23 @@ -import json -import pytest -from langchain_core.documents import Document +import sqlalchemy from langchain_postgres.vectorstores import PGVector from tests.unit_tests.fake_embeddings import FakeEmbeddings +# --- Monkey-patch PGVector to avoid DB operations during instantiation --- +# This prevents __post_init__ (table creation, etc.) from running. +PGVector.__post_init__ = lambda self: None + + +# --- Fake classes to simulate database result rows --- -# Fake classes to simulate different metadata types class FakeFragment: - """Simulate a non-dict JSON fragment (e.g., asyncpg Fragment) with a 'buf' attribute.""" + """Simulate a non-dict JSON fragment (e.g. an asyncpg Fragment) with a 'buf' attribute.""" def __init__(self, data: bytes): self.buf = data class FakeBytes: - """Simulate an object with a decode() method.""" + """Simulate an object with a decode() method that returns a JSON string.""" def __init__(self, data: bytes): self.data = data @@ -33,10 +36,11 @@ def __init__(self, id, document, cmetadata): class FakeResult: - """Simulate a database result row with an EmbeddingStore attribute.""" + """Simulate a database result row with an EmbeddingStore attribute and a 'distance' attribute.""" - def __init__(self, embedding_store): + def __init__(self, embedding_store, distance=0.0): self.EmbeddingStore = embedding_store + self.distance = distance def fake_results(): @@ -50,32 +54,43 @@ def fake_results(): # 4. Metadata as an object with a decode() method. fake_bytes = FakeBytes(b'{"user": "qux"}') result4 = FakeResult(FakeEmbeddingStore("4", "doc4", fake_bytes)) - # 5. Metadata of an invalid type should result in empty dict. + # 5. Metadata of an invalid type (e.g. an integer) should result in empty dict. result5 = FakeResult(FakeEmbeddingStore("5", "doc5", 12345)) return [result1, result2, result3, result4, result5] def test_metadata_deserialization(): - """Test that PGVector correctly deserializes non-dict metadata.""" - # Create a PGVector instance using a fake embedding function. - pg_vector = PGVector( - embeddings=FakeEmbeddings(), - connection="sqlite://", # Using SQLite for testing purposes. - collection_name="fake_collection", - pre_delete_collection=True, - use_jsonb=True, - async_mode=False, - ) + """Test that PGVector correctly converts non-dict metadata to dictionaries.""" + # Monkey-patch SQLAlchemy's MetaData.create_all so that table creation is skipped. + orig_create_all = sqlalchemy.MetaData.create_all + sqlalchemy.MetaData.create_all = lambda self, bind=None, checkfirst=True: None + + try: + # Create a PGVector instance. + # Using SQLite for testing, so we disable extension creation. + pg_vector = PGVector( + embeddings=FakeEmbeddings(), + connection="sqlite://", # using SQLite for testing + collection_name="fake_collection", + pre_delete_collection=True, + use_jsonb=True, + async_mode=False, + create_extension=False # Prevent executing PG-specific extension creation + ) + finally: + # Restore the original create_all method so that other tests aren’t affected. + sqlalchemy.MetaData.create_all = orig_create_all + results = fake_results() docs_and_scores = pg_vector._results_to_docs_and_scores(results) docs = [doc for doc, score in docs_and_scores] - # Verify that each document’s metadata is as expected. + # Check that metadata is correctly deserialized. assert docs[0].metadata == {"user": "foo"} assert docs[1].metadata == {"user": "bar"} assert docs[2].metadata == {"user": "baz"} assert docs[3].metadata == {"user": "qux"} - # For an invalid type, we expect an empty dict. + # For the invalid type, we expect an empty dict. assert docs[4].metadata == {} From f4cdf73fea0fbbf728e9afd7d5190064b370f0ed Mon Sep 17 00:00:00 2001 From: Shamsuddin Ahmed Date: Tue, 11 Mar 2025 15:18:23 +0300 Subject: [PATCH 08/15] test: move PGVector metadata test from unit to integration using asyncpg --- .../test_pgvector_metadata_integration.py | 96 ++++++++++++++++++ tests/unit_tests/test_pgvector_metadata.py | 99 ------------------- 2 files changed, 96 insertions(+), 99 deletions(-) create mode 100644 tests/integration/test_pgvector_metadata_integration.py delete mode 100644 tests/unit_tests/test_pgvector_metadata.py diff --git a/tests/integration/test_pgvector_metadata_integration.py b/tests/integration/test_pgvector_metadata_integration.py new file mode 100644 index 0000000..ecec0f8 --- /dev/null +++ b/tests/integration/test_pgvector_metadata_integration.py @@ -0,0 +1,96 @@ +""" +Integration Test for PGVector Metadata Deserialization + +This test verifies that PGVector correctly deserializes stored metadata into a Python dictionary. +It connects to a live PostgreSQL instance (with the pgvector extension enabled) and uses +FakeEmbeddings from langchain_core for testing purposes. + +Workflow: + 1. Connect to the PostgreSQL instance via asyncpg. + 2. Initialize PGVector with create_extension=False (to avoid multi-command issues). + 3. Manually create the pgvector extension in the database. + 4. Create the collection (and tables) in the database. + 5. Insert test documents with known metadata. + 6. Perform a similarity search. + 7. Assert that each returned document's metadata is a dict. + +Usage: + Make sure your Docker Compose container for pgvector (exposed on port 6024) is running. + Then run: + poetry run pytest tests/integration/test_pgvector_metadata_integration.py +""" + +import pytest +from sqlalchemy import text +from langchain_postgres.vectorstores import PGVector +from langchain_core.documents import Document +from langchain_core.embeddings.fake import FakeEmbeddings + + +@pytest.fixture(scope="module") +async def pgvector_instance(): + """ + Fixture to create and tear down a PGVector instance for integration testing. + """ + # Connection string for the PostgreSQL instance (pgvector container on port 6024) + connection: str = "postgresql+asyncpg://langchain:langchain@localhost:6024/langchain" + + # Instantiate FakeEmbeddings for testing (with a vector size of 1352) + embeddings = FakeEmbeddings(size=1352) + + # Initialize PGVector in async mode. + # Set create_extension=False to avoid multi-command issues. + vectorstore = PGVector( + embeddings=embeddings, + connection=connection, + collection_name="integration_test_metadata", + use_jsonb=True, + async_mode=True, + create_extension=False, # Disable automatic extension creation + pre_delete_collection=True # Ensure a fresh collection for testing + ) + + # Manually create the pgvector extension so that the "vector" type is available. + async with vectorstore._async_engine.begin() as conn: + await conn.execute(text("CREATE EXTENSION IF NOT EXISTS vector;")) + + # Create the collection (and corresponding tables) in the database. + await vectorstore.acreate_collection() + + yield vectorstore + + # Teardown: Drop the collection (and tables) after tests. + await vectorstore.adelete_collection() + + +@pytest.mark.asyncio +async def test_pgvector_metadata_integration(pgvector_instance: PGVector) -> None: + """ + Test that PGVector deserializes metadata correctly. + + Steps: + - Insert documents with predefined metadata. + - Perform a similarity search. + - Verify that each returned document's metadata is a Python dict. + """ + vectorstore = pgvector_instance + + # Prepare sample documents with predefined metadata. + documents = [ + Document(page_content="Document 1", metadata={"user": "foo"}), + Document(page_content="Document 2", metadata={"user": "bar"}) + ] + + # Add the documents to the vectorstore. + await vectorstore.aadd_texts( + texts=[doc.page_content for doc in documents], + metadatas=[doc.metadata for doc in documents] + ) + + # Perform a similarity search that should match the inserted documents. + results = await vectorstore.asimilarity_search_with_score("Document", k=2) + + # Verify that each returned document has its metadata deserialized as a dict. + for doc, score in results: + assert isinstance(doc.metadata, dict), f"Metadata is not a dict: {doc.metadata}" + print(f"Document: {doc.page_content}, Metadata: {doc.metadata}, Score: {score}") diff --git a/tests/unit_tests/test_pgvector_metadata.py b/tests/unit_tests/test_pgvector_metadata.py deleted file mode 100644 index 7143dfa..0000000 --- a/tests/unit_tests/test_pgvector_metadata.py +++ /dev/null @@ -1,99 +0,0 @@ -import sqlalchemy -from langchain_postgres.vectorstores import PGVector -from tests.unit_tests.fake_embeddings import FakeEmbeddings - -# --- Monkey-patch PGVector to avoid DB operations during instantiation --- -# This prevents __post_init__ (table creation, etc.) from running. -PGVector.__post_init__ = lambda self: None - - -# --- Fake classes to simulate database result rows --- - -class FakeFragment: - """Simulate a non-dict JSON fragment (e.g. an asyncpg Fragment) with a 'buf' attribute.""" - - def __init__(self, data: bytes): - self.buf = data - - -class FakeBytes: - """Simulate an object with a decode() method that returns a JSON string.""" - - def __init__(self, data: bytes): - self.data = data - - def decode(self, encoding: str) -> str: - return self.data.decode(encoding) - - -class FakeEmbeddingStore: - """Simulate the EmbeddingStore object attached to a result row.""" - - def __init__(self, id, document, cmetadata): - self.id = id - self.document = document - self.cmetadata = cmetadata - - -class FakeResult: - """Simulate a database result row with an EmbeddingStore attribute and a 'distance' attribute.""" - - def __init__(self, embedding_store, distance=0.0): - self.EmbeddingStore = embedding_store - self.distance = distance - - -def fake_results(): - # 1. Metadata already a dict. - result1 = FakeResult(FakeEmbeddingStore("1", "doc1", {"user": "foo"})) - # 2. Metadata as a JSON string. - result2 = FakeResult(FakeEmbeddingStore("2", "doc2", '{"user": "bar"}')) - # 3. Metadata as a fake Fragment (simulate asyncpg) with a .buf attribute. - fragment = FakeFragment(b'{"user": "baz"}') - result3 = FakeResult(FakeEmbeddingStore("3", "doc3", fragment)) - # 4. Metadata as an object with a decode() method. - fake_bytes = FakeBytes(b'{"user": "qux"}') - result4 = FakeResult(FakeEmbeddingStore("4", "doc4", fake_bytes)) - # 5. Metadata of an invalid type (e.g. an integer) should result in empty dict. - result5 = FakeResult(FakeEmbeddingStore("5", "doc5", 12345)) - return [result1, result2, result3, result4, result5] - - -def test_metadata_deserialization(): - """Test that PGVector correctly converts non-dict metadata to dictionaries.""" - # Monkey-patch SQLAlchemy's MetaData.create_all so that table creation is skipped. - orig_create_all = sqlalchemy.MetaData.create_all - sqlalchemy.MetaData.create_all = lambda self, bind=None, checkfirst=True: None - - try: - # Create a PGVector instance. - # Using SQLite for testing, so we disable extension creation. - pg_vector = PGVector( - embeddings=FakeEmbeddings(), - connection="sqlite://", # using SQLite for testing - collection_name="fake_collection", - pre_delete_collection=True, - use_jsonb=True, - async_mode=False, - create_extension=False # Prevent executing PG-specific extension creation - ) - finally: - # Restore the original create_all method so that other tests aren’t affected. - sqlalchemy.MetaData.create_all = orig_create_all - - results = fake_results() - docs_and_scores = pg_vector._results_to_docs_and_scores(results) - docs = [doc for doc, score in docs_and_scores] - - # Check that metadata is correctly deserialized. - assert docs[0].metadata == {"user": "foo"} - assert docs[1].metadata == {"user": "bar"} - assert docs[2].metadata == {"user": "baz"} - assert docs[3].metadata == {"user": "qux"} - # For the invalid type, we expect an empty dict. - assert docs[4].metadata == {} - - -if __name__ == "__main__": - test_metadata_deserialization() - print("All metadata deserialization tests passed.") From 27fe2740bd255642324cf11bfe09c4e8692667e8 Mon Sep 17 00:00:00 2001 From: Shamsuddin Ahmed Date: Tue, 11 Mar 2025 15:22:57 +0300 Subject: [PATCH 09/15] test: add integration test for PGVector metadata deserialization using FakeEmbeddings and manual extension creation --- .../test_pgvector_metadata_integration.py | 71 +++++++++---------- 1 file changed, 35 insertions(+), 36 deletions(-) diff --git a/tests/integration/test_pgvector_metadata_integration.py b/tests/integration/test_pgvector_metadata_integration.py index ecec0f8..c3e4f8b 100644 --- a/tests/integration/test_pgvector_metadata_integration.py +++ b/tests/integration/test_pgvector_metadata_integration.py @@ -3,23 +3,24 @@ This test verifies that PGVector correctly deserializes stored metadata into a Python dictionary. It connects to a live PostgreSQL instance (with the pgvector extension enabled) and uses -FakeEmbeddings from langchain_core for testing purposes. +FakeEmbeddings (from langchain_core) for testing purposes. Workflow: - 1. Connect to the PostgreSQL instance via asyncpg. - 2. Initialize PGVector with create_extension=False (to avoid multi-command issues). - 3. Manually create the pgvector extension in the database. - 4. Create the collection (and tables) in the database. + 1. Connect to the PostgreSQL instance (pgvector container on port 6024). + 2. Initialize PGVector with FakeEmbeddings, disabling automatic extension creation. + 3. Manually create the vector extension. + 4. Create the collection and tables. 5. Insert test documents with known metadata. 6. Perform a similarity search. - 7. Assert that each returned document's metadata is a dict. + 7. Assert that each returned document's metadata is a Python dict. + 8. Dispose the async engine to clean up connections. Usage: - Make sure your Docker Compose container for pgvector (exposed on port 6024) is running. - Then run: + Ensure your Docker Compose container for pgvector is running, then run: poetry run pytest tests/integration/test_pgvector_metadata_integration.py """ +import asyncio import pytest from sqlalchemy import text from langchain_postgres.vectorstores import PGVector @@ -27,15 +28,24 @@ from langchain_core.embeddings.fake import FakeEmbeddings -@pytest.fixture(scope="module") -async def pgvector_instance(): +@pytest.mark.asyncio +async def test_pgvector_metadata_integration() -> None: """ - Fixture to create and tear down a PGVector instance for integration testing. + Test that PGVector deserializes metadata correctly. + + Steps: + - Connect to a PostgreSQL instance with the pgvector extension. + - Initialize PGVector with FakeEmbeddings and disable automatic extension creation. + - Manually create the pgvector extension so that the "vector" type is available. + - Create the collection and tables. + - Insert documents with predefined metadata. + - Perform a similarity search. + - Verify that each returned document's metadata is a dict. """ # Connection string for the PostgreSQL instance (pgvector container on port 6024) connection: str = "postgresql+asyncpg://langchain:langchain@localhost:6024/langchain" - # Instantiate FakeEmbeddings for testing (with a vector size of 1352) + # Instantiate FakeEmbeddings for testing (vector size set to 1352) embeddings = FakeEmbeddings(size=1352) # Initialize PGVector in async mode. @@ -47,38 +57,20 @@ async def pgvector_instance(): use_jsonb=True, async_mode=True, create_extension=False, # Disable automatic extension creation - pre_delete_collection=True # Ensure a fresh collection for testing + pre_delete_collection=True, # Ensure a fresh collection for testing ) - # Manually create the pgvector extension so that the "vector" type is available. + # Manually create the pgvector extension so that the "vector" type exists. async with vectorstore._async_engine.begin() as conn: await conn.execute(text("CREATE EXTENSION IF NOT EXISTS vector;")) - # Create the collection (and corresponding tables) in the database. + # Create the collection (and tables) in the database. await vectorstore.acreate_collection() - yield vectorstore - - # Teardown: Drop the collection (and tables) after tests. - await vectorstore.adelete_collection() - - -@pytest.mark.asyncio -async def test_pgvector_metadata_integration(pgvector_instance: PGVector) -> None: - """ - Test that PGVector deserializes metadata correctly. - - Steps: - - Insert documents with predefined metadata. - - Perform a similarity search. - - Verify that each returned document's metadata is a Python dict. - """ - vectorstore = pgvector_instance - # Prepare sample documents with predefined metadata. documents = [ Document(page_content="Document 1", metadata={"user": "foo"}), - Document(page_content="Document 2", metadata={"user": "bar"}) + Document(page_content="Document 2", metadata={"user": "bar"}), ] # Add the documents to the vectorstore. @@ -87,10 +79,17 @@ async def test_pgvector_metadata_integration(pgvector_instance: PGVector) -> Non metadatas=[doc.metadata for doc in documents] ) - # Perform a similarity search that should match the inserted documents. + # Perform a similarity search; the query should match the inserted documents. results = await vectorstore.asimilarity_search_with_score("Document", k=2) - # Verify that each returned document has its metadata deserialized as a dict. + # Verify that each returned document's metadata is deserialized as a dict. for doc, score in results: assert isinstance(doc.metadata, dict), f"Metadata is not a dict: {doc.metadata}" print(f"Document: {doc.page_content}, Metadata: {doc.metadata}, Score: {score}") + + # Clean up: Dispose the async engine to close connections. + await vectorstore._async_engine.dispose() + + +if __name__ == "__main__": + asyncio.run(test_pgvector_metadata_integration()) From 4db7c59711314960eff3d0c642993fc0daec834f Mon Sep 17 00:00:00 2001 From: Shamsuddin Ahmed Date: Tue, 11 Mar 2025 15:24:16 +0300 Subject: [PATCH 10/15] test: add integration test for PGVector metadata deserialization and add asyncpg dependency --- poetry.lock | 290 ++++++++++++++++++++++++++++++++++++++++++++----- pyproject.toml | 1 + 2 files changed, 263 insertions(+), 28 deletions(-) diff --git a/poetry.lock b/poetry.lock index a648c81..e4749e4 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand. [[package]] name = "aiofiles" @@ -6,6 +6,7 @@ version = "22.1.0" description = "File support for asyncio." optional = false python-versions = ">=3.7,<4.0" +groups = ["dev"] files = [ {file = "aiofiles-22.1.0-py3-none-any.whl", hash = "sha256:1142fa8e80dbae46bb6339573ad4c8c0841358f79c6eb50a493dceca14621bad"}, {file = "aiofiles-22.1.0.tar.gz", hash = "sha256:9107f1ca0b2a5553987a94a3c9959fe5b491fdf731389aa5b7b1bd0733e32de6"}, @@ -17,6 +18,7 @@ version = "0.21.0" description = "asyncio bridge to the standard sqlite3 module" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "aiosqlite-0.21.0-py3-none-any.whl", hash = "sha256:2549cf4057f95f53dcba16f2b64e8e2791d7e1adedb13197dd8ed77bb226d7d0"}, {file = "aiosqlite-0.21.0.tar.gz", hash = "sha256:131bb8056daa3bc875608c631c678cda73922a2d4ba8aec373b19f18c17e7aa3"}, @@ -35,6 +37,7 @@ version = "0.7.0" description = "Reusable constraint types to use with typing.Annotated" optional = false python-versions = ">=3.8" +groups = ["main", "test"] files = [ {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, @@ -46,6 +49,7 @@ version = "4.8.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.9" +groups = ["main", "dev", "test"] files = [ {file = "anyio-4.8.0-py3-none-any.whl", hash = "sha256:b5011f270ab5eb0abf13385f851315585cc37ef330dd88e27ec3d34d651fd47a"}, {file = "anyio-4.8.0.tar.gz", hash = "sha256:1d9fe889df5212298c0c0723fa20479d1b94883a2df44bd3897aa91083316f7a"}, @@ -59,7 +63,7 @@ typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""} [package.extras] doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21)"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1) ; python_version >= \"3.10\"", "uvloop (>=0.21) ; platform_python_implementation == \"CPython\" and platform_system != \"Windows\" and python_version < \"3.14\""] trio = ["trio (>=0.26.1)"] [[package]] @@ -68,6 +72,8 @@ version = "0.1.4" description = "Disable App Nap on macOS >= 10.9" optional = false python-versions = ">=3.6" +groups = ["dev"] +markers = "platform_system == \"Darwin\"" files = [ {file = "appnope-0.1.4-py2.py3-none-any.whl", hash = "sha256:502575ee11cd7a28c0205f379b525beefebab9d161b7c964670864014ed7213c"}, {file = "appnope-0.1.4.tar.gz", hash = "sha256:1de3860566df9caf38f01f86f65e0e13e379af54f9e4bee1e66b48f2efffd1ee"}, @@ -79,6 +85,7 @@ version = "23.1.0" description = "Argon2 for Python" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "argon2_cffi-23.1.0-py3-none-any.whl", hash = "sha256:c670642b78ba29641818ab2e68bd4e6a78ba53b7eff7b4c3815ae16abf91c7ea"}, {file = "argon2_cffi-23.1.0.tar.gz", hash = "sha256:879c3e79a2729ce768ebb7d36d4609e3a78a4ca2ec3a9f12286ca057e3d0db08"}, @@ -99,6 +106,7 @@ version = "21.2.0" description = "Low-level CFFI bindings for Argon2" optional = false python-versions = ">=3.6" +groups = ["dev"] files = [ {file = "argon2-cffi-bindings-21.2.0.tar.gz", hash = "sha256:bb89ceffa6c791807d1305ceb77dbfacc5aa499891d2c55661c6459651fc39e3"}, {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ccb949252cb2ab3a08c02024acb77cfb179492d5701c7cbdbfd776124d4d2367"}, @@ -136,6 +144,7 @@ version = "1.3.0" description = "Better dates & times for Python" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "arrow-1.3.0-py3-none-any.whl", hash = "sha256:c728b120ebc00eb84e01882a6f5e7927a53960aa990ce7dd2b10f39005a67f80"}, {file = "arrow-1.3.0.tar.gz", hash = "sha256:d4540617648cb5f895730f1ad8c82a65f2dad0166f57b75f3ca54759c4d67a85"}, @@ -155,6 +164,7 @@ version = "3.0.0" description = "Annotate AST trees with source code positions" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "asttokens-3.0.0-py3-none-any.whl", hash = "sha256:e3078351a059199dd5138cb1c706e6430c05eff2ff136af5eb4790f9d28932e2"}, {file = "asttokens-3.0.0.tar.gz", hash = "sha256:0dcd8baa8d62b0c1d118b399b2ddba3c4aff271d0d7a9e0d4c1681c79035bbc7"}, @@ -164,24 +174,105 @@ files = [ astroid = ["astroid (>=2,<4)"] test = ["astroid (>=2,<4)", "pytest", "pytest-cov", "pytest-xdist"] +[[package]] +name = "async-timeout" +version = "5.0.1" +description = "Timeout context manager for asyncio programs" +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "python_version < \"3.11.0\"" +files = [ + {file = "async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c"}, + {file = "async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3"}, +] + +[[package]] +name = "asyncpg" +version = "0.30.0" +description = "An asyncio PostgreSQL driver" +optional = false +python-versions = ">=3.8.0" +groups = ["main"] +files = [ + {file = "asyncpg-0.30.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bfb4dd5ae0699bad2b233672c8fc5ccbd9ad24b89afded02341786887e37927e"}, + {file = "asyncpg-0.30.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dc1f62c792752a49f88b7e6f774c26077091b44caceb1983509edc18a2222ec0"}, + {file = "asyncpg-0.30.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3152fef2e265c9c24eec4ee3d22b4f4d2703d30614b0b6753e9ed4115c8a146f"}, + {file = "asyncpg-0.30.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c7255812ac85099a0e1ffb81b10dc477b9973345793776b128a23e60148dd1af"}, + {file = "asyncpg-0.30.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:578445f09f45d1ad7abddbff2a3c7f7c291738fdae0abffbeb737d3fc3ab8b75"}, + {file = "asyncpg-0.30.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c42f6bb65a277ce4d93f3fba46b91a265631c8df7250592dd4f11f8b0152150f"}, + {file = "asyncpg-0.30.0-cp310-cp310-win32.whl", hash = "sha256:aa403147d3e07a267ada2ae34dfc9324e67ccc4cdca35261c8c22792ba2b10cf"}, + {file = "asyncpg-0.30.0-cp310-cp310-win_amd64.whl", hash = "sha256:fb622c94db4e13137c4c7f98834185049cc50ee01d8f657ef898b6407c7b9c50"}, + {file = "asyncpg-0.30.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5e0511ad3dec5f6b4f7a9e063591d407eee66b88c14e2ea636f187da1dcfff6a"}, + {file = "asyncpg-0.30.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:915aeb9f79316b43c3207363af12d0e6fd10776641a7de8a01212afd95bdf0ed"}, + {file = "asyncpg-0.30.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c198a00cce9506fcd0bf219a799f38ac7a237745e1d27f0e1f66d3707c84a5a"}, + {file = "asyncpg-0.30.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3326e6d7381799e9735ca2ec9fd7be4d5fef5dcbc3cb555d8a463d8460607956"}, + {file = "asyncpg-0.30.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:51da377487e249e35bd0859661f6ee2b81db11ad1f4fc036194bc9cb2ead5056"}, + {file = "asyncpg-0.30.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bc6d84136f9c4d24d358f3b02be4b6ba358abd09f80737d1ac7c444f36108454"}, + {file = "asyncpg-0.30.0-cp311-cp311-win32.whl", hash = "sha256:574156480df14f64c2d76450a3f3aaaf26105869cad3865041156b38459e935d"}, + {file = "asyncpg-0.30.0-cp311-cp311-win_amd64.whl", hash = "sha256:3356637f0bd830407b5597317b3cb3571387ae52ddc3bca6233682be88bbbc1f"}, + {file = "asyncpg-0.30.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c902a60b52e506d38d7e80e0dd5399f657220f24635fee368117b8b5fce1142e"}, + {file = "asyncpg-0.30.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aca1548e43bbb9f0f627a04666fedaca23db0a31a84136ad1f868cb15deb6e3a"}, + {file = "asyncpg-0.30.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c2a2ef565400234a633da0eafdce27e843836256d40705d83ab7ec42074efb3"}, + {file = "asyncpg-0.30.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1292b84ee06ac8a2ad8e51c7475aa309245874b61333d97411aab835c4a2f737"}, + {file = "asyncpg-0.30.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0f5712350388d0cd0615caec629ad53c81e506b1abaaf8d14c93f54b35e3595a"}, + {file = "asyncpg-0.30.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:db9891e2d76e6f425746c5d2da01921e9a16b5a71a1c905b13f30e12a257c4af"}, + {file = "asyncpg-0.30.0-cp312-cp312-win32.whl", hash = "sha256:68d71a1be3d83d0570049cd1654a9bdfe506e794ecc98ad0873304a9f35e411e"}, + {file = "asyncpg-0.30.0-cp312-cp312-win_amd64.whl", hash = "sha256:9a0292c6af5c500523949155ec17b7fe01a00ace33b68a476d6b5059f9630305"}, + {file = "asyncpg-0.30.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:05b185ebb8083c8568ea8a40e896d5f7af4b8554b64d7719c0eaa1eb5a5c3a70"}, + {file = "asyncpg-0.30.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c47806b1a8cbb0a0db896f4cd34d89942effe353a5035c62734ab13b9f938da3"}, + {file = "asyncpg-0.30.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b6fde867a74e8c76c71e2f64f80c64c0f3163e687f1763cfaf21633ec24ec33"}, + {file = "asyncpg-0.30.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46973045b567972128a27d40001124fbc821c87a6cade040cfcd4fa8a30bcdc4"}, + {file = "asyncpg-0.30.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9110df111cabc2ed81aad2f35394a00cadf4f2e0635603db6ebbd0fc896f46a4"}, + {file = "asyncpg-0.30.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:04ff0785ae7eed6cc138e73fc67b8e51d54ee7a3ce9b63666ce55a0bf095f7ba"}, + {file = "asyncpg-0.30.0-cp313-cp313-win32.whl", hash = "sha256:ae374585f51c2b444510cdf3595b97ece4f233fde739aa14b50e0d64e8a7a590"}, + {file = "asyncpg-0.30.0-cp313-cp313-win_amd64.whl", hash = "sha256:f59b430b8e27557c3fb9869222559f7417ced18688375825f8f12302c34e915e"}, + {file = "asyncpg-0.30.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:29ff1fc8b5bf724273782ff8b4f57b0f8220a1b2324184846b39d1ab4122031d"}, + {file = "asyncpg-0.30.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:64e899bce0600871b55368b8483e5e3e7f1860c9482e7f12e0a771e747988168"}, + {file = "asyncpg-0.30.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b290f4726a887f75dcd1b3006f484252db37602313f806e9ffc4e5996cfe5cb"}, + {file = "asyncpg-0.30.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f86b0e2cd3f1249d6fe6fd6cfe0cd4538ba994e2d8249c0491925629b9104d0f"}, + {file = "asyncpg-0.30.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:393af4e3214c8fa4c7b86da6364384c0d1b3298d45803375572f415b6f673f38"}, + {file = "asyncpg-0.30.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:fd4406d09208d5b4a14db9a9dbb311b6d7aeeab57bded7ed2f8ea41aeef39b34"}, + {file = "asyncpg-0.30.0-cp38-cp38-win32.whl", hash = "sha256:0b448f0150e1c3b96cb0438a0d0aa4871f1472e58de14a3ec320dbb2798fb0d4"}, + {file = "asyncpg-0.30.0-cp38-cp38-win_amd64.whl", hash = "sha256:f23b836dd90bea21104f69547923a02b167d999ce053f3d502081acea2fba15b"}, + {file = "asyncpg-0.30.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6f4e83f067b35ab5e6371f8a4c93296e0439857b4569850b178a01385e82e9ad"}, + {file = "asyncpg-0.30.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5df69d55add4efcd25ea2a3b02025b669a285b767bfbf06e356d68dbce4234ff"}, + {file = "asyncpg-0.30.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3479a0d9a852c7c84e822c073622baca862d1217b10a02dd57ee4a7a081f708"}, + {file = "asyncpg-0.30.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26683d3b9a62836fad771a18ecf4659a30f348a561279d6227dab96182f46144"}, + {file = "asyncpg-0.30.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1b982daf2441a0ed314bd10817f1606f1c28b1136abd9e4f11335358c2c631cb"}, + {file = "asyncpg-0.30.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1c06a3a50d014b303e5f6fc1e5f95eb28d2cee89cf58384b700da621e5d5e547"}, + {file = "asyncpg-0.30.0-cp39-cp39-win32.whl", hash = "sha256:1b11a555a198b08f5c4baa8f8231c74a366d190755aa4f99aacec5970afe929a"}, + {file = "asyncpg-0.30.0-cp39-cp39-win_amd64.whl", hash = "sha256:8b684a3c858a83cd876f05958823b68e8d14ec01bb0c0d14a6704c5bf9711773"}, + {file = "asyncpg-0.30.0.tar.gz", hash = "sha256:c551e9928ab6707602f44811817f82ba3c446e018bfe1d3abecc8ba5f3eac851"}, +] + +[package.dependencies] +async-timeout = {version = ">=4.0.3", markers = "python_version < \"3.11.0\""} + +[package.extras] +docs = ["Sphinx (>=8.1.3,<8.2.0)", "sphinx-rtd-theme (>=1.2.2)"] +gssauth = ["gssapi ; platform_system != \"Windows\"", "sspilib ; platform_system == \"Windows\""] +test = ["distro (>=1.9.0,<1.10.0)", "flake8 (>=6.1,<7.0)", "flake8-pyi (>=24.1.0,<24.2.0)", "gssapi ; platform_system == \"Linux\"", "k5test ; platform_system == \"Linux\"", "mypy (>=1.8.0,<1.9.0)", "sspilib ; platform_system == \"Windows\"", "uvloop (>=0.15.3) ; platform_system != \"Windows\" and python_version < \"3.14.0\""] + [[package]] name = "attrs" version = "25.1.0" description = "Classes Without Boilerplate" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "attrs-25.1.0-py3-none-any.whl", hash = "sha256:c75a69e28a550a7e93789579c22aa26b0f5b83b75dc4e08fe092980051e1090a"}, {file = "attrs-25.1.0.tar.gz", hash = "sha256:1c97078a80c814273a76b2a298a932eb681c87415c11dee0a6921de7f1b02c3e"}, ] [package.extras] -benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +benchmark = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +cov = ["cloudpickle ; platform_python_implementation == \"CPython\"", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +dev = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] -tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] +tests = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\""] [[package]] name = "babel" @@ -189,13 +280,14 @@ version = "2.17.0" description = "Internationalization utilities" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "babel-2.17.0-py3-none-any.whl", hash = "sha256:4d0b53093fdfb4b21c92b5213dba5a1b23885afa8383709427046b21c366e5f2"}, {file = "babel-2.17.0.tar.gz", hash = "sha256:0c54cffb19f690cdcc52a3b50bcbf71e07a808d1c80d549f2459b9d2cf0afb9d"}, ] [package.extras] -dev = ["backports.zoneinfo", "freezegun (>=1.0,<2.0)", "jinja2 (>=3.0)", "pytest (>=6.0)", "pytest-cov", "pytz", "setuptools", "tzdata"] +dev = ["backports.zoneinfo ; python_version < \"3.9\"", "freezegun (>=1.0,<2.0)", "jinja2 (>=3.0)", "pytest (>=6.0)", "pytest-cov", "pytz", "setuptools", "tzdata ; sys_platform == \"win32\""] [[package]] name = "beautifulsoup4" @@ -203,6 +295,7 @@ version = "4.13.3" description = "Screen-scraping library" optional = false python-versions = ">=3.7.0" +groups = ["dev"] files = [ {file = "beautifulsoup4-4.13.3-py3-none-any.whl", hash = "sha256:99045d7d3f08f91f0d656bc9b7efbae189426cd913d830294a15eefa0ea4df16"}, {file = "beautifulsoup4-4.13.3.tar.gz", hash = "sha256:1bd32405dacc920b42b83ba01644747ed77456a65760e285fbc47633ceddaf8b"}, @@ -225,6 +318,7 @@ version = "6.2.0" description = "An easy safelist-based HTML-sanitizing tool." optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "bleach-6.2.0-py3-none-any.whl", hash = "sha256:117d9c6097a7c3d22fd578fcd8d35ff1e125df6736f554da4e432fdd63f31e5e"}, {file = "bleach-6.2.0.tar.gz", hash = "sha256:123e894118b8a599fd80d3ec1a6d4cc7ce4e5882b1317a7e1ba69b56e95f991f"}, @@ -243,6 +337,7 @@ version = "2025.1.31" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" +groups = ["main", "dev", "test"] files = [ {file = "certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe"}, {file = "certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651"}, @@ -254,6 +349,7 @@ version = "1.17.1" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.8" +groups = ["main", "dev", "test"] files = [ {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, @@ -323,6 +419,7 @@ files = [ {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, ] +markers = {main = "platform_python_implementation == \"PyPy\"", test = "platform_python_implementation == \"PyPy\""} [package.dependencies] pycparser = "*" @@ -333,6 +430,7 @@ version = "3.4.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7" +groups = ["main", "dev", "test"] files = [ {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"}, {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"}, @@ -434,6 +532,7 @@ version = "2.4.1" description = "Fix common misspellings in text files" optional = false python-versions = ">=3.8" +groups = ["codespell"] files = [ {file = "codespell-2.4.1-py3-none-any.whl", hash = "sha256:3dadafa67df7e4a3dbf51e0d7315061b80d265f9552ebd699b3dd6834b47e425"}, {file = "codespell-2.4.1.tar.gz", hash = "sha256:299fcdcb09d23e81e35a671bbe746d5ad7e8385972e65dbb833a2eaac33c01e5"}, @@ -442,7 +541,7 @@ files = [ [package.extras] dev = ["Pygments", "build", "chardet", "pre-commit", "pytest", "pytest-cov", "pytest-dependency", "ruff", "tomli", "twine"] hard-encoding-detection = ["chardet"] -toml = ["tomli"] +toml = ["tomli ; python_version < \"3.11\""] types = ["chardet (>=5.1.0)", "mypy", "pytest", "pytest-cov", "pytest-dependency"] [[package]] @@ -451,6 +550,8 @@ version = "0.4.6" description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["dev", "test"] +markers = "sys_platform == \"win32\"" files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, @@ -462,6 +563,7 @@ version = "0.2.2" description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "comm-0.2.2-py3-none-any.whl", hash = "sha256:e6fb86cb70ff661ee8c9c14e7d36d6de3b4066f1441be4063df9c5009f0a64d3"}, {file = "comm-0.2.2.tar.gz", hash = "sha256:3fd7a84065306e07bea1773df6eb8282de51ba82f77c72f9c85716ab11fe980e"}, @@ -479,6 +581,7 @@ version = "7.6.10" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.9" +groups = ["test"] files = [ {file = "coverage-7.6.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5c912978f7fbf47ef99cec50c4401340436d200d41d714c7a4766f377c5b7b78"}, {file = "coverage-7.6.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a01ec4af7dfeb96ff0078ad9a48810bb0cc8abcb0115180c6013a6b26237626c"}, @@ -548,7 +651,7 @@ files = [ tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} [package.extras] -toml = ["tomli"] +toml = ["tomli ; python_full_version <= \"3.11.0a6\""] [[package]] name = "debugpy" @@ -556,6 +659,7 @@ version = "1.8.12" description = "An implementation of the Debug Adapter Protocol for Python" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "debugpy-1.8.12-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:a2ba7ffe58efeae5b8fad1165357edfe01464f9aef25e814e891ec690e7dd82a"}, {file = "debugpy-1.8.12-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cbbd4149c4fc5e7d508ece083e78c17442ee13b0e69bfa6bd63003e486770f45"}, @@ -591,6 +695,7 @@ version = "5.1.1" description = "Decorators for Humans" optional = false python-versions = ">=3.5" +groups = ["dev"] files = [ {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, @@ -602,6 +707,7 @@ version = "0.7.1" description = "XML bomb protection for Python stdlib modules" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +groups = ["dev"] files = [ {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, @@ -613,6 +719,7 @@ version = "0.4" description = "Discover and load entry points from installed packages." optional = false python-versions = ">=3.6" +groups = ["dev"] files = [ {file = "entrypoints-0.4-py3-none-any.whl", hash = "sha256:f174b5ff827504fd3cd97cc3f8649f3693f51538c7e4bdf3ef002c8429d42f9f"}, {file = "entrypoints-0.4.tar.gz", hash = "sha256:b706eddaa9218a19ebcd67b56818f05bb27589b1ca9e8d797b74affad4ccacd4"}, @@ -624,6 +731,8 @@ version = "1.2.2" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" +groups = ["main", "dev", "test"] +markers = "python_version < \"3.11\"" files = [ {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, @@ -638,13 +747,14 @@ version = "2.2.0" description = "Get the currently executing AST node of a frame, and other information" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "executing-2.2.0-py2.py3-none-any.whl", hash = "sha256:11387150cad388d62750327a53d3339fad4888b39a6fe233c3afbb54ecffd3aa"}, {file = "executing-2.2.0.tar.gz", hash = "sha256:5d108c028108fe2551d1a7b2e8b713341e2cb4fc0aa7dcf966fa4327a5226755"}, ] [package.extras] -tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich"] +tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich ; python_version >= \"3.11\""] [[package]] name = "fastjsonschema" @@ -652,6 +762,7 @@ version = "2.21.1" description = "Fastest Python implementation of JSON schema" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "fastjsonschema-2.21.1-py3-none-any.whl", hash = "sha256:c9e5b7e908310918cf494a434eeb31384dd84a98b57a30bcb1f535015b554667"}, {file = "fastjsonschema-2.21.1.tar.gz", hash = "sha256:794d4f0a58f848961ba16af7b9c85a3e88cd360df008c59aac6fc5ae9323b5d4"}, @@ -666,6 +777,7 @@ version = "1.5.1" description = "Validates fully-qualified domain names against RFC 1123, so that they are acceptable to modern bowsers" optional = false python-versions = ">=2.7, !=3.0, !=3.1, !=3.2, !=3.3, !=3.4, <4" +groups = ["dev"] files = [ {file = "fqdn-1.5.1-py3-none-any.whl", hash = "sha256:3a179af3761e4df6eb2e026ff9e1a3033d3587bf980a0b1b2e1e5d08d7358014"}, {file = "fqdn-1.5.1.tar.gz", hash = "sha256:105ed3677e767fb5ca086a0c1f4bb66ebc3c100be518f0e0d755d9eae164d89f"}, @@ -677,6 +789,7 @@ version = "3.1.1" description = "Lightweight in-process concurrent programming" optional = false python-versions = ">=3.7" +groups = ["main", "test"] files = [ {file = "greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563"}, {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83"}, @@ -763,6 +876,7 @@ version = "0.14.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" optional = false python-versions = ">=3.7" +groups = ["main", "test"] files = [ {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, @@ -774,6 +888,7 @@ version = "1.0.7" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.8" +groups = ["main", "test"] files = [ {file = "httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd"}, {file = "httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c"}, @@ -795,6 +910,7 @@ version = "0.28.1" description = "The next generation HTTP client." optional = false python-versions = ">=3.8" +groups = ["main", "test"] files = [ {file = "httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad"}, {file = "httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc"}, @@ -807,7 +923,7 @@ httpcore = "==1.*" idna = "*" [package.extras] -brotli = ["brotli", "brotlicffi"] +brotli = ["brotli ; platform_python_implementation == \"CPython\"", "brotlicffi ; platform_python_implementation != \"CPython\""] cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] @@ -819,6 +935,7 @@ version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.6" +groups = ["main", "dev", "test"] files = [ {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, @@ -833,6 +950,8 @@ version = "8.6.1" description = "Read metadata from Python packages" optional = false python-versions = ">=3.9" +groups = ["dev"] +markers = "python_version < \"3.10\"" files = [ {file = "importlib_metadata-8.6.1-py3-none-any.whl", hash = "sha256:02a89390c1e15fdfdc0d7c6b25cb3e62650d0494005c97d6f148bf5b9787525e"}, {file = "importlib_metadata-8.6.1.tar.gz", hash = "sha256:310b41d755445d74569f993ccfc22838295d9fe005425094fad953d7f15c8580"}, @@ -842,12 +961,12 @@ files = [ zipp = ">=3.20" [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] enabler = ["pytest-enabler (>=2.2)"] perf = ["ipython"] -test = ["flufl.flake8", "importlib_resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] +test = ["flufl.flake8", "importlib_resources (>=1.3) ; python_version < \"3.9\"", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] type = ["pytest-mypy"] [[package]] @@ -856,6 +975,7 @@ version = "2.0.0" description = "brain-dead simple config-ini parsing" optional = false python-versions = ">=3.7" +groups = ["test"] files = [ {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, @@ -867,6 +987,7 @@ version = "6.29.5" description = "IPython Kernel for Jupyter" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "ipykernel-6.29.5-py3-none-any.whl", hash = "sha256:afdb66ba5aa354b09b91379bac28ae4afebbb30e8b39510c9690afb7a10421b5"}, {file = "ipykernel-6.29.5.tar.gz", hash = "sha256:f093a22c4a40f8828f8e330a9c297cb93dcab13bd9678ded6de8e5cf81c56215"}, @@ -900,6 +1021,7 @@ version = "8.18.1" description = "IPython: Productive Interactive Computing" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "ipython-8.18.1-py3-none-any.whl", hash = "sha256:e8267419d72d81955ec1177f8a29aaa90ac80ad647499201119e2f05e99aa397"}, {file = "ipython-8.18.1.tar.gz", hash = "sha256:ca6f079bb33457c66e233e4580ebfc4128855b4cf6370dddd73842a9563e8a27"}, @@ -937,6 +1059,7 @@ version = "0.2.0" description = "Vestigial utilities from IPython" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "ipython_genutils-0.2.0-py2.py3-none-any.whl", hash = "sha256:72dd37233799e619666c9f639a9da83c34013a73e8bbc79a7a6348d93c61fab8"}, {file = "ipython_genutils-0.2.0.tar.gz", hash = "sha256:eb2e116e75ecef9d4d228fdc66af54269afa26ab4463042e33785b887c628ba8"}, @@ -948,6 +1071,7 @@ version = "20.11.0" description = "Operations with ISO 8601 durations" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "isoduration-20.11.0-py3-none-any.whl", hash = "sha256:b2904c2a4228c3d44f409c8ae8e2370eb21a26f7ac2ec5446df141dde3452042"}, {file = "isoduration-20.11.0.tar.gz", hash = "sha256:ac2f9015137935279eac671f94f89eb00584f940f5dc49462a0c4ee692ba1bd9"}, @@ -962,6 +1086,7 @@ version = "0.19.2" description = "An autocompletion tool for Python that can be used for text editors." optional = false python-versions = ">=3.6" +groups = ["dev"] files = [ {file = "jedi-0.19.2-py2.py3-none-any.whl", hash = "sha256:a8ef22bde8490f57fe5c7681a3c83cb58874daf72b4784de3cce5b6ef6edb5b9"}, {file = "jedi-0.19.2.tar.gz", hash = "sha256:4770dc3de41bde3966b02eb84fbcf557fb33cce26ad23da12c742fb50ecb11f0"}, @@ -981,6 +1106,7 @@ version = "3.1.5" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "jinja2-3.1.5-py3-none-any.whl", hash = "sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb"}, {file = "jinja2-3.1.5.tar.gz", hash = "sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb"}, @@ -998,6 +1124,7 @@ version = "0.10.0" description = "A Python implementation of the JSON5 data format." optional = false python-versions = ">=3.8.0" +groups = ["dev"] files = [ {file = "json5-0.10.0-py3-none-any.whl", hash = "sha256:19b23410220a7271e8377f81ba8aacba2fdd56947fbb137ee5977cbe1f5e8dfa"}, {file = "json5-0.10.0.tar.gz", hash = "sha256:e66941c8f0a02026943c52c2eb34ebeb2a6f819a0be05920a6f5243cd30fd559"}, @@ -1012,6 +1139,7 @@ version = "1.33" description = "Apply JSON-Patches (RFC 6902)" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" +groups = ["main", "test"] files = [ {file = "jsonpatch-1.33-py2.py3-none-any.whl", hash = "sha256:0ae28c0cd062bbd8b8ecc26d7d164fbbea9652a1a3693f3b956c1eae5145dade"}, {file = "jsonpatch-1.33.tar.gz", hash = "sha256:9fcd4009c41e6d12348b4a0ff2563ba56a2923a7dfee731d004e212e1ee5030c"}, @@ -1026,6 +1154,7 @@ version = "3.0.0" description = "Identify specific nodes in a JSON document (RFC 6901)" optional = false python-versions = ">=3.7" +groups = ["main", "dev", "test"] files = [ {file = "jsonpointer-3.0.0-py2.py3-none-any.whl", hash = "sha256:13e088adc14fca8b6aa8177c044e12701e6ad4b28ff10e65f2267a90109c9942"}, {file = "jsonpointer-3.0.0.tar.gz", hash = "sha256:2b2d729f2091522d61c3b31f82e11870f60b68f43fbc705cb76bf4b832af59ef"}, @@ -1037,6 +1166,7 @@ version = "4.23.0" description = "An implementation of JSON Schema validation for Python" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "jsonschema-4.23.0-py3-none-any.whl", hash = "sha256:fbadb6f8b144a8f8cf9f0b89ba94501d143e50411a1278633f56a7acf7fd5566"}, {file = "jsonschema-4.23.0.tar.gz", hash = "sha256:d71497fef26351a33265337fa77ffeb82423f3ea21283cd9467bb03999266bc4"}, @@ -1066,6 +1196,7 @@ version = "2024.10.1" description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "jsonschema_specifications-2024.10.1-py3-none-any.whl", hash = "sha256:a09a0680616357d9a0ecf05c12ad234479f549239d0f5b55f3deea67475da9bf"}, {file = "jsonschema_specifications-2024.10.1.tar.gz", hash = "sha256:0f38b83639958ce1152d02a7f062902c41c8fd20d558b0c34344292d417ae272"}, @@ -1080,6 +1211,7 @@ version = "7.4.9" description = "Jupyter protocol implementation and client libraries" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "jupyter_client-7.4.9-py3-none-any.whl", hash = "sha256:214668aaea208195f4c13d28eb272ba79f945fc0cf3f11c7092c20b2ca1980e7"}, {file = "jupyter_client-7.4.9.tar.gz", hash = "sha256:52be28e04171f07aed8f20e1616a5a552ab9fee9cbbe6c1896ae170c3880d392"}, @@ -1104,6 +1236,7 @@ version = "5.7.2" description = "Jupyter core package. A base package on which Jupyter projects rely." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "jupyter_core-5.7.2-py3-none-any.whl", hash = "sha256:4f7315d2f6b4bcf2e3e7cb6e46772eba760ae459cd1f59d29eb57b0a01bd7409"}, {file = "jupyter_core-5.7.2.tar.gz", hash = "sha256:aa5f8d32bbf6b431ac830496da7392035d6f61b4f54872f15c4bd2a9c3f536d9"}, @@ -1124,6 +1257,7 @@ version = "0.12.0" description = "Jupyter Event System library" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "jupyter_events-0.12.0-py3-none-any.whl", hash = "sha256:6464b2fa5ad10451c3d35fabc75eab39556ae1e2853ad0c0cc31b656731a97fb"}, {file = "jupyter_events-0.12.0.tar.gz", hash = "sha256:fc3fce98865f6784c9cd0a56a20644fc6098f21c8c33834a8d9fe383c17e554b"}, @@ -1150,6 +1284,7 @@ version = "2.15.0" description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications." optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "jupyter_server-2.15.0-py3-none-any.whl", hash = "sha256:872d989becf83517012ee669f09604aa4a28097c0bd90b2f424310156c2cdae3"}, {file = "jupyter_server-2.15.0.tar.gz", hash = "sha256:9d446b8697b4f7337a1b7cdcac40778babdd93ba614b6d68ab1c0c918f1c4084"}, @@ -1186,6 +1321,7 @@ version = "0.9.3" description = "Jupyter Server extension providing an implementation of the File ID service." optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "jupyter_server_fileid-0.9.3-py3-none-any.whl", hash = "sha256:f73c01c19f90005d3fff93607b91b4955ba4e1dccdde9bfe8026646f94053791"}, {file = "jupyter_server_fileid-0.9.3.tar.gz", hash = "sha256:521608bb87f606a8637fcbdce2f3d24a8b3cc89d2eef61751cb40e468d4e54be"}, @@ -1205,6 +1341,7 @@ version = "0.5.3" description = "A Jupyter Server Extension Providing Terminals." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "jupyter_server_terminals-0.5.3-py3-none-any.whl", hash = "sha256:41ee0d7dc0ebf2809c668e0fc726dfaf258fcd3e769568996ca731b6194ae9aa"}, {file = "jupyter_server_terminals-0.5.3.tar.gz", hash = "sha256:5ae0295167220e9ace0edcfdb212afd2b01ee8d179fe6f23c899590e9b8a5269"}, @@ -1224,6 +1361,7 @@ version = "0.8.0" description = "A Jupyter Server Extension Providing Y Documents." optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "jupyter_server_ydoc-0.8.0-py3-none-any.whl", hash = "sha256:969a3a1a77ed4e99487d60a74048dc9fa7d3b0dcd32e60885d835bbf7ba7be11"}, {file = "jupyter_server_ydoc-0.8.0.tar.gz", hash = "sha256:a6fe125091792d16c962cc3720c950c2b87fcc8c3ecf0c54c84e9a20b814526c"}, @@ -1243,6 +1381,7 @@ version = "0.2.5" description = "Document structures for collaborative editing using Ypy" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "jupyter_ydoc-0.2.5-py3-none-any.whl", hash = "sha256:5759170f112c70320a84217dd98d287699076ae65a7f88d458d57940a9f2b882"}, {file = "jupyter_ydoc-0.2.5.tar.gz", hash = "sha256:5a02ca7449f0d875f73e8cb8efdf695dddef15a8e71378b1f4eda6b7c90f5382"}, @@ -1262,6 +1401,7 @@ version = "3.6.8" description = "JupyterLab computational environment" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "jupyterlab-3.6.8-py3-none-any.whl", hash = "sha256:891284e75158998e23eb7a23ecc4caaf27b365e41adca374109b1305b9f769db"}, {file = "jupyterlab-3.6.8.tar.gz", hash = "sha256:a2477383e23f20009188bd9dac7e6e38dbc54307bc36d716bea6ced450647c97"}, @@ -1291,6 +1431,7 @@ version = "0.3.0" description = "Pygments theme using JupyterLab CSS variables" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "jupyterlab_pygments-0.3.0-py3-none-any.whl", hash = "sha256:841a89020971da1d8693f1a99997aefc5dc424bb1b251fd6322462a1b8842780"}, {file = "jupyterlab_pygments-0.3.0.tar.gz", hash = "sha256:721aca4d9029252b11cfa9d185e5b5af4d54772bb8072f9b7036f4170054d35d"}, @@ -1302,6 +1443,7 @@ version = "2.27.3" description = "A set of server components for JupyterLab and JupyterLab like applications." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "jupyterlab_server-2.27.3-py3-none-any.whl", hash = "sha256:e697488f66c3db49df675158a77b3b017520d772c6e1548c7d9bcc5df7944ee4"}, {file = "jupyterlab_server-2.27.3.tar.gz", hash = "sha256:eb36caca59e74471988f0ae25c77945610b887f777255aa21f8065def9e51ed4"}, @@ -1328,6 +1470,7 @@ version = "0.3.33" description = "Building applications with LLMs through composability" optional = false python-versions = "<4.0,>=3.9" +groups = ["main", "test"] files = [ {file = "langchain_core-0.3.33-py3-none-any.whl", hash = "sha256:269706408a2223f863ff1f9616f31903a5712403199d828b50aadbc4c28b553a"}, {file = "langchain_core-0.3.33.tar.gz", hash = "sha256:b5dd93a4e7f8198d2fc6048723b0bfecf7aaf128b0d268cbac19c34c1579b953"}, @@ -1351,6 +1494,7 @@ version = "0.3.7" description = "Standard tests for LangChain implementations" optional = false python-versions = "<4.0,>=3.9" +groups = ["test"] files = [ {file = "langchain_tests-0.3.7-py3-none-any.whl", hash = "sha256:ebbb15bb4b81a75b15272ecfd8a6b9ff45f015c6edccdcf0a16d1fe461a33c90"}, {file = "langchain_tests-0.3.7.tar.gz", hash = "sha256:28c1edff887de6f50b399345568f54f38797b7fa25d1d01b05e07172717ff7a0"}, @@ -1374,6 +1518,7 @@ version = "0.3.5" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." optional = false python-versions = "<4.0,>=3.9" +groups = ["main", "test"] files = [ {file = "langsmith-0.3.5-py3-none-any.whl", hash = "sha256:29da924d2e3662dd56f96d179ebc06662b66dd0b2317362ccebe0de1b78750e7"}, {file = "langsmith-0.3.5.tar.gz", hash = "sha256:d891a205f70ab0b2c26311db6c52486ffc9fc1124238b999619445f6ae900725"}, @@ -1400,6 +1545,7 @@ version = "3.0.2" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, @@ -1470,6 +1616,7 @@ version = "0.1.7" description = "Inline Matplotlib backend for Jupyter" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca"}, {file = "matplotlib_inline-0.1.7.tar.gz", hash = "sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90"}, @@ -1484,6 +1631,7 @@ version = "3.1.1" description = "A sane and fast Markdown parser with useful plugins and renderers" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "mistune-3.1.1-py3-none-any.whl", hash = "sha256:02106ac2aa4f66e769debbfa028509a275069dcffce0dfa578edd7b991ee700a"}, {file = "mistune-3.1.1.tar.gz", hash = "sha256:e0740d635f515119f7d1feb6f9b192ee60f0cc649f80a8f944f905706a21654c"}, @@ -1498,6 +1646,7 @@ version = "1.14.1" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" +groups = ["typing"] files = [ {file = "mypy-1.14.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:52686e37cf13d559f668aa398dd7ddf1f92c5d613e4f8cb262be2fb4fedb0fcb"}, {file = "mypy-1.14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1fb545ca340537d4b45d3eecdb3def05e913299ca72c290326be19b3804b39c0"}, @@ -1557,6 +1706,7 @@ version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." optional = false python-versions = ">=3.5" +groups = ["typing"] files = [ {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, @@ -1568,6 +1718,7 @@ version = "1.2.0" description = "Jupyter Notebook as a Jupyter Server extension." optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "nbclassic-1.2.0-py3-none-any.whl", hash = "sha256:61cc3612dae9d418d6c3577f7c6d8dad1d6a81b68e7a12585483283d5c92ec37"}, {file = "nbclassic-1.2.0.tar.gz", hash = "sha256:736ec50483a54485971db213be9207e34e51fc144c78343625b69917423644ba"}, @@ -1582,7 +1733,7 @@ notebook-shim = ">=0.2.3" [package.extras] docs = ["myst-parser", "nbsphinx", "sphinx", "sphinx-rtd-theme", "sphinxcontrib-github-alt"] json-logging = ["json-logging"] -test = ["coverage", "nbval", "pytest", "pytest-cov", "pytest-jupyter", "pytest-playwright", "pytest-tornasync", "requests", "requests-unixsocket", "testpath"] +test = ["coverage", "nbval", "pytest", "pytest-cov", "pytest-jupyter", "pytest-playwright", "pytest-tornasync", "requests", "requests-unixsocket ; sys_platform != \"win32\"", "testpath"] [[package]] name = "nbclient" @@ -1590,6 +1741,7 @@ version = "0.10.2" description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor." optional = false python-versions = ">=3.9.0" +groups = ["dev"] files = [ {file = "nbclient-0.10.2-py3-none-any.whl", hash = "sha256:4ffee11e788b4a27fabeb7955547e4318a5298f34342a4bfd01f2e1faaeadc3d"}, {file = "nbclient-0.10.2.tar.gz", hash = "sha256:90b7fc6b810630db87a6d0c2250b1f0ab4cf4d3c27a299b0cde78a4ed3fd9193"}, @@ -1612,6 +1764,7 @@ version = "7.16.6" description = "Converting Jupyter Notebooks (.ipynb files) to other formats. Output formats include asciidoc, html, latex, markdown, pdf, py, rst, script. nbconvert can be used both as a Python library (`import nbconvert`) or as a command line tool (invoked as `jupyter nbconvert ...`)." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "nbconvert-7.16.6-py3-none-any.whl", hash = "sha256:1375a7b67e0c2883678c48e506dc320febb57685e5ee67faa51b18a90f3a712b"}, {file = "nbconvert-7.16.6.tar.gz", hash = "sha256:576a7e37c6480da7b8465eefa66c17844243816ce1ccc372633c6b71c3c0f582"}, @@ -1649,6 +1802,7 @@ version = "5.10.4" description = "The Jupyter Notebook format" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "nbformat-5.10.4-py3-none-any.whl", hash = "sha256:3b48d6c8fbca4b299bf3982ea7db1af21580e4fec269ad087b9e81588891200b"}, {file = "nbformat-5.10.4.tar.gz", hash = "sha256:322168b14f937a5d11362988ecac2a4952d3d8e3a2cbeb2319584631226d5b3a"}, @@ -1670,6 +1824,7 @@ version = "1.6.0" description = "Patch asyncio to allow nested event loops" optional = false python-versions = ">=3.5" +groups = ["dev"] files = [ {file = "nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c"}, {file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"}, @@ -1681,6 +1836,7 @@ version = "6.5.7" description = "A web-based notebook environment for interactive computing" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "notebook-6.5.7-py3-none-any.whl", hash = "sha256:a6afa9a4ff4d149a0771ff8b8c881a7a73b3835f9add0606696d6e9d98ac1cd0"}, {file = "notebook-6.5.7.tar.gz", hash = "sha256:04eb9011dfac634fbd4442adaf0a8c27cd26beef831fe1d19faf930c327768e4"}, @@ -1707,7 +1863,7 @@ traitlets = ">=4.2.1" [package.extras] docs = ["myst-parser", "nbsphinx", "sphinx", "sphinx-rtd-theme", "sphinxcontrib-github-alt"] json-logging = ["json-logging"] -test = ["coverage", "nbval", "pytest", "pytest-cov", "requests", "requests-unixsocket", "selenium (==4.1.5)", "testpath"] +test = ["coverage", "nbval", "pytest", "pytest-cov", "requests", "requests-unixsocket ; sys_platform != \"win32\"", "selenium (==4.1.5)", "testpath"] [[package]] name = "notebook-shim" @@ -1715,6 +1871,7 @@ version = "0.2.4" description = "A shim layer for notebook traits and config" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "notebook_shim-0.2.4-py3-none-any.whl", hash = "sha256:411a5be4e9dc882a074ccbcae671eda64cceb068767e9a3419096986560e1cef"}, {file = "notebook_shim-0.2.4.tar.gz", hash = "sha256:b4b2cfa1b65d98307ca24361f5b30fe785b53c3fd07b7a47e89acb5e6ac638cb"}, @@ -1732,6 +1889,8 @@ version = "1.26.4" description = "Fundamental package for array computing in Python" optional = false python-versions = ">=3.9" +groups = ["main", "test"] +markers = "python_version < \"3.12\"" files = [ {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, @@ -1777,6 +1936,8 @@ version = "2.0.2" description = "Fundamental package for array computing in Python" optional = false python-versions = ">=3.9" +groups = ["main", "test"] +markers = "python_version >= \"3.12\"" files = [ {file = "numpy-2.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:51129a29dbe56f9ca83438b706e2e69a39892b5eda6cedcb6b0c9fdc9b0d3ece"}, {file = "numpy-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f15975dfec0cf2239224d80e32c3170b1d168335eaedee69da84fbe9f1f9cd04"}, @@ -1831,6 +1992,8 @@ version = "3.10.15" description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" optional = false python-versions = ">=3.8" +groups = ["main", "test"] +markers = "platform_python_implementation != \"PyPy\"" files = [ {file = "orjson-3.10.15-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:552c883d03ad185f720d0c09583ebde257e41b9521b74ff40e08b7dec4559c04"}, {file = "orjson-3.10.15-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:616e3e8d438d02e4854f70bfdc03a6bcdb697358dbaa6bcd19cbe24d24ece1f8"}, @@ -1919,6 +2082,7 @@ version = "7.7.0" description = "A decorator to automatically detect mismatch when overriding a method." optional = false python-versions = ">=3.6" +groups = ["dev"] files = [ {file = "overrides-7.7.0-py3-none-any.whl", hash = "sha256:c7ed9d062f78b8e4c1a7b70bd8796b35ead4d9f510227ef9c5dc7626c60d7e49"}, {file = "overrides-7.7.0.tar.gz", hash = "sha256:55158fa3d93b98cc75299b1e67078ad9003ca27945c76162c1c0766d6f91820a"}, @@ -1930,6 +2094,7 @@ version = "24.2" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" +groups = ["main", "dev", "test"] files = [ {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, @@ -1941,6 +2106,7 @@ version = "1.5.1" description = "Utilities for writing pandoc filters in python" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["dev"] files = [ {file = "pandocfilters-1.5.1-py2.py3-none-any.whl", hash = "sha256:93be382804a9cdb0a7267585f157e5d1731bbe5545a85b268d6f5fe6232de2bc"}, {file = "pandocfilters-1.5.1.tar.gz", hash = "sha256:002b4a555ee4ebc03f8b66307e287fa492e4a77b4ea14d3f934328297bb4939e"}, @@ -1952,6 +2118,7 @@ version = "0.8.4" description = "A Python Parser" optional = false python-versions = ">=3.6" +groups = ["dev"] files = [ {file = "parso-0.8.4-py2.py3-none-any.whl", hash = "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18"}, {file = "parso-0.8.4.tar.gz", hash = "sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d"}, @@ -1967,6 +2134,8 @@ version = "4.9.0" description = "Pexpect allows easy control of interactive console applications." optional = false python-versions = "*" +groups = ["dev"] +markers = "sys_platform != \"win32\"" files = [ {file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"}, {file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"}, @@ -1981,6 +2150,7 @@ version = "0.3.6" description = "pgvector support for Python" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "pgvector-0.3.6-py3-none-any.whl", hash = "sha256:f6c269b3c110ccb7496bac87202148ed18f34b390a0189c783e351062400a75a"}, {file = "pgvector-0.3.6.tar.gz", hash = "sha256:31d01690e6ea26cea8a633cde5f0f55f5b246d9c8292d68efdef8c22ec994ade"}, @@ -1995,6 +2165,7 @@ version = "4.3.6" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, @@ -2011,6 +2182,7 @@ version = "1.5.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" +groups = ["test"] files = [ {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, @@ -2026,6 +2198,7 @@ version = "0.21.1" description = "Python client for the Prometheus monitoring system." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "prometheus_client-0.21.1-py3-none-any.whl", hash = "sha256:594b45c410d6f4f8888940fe80b5cc2521b305a1fafe1c58609ef715a001f301"}, {file = "prometheus_client-0.21.1.tar.gz", hash = "sha256:252505a722ac04b0456be05c05f75f45d760c2911ffc45f2a06bcaed9f3ae3fb"}, @@ -2040,6 +2213,7 @@ version = "3.0.50" description = "Library for building powerful interactive command lines in Python" optional = false python-versions = ">=3.8.0" +groups = ["dev"] files = [ {file = "prompt_toolkit-3.0.50-py3-none-any.whl", hash = "sha256:9b6427eb19e479d98acff65196a307c555eb567989e6d88ebbb1b509d9779198"}, {file = "prompt_toolkit-3.0.50.tar.gz", hash = "sha256:544748f3860a2623ca5cd6d2795e7a14f3d0e1c3c9728359013f79877fc89bab"}, @@ -2054,6 +2228,7 @@ version = "6.1.1" description = "Cross-platform lib for process and system monitoring in Python." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +groups = ["dev"] files = [ {file = "psutil-6.1.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:9ccc4316f24409159897799b83004cb1e24f9819b0dcf9c0b68bdcb6cefee6a8"}, {file = "psutil-6.1.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ca9609c77ea3b8481ab005da74ed894035936223422dc591d6772b147421f777"}, @@ -2084,6 +2259,7 @@ version = "3.2.4" description = "PostgreSQL database adapter for Python" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "psycopg-3.2.4-py3-none-any.whl", hash = "sha256:43665368ccd48180744cab26b74332f46b63b7e06e8ce0775547a3533883d381"}, {file = "psycopg-3.2.4.tar.gz", hash = "sha256:f26f1346d6bf1ef5f5ef1714dd405c67fb365cfd1c6cea07de1792747b167b92"}, @@ -2094,8 +2270,8 @@ typing-extensions = {version = ">=4.6", markers = "python_version < \"3.13\""} tzdata = {version = "*", markers = "sys_platform == \"win32\""} [package.extras] -binary = ["psycopg-binary (==3.2.4)"] -c = ["psycopg-c (==3.2.4)"] +binary = ["psycopg-binary (==3.2.4) ; implementation_name != \"pypy\""] +c = ["psycopg-c (==3.2.4) ; implementation_name != \"pypy\""] dev = ["ast-comments (>=1.1.2)", "black (>=24.1.0)", "codespell (>=2.2)", "dnspython (>=2.1)", "flake8 (>=4.0)", "mypy (>=1.14)", "pre-commit (>=4.0.1)", "types-setuptools (>=57.4)", "wheel (>=0.37)"] docs = ["Sphinx (>=5.0)", "furo (==2022.6.21)", "sphinx-autobuild (>=2021.3.14)", "sphinx-autodoc-typehints (>=1.12)"] pool = ["psycopg-pool"] @@ -2107,6 +2283,7 @@ version = "3.2.4" description = "Connection Pool for Psycopg" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "psycopg_pool-3.2.4-py3-none-any.whl", hash = "sha256:f6a22cff0f21f06d72fb2f5cb48c618946777c49385358e0c88d062c59cbd224"}, {file = "psycopg_pool-3.2.4.tar.gz", hash = "sha256:61774b5bbf23e8d22bedc7504707135aaf744679f8ef9b3fe29942920746a6ed"}, @@ -2121,6 +2298,8 @@ version = "0.7.0" description = "Run a subprocess in a pseudo terminal" optional = false python-versions = "*" +groups = ["dev"] +markers = "sys_platform != \"win32\" or os_name != \"nt\"" files = [ {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, @@ -2132,6 +2311,7 @@ version = "0.2.3" description = "Safely evaluate AST nodes without side effects" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0"}, {file = "pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42"}, @@ -2146,10 +2326,12 @@ version = "2.22" description = "C parser in Python" optional = false python-versions = ">=3.8" +groups = ["main", "dev", "test"] files = [ {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, ] +markers = {main = "platform_python_implementation == \"PyPy\"", test = "platform_python_implementation == \"PyPy\""} [[package]] name = "pydantic" @@ -2157,6 +2339,7 @@ version = "2.10.6" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" +groups = ["main", "test"] files = [ {file = "pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584"}, {file = "pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236"}, @@ -2169,7 +2352,7 @@ typing-extensions = ">=4.12.2" [package.extras] email = ["email-validator (>=2.0.0)"] -timezone = ["tzdata"] +timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""] [[package]] name = "pydantic-core" @@ -2177,6 +2360,7 @@ version = "2.27.2" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" +groups = ["main", "test"] files = [ {file = "pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa"}, {file = "pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c"}, @@ -2289,6 +2473,7 @@ version = "2.19.1" description = "Pygments is a syntax highlighting package written in Python." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"}, {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"}, @@ -2303,6 +2488,7 @@ version = "7.4.4" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.7" +groups = ["test"] files = [ {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, @@ -2325,6 +2511,7 @@ version = "0.23.8" description = "Pytest support for asyncio" optional = false python-versions = ">=3.8" +groups = ["test"] files = [ {file = "pytest_asyncio-0.23.8-py3-none-any.whl", hash = "sha256:50265d892689a5faefb84df80819d1ecef566eb3549cf915dfb33569359d1ce2"}, {file = "pytest_asyncio-0.23.8.tar.gz", hash = "sha256:759b10b33a6dc61cce40a8bd5205e302978bbbcc00e279a8b61d9a6a3c82e4d3"}, @@ -2343,6 +2530,7 @@ version = "5.0.0" description = "Pytest plugin for measuring coverage." optional = false python-versions = ">=3.8" +groups = ["test"] files = [ {file = "pytest-cov-5.0.0.tar.gz", hash = "sha256:5837b58e9f6ebd335b0f8060eecce69b662415b16dc503883a02f45dfeb14857"}, {file = "pytest_cov-5.0.0-py3-none-any.whl", hash = "sha256:4f0764a1219df53214206bf1feea4633c3b558a2925c8b59f144f682861ce652"}, @@ -2361,6 +2549,7 @@ version = "0.7.0" description = "Pytest Plugin to disable socket calls during tests" optional = false python-versions = ">=3.8,<4.0" +groups = ["test"] files = [ {file = "pytest_socket-0.7.0-py3-none-any.whl", hash = "sha256:7e0f4642177d55d317bbd58fc68c6bd9048d6eadb2d46a89307fa9221336ce45"}, {file = "pytest_socket-0.7.0.tar.gz", hash = "sha256:71ab048cbbcb085c15a4423b73b619a8b35d6a307f46f78ea46be51b1b7e11b3"}, @@ -2375,6 +2564,7 @@ version = "2.3.1" description = "pytest plugin to abort hanging tests" optional = false python-versions = ">=3.7" +groups = ["test"] files = [ {file = "pytest-timeout-2.3.1.tar.gz", hash = "sha256:12397729125c6ecbdaca01035b9e5239d4db97352320af155b3f5de1ba5165d9"}, {file = "pytest_timeout-2.3.1-py3-none-any.whl", hash = "sha256:68188cb703edfc6a18fad98dc25a3c61e9f24d644b0b70f33af545219fc7813e"}, @@ -2389,6 +2579,7 @@ version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["dev"] files = [ {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, @@ -2403,6 +2594,7 @@ version = "3.2.1" description = "JSON Log Formatter for the Python Logging Package" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "python_json_logger-3.2.1-py3-none-any.whl", hash = "sha256:cdc17047eb5374bd311e748b42f99d71223f3b0e186f4206cc5d52aefe85b090"}, {file = "python_json_logger-3.2.1.tar.gz", hash = "sha256:8eb0554ea17cb75b05d2848bc14fb02fbdbd9d6972120781b974380bfa162008"}, @@ -2412,7 +2604,7 @@ files = [ typing_extensions = {version = "*", markers = "python_version < \"3.10\""} [package.extras] -dev = ["backports.zoneinfo", "black", "build", "freezegun", "mdx_truly_sane_lists", "mike", "mkdocs", "mkdocs-awesome-pages-plugin", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-material (>=8.5)", "mkdocstrings[python]", "msgspec", "msgspec-python313-pre", "mypy", "orjson", "pylint", "pytest", "tzdata", "validate-pyproject[all]"] +dev = ["backports.zoneinfo ; python_version < \"3.9\"", "black", "build", "freezegun", "mdx_truly_sane_lists", "mike", "mkdocs", "mkdocs-awesome-pages-plugin", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-material (>=8.5)", "mkdocstrings[python]", "msgspec ; implementation_name != \"pypy\" and python_version < \"3.13\"", "msgspec-python313-pre ; implementation_name != \"pypy\" and python_version == \"3.13\"", "mypy", "orjson ; implementation_name != \"pypy\"", "pylint", "pytest", "tzdata", "validate-pyproject[all]"] [[package]] name = "pywin32" @@ -2420,6 +2612,8 @@ version = "308" description = "Python for Window Extensions" optional = false python-versions = "*" +groups = ["dev"] +markers = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\"" files = [ {file = "pywin32-308-cp310-cp310-win32.whl", hash = "sha256:796ff4426437896550d2981b9c2ac0ffd75238ad9ea2d3bfa67a1abd546d262e"}, {file = "pywin32-308-cp310-cp310-win_amd64.whl", hash = "sha256:4fc888c59b3c0bef905ce7eb7e2106a07712015ea1c8234b703a088d46110e8e"}, @@ -2447,6 +2641,8 @@ version = "2.0.15" description = "Pseudo terminal support for Windows from Python." optional = false python-versions = ">=3.9" +groups = ["dev"] +markers = "os_name == \"nt\"" files = [ {file = "pywinpty-2.0.15-cp310-cp310-win_amd64.whl", hash = "sha256:8e7f5de756a615a38b96cd86fa3cd65f901ce54ce147a3179c45907fa11b4c4e"}, {file = "pywinpty-2.0.15-cp311-cp311-win_amd64.whl", hash = "sha256:9a6bcec2df2707aaa9d08b86071970ee32c5026e10bcc3cc5f6f391d85baf7ca"}, @@ -2463,6 +2659,7 @@ version = "6.0.2" description = "YAML parser and emitter for Python" optional = false python-versions = ">=3.8" +groups = ["main", "dev", "test"] files = [ {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, @@ -2525,6 +2722,7 @@ version = "26.2.1" description = "Python bindings for 0MQ" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "pyzmq-26.2.1-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:f39d1227e8256d19899d953e6e19ed2ccb689102e6d85e024da5acf410f301eb"}, {file = "pyzmq-26.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a23948554c692df95daed595fdd3b76b420a4939d7a8a28d6d7dea9711878641"}, @@ -2646,6 +2844,7 @@ version = "0.36.2" description = "JSON Referencing + Python" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "referencing-0.36.2-py3-none-any.whl", hash = "sha256:e8699adbbf8b5c7de96d8ffa0eb5c158b3beafce084968e2ea8bb08c6794dcd0"}, {file = "referencing-0.36.2.tar.gz", hash = "sha256:df2e89862cd09deabbdba16944cc3f10feb6b3e6f18e902f7cc25609a34775aa"}, @@ -2662,6 +2861,7 @@ version = "2.32.3" description = "Python HTTP for Humans." optional = false python-versions = ">=3.8" +groups = ["main", "dev", "test"] files = [ {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, @@ -2683,6 +2883,7 @@ version = "1.0.0" description = "A utility belt for advanced users of python-requests" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["main", "test"] files = [ {file = "requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6"}, {file = "requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06"}, @@ -2697,6 +2898,7 @@ version = "0.1.4" description = "A pure python RFC3339 validator" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +groups = ["dev"] files = [ {file = "rfc3339_validator-0.1.4-py2.py3-none-any.whl", hash = "sha256:24f6ec1eda14ef823da9e36ec7113124b39c04d50a4d3d3a3c2859577e7791fa"}, {file = "rfc3339_validator-0.1.4.tar.gz", hash = "sha256:138a2abdf93304ad60530167e51d2dfb9549521a836871b88d7f4695d0022f6b"}, @@ -2711,6 +2913,7 @@ version = "0.1.1" description = "Pure python rfc3986 validator" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +groups = ["dev"] files = [ {file = "rfc3986_validator-0.1.1-py2.py3-none-any.whl", hash = "sha256:2f235c432ef459970b4306369336b9d5dbdda31b510ca1e327636e01f528bfa9"}, {file = "rfc3986_validator-0.1.1.tar.gz", hash = "sha256:3d44bde7921b3b9ec3ae4e3adca370438eccebc676456449b145d533b240d055"}, @@ -2722,6 +2925,7 @@ version = "0.22.3" description = "Python bindings to Rust's persistent data structures (rpds)" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "rpds_py-0.22.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:6c7b99ca52c2c1752b544e310101b98a659b720b21db00e65edca34483259967"}, {file = "rpds_py-0.22.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:be2eb3f2495ba669d2a985f9b426c1797b7d48d6963899276d22f23e33d47e37"}, @@ -2834,6 +3038,7 @@ version = "0.1.15" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" +groups = ["lint"] files = [ {file = "ruff-0.1.15-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:5fe8d54df166ecc24106db7dd6a68d44852d14eb0729ea4672bb4d96c320b7df"}, {file = "ruff-0.1.15-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6f0bfbb53c4b4de117ac4d6ddfd33aa5fc31beeaa21d23c45c6dd249faf9126f"}, @@ -2860,15 +3065,16 @@ version = "1.8.3" description = "Send file to trash natively under Mac OS X, Windows and Linux" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +groups = ["dev"] files = [ {file = "Send2Trash-1.8.3-py3-none-any.whl", hash = "sha256:0c31227e0bd08961c7665474a3d1ef7193929fedda4233843689baa056be46c9"}, {file = "Send2Trash-1.8.3.tar.gz", hash = "sha256:b18e7a3966d99871aefeb00cfbcfdced55ce4871194810fc71f4aa484b953abf"}, ] [package.extras] -nativelib = ["pyobjc-framework-Cocoa", "pywin32"] -objc = ["pyobjc-framework-Cocoa"] -win32 = ["pywin32"] +nativelib = ["pyobjc-framework-Cocoa ; sys_platform == \"darwin\"", "pywin32 ; sys_platform == \"win32\""] +objc = ["pyobjc-framework-Cocoa ; sys_platform == \"darwin\""] +win32 = ["pywin32 ; sys_platform == \"win32\""] [[package]] name = "six" @@ -2876,6 +3082,7 @@ version = "1.17.0" description = "Python 2 and 3 compatibility utilities" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["dev"] files = [ {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, @@ -2887,6 +3094,7 @@ version = "1.3.1" description = "Sniff out which async library your code is running under" optional = false python-versions = ">=3.7" +groups = ["main", "dev", "test"] files = [ {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, @@ -2898,6 +3106,7 @@ version = "2.6" description = "A modern CSS selector implementation for Beautiful Soup." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "soupsieve-2.6-py3-none-any.whl", hash = "sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9"}, {file = "soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb"}, @@ -2909,6 +3118,7 @@ version = "2.0.37" description = "Database Abstraction Library" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "SQLAlchemy-2.0.37-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:da36c3b0e891808a7542c5c89f224520b9a16c7f5e4d6a1156955605e54aef0e"}, {file = "SQLAlchemy-2.0.37-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e7402ff96e2b073a98ef6d6142796426d705addd27b9d26c3b32dbaa06d7d069"}, @@ -3004,6 +3214,7 @@ version = "0.6.3" description = "Extract data from python stack frames and tracebacks for informative displays" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"}, {file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"}, @@ -3023,6 +3234,7 @@ version = "4.8.1" description = "Pytest Snapshot Test Utility" optional = false python-versions = ">=3.8.1" +groups = ["test"] files = [ {file = "syrupy-4.8.1-py3-none-any.whl", hash = "sha256:274f97cbaf44175f5e478a2f3a53559d31f41c66c6bf28131695f94ac893ea00"}, {file = "syrupy-4.8.1.tar.gz", hash = "sha256:8da8c0311e6d92de0b15767768c6ab98982b7b4a4c67083c08fbac3fbad4d44c"}, @@ -3037,6 +3249,7 @@ version = "9.0.0" description = "Retry code until it succeeds" optional = false python-versions = ">=3.8" +groups = ["main", "test"] files = [ {file = "tenacity-9.0.0-py3-none-any.whl", hash = "sha256:93de0c98785b27fcf659856aa9f54bfbd399e29969b0621bc7f762bd441b4539"}, {file = "tenacity-9.0.0.tar.gz", hash = "sha256:807f37ca97d62aa361264d497b0e31e92b8027044942bfa756160d908320d73b"}, @@ -3052,6 +3265,7 @@ version = "0.18.1" description = "Tornado websocket backend for the Xterm.js Javascript terminal emulator library." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "terminado-0.18.1-py3-none-any.whl", hash = "sha256:a4468e1b37bb318f8a86514f65814e1afc977cf29b3992a4500d9dd305dcceb0"}, {file = "terminado-0.18.1.tar.gz", hash = "sha256:de09f2c4b85de4765f7714688fff57d3e75bad1f909b589fde880460c753fd2e"}, @@ -3073,6 +3287,7 @@ version = "1.4.0" description = "A tiny CSS parser" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "tinycss2-1.4.0-py3-none-any.whl", hash = "sha256:3a49cf47b7675da0b15d0c6e1df8df4ebd96e9394bb905a5775adb0d884c5289"}, {file = "tinycss2-1.4.0.tar.gz", hash = "sha256:10c0972f6fc0fbee87c3edb76549357415e94548c1ae10ebccdea16fb404a9b7"}, @@ -3091,6 +3306,7 @@ version = "2.2.1" description = "A lil' TOML parser" optional = false python-versions = ">=3.8" +groups = ["dev", "test", "typing"] files = [ {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, @@ -3125,6 +3341,7 @@ files = [ {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, ] +markers = {dev = "python_version < \"3.11\"", test = "python_full_version <= \"3.11.0a6\"", typing = "python_version < \"3.11\""} [[package]] name = "tornado" @@ -3132,6 +3349,7 @@ version = "6.4.2" description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "tornado-6.4.2-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e828cce1123e9e44ae2a50a9de3055497ab1d0aeb440c5ac23064d9e44880da1"}, {file = "tornado-6.4.2-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:072ce12ada169c5b00b7d92a99ba089447ccc993ea2143c9ede887e0937aa803"}, @@ -3152,6 +3370,7 @@ version = "5.14.3" description = "Traitlets Python configuration system" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f"}, {file = "traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7"}, @@ -3167,6 +3386,7 @@ version = "2.9.0.20241206" description = "Typing stubs for python-dateutil" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "types_python_dateutil-2.9.0.20241206-py3-none-any.whl", hash = "sha256:e248a4bc70a486d3e3ec84d0dc30eec3a5f979d6e7ee4123ae043eedbb987f53"}, {file = "types_python_dateutil-2.9.0.20241206.tar.gz", hash = "sha256:18f493414c26ffba692a72369fea7a154c502646301ebfe3d56a04b3767284cb"}, @@ -3178,6 +3398,7 @@ version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" +groups = ["main", "dev", "test", "typing"] files = [ {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, @@ -3189,6 +3410,8 @@ version = "2025.1" description = "Provider of IANA time zone data" optional = false python-versions = ">=2" +groups = ["main"] +markers = "sys_platform == \"win32\"" files = [ {file = "tzdata-2025.1-py2.py3-none-any.whl", hash = "sha256:7e127113816800496f027041c570f50bcd464a020098a3b6b199517772303639"}, {file = "tzdata-2025.1.tar.gz", hash = "sha256:24894909e88cdb28bd1636c6887801df64cb485bd593f2fd83ef29075a81d694"}, @@ -3200,6 +3423,7 @@ version = "1.3.0" description = "RFC 6570 URI Template Processor" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "uri-template-1.3.0.tar.gz", hash = "sha256:0e00f8eb65e18c7de20d595a14336e9f337ead580c70934141624b6d1ffdacc7"}, {file = "uri_template-1.3.0-py3-none-any.whl", hash = "sha256:a44a133ea12d44a0c0f06d7d42a52d71282e77e2f937d8abd5655b8d56fc1363"}, @@ -3214,13 +3438,14 @@ version = "2.3.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.9" +groups = ["main", "dev", "test"] files = [ {file = "urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df"}, {file = "urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d"}, ] [package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""] h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] @@ -3231,6 +3456,7 @@ version = "0.2.13" description = "Measures the displayed width of unicode strings in a terminal" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, @@ -3242,6 +3468,7 @@ version = "24.11.1" description = "A library for working with the color formats defined by HTML and CSS." optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "webcolors-24.11.1-py3-none-any.whl", hash = "sha256:515291393b4cdf0eb19c155749a096f779f7d909f7cceea072791cb9095b92e9"}, {file = "webcolors-24.11.1.tar.gz", hash = "sha256:ecb3d768f32202af770477b8b65f318fa4f566c22948673a977b00d589dd80f6"}, @@ -3253,6 +3480,7 @@ version = "0.5.1" description = "Character encoding aliases for legacy web content" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, @@ -3264,6 +3492,7 @@ version = "1.8.0" description = "WebSocket client for Python with low level API options" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "websocket_client-1.8.0-py3-none-any.whl", hash = "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526"}, {file = "websocket_client-1.8.0.tar.gz", hash = "sha256:3239df9f44da632f96012472805d40a23281a991027ce11d2f45a6f24ac4c3da"}, @@ -3280,6 +3509,7 @@ version = "0.6.2" description = "Python bindings for the Y-CRDT built from yrs (Rust)" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "y_py-0.6.2-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:c26bada6cd109095139237a46f50fc4308f861f0d304bc9e70acbc6c4503d158"}, {file = "y_py-0.6.2-cp310-cp310-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:bae1b1ad8d2b8cf938a60313f8f7461de609621c5dcae491b6e54975f76f83c5"}, @@ -3363,6 +3593,7 @@ version = "0.8.4" description = "WebSocket connector for Ypy" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "ypy_websocket-0.8.4-py3-none-any.whl", hash = "sha256:b1ba0dfcc9762f0ca168d2378062d3ca1299d39076b0f145d961359121042be5"}, {file = "ypy_websocket-0.8.4.tar.gz", hash = "sha256:43a001473f5c8abcf182f603049cf305cbc855ad8deaa9dfa0f3b5a7cea9d0ff"}, @@ -3382,17 +3613,19 @@ version = "3.21.0" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.9" +groups = ["dev"] +markers = "python_version < \"3.10\"" files = [ {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"}, {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"}, ] [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] enabler = ["pytest-enabler (>=2.2)"] -test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +test = ["big-O", "importlib-resources ; python_version < \"3.9\"", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] type = ["pytest-mypy"] [[package]] @@ -3401,6 +3634,7 @@ version = "0.23.0" description = "Zstandard bindings for Python" optional = false python-versions = ">=3.8" +groups = ["main", "test"] files = [ {file = "zstandard-0.23.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bf0a05b6059c0528477fba9054d09179beb63744355cab9f38059548fedd46a9"}, {file = "zstandard-0.23.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fc9ca1c9718cb3b06634c7c8dec57d24e9438b2aa9a0f02b8bb36bf478538880"}, @@ -3508,6 +3742,6 @@ cffi = {version = ">=1.11", markers = "platform_python_implementation == \"PyPy\ cffi = ["cffi (>=1.11)"] [metadata] -lock-version = "2.0" +lock-version = "2.1" python-versions = "^3.9" -content-hash = "a9a2c1b3ebd06e93ff340bed08e0c69886694c51bbf2c07033c170b1b77878bb" +content-hash = "55b9c0b1a970e59535ecb526b11c34ade8a8b8e010bc513c20aba9bf328ceebd" diff --git a/pyproject.toml b/pyproject.toml index cc25b79..993ea6f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -18,6 +18,7 @@ psycopg-pool = "^3.2.1" sqlalchemy = "^2" pgvector = "<0.4" numpy = ">=1.21" +asyncpg = "^0.30.0" [tool.poetry.group.docs.dependencies] From 9038c3d583bbc80dda2cc2462a21412b8abf766d Mon Sep 17 00:00:00 2001 From: Shamsuddin Ahmed Date: Tue, 11 Mar 2025 15:33:51 +0300 Subject: [PATCH 11/15] chore: move asyncpg dependency from main to test group --- poetry.lock | 6 +++--- pyproject.toml | 4 +--- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/poetry.lock b/poetry.lock index e4749e4..1d8fac0 100644 --- a/poetry.lock +++ b/poetry.lock @@ -180,7 +180,7 @@ version = "5.0.1" description = "Timeout context manager for asyncio programs" optional = false python-versions = ">=3.8" -groups = ["main"] +groups = ["test"] markers = "python_version < \"3.11.0\"" files = [ {file = "async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c"}, @@ -193,7 +193,7 @@ version = "0.30.0" description = "An asyncio PostgreSQL driver" optional = false python-versions = ">=3.8.0" -groups = ["main"] +groups = ["test"] files = [ {file = "asyncpg-0.30.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bfb4dd5ae0699bad2b233672c8fc5ccbd9ad24b89afded02341786887e37927e"}, {file = "asyncpg-0.30.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dc1f62c792752a49f88b7e6f774c26077091b44caceb1983509edc18a2222ec0"}, @@ -3744,4 +3744,4 @@ cffi = ["cffi (>=1.11)"] [metadata] lock-version = "2.1" python-versions = "^3.9" -content-hash = "55b9c0b1a970e59535ecb526b11c34ade8a8b8e010bc513c20aba9bf328ceebd" +content-hash = "dc441ccf21d45952c61386a0a29aadc7d6a2827303a774ee86d8933ecb406638" diff --git a/pyproject.toml b/pyproject.toml index 993ea6f..bdb442a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -18,9 +18,6 @@ psycopg-pool = "^3.2.1" sqlalchemy = "^2" pgvector = "<0.4" numpy = ">=1.21" -asyncpg = "^0.30.0" - -[tool.poetry.group.docs.dependencies] [tool.poetry.group.dev.dependencies] jupyterlab = "^3.6.1" @@ -37,6 +34,7 @@ pytest-socket = "^0.7.0" pytest-cov = "^5.0.0" pytest-timeout = "^2.3.1" langchain-tests = "0.3.7" +asyncpg = "^0.30.0" [tool.poetry.group.codespell] optional = true From 5989544aac29c092f36f1156e642ddf695a3e64f Mon Sep 17 00:00:00 2001 From: Shamsuddin Ahmed Date: Wed, 12 Mar 2025 00:32:35 +0300 Subject: [PATCH 12/15] feat(tests): add temp PGVector test DB fixture for metadata deserialization --- .../test_pgvector_metadata_integration.py | 123 ++++++++++++------ 1 file changed, 81 insertions(+), 42 deletions(-) diff --git a/tests/integration/test_pgvector_metadata_integration.py b/tests/integration/test_pgvector_metadata_integration.py index c3e4f8b..c82f4bc 100644 --- a/tests/integration/test_pgvector_metadata_integration.py +++ b/tests/integration/test_pgvector_metadata_integration.py @@ -1,73 +1,116 @@ """ -Integration Test for PGVector Metadata Deserialization - -This test verifies that PGVector correctly deserializes stored metadata into a Python dictionary. -It connects to a live PostgreSQL instance (with the pgvector extension enabled) and uses -FakeEmbeddings (from langchain_core) for testing purposes. - -Workflow: - 1. Connect to the PostgreSQL instance (pgvector container on port 6024). - 2. Initialize PGVector with FakeEmbeddings, disabling automatic extension creation. - 3. Manually create the vector extension. - 4. Create the collection and tables. - 5. Insert test documents with known metadata. - 6. Perform a similarity search. - 7. Assert that each returned document's metadata is a Python dict. - 8. Dispose the async engine to clean up connections. +Integration Test for PGVector Metadata Deserialization Using a Temporary Test Database + +This integration test verifies that PGVector correctly deserializes stored metadata into a Python dictionary. +It follows these steps: + 1. Dynamically create a temporary test database (named "langchain_test_") using a session-scoped + pytest fixture. This ensures an isolated environment for testing. + 2. Initialize PGVector with FakeEmbeddings while disabling automatic extension creation. + 3. Manually create the pgvector extension (using AUTOCOMMIT) so that the custom "vector" type becomes available. + 4. Create the collection and tables in the temporary database. + 5. Insert test documents with predefined metadata. + 6. Execute a similarity search. + 7. Assert that each returned document’s metadata is deserialized as a Python dict. + 8. Finally, clean up by terminating connections and dropping the temporary database. Usage: - Ensure your Docker Compose container for pgvector is running, then run: + Ensure your PostgreSQL instance (with pgvector enabled) is running, then execute: poetry run pytest tests/integration/test_pgvector_metadata_integration.py """ import asyncio +import uuid import pytest +from sqlalchemy.ext.asyncio import create_async_engine from sqlalchemy import text from langchain_postgres.vectorstores import PGVector from langchain_core.documents import Document from langchain_core.embeddings.fake import FakeEmbeddings +@pytest.fixture(scope="session") +async def test_database_url(): + """ + Create a temporary test database for integration testing and drop it afterwards. + + The temporary database name is generated as "langchain_test_". + This fixture connects to the default "postgres" database (with AUTOCOMMIT enabled) to run + CREATE/DROP DATABASE commands outside of a transaction block. It yields the connection string + for the temporary test database. + + Yields: + str: Connection string for the temporary test database. + """ + # Generate a unique database name. + db_suffix = uuid.uuid4().hex + test_db = f"langchain_test_{db_suffix}" + + # Connection string to the default 'postgres' database. + default_db_url = "postgresql+asyncpg://langchain:langchain@localhost:6024/postgres" + engine = create_async_engine(default_db_url, isolation_level="AUTOCOMMIT") + async with engine.connect() as conn: + await conn.execute(text(f"CREATE DATABASE {test_db}")) + await engine.dispose() + + # Build the test database connection string. + test_db_url = f"postgresql+asyncpg://langchain:langchain@localhost:6024/{test_db}" + yield test_db_url + + # Teardown: Drop the temporary test database. + engine = create_async_engine(default_db_url, isolation_level="AUTOCOMMIT") + async with engine.connect() as conn: + # Terminate active connections to the test database. + await conn.execute(text(f""" + SELECT pg_terminate_backend(pg_stat_activity.pid) + FROM pg_stat_activity + WHERE pg_stat_activity.datname = '{test_db}' + AND pid <> pg_backend_pid(); + """)) + await conn.execute(text(f"DROP DATABASE IF EXISTS {test_db}")) + await engine.dispose() + + @pytest.mark.asyncio -async def test_pgvector_metadata_integration() -> None: +async def test_pgvector_metadata_integration(test_database_url: str) -> None: """ - Test that PGVector deserializes metadata correctly. + Integration test for PGVector metadata deserialization using a temporary test database. Steps: - - Connect to a PostgreSQL instance with the pgvector extension. - - Initialize PGVector with FakeEmbeddings and disable automatic extension creation. - - Manually create the pgvector extension so that the "vector" type is available. - - Create the collection and tables. - - Insert documents with predefined metadata. - - Perform a similarity search. - - Verify that each returned document's metadata is a dict. + 1. Use the temporary database connection string provided by the fixture. + 2. Initialize PGVector with FakeEmbeddings and disable automatic extension creation. + 3. Manually create the pgvector extension using AUTOCOMMIT so that the "vector" type is available. + 4. Create the collection (and tables) in the temporary database. + 5. Insert test documents with predefined metadata. + 6. Execute a similarity search. + 7. Assert that each returned document's metadata is deserialized as a Python dict. """ - # Connection string for the PostgreSQL instance (pgvector container on port 6024) - connection: str = "postgresql+asyncpg://langchain:langchain@localhost:6024/langchain" + # Use the temporary test database connection string. + connection = test_database_url - # Instantiate FakeEmbeddings for testing (vector size set to 1352) + # Initialize FakeEmbeddings (with a vector size of 1352 for testing purposes). embeddings = FakeEmbeddings(size=1352) - # Initialize PGVector in async mode. - # Set create_extension=False to avoid multi-command issues. + # Initialize PGVector in asynchronous mode. vectorstore = PGVector( embeddings=embeddings, connection=connection, collection_name="integration_test_metadata", use_jsonb=True, async_mode=True, - create_extension=False, # Disable automatic extension creation - pre_delete_collection=True, # Ensure a fresh collection for testing + create_extension=False, # Disable auto extension creation; we'll do it manually. + pre_delete_collection=True, # Ensure a fresh collection. ) - # Manually create the pgvector extension so that the "vector" type exists. - async with vectorstore._async_engine.begin() as conn: + # Manually create the pgvector extension on the test database. + async with vectorstore._async_engine.connect() as conn: + # Await the execution_options method to obtain a connection with AUTOCOMMIT. + conn = await conn.execution_options(isolation_level="AUTOCOMMIT") await conn.execute(text("CREATE EXTENSION IF NOT EXISTS vector;")) # Create the collection (and tables) in the database. await vectorstore.acreate_collection() - # Prepare sample documents with predefined metadata. + # Define sample documents with predefined metadata. documents = [ Document(page_content="Document 1", metadata={"user": "foo"}), Document(page_content="Document 2", metadata={"user": "bar"}), @@ -79,17 +122,13 @@ async def test_pgvector_metadata_integration() -> None: metadatas=[doc.metadata for doc in documents] ) - # Perform a similarity search; the query should match the inserted documents. + # Perform a similarity search. results = await vectorstore.asimilarity_search_with_score("Document", k=2) - # Verify that each returned document's metadata is deserialized as a dict. + # Verify that each returned document's metadata is deserialized as a Python dict. for doc, score in results: assert isinstance(doc.metadata, dict), f"Metadata is not a dict: {doc.metadata}" print(f"Document: {doc.page_content}, Metadata: {doc.metadata}, Score: {score}") - # Clean up: Dispose the async engine to close connections. + # Clean up: Dispose the async engine to close all connections. await vectorstore._async_engine.dispose() - - -if __name__ == "__main__": - asyncio.run(test_pgvector_metadata_integration()) From 317c39ab06fcc0f7ddf21876ab1dbe894b7dcda3 Mon Sep 17 00:00:00 2001 From: Shamsuddin Ahmed Date: Wed, 12 Mar 2025 13:08:19 +0300 Subject: [PATCH 13/15] test: add integration tests for PGVector retriever and similarity search methods with score and vector queries --- .../test_pgvector_metadata_integration.py | 108 ++++++++++-------- 1 file changed, 61 insertions(+), 47 deletions(-) diff --git a/tests/integration/test_pgvector_metadata_integration.py b/tests/integration/test_pgvector_metadata_integration.py index c82f4bc..f883f61 100644 --- a/tests/integration/test_pgvector_metadata_integration.py +++ b/tests/integration/test_pgvector_metadata_integration.py @@ -1,24 +1,32 @@ """ -Integration Test for PGVector Metadata Deserialization Using a Temporary Test Database +Integration Test for PGVector Retrieval Methods Using a Temporary Test Database -This integration test verifies that PGVector correctly deserializes stored metadata into a Python dictionary. -It follows these steps: +This integration test verifies that PGVector retrieval methods work as expected, including: + - Retriever interface (ainvoke) + - asimilarity_search_with_score (retrieval with score by query string) + - asimilarity_search_by_vector (retrieval by query embedding) + - asimilarity_search_with_score_by_vector (retrieval with score by query embedding) + +Steps: 1. Dynamically create a temporary test database (named "langchain_test_") using a session-scoped pytest fixture. This ensures an isolated environment for testing. 2. Initialize PGVector with FakeEmbeddings while disabling automatic extension creation. 3. Manually create the pgvector extension (using AUTOCOMMIT) so that the custom "vector" type becomes available. 4. Create the collection and tables in the temporary database. 5. Insert test documents with predefined metadata. - 6. Execute a similarity search. - 7. Assert that each returned document’s metadata is deserialized as a Python dict. - 8. Finally, clean up by terminating connections and dropping the temporary database. + 6. Perform retrieval using: + a) the retriever interface, + b) asimilarity_search_with_score (by query string), + c) asimilarity_search_by_vector (by query embedding), + d) asimilarity_search_with_score_by_vector (by query embedding). + 7. Assert that each returned document's metadata is deserialized as a Python dict and that scores (if applicable) are floats. + 8. Finally, clean up by disposing the async engine and dropping the temporary database. Usage: Ensure your PostgreSQL instance (with pgvector enabled) is running, then execute: - poetry run pytest tests/integration/test_pgvector_metadata_integration.py + poetry run pytest -s tests/integration/test_pgvector_retrieval_integration.py """ -import asyncio import uuid import pytest from sqlalchemy.ext.asyncio import create_async_engine @@ -37,29 +45,20 @@ async def test_database_url(): This fixture connects to the default "postgres" database (with AUTOCOMMIT enabled) to run CREATE/DROP DATABASE commands outside of a transaction block. It yields the connection string for the temporary test database. - - Yields: - str: Connection string for the temporary test database. """ - # Generate a unique database name. db_suffix = uuid.uuid4().hex test_db = f"langchain_test_{db_suffix}" - - # Connection string to the default 'postgres' database. default_db_url = "postgresql+asyncpg://langchain:langchain@localhost:6024/postgres" engine = create_async_engine(default_db_url, isolation_level="AUTOCOMMIT") async with engine.connect() as conn: await conn.execute(text(f"CREATE DATABASE {test_db}")) await engine.dispose() - # Build the test database connection string. test_db_url = f"postgresql+asyncpg://langchain:langchain@localhost:6024/{test_db}" yield test_db_url - # Teardown: Drop the temporary test database. engine = create_async_engine(default_db_url, isolation_level="AUTOCOMMIT") async with engine.connect() as conn: - # Terminate active connections to the test database. await conn.execute(text(f""" SELECT pg_terminate_backend(pg_stat_activity.pid) FROM pg_stat_activity @@ -71,64 +70,79 @@ async def test_database_url(): @pytest.mark.asyncio -async def test_pgvector_metadata_integration(test_database_url: str) -> None: +async def test_all_retrieval_methods(test_database_url: str) -> None: """ - Integration test for PGVector metadata deserialization using a temporary test database. - - Steps: - 1. Use the temporary database connection string provided by the fixture. - 2. Initialize PGVector with FakeEmbeddings and disable automatic extension creation. - 3. Manually create the pgvector extension using AUTOCOMMIT so that the "vector" type is available. - 4. Create the collection (and tables) in the temporary database. - 5. Insert test documents with predefined metadata. - 6. Execute a similarity search. - 7. Assert that each returned document's metadata is deserialized as a Python dict. + Integration test for all PGVector retrieval methods. + + This test verifies: + a) The retriever interface via ainvoke(). + b) asimilarity_search_with_score() using a query string. + c) asimilarity_search_by_vector() using a query embedding. + d) asimilarity_search_with_score_by_vector() using a query embedding. + + In all cases, the metadata of returned documents should be deserialized as a Python dict, + and where scores are provided, they must be floats. """ - # Use the temporary test database connection string. connection = test_database_url - - # Initialize FakeEmbeddings (with a vector size of 1352 for testing purposes). embeddings = FakeEmbeddings(size=1352) - - # Initialize PGVector in asynchronous mode. vectorstore = PGVector( embeddings=embeddings, connection=connection, - collection_name="integration_test_metadata", + collection_name="integration_test_retrieval", use_jsonb=True, async_mode=True, - create_extension=False, # Disable auto extension creation; we'll do it manually. + create_extension=False, # We'll manually create the extension. pre_delete_collection=True, # Ensure a fresh collection. ) # Manually create the pgvector extension on the test database. async with vectorstore._async_engine.connect() as conn: - # Await the execution_options method to obtain a connection with AUTOCOMMIT. conn = await conn.execution_options(isolation_level="AUTOCOMMIT") await conn.execute(text("CREATE EXTENSION IF NOT EXISTS vector;")) - # Create the collection (and tables) in the database. + # Create the collection (and underlying tables). await vectorstore.acreate_collection() - # Define sample documents with predefined metadata. + # Insert sample documents with metadata. documents = [ Document(page_content="Document 1", metadata={"user": "foo"}), Document(page_content="Document 2", metadata={"user": "bar"}), + Document(page_content="Another Document", metadata={"user": "baz"}), ] - - # Add the documents to the vectorstore. await vectorstore.aadd_texts( texts=[doc.page_content for doc in documents], metadatas=[doc.metadata for doc in documents] ) - # Perform a similarity search. - results = await vectorstore.asimilarity_search_with_score("Document", k=2) - - # Verify that each returned document's metadata is deserialized as a Python dict. - for doc, score in results: - assert isinstance(doc.metadata, dict), f"Metadata is not a dict: {doc.metadata}" - print(f"Document: {doc.page_content}, Metadata: {doc.metadata}, Score: {score}") + # a) Test the retriever interface. + retriever = vectorstore.as_retriever() + retrieved_docs = await retriever.ainvoke("Document") + for doc in retrieved_docs: + assert isinstance(doc.metadata, dict), f"[Retriever] Metadata is not a dict: {doc.metadata}" + print(f"[Retriever] Document: {doc.page_content}, Metadata: {doc.metadata}") + + # b) Test asimilarity_search_with_score (by query string). + scored_results = await vectorstore.asimilarity_search_with_score("Document", k=2) + for doc, score in scored_results: + assert isinstance(doc.metadata, dict), f"[With Score] Metadata is not a dict: {doc.metadata}" + assert isinstance(score, float), f"[With Score] Score is not a float: {score}" + print(f"[With Score] Document: {doc.page_content}, Metadata: {doc.metadata}, Score: {score}") + + # Obtain a query embedding. + query_embedding = await vectorstore.embeddings.aembed_query("Document") + + # c) Test asimilarity_search_by_vector (by query embedding). + docs_by_vector = await vectorstore.asimilarity_search_by_vector(query_embedding, k=2) + for doc in docs_by_vector: + assert isinstance(doc.metadata, dict), f"[By Vector] Metadata is not a dict: {doc.metadata}" + print(f"[By Vector] Document: {doc.page_content}, Metadata: {doc.metadata}") + + # d) Test asimilarity_search_with_score_by_vector (by query embedding). + scored_docs_by_vector = await vectorstore.asimilarity_search_with_score_by_vector(query_embedding, k=2) + for doc, score in scored_docs_by_vector: + assert isinstance(doc.metadata, dict), f"[With Score By Vector] Metadata is not a dict: {doc.metadata}" + assert isinstance(score, float), f"[With Score By Vector] Score is not a float: {score}" + print(f"[With Score By Vector] Document: {doc.page_content}, Metadata: {doc.metadata}, Score: {score}") # Clean up: Dispose the async engine to close all connections. await vectorstore._async_engine.dispose() From be50ffae80d298e5514ff78c4b6571fc0d8fa0a3 Mon Sep 17 00:00:00 2001 From: Shamsuddin Ahmed Date: Wed, 12 Mar 2025 13:34:09 +0300 Subject: [PATCH 14/15] chore: remove asyncpg --- poetry.lock | 290 +++++-------------------------------------------- pyproject.toml | 7 +- 2 files changed, 32 insertions(+), 265 deletions(-) diff --git a/poetry.lock b/poetry.lock index 1d8fac0..766ce72 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. [[package]] name = "aiofiles" @@ -6,7 +6,6 @@ version = "22.1.0" description = "File support for asyncio." optional = false python-versions = ">=3.7,<4.0" -groups = ["dev"] files = [ {file = "aiofiles-22.1.0-py3-none-any.whl", hash = "sha256:1142fa8e80dbae46bb6339573ad4c8c0841358f79c6eb50a493dceca14621bad"}, {file = "aiofiles-22.1.0.tar.gz", hash = "sha256:9107f1ca0b2a5553987a94a3c9959fe5b491fdf731389aa5b7b1bd0733e32de6"}, @@ -18,7 +17,6 @@ version = "0.21.0" description = "asyncio bridge to the standard sqlite3 module" optional = false python-versions = ">=3.9" -groups = ["dev"] files = [ {file = "aiosqlite-0.21.0-py3-none-any.whl", hash = "sha256:2549cf4057f95f53dcba16f2b64e8e2791d7e1adedb13197dd8ed77bb226d7d0"}, {file = "aiosqlite-0.21.0.tar.gz", hash = "sha256:131bb8056daa3bc875608c631c678cda73922a2d4ba8aec373b19f18c17e7aa3"}, @@ -37,7 +35,6 @@ version = "0.7.0" description = "Reusable constraint types to use with typing.Annotated" optional = false python-versions = ">=3.8" -groups = ["main", "test"] files = [ {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, @@ -49,7 +46,6 @@ version = "4.8.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.9" -groups = ["main", "dev", "test"] files = [ {file = "anyio-4.8.0-py3-none-any.whl", hash = "sha256:b5011f270ab5eb0abf13385f851315585cc37ef330dd88e27ec3d34d651fd47a"}, {file = "anyio-4.8.0.tar.gz", hash = "sha256:1d9fe889df5212298c0c0723fa20479d1b94883a2df44bd3897aa91083316f7a"}, @@ -63,7 +59,7 @@ typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""} [package.extras] doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1) ; python_version >= \"3.10\"", "uvloop (>=0.21) ; platform_python_implementation == \"CPython\" and platform_system != \"Windows\" and python_version < \"3.14\""] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21)"] trio = ["trio (>=0.26.1)"] [[package]] @@ -72,8 +68,6 @@ version = "0.1.4" description = "Disable App Nap on macOS >= 10.9" optional = false python-versions = ">=3.6" -groups = ["dev"] -markers = "platform_system == \"Darwin\"" files = [ {file = "appnope-0.1.4-py2.py3-none-any.whl", hash = "sha256:502575ee11cd7a28c0205f379b525beefebab9d161b7c964670864014ed7213c"}, {file = "appnope-0.1.4.tar.gz", hash = "sha256:1de3860566df9caf38f01f86f65e0e13e379af54f9e4bee1e66b48f2efffd1ee"}, @@ -85,7 +79,6 @@ version = "23.1.0" description = "Argon2 for Python" optional = false python-versions = ">=3.7" -groups = ["dev"] files = [ {file = "argon2_cffi-23.1.0-py3-none-any.whl", hash = "sha256:c670642b78ba29641818ab2e68bd4e6a78ba53b7eff7b4c3815ae16abf91c7ea"}, {file = "argon2_cffi-23.1.0.tar.gz", hash = "sha256:879c3e79a2729ce768ebb7d36d4609e3a78a4ca2ec3a9f12286ca057e3d0db08"}, @@ -106,7 +99,6 @@ version = "21.2.0" description = "Low-level CFFI bindings for Argon2" optional = false python-versions = ">=3.6" -groups = ["dev"] files = [ {file = "argon2-cffi-bindings-21.2.0.tar.gz", hash = "sha256:bb89ceffa6c791807d1305ceb77dbfacc5aa499891d2c55661c6459651fc39e3"}, {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ccb949252cb2ab3a08c02024acb77cfb179492d5701c7cbdbfd776124d4d2367"}, @@ -144,7 +136,6 @@ version = "1.3.0" description = "Better dates & times for Python" optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "arrow-1.3.0-py3-none-any.whl", hash = "sha256:c728b120ebc00eb84e01882a6f5e7927a53960aa990ce7dd2b10f39005a67f80"}, {file = "arrow-1.3.0.tar.gz", hash = "sha256:d4540617648cb5f895730f1ad8c82a65f2dad0166f57b75f3ca54759c4d67a85"}, @@ -164,7 +155,6 @@ version = "3.0.0" description = "Annotate AST trees with source code positions" optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "asttokens-3.0.0-py3-none-any.whl", hash = "sha256:e3078351a059199dd5138cb1c706e6430c05eff2ff136af5eb4790f9d28932e2"}, {file = "asttokens-3.0.0.tar.gz", hash = "sha256:0dcd8baa8d62b0c1d118b399b2ddba3c4aff271d0d7a9e0d4c1681c79035bbc7"}, @@ -174,105 +164,24 @@ files = [ astroid = ["astroid (>=2,<4)"] test = ["astroid (>=2,<4)", "pytest", "pytest-cov", "pytest-xdist"] -[[package]] -name = "async-timeout" -version = "5.0.1" -description = "Timeout context manager for asyncio programs" -optional = false -python-versions = ">=3.8" -groups = ["test"] -markers = "python_version < \"3.11.0\"" -files = [ - {file = "async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c"}, - {file = "async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3"}, -] - -[[package]] -name = "asyncpg" -version = "0.30.0" -description = "An asyncio PostgreSQL driver" -optional = false -python-versions = ">=3.8.0" -groups = ["test"] -files = [ - {file = "asyncpg-0.30.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bfb4dd5ae0699bad2b233672c8fc5ccbd9ad24b89afded02341786887e37927e"}, - {file = "asyncpg-0.30.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dc1f62c792752a49f88b7e6f774c26077091b44caceb1983509edc18a2222ec0"}, - {file = "asyncpg-0.30.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3152fef2e265c9c24eec4ee3d22b4f4d2703d30614b0b6753e9ed4115c8a146f"}, - {file = "asyncpg-0.30.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c7255812ac85099a0e1ffb81b10dc477b9973345793776b128a23e60148dd1af"}, - {file = "asyncpg-0.30.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:578445f09f45d1ad7abddbff2a3c7f7c291738fdae0abffbeb737d3fc3ab8b75"}, - {file = "asyncpg-0.30.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c42f6bb65a277ce4d93f3fba46b91a265631c8df7250592dd4f11f8b0152150f"}, - {file = "asyncpg-0.30.0-cp310-cp310-win32.whl", hash = "sha256:aa403147d3e07a267ada2ae34dfc9324e67ccc4cdca35261c8c22792ba2b10cf"}, - {file = "asyncpg-0.30.0-cp310-cp310-win_amd64.whl", hash = "sha256:fb622c94db4e13137c4c7f98834185049cc50ee01d8f657ef898b6407c7b9c50"}, - {file = "asyncpg-0.30.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5e0511ad3dec5f6b4f7a9e063591d407eee66b88c14e2ea636f187da1dcfff6a"}, - {file = "asyncpg-0.30.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:915aeb9f79316b43c3207363af12d0e6fd10776641a7de8a01212afd95bdf0ed"}, - {file = "asyncpg-0.30.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c198a00cce9506fcd0bf219a799f38ac7a237745e1d27f0e1f66d3707c84a5a"}, - {file = "asyncpg-0.30.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3326e6d7381799e9735ca2ec9fd7be4d5fef5dcbc3cb555d8a463d8460607956"}, - {file = "asyncpg-0.30.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:51da377487e249e35bd0859661f6ee2b81db11ad1f4fc036194bc9cb2ead5056"}, - {file = "asyncpg-0.30.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bc6d84136f9c4d24d358f3b02be4b6ba358abd09f80737d1ac7c444f36108454"}, - {file = "asyncpg-0.30.0-cp311-cp311-win32.whl", hash = "sha256:574156480df14f64c2d76450a3f3aaaf26105869cad3865041156b38459e935d"}, - {file = "asyncpg-0.30.0-cp311-cp311-win_amd64.whl", hash = "sha256:3356637f0bd830407b5597317b3cb3571387ae52ddc3bca6233682be88bbbc1f"}, - {file = "asyncpg-0.30.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c902a60b52e506d38d7e80e0dd5399f657220f24635fee368117b8b5fce1142e"}, - {file = "asyncpg-0.30.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aca1548e43bbb9f0f627a04666fedaca23db0a31a84136ad1f868cb15deb6e3a"}, - {file = "asyncpg-0.30.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c2a2ef565400234a633da0eafdce27e843836256d40705d83ab7ec42074efb3"}, - {file = "asyncpg-0.30.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1292b84ee06ac8a2ad8e51c7475aa309245874b61333d97411aab835c4a2f737"}, - {file = "asyncpg-0.30.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0f5712350388d0cd0615caec629ad53c81e506b1abaaf8d14c93f54b35e3595a"}, - {file = "asyncpg-0.30.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:db9891e2d76e6f425746c5d2da01921e9a16b5a71a1c905b13f30e12a257c4af"}, - {file = "asyncpg-0.30.0-cp312-cp312-win32.whl", hash = "sha256:68d71a1be3d83d0570049cd1654a9bdfe506e794ecc98ad0873304a9f35e411e"}, - {file = "asyncpg-0.30.0-cp312-cp312-win_amd64.whl", hash = "sha256:9a0292c6af5c500523949155ec17b7fe01a00ace33b68a476d6b5059f9630305"}, - {file = "asyncpg-0.30.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:05b185ebb8083c8568ea8a40e896d5f7af4b8554b64d7719c0eaa1eb5a5c3a70"}, - {file = "asyncpg-0.30.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c47806b1a8cbb0a0db896f4cd34d89942effe353a5035c62734ab13b9f938da3"}, - {file = "asyncpg-0.30.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b6fde867a74e8c76c71e2f64f80c64c0f3163e687f1763cfaf21633ec24ec33"}, - {file = "asyncpg-0.30.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46973045b567972128a27d40001124fbc821c87a6cade040cfcd4fa8a30bcdc4"}, - {file = "asyncpg-0.30.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9110df111cabc2ed81aad2f35394a00cadf4f2e0635603db6ebbd0fc896f46a4"}, - {file = "asyncpg-0.30.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:04ff0785ae7eed6cc138e73fc67b8e51d54ee7a3ce9b63666ce55a0bf095f7ba"}, - {file = "asyncpg-0.30.0-cp313-cp313-win32.whl", hash = "sha256:ae374585f51c2b444510cdf3595b97ece4f233fde739aa14b50e0d64e8a7a590"}, - {file = "asyncpg-0.30.0-cp313-cp313-win_amd64.whl", hash = "sha256:f59b430b8e27557c3fb9869222559f7417ced18688375825f8f12302c34e915e"}, - {file = "asyncpg-0.30.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:29ff1fc8b5bf724273782ff8b4f57b0f8220a1b2324184846b39d1ab4122031d"}, - {file = "asyncpg-0.30.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:64e899bce0600871b55368b8483e5e3e7f1860c9482e7f12e0a771e747988168"}, - {file = "asyncpg-0.30.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b290f4726a887f75dcd1b3006f484252db37602313f806e9ffc4e5996cfe5cb"}, - {file = "asyncpg-0.30.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f86b0e2cd3f1249d6fe6fd6cfe0cd4538ba994e2d8249c0491925629b9104d0f"}, - {file = "asyncpg-0.30.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:393af4e3214c8fa4c7b86da6364384c0d1b3298d45803375572f415b6f673f38"}, - {file = "asyncpg-0.30.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:fd4406d09208d5b4a14db9a9dbb311b6d7aeeab57bded7ed2f8ea41aeef39b34"}, - {file = "asyncpg-0.30.0-cp38-cp38-win32.whl", hash = "sha256:0b448f0150e1c3b96cb0438a0d0aa4871f1472e58de14a3ec320dbb2798fb0d4"}, - {file = "asyncpg-0.30.0-cp38-cp38-win_amd64.whl", hash = "sha256:f23b836dd90bea21104f69547923a02b167d999ce053f3d502081acea2fba15b"}, - {file = "asyncpg-0.30.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6f4e83f067b35ab5e6371f8a4c93296e0439857b4569850b178a01385e82e9ad"}, - {file = "asyncpg-0.30.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5df69d55add4efcd25ea2a3b02025b669a285b767bfbf06e356d68dbce4234ff"}, - {file = "asyncpg-0.30.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3479a0d9a852c7c84e822c073622baca862d1217b10a02dd57ee4a7a081f708"}, - {file = "asyncpg-0.30.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26683d3b9a62836fad771a18ecf4659a30f348a561279d6227dab96182f46144"}, - {file = "asyncpg-0.30.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1b982daf2441a0ed314bd10817f1606f1c28b1136abd9e4f11335358c2c631cb"}, - {file = "asyncpg-0.30.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1c06a3a50d014b303e5f6fc1e5f95eb28d2cee89cf58384b700da621e5d5e547"}, - {file = "asyncpg-0.30.0-cp39-cp39-win32.whl", hash = "sha256:1b11a555a198b08f5c4baa8f8231c74a366d190755aa4f99aacec5970afe929a"}, - {file = "asyncpg-0.30.0-cp39-cp39-win_amd64.whl", hash = "sha256:8b684a3c858a83cd876f05958823b68e8d14ec01bb0c0d14a6704c5bf9711773"}, - {file = "asyncpg-0.30.0.tar.gz", hash = "sha256:c551e9928ab6707602f44811817f82ba3c446e018bfe1d3abecc8ba5f3eac851"}, -] - -[package.dependencies] -async-timeout = {version = ">=4.0.3", markers = "python_version < \"3.11.0\""} - -[package.extras] -docs = ["Sphinx (>=8.1.3,<8.2.0)", "sphinx-rtd-theme (>=1.2.2)"] -gssauth = ["gssapi ; platform_system != \"Windows\"", "sspilib ; platform_system == \"Windows\""] -test = ["distro (>=1.9.0,<1.10.0)", "flake8 (>=6.1,<7.0)", "flake8-pyi (>=24.1.0,<24.2.0)", "gssapi ; platform_system == \"Linux\"", "k5test ; platform_system == \"Linux\"", "mypy (>=1.8.0,<1.9.0)", "sspilib ; platform_system == \"Windows\"", "uvloop (>=0.15.3) ; platform_system != \"Windows\" and python_version < \"3.14.0\""] - [[package]] name = "attrs" version = "25.1.0" description = "Classes Without Boilerplate" optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "attrs-25.1.0-py3-none-any.whl", hash = "sha256:c75a69e28a550a7e93789579c22aa26b0f5b83b75dc4e08fe092980051e1090a"}, {file = "attrs-25.1.0.tar.gz", hash = "sha256:1c97078a80c814273a76b2a298a932eb681c87415c11dee0a6921de7f1b02c3e"}, ] [package.extras] -benchmark = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] -cov = ["cloudpickle ; platform_python_implementation == \"CPython\"", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] -dev = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] -tests = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] -tests-mypy = ["mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\""] +tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] [[package]] name = "babel" @@ -280,14 +189,13 @@ version = "2.17.0" description = "Internationalization utilities" optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "babel-2.17.0-py3-none-any.whl", hash = "sha256:4d0b53093fdfb4b21c92b5213dba5a1b23885afa8383709427046b21c366e5f2"}, {file = "babel-2.17.0.tar.gz", hash = "sha256:0c54cffb19f690cdcc52a3b50bcbf71e07a808d1c80d549f2459b9d2cf0afb9d"}, ] [package.extras] -dev = ["backports.zoneinfo ; python_version < \"3.9\"", "freezegun (>=1.0,<2.0)", "jinja2 (>=3.0)", "pytest (>=6.0)", "pytest-cov", "pytz", "setuptools", "tzdata ; sys_platform == \"win32\""] +dev = ["backports.zoneinfo", "freezegun (>=1.0,<2.0)", "jinja2 (>=3.0)", "pytest (>=6.0)", "pytest-cov", "pytz", "setuptools", "tzdata"] [[package]] name = "beautifulsoup4" @@ -295,7 +203,6 @@ version = "4.13.3" description = "Screen-scraping library" optional = false python-versions = ">=3.7.0" -groups = ["dev"] files = [ {file = "beautifulsoup4-4.13.3-py3-none-any.whl", hash = "sha256:99045d7d3f08f91f0d656bc9b7efbae189426cd913d830294a15eefa0ea4df16"}, {file = "beautifulsoup4-4.13.3.tar.gz", hash = "sha256:1bd32405dacc920b42b83ba01644747ed77456a65760e285fbc47633ceddaf8b"}, @@ -318,7 +225,6 @@ version = "6.2.0" description = "An easy safelist-based HTML-sanitizing tool." optional = false python-versions = ">=3.9" -groups = ["dev"] files = [ {file = "bleach-6.2.0-py3-none-any.whl", hash = "sha256:117d9c6097a7c3d22fd578fcd8d35ff1e125df6736f554da4e432fdd63f31e5e"}, {file = "bleach-6.2.0.tar.gz", hash = "sha256:123e894118b8a599fd80d3ec1a6d4cc7ce4e5882b1317a7e1ba69b56e95f991f"}, @@ -337,7 +243,6 @@ version = "2025.1.31" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" -groups = ["main", "dev", "test"] files = [ {file = "certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe"}, {file = "certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651"}, @@ -349,7 +254,6 @@ version = "1.17.1" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.8" -groups = ["main", "dev", "test"] files = [ {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, @@ -419,7 +323,6 @@ files = [ {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, ] -markers = {main = "platform_python_implementation == \"PyPy\"", test = "platform_python_implementation == \"PyPy\""} [package.dependencies] pycparser = "*" @@ -430,7 +333,6 @@ version = "3.4.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7" -groups = ["main", "dev", "test"] files = [ {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"}, {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"}, @@ -532,7 +434,6 @@ version = "2.4.1" description = "Fix common misspellings in text files" optional = false python-versions = ">=3.8" -groups = ["codespell"] files = [ {file = "codespell-2.4.1-py3-none-any.whl", hash = "sha256:3dadafa67df7e4a3dbf51e0d7315061b80d265f9552ebd699b3dd6834b47e425"}, {file = "codespell-2.4.1.tar.gz", hash = "sha256:299fcdcb09d23e81e35a671bbe746d5ad7e8385972e65dbb833a2eaac33c01e5"}, @@ -541,7 +442,7 @@ files = [ [package.extras] dev = ["Pygments", "build", "chardet", "pre-commit", "pytest", "pytest-cov", "pytest-dependency", "ruff", "tomli", "twine"] hard-encoding-detection = ["chardet"] -toml = ["tomli ; python_version < \"3.11\""] +toml = ["tomli"] types = ["chardet (>=5.1.0)", "mypy", "pytest", "pytest-cov", "pytest-dependency"] [[package]] @@ -550,8 +451,6 @@ version = "0.4.6" description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -groups = ["dev", "test"] -markers = "sys_platform == \"win32\"" files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, @@ -563,7 +462,6 @@ version = "0.2.2" description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "comm-0.2.2-py3-none-any.whl", hash = "sha256:e6fb86cb70ff661ee8c9c14e7d36d6de3b4066f1441be4063df9c5009f0a64d3"}, {file = "comm-0.2.2.tar.gz", hash = "sha256:3fd7a84065306e07bea1773df6eb8282de51ba82f77c72f9c85716ab11fe980e"}, @@ -581,7 +479,6 @@ version = "7.6.10" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.9" -groups = ["test"] files = [ {file = "coverage-7.6.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5c912978f7fbf47ef99cec50c4401340436d200d41d714c7a4766f377c5b7b78"}, {file = "coverage-7.6.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a01ec4af7dfeb96ff0078ad9a48810bb0cc8abcb0115180c6013a6b26237626c"}, @@ -651,7 +548,7 @@ files = [ tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} [package.extras] -toml = ["tomli ; python_full_version <= \"3.11.0a6\""] +toml = ["tomli"] [[package]] name = "debugpy" @@ -659,7 +556,6 @@ version = "1.8.12" description = "An implementation of the Debug Adapter Protocol for Python" optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "debugpy-1.8.12-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:a2ba7ffe58efeae5b8fad1165357edfe01464f9aef25e814e891ec690e7dd82a"}, {file = "debugpy-1.8.12-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cbbd4149c4fc5e7d508ece083e78c17442ee13b0e69bfa6bd63003e486770f45"}, @@ -695,7 +591,6 @@ version = "5.1.1" description = "Decorators for Humans" optional = false python-versions = ">=3.5" -groups = ["dev"] files = [ {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, @@ -707,7 +602,6 @@ version = "0.7.1" description = "XML bomb protection for Python stdlib modules" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -groups = ["dev"] files = [ {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, @@ -719,7 +613,6 @@ version = "0.4" description = "Discover and load entry points from installed packages." optional = false python-versions = ">=3.6" -groups = ["dev"] files = [ {file = "entrypoints-0.4-py3-none-any.whl", hash = "sha256:f174b5ff827504fd3cd97cc3f8649f3693f51538c7e4bdf3ef002c8429d42f9f"}, {file = "entrypoints-0.4.tar.gz", hash = "sha256:b706eddaa9218a19ebcd67b56818f05bb27589b1ca9e8d797b74affad4ccacd4"}, @@ -731,8 +624,6 @@ version = "1.2.2" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" -groups = ["main", "dev", "test"] -markers = "python_version < \"3.11\"" files = [ {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, @@ -747,14 +638,13 @@ version = "2.2.0" description = "Get the currently executing AST node of a frame, and other information" optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "executing-2.2.0-py2.py3-none-any.whl", hash = "sha256:11387150cad388d62750327a53d3339fad4888b39a6fe233c3afbb54ecffd3aa"}, {file = "executing-2.2.0.tar.gz", hash = "sha256:5d108c028108fe2551d1a7b2e8b713341e2cb4fc0aa7dcf966fa4327a5226755"}, ] [package.extras] -tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich ; python_version >= \"3.11\""] +tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich"] [[package]] name = "fastjsonschema" @@ -762,7 +652,6 @@ version = "2.21.1" description = "Fastest Python implementation of JSON schema" optional = false python-versions = "*" -groups = ["dev"] files = [ {file = "fastjsonschema-2.21.1-py3-none-any.whl", hash = "sha256:c9e5b7e908310918cf494a434eeb31384dd84a98b57a30bcb1f535015b554667"}, {file = "fastjsonschema-2.21.1.tar.gz", hash = "sha256:794d4f0a58f848961ba16af7b9c85a3e88cd360df008c59aac6fc5ae9323b5d4"}, @@ -777,7 +666,6 @@ version = "1.5.1" description = "Validates fully-qualified domain names against RFC 1123, so that they are acceptable to modern bowsers" optional = false python-versions = ">=2.7, !=3.0, !=3.1, !=3.2, !=3.3, !=3.4, <4" -groups = ["dev"] files = [ {file = "fqdn-1.5.1-py3-none-any.whl", hash = "sha256:3a179af3761e4df6eb2e026ff9e1a3033d3587bf980a0b1b2e1e5d08d7358014"}, {file = "fqdn-1.5.1.tar.gz", hash = "sha256:105ed3677e767fb5ca086a0c1f4bb66ebc3c100be518f0e0d755d9eae164d89f"}, @@ -789,7 +677,6 @@ version = "3.1.1" description = "Lightweight in-process concurrent programming" optional = false python-versions = ">=3.7" -groups = ["main", "test"] files = [ {file = "greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563"}, {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83"}, @@ -876,7 +763,6 @@ version = "0.14.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" optional = false python-versions = ">=3.7" -groups = ["main", "test"] files = [ {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, @@ -888,7 +774,6 @@ version = "1.0.7" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.8" -groups = ["main", "test"] files = [ {file = "httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd"}, {file = "httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c"}, @@ -910,7 +795,6 @@ version = "0.28.1" description = "The next generation HTTP client." optional = false python-versions = ">=3.8" -groups = ["main", "test"] files = [ {file = "httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad"}, {file = "httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc"}, @@ -923,7 +807,7 @@ httpcore = "==1.*" idna = "*" [package.extras] -brotli = ["brotli ; platform_python_implementation == \"CPython\"", "brotlicffi ; platform_python_implementation != \"CPython\""] +brotli = ["brotli", "brotlicffi"] cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] @@ -935,7 +819,6 @@ version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.6" -groups = ["main", "dev", "test"] files = [ {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, @@ -950,8 +833,6 @@ version = "8.6.1" description = "Read metadata from Python packages" optional = false python-versions = ">=3.9" -groups = ["dev"] -markers = "python_version < \"3.10\"" files = [ {file = "importlib_metadata-8.6.1-py3-none-any.whl", hash = "sha256:02a89390c1e15fdfdc0d7c6b25cb3e62650d0494005c97d6f148bf5b9787525e"}, {file = "importlib_metadata-8.6.1.tar.gz", hash = "sha256:310b41d755445d74569f993ccfc22838295d9fe005425094fad953d7f15c8580"}, @@ -961,12 +842,12 @@ files = [ zipp = ">=3.20" [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] enabler = ["pytest-enabler (>=2.2)"] perf = ["ipython"] -test = ["flufl.flake8", "importlib_resources (>=1.3) ; python_version < \"3.9\"", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] +test = ["flufl.flake8", "importlib_resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] type = ["pytest-mypy"] [[package]] @@ -975,7 +856,6 @@ version = "2.0.0" description = "brain-dead simple config-ini parsing" optional = false python-versions = ">=3.7" -groups = ["test"] files = [ {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, @@ -987,7 +867,6 @@ version = "6.29.5" description = "IPython Kernel for Jupyter" optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "ipykernel-6.29.5-py3-none-any.whl", hash = "sha256:afdb66ba5aa354b09b91379bac28ae4afebbb30e8b39510c9690afb7a10421b5"}, {file = "ipykernel-6.29.5.tar.gz", hash = "sha256:f093a22c4a40f8828f8e330a9c297cb93dcab13bd9678ded6de8e5cf81c56215"}, @@ -1021,7 +900,6 @@ version = "8.18.1" description = "IPython: Productive Interactive Computing" optional = false python-versions = ">=3.9" -groups = ["dev"] files = [ {file = "ipython-8.18.1-py3-none-any.whl", hash = "sha256:e8267419d72d81955ec1177f8a29aaa90ac80ad647499201119e2f05e99aa397"}, {file = "ipython-8.18.1.tar.gz", hash = "sha256:ca6f079bb33457c66e233e4580ebfc4128855b4cf6370dddd73842a9563e8a27"}, @@ -1059,7 +937,6 @@ version = "0.2.0" description = "Vestigial utilities from IPython" optional = false python-versions = "*" -groups = ["dev"] files = [ {file = "ipython_genutils-0.2.0-py2.py3-none-any.whl", hash = "sha256:72dd37233799e619666c9f639a9da83c34013a73e8bbc79a7a6348d93c61fab8"}, {file = "ipython_genutils-0.2.0.tar.gz", hash = "sha256:eb2e116e75ecef9d4d228fdc66af54269afa26ab4463042e33785b887c628ba8"}, @@ -1071,7 +948,6 @@ version = "20.11.0" description = "Operations with ISO 8601 durations" optional = false python-versions = ">=3.7" -groups = ["dev"] files = [ {file = "isoduration-20.11.0-py3-none-any.whl", hash = "sha256:b2904c2a4228c3d44f409c8ae8e2370eb21a26f7ac2ec5446df141dde3452042"}, {file = "isoduration-20.11.0.tar.gz", hash = "sha256:ac2f9015137935279eac671f94f89eb00584f940f5dc49462a0c4ee692ba1bd9"}, @@ -1086,7 +962,6 @@ version = "0.19.2" description = "An autocompletion tool for Python that can be used for text editors." optional = false python-versions = ">=3.6" -groups = ["dev"] files = [ {file = "jedi-0.19.2-py2.py3-none-any.whl", hash = "sha256:a8ef22bde8490f57fe5c7681a3c83cb58874daf72b4784de3cce5b6ef6edb5b9"}, {file = "jedi-0.19.2.tar.gz", hash = "sha256:4770dc3de41bde3966b02eb84fbcf557fb33cce26ad23da12c742fb50ecb11f0"}, @@ -1106,7 +981,6 @@ version = "3.1.5" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" -groups = ["dev"] files = [ {file = "jinja2-3.1.5-py3-none-any.whl", hash = "sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb"}, {file = "jinja2-3.1.5.tar.gz", hash = "sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb"}, @@ -1124,7 +998,6 @@ version = "0.10.0" description = "A Python implementation of the JSON5 data format." optional = false python-versions = ">=3.8.0" -groups = ["dev"] files = [ {file = "json5-0.10.0-py3-none-any.whl", hash = "sha256:19b23410220a7271e8377f81ba8aacba2fdd56947fbb137ee5977cbe1f5e8dfa"}, {file = "json5-0.10.0.tar.gz", hash = "sha256:e66941c8f0a02026943c52c2eb34ebeb2a6f819a0be05920a6f5243cd30fd559"}, @@ -1139,7 +1012,6 @@ version = "1.33" description = "Apply JSON-Patches (RFC 6902)" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" -groups = ["main", "test"] files = [ {file = "jsonpatch-1.33-py2.py3-none-any.whl", hash = "sha256:0ae28c0cd062bbd8b8ecc26d7d164fbbea9652a1a3693f3b956c1eae5145dade"}, {file = "jsonpatch-1.33.tar.gz", hash = "sha256:9fcd4009c41e6d12348b4a0ff2563ba56a2923a7dfee731d004e212e1ee5030c"}, @@ -1154,7 +1026,6 @@ version = "3.0.0" description = "Identify specific nodes in a JSON document (RFC 6901)" optional = false python-versions = ">=3.7" -groups = ["main", "dev", "test"] files = [ {file = "jsonpointer-3.0.0-py2.py3-none-any.whl", hash = "sha256:13e088adc14fca8b6aa8177c044e12701e6ad4b28ff10e65f2267a90109c9942"}, {file = "jsonpointer-3.0.0.tar.gz", hash = "sha256:2b2d729f2091522d61c3b31f82e11870f60b68f43fbc705cb76bf4b832af59ef"}, @@ -1166,7 +1037,6 @@ version = "4.23.0" description = "An implementation of JSON Schema validation for Python" optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "jsonschema-4.23.0-py3-none-any.whl", hash = "sha256:fbadb6f8b144a8f8cf9f0b89ba94501d143e50411a1278633f56a7acf7fd5566"}, {file = "jsonschema-4.23.0.tar.gz", hash = "sha256:d71497fef26351a33265337fa77ffeb82423f3ea21283cd9467bb03999266bc4"}, @@ -1196,7 +1066,6 @@ version = "2024.10.1" description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" optional = false python-versions = ">=3.9" -groups = ["dev"] files = [ {file = "jsonschema_specifications-2024.10.1-py3-none-any.whl", hash = "sha256:a09a0680616357d9a0ecf05c12ad234479f549239d0f5b55f3deea67475da9bf"}, {file = "jsonschema_specifications-2024.10.1.tar.gz", hash = "sha256:0f38b83639958ce1152d02a7f062902c41c8fd20d558b0c34344292d417ae272"}, @@ -1211,7 +1080,6 @@ version = "7.4.9" description = "Jupyter protocol implementation and client libraries" optional = false python-versions = ">=3.7" -groups = ["dev"] files = [ {file = "jupyter_client-7.4.9-py3-none-any.whl", hash = "sha256:214668aaea208195f4c13d28eb272ba79f945fc0cf3f11c7092c20b2ca1980e7"}, {file = "jupyter_client-7.4.9.tar.gz", hash = "sha256:52be28e04171f07aed8f20e1616a5a552ab9fee9cbbe6c1896ae170c3880d392"}, @@ -1236,7 +1104,6 @@ version = "5.7.2" description = "Jupyter core package. A base package on which Jupyter projects rely." optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "jupyter_core-5.7.2-py3-none-any.whl", hash = "sha256:4f7315d2f6b4bcf2e3e7cb6e46772eba760ae459cd1f59d29eb57b0a01bd7409"}, {file = "jupyter_core-5.7.2.tar.gz", hash = "sha256:aa5f8d32bbf6b431ac830496da7392035d6f61b4f54872f15c4bd2a9c3f536d9"}, @@ -1257,7 +1124,6 @@ version = "0.12.0" description = "Jupyter Event System library" optional = false python-versions = ">=3.9" -groups = ["dev"] files = [ {file = "jupyter_events-0.12.0-py3-none-any.whl", hash = "sha256:6464b2fa5ad10451c3d35fabc75eab39556ae1e2853ad0c0cc31b656731a97fb"}, {file = "jupyter_events-0.12.0.tar.gz", hash = "sha256:fc3fce98865f6784c9cd0a56a20644fc6098f21c8c33834a8d9fe383c17e554b"}, @@ -1284,7 +1150,6 @@ version = "2.15.0" description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications." optional = false python-versions = ">=3.9" -groups = ["dev"] files = [ {file = "jupyter_server-2.15.0-py3-none-any.whl", hash = "sha256:872d989becf83517012ee669f09604aa4a28097c0bd90b2f424310156c2cdae3"}, {file = "jupyter_server-2.15.0.tar.gz", hash = "sha256:9d446b8697b4f7337a1b7cdcac40778babdd93ba614b6d68ab1c0c918f1c4084"}, @@ -1321,7 +1186,6 @@ version = "0.9.3" description = "Jupyter Server extension providing an implementation of the File ID service." optional = false python-versions = ">=3.7" -groups = ["dev"] files = [ {file = "jupyter_server_fileid-0.9.3-py3-none-any.whl", hash = "sha256:f73c01c19f90005d3fff93607b91b4955ba4e1dccdde9bfe8026646f94053791"}, {file = "jupyter_server_fileid-0.9.3.tar.gz", hash = "sha256:521608bb87f606a8637fcbdce2f3d24a8b3cc89d2eef61751cb40e468d4e54be"}, @@ -1341,7 +1205,6 @@ version = "0.5.3" description = "A Jupyter Server Extension Providing Terminals." optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "jupyter_server_terminals-0.5.3-py3-none-any.whl", hash = "sha256:41ee0d7dc0ebf2809c668e0fc726dfaf258fcd3e769568996ca731b6194ae9aa"}, {file = "jupyter_server_terminals-0.5.3.tar.gz", hash = "sha256:5ae0295167220e9ace0edcfdb212afd2b01ee8d179fe6f23c899590e9b8a5269"}, @@ -1361,7 +1224,6 @@ version = "0.8.0" description = "A Jupyter Server Extension Providing Y Documents." optional = false python-versions = ">=3.7" -groups = ["dev"] files = [ {file = "jupyter_server_ydoc-0.8.0-py3-none-any.whl", hash = "sha256:969a3a1a77ed4e99487d60a74048dc9fa7d3b0dcd32e60885d835bbf7ba7be11"}, {file = "jupyter_server_ydoc-0.8.0.tar.gz", hash = "sha256:a6fe125091792d16c962cc3720c950c2b87fcc8c3ecf0c54c84e9a20b814526c"}, @@ -1381,7 +1243,6 @@ version = "0.2.5" description = "Document structures for collaborative editing using Ypy" optional = false python-versions = ">=3.7" -groups = ["dev"] files = [ {file = "jupyter_ydoc-0.2.5-py3-none-any.whl", hash = "sha256:5759170f112c70320a84217dd98d287699076ae65a7f88d458d57940a9f2b882"}, {file = "jupyter_ydoc-0.2.5.tar.gz", hash = "sha256:5a02ca7449f0d875f73e8cb8efdf695dddef15a8e71378b1f4eda6b7c90f5382"}, @@ -1401,7 +1262,6 @@ version = "3.6.8" description = "JupyterLab computational environment" optional = false python-versions = ">=3.7" -groups = ["dev"] files = [ {file = "jupyterlab-3.6.8-py3-none-any.whl", hash = "sha256:891284e75158998e23eb7a23ecc4caaf27b365e41adca374109b1305b9f769db"}, {file = "jupyterlab-3.6.8.tar.gz", hash = "sha256:a2477383e23f20009188bd9dac7e6e38dbc54307bc36d716bea6ced450647c97"}, @@ -1431,7 +1291,6 @@ version = "0.3.0" description = "Pygments theme using JupyterLab CSS variables" optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "jupyterlab_pygments-0.3.0-py3-none-any.whl", hash = "sha256:841a89020971da1d8693f1a99997aefc5dc424bb1b251fd6322462a1b8842780"}, {file = "jupyterlab_pygments-0.3.0.tar.gz", hash = "sha256:721aca4d9029252b11cfa9d185e5b5af4d54772bb8072f9b7036f4170054d35d"}, @@ -1443,7 +1302,6 @@ version = "2.27.3" description = "A set of server components for JupyterLab and JupyterLab like applications." optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "jupyterlab_server-2.27.3-py3-none-any.whl", hash = "sha256:e697488f66c3db49df675158a77b3b017520d772c6e1548c7d9bcc5df7944ee4"}, {file = "jupyterlab_server-2.27.3.tar.gz", hash = "sha256:eb36caca59e74471988f0ae25c77945610b887f777255aa21f8065def9e51ed4"}, @@ -1470,7 +1328,6 @@ version = "0.3.33" description = "Building applications with LLMs through composability" optional = false python-versions = "<4.0,>=3.9" -groups = ["main", "test"] files = [ {file = "langchain_core-0.3.33-py3-none-any.whl", hash = "sha256:269706408a2223f863ff1f9616f31903a5712403199d828b50aadbc4c28b553a"}, {file = "langchain_core-0.3.33.tar.gz", hash = "sha256:b5dd93a4e7f8198d2fc6048723b0bfecf7aaf128b0d268cbac19c34c1579b953"}, @@ -1494,7 +1351,6 @@ version = "0.3.7" description = "Standard tests for LangChain implementations" optional = false python-versions = "<4.0,>=3.9" -groups = ["test"] files = [ {file = "langchain_tests-0.3.7-py3-none-any.whl", hash = "sha256:ebbb15bb4b81a75b15272ecfd8a6b9ff45f015c6edccdcf0a16d1fe461a33c90"}, {file = "langchain_tests-0.3.7.tar.gz", hash = "sha256:28c1edff887de6f50b399345568f54f38797b7fa25d1d01b05e07172717ff7a0"}, @@ -1518,7 +1374,6 @@ version = "0.3.5" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." optional = false python-versions = "<4.0,>=3.9" -groups = ["main", "test"] files = [ {file = "langsmith-0.3.5-py3-none-any.whl", hash = "sha256:29da924d2e3662dd56f96d179ebc06662b66dd0b2317362ccebe0de1b78750e7"}, {file = "langsmith-0.3.5.tar.gz", hash = "sha256:d891a205f70ab0b2c26311db6c52486ffc9fc1124238b999619445f6ae900725"}, @@ -1545,7 +1400,6 @@ version = "3.0.2" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.9" -groups = ["dev"] files = [ {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, @@ -1616,7 +1470,6 @@ version = "0.1.7" description = "Inline Matplotlib backend for Jupyter" optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca"}, {file = "matplotlib_inline-0.1.7.tar.gz", hash = "sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90"}, @@ -1631,7 +1484,6 @@ version = "3.1.1" description = "A sane and fast Markdown parser with useful plugins and renderers" optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "mistune-3.1.1-py3-none-any.whl", hash = "sha256:02106ac2aa4f66e769debbfa028509a275069dcffce0dfa578edd7b991ee700a"}, {file = "mistune-3.1.1.tar.gz", hash = "sha256:e0740d635f515119f7d1feb6f9b192ee60f0cc649f80a8f944f905706a21654c"}, @@ -1646,7 +1498,6 @@ version = "1.14.1" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" -groups = ["typing"] files = [ {file = "mypy-1.14.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:52686e37cf13d559f668aa398dd7ddf1f92c5d613e4f8cb262be2fb4fedb0fcb"}, {file = "mypy-1.14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1fb545ca340537d4b45d3eecdb3def05e913299ca72c290326be19b3804b39c0"}, @@ -1706,7 +1557,6 @@ version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." optional = false python-versions = ">=3.5" -groups = ["typing"] files = [ {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, @@ -1718,7 +1568,6 @@ version = "1.2.0" description = "Jupyter Notebook as a Jupyter Server extension." optional = false python-versions = ">=3.7" -groups = ["dev"] files = [ {file = "nbclassic-1.2.0-py3-none-any.whl", hash = "sha256:61cc3612dae9d418d6c3577f7c6d8dad1d6a81b68e7a12585483283d5c92ec37"}, {file = "nbclassic-1.2.0.tar.gz", hash = "sha256:736ec50483a54485971db213be9207e34e51fc144c78343625b69917423644ba"}, @@ -1733,7 +1582,7 @@ notebook-shim = ">=0.2.3" [package.extras] docs = ["myst-parser", "nbsphinx", "sphinx", "sphinx-rtd-theme", "sphinxcontrib-github-alt"] json-logging = ["json-logging"] -test = ["coverage", "nbval", "pytest", "pytest-cov", "pytest-jupyter", "pytest-playwright", "pytest-tornasync", "requests", "requests-unixsocket ; sys_platform != \"win32\"", "testpath"] +test = ["coverage", "nbval", "pytest", "pytest-cov", "pytest-jupyter", "pytest-playwright", "pytest-tornasync", "requests", "requests-unixsocket", "testpath"] [[package]] name = "nbclient" @@ -1741,7 +1590,6 @@ version = "0.10.2" description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor." optional = false python-versions = ">=3.9.0" -groups = ["dev"] files = [ {file = "nbclient-0.10.2-py3-none-any.whl", hash = "sha256:4ffee11e788b4a27fabeb7955547e4318a5298f34342a4bfd01f2e1faaeadc3d"}, {file = "nbclient-0.10.2.tar.gz", hash = "sha256:90b7fc6b810630db87a6d0c2250b1f0ab4cf4d3c27a299b0cde78a4ed3fd9193"}, @@ -1764,7 +1612,6 @@ version = "7.16.6" description = "Converting Jupyter Notebooks (.ipynb files) to other formats. Output formats include asciidoc, html, latex, markdown, pdf, py, rst, script. nbconvert can be used both as a Python library (`import nbconvert`) or as a command line tool (invoked as `jupyter nbconvert ...`)." optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "nbconvert-7.16.6-py3-none-any.whl", hash = "sha256:1375a7b67e0c2883678c48e506dc320febb57685e5ee67faa51b18a90f3a712b"}, {file = "nbconvert-7.16.6.tar.gz", hash = "sha256:576a7e37c6480da7b8465eefa66c17844243816ce1ccc372633c6b71c3c0f582"}, @@ -1802,7 +1649,6 @@ version = "5.10.4" description = "The Jupyter Notebook format" optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "nbformat-5.10.4-py3-none-any.whl", hash = "sha256:3b48d6c8fbca4b299bf3982ea7db1af21580e4fec269ad087b9e81588891200b"}, {file = "nbformat-5.10.4.tar.gz", hash = "sha256:322168b14f937a5d11362988ecac2a4952d3d8e3a2cbeb2319584631226d5b3a"}, @@ -1824,7 +1670,6 @@ version = "1.6.0" description = "Patch asyncio to allow nested event loops" optional = false python-versions = ">=3.5" -groups = ["dev"] files = [ {file = "nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c"}, {file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"}, @@ -1836,7 +1681,6 @@ version = "6.5.7" description = "A web-based notebook environment for interactive computing" optional = false python-versions = ">=3.7" -groups = ["dev"] files = [ {file = "notebook-6.5.7-py3-none-any.whl", hash = "sha256:a6afa9a4ff4d149a0771ff8b8c881a7a73b3835f9add0606696d6e9d98ac1cd0"}, {file = "notebook-6.5.7.tar.gz", hash = "sha256:04eb9011dfac634fbd4442adaf0a8c27cd26beef831fe1d19faf930c327768e4"}, @@ -1863,7 +1707,7 @@ traitlets = ">=4.2.1" [package.extras] docs = ["myst-parser", "nbsphinx", "sphinx", "sphinx-rtd-theme", "sphinxcontrib-github-alt"] json-logging = ["json-logging"] -test = ["coverage", "nbval", "pytest", "pytest-cov", "requests", "requests-unixsocket ; sys_platform != \"win32\"", "selenium (==4.1.5)", "testpath"] +test = ["coverage", "nbval", "pytest", "pytest-cov", "requests", "requests-unixsocket", "selenium (==4.1.5)", "testpath"] [[package]] name = "notebook-shim" @@ -1871,7 +1715,6 @@ version = "0.2.4" description = "A shim layer for notebook traits and config" optional = false python-versions = ">=3.7" -groups = ["dev"] files = [ {file = "notebook_shim-0.2.4-py3-none-any.whl", hash = "sha256:411a5be4e9dc882a074ccbcae671eda64cceb068767e9a3419096986560e1cef"}, {file = "notebook_shim-0.2.4.tar.gz", hash = "sha256:b4b2cfa1b65d98307ca24361f5b30fe785b53c3fd07b7a47e89acb5e6ac638cb"}, @@ -1889,8 +1732,6 @@ version = "1.26.4" description = "Fundamental package for array computing in Python" optional = false python-versions = ">=3.9" -groups = ["main", "test"] -markers = "python_version < \"3.12\"" files = [ {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, @@ -1936,8 +1777,6 @@ version = "2.0.2" description = "Fundamental package for array computing in Python" optional = false python-versions = ">=3.9" -groups = ["main", "test"] -markers = "python_version >= \"3.12\"" files = [ {file = "numpy-2.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:51129a29dbe56f9ca83438b706e2e69a39892b5eda6cedcb6b0c9fdc9b0d3ece"}, {file = "numpy-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f15975dfec0cf2239224d80e32c3170b1d168335eaedee69da84fbe9f1f9cd04"}, @@ -1992,8 +1831,6 @@ version = "3.10.15" description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" optional = false python-versions = ">=3.8" -groups = ["main", "test"] -markers = "platform_python_implementation != \"PyPy\"" files = [ {file = "orjson-3.10.15-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:552c883d03ad185f720d0c09583ebde257e41b9521b74ff40e08b7dec4559c04"}, {file = "orjson-3.10.15-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:616e3e8d438d02e4854f70bfdc03a6bcdb697358dbaa6bcd19cbe24d24ece1f8"}, @@ -2082,7 +1919,6 @@ version = "7.7.0" description = "A decorator to automatically detect mismatch when overriding a method." optional = false python-versions = ">=3.6" -groups = ["dev"] files = [ {file = "overrides-7.7.0-py3-none-any.whl", hash = "sha256:c7ed9d062f78b8e4c1a7b70bd8796b35ead4d9f510227ef9c5dc7626c60d7e49"}, {file = "overrides-7.7.0.tar.gz", hash = "sha256:55158fa3d93b98cc75299b1e67078ad9003ca27945c76162c1c0766d6f91820a"}, @@ -2094,7 +1930,6 @@ version = "24.2" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" -groups = ["main", "dev", "test"] files = [ {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, @@ -2106,7 +1941,6 @@ version = "1.5.1" description = "Utilities for writing pandoc filters in python" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -groups = ["dev"] files = [ {file = "pandocfilters-1.5.1-py2.py3-none-any.whl", hash = "sha256:93be382804a9cdb0a7267585f157e5d1731bbe5545a85b268d6f5fe6232de2bc"}, {file = "pandocfilters-1.5.1.tar.gz", hash = "sha256:002b4a555ee4ebc03f8b66307e287fa492e4a77b4ea14d3f934328297bb4939e"}, @@ -2118,7 +1952,6 @@ version = "0.8.4" description = "A Python Parser" optional = false python-versions = ">=3.6" -groups = ["dev"] files = [ {file = "parso-0.8.4-py2.py3-none-any.whl", hash = "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18"}, {file = "parso-0.8.4.tar.gz", hash = "sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d"}, @@ -2134,8 +1967,6 @@ version = "4.9.0" description = "Pexpect allows easy control of interactive console applications." optional = false python-versions = "*" -groups = ["dev"] -markers = "sys_platform != \"win32\"" files = [ {file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"}, {file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"}, @@ -2150,7 +1981,6 @@ version = "0.3.6" description = "pgvector support for Python" optional = false python-versions = ">=3.8" -groups = ["main"] files = [ {file = "pgvector-0.3.6-py3-none-any.whl", hash = "sha256:f6c269b3c110ccb7496bac87202148ed18f34b390a0189c783e351062400a75a"}, {file = "pgvector-0.3.6.tar.gz", hash = "sha256:31d01690e6ea26cea8a633cde5f0f55f5b246d9c8292d68efdef8c22ec994ade"}, @@ -2165,7 +1995,6 @@ version = "4.3.6" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, @@ -2182,7 +2011,6 @@ version = "1.5.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" -groups = ["test"] files = [ {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, @@ -2198,7 +2026,6 @@ version = "0.21.1" description = "Python client for the Prometheus monitoring system." optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "prometheus_client-0.21.1-py3-none-any.whl", hash = "sha256:594b45c410d6f4f8888940fe80b5cc2521b305a1fafe1c58609ef715a001f301"}, {file = "prometheus_client-0.21.1.tar.gz", hash = "sha256:252505a722ac04b0456be05c05f75f45d760c2911ffc45f2a06bcaed9f3ae3fb"}, @@ -2213,7 +2040,6 @@ version = "3.0.50" description = "Library for building powerful interactive command lines in Python" optional = false python-versions = ">=3.8.0" -groups = ["dev"] files = [ {file = "prompt_toolkit-3.0.50-py3-none-any.whl", hash = "sha256:9b6427eb19e479d98acff65196a307c555eb567989e6d88ebbb1b509d9779198"}, {file = "prompt_toolkit-3.0.50.tar.gz", hash = "sha256:544748f3860a2623ca5cd6d2795e7a14f3d0e1c3c9728359013f79877fc89bab"}, @@ -2228,7 +2054,6 @@ version = "6.1.1" description = "Cross-platform lib for process and system monitoring in Python." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" -groups = ["dev"] files = [ {file = "psutil-6.1.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:9ccc4316f24409159897799b83004cb1e24f9819b0dcf9c0b68bdcb6cefee6a8"}, {file = "psutil-6.1.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ca9609c77ea3b8481ab005da74ed894035936223422dc591d6772b147421f777"}, @@ -2259,7 +2084,6 @@ version = "3.2.4" description = "PostgreSQL database adapter for Python" optional = false python-versions = ">=3.8" -groups = ["main"] files = [ {file = "psycopg-3.2.4-py3-none-any.whl", hash = "sha256:43665368ccd48180744cab26b74332f46b63b7e06e8ce0775547a3533883d381"}, {file = "psycopg-3.2.4.tar.gz", hash = "sha256:f26f1346d6bf1ef5f5ef1714dd405c67fb365cfd1c6cea07de1792747b167b92"}, @@ -2270,8 +2094,8 @@ typing-extensions = {version = ">=4.6", markers = "python_version < \"3.13\""} tzdata = {version = "*", markers = "sys_platform == \"win32\""} [package.extras] -binary = ["psycopg-binary (==3.2.4) ; implementation_name != \"pypy\""] -c = ["psycopg-c (==3.2.4) ; implementation_name != \"pypy\""] +binary = ["psycopg-binary (==3.2.4)"] +c = ["psycopg-c (==3.2.4)"] dev = ["ast-comments (>=1.1.2)", "black (>=24.1.0)", "codespell (>=2.2)", "dnspython (>=2.1)", "flake8 (>=4.0)", "mypy (>=1.14)", "pre-commit (>=4.0.1)", "types-setuptools (>=57.4)", "wheel (>=0.37)"] docs = ["Sphinx (>=5.0)", "furo (==2022.6.21)", "sphinx-autobuild (>=2021.3.14)", "sphinx-autodoc-typehints (>=1.12)"] pool = ["psycopg-pool"] @@ -2283,7 +2107,6 @@ version = "3.2.4" description = "Connection Pool for Psycopg" optional = false python-versions = ">=3.8" -groups = ["main"] files = [ {file = "psycopg_pool-3.2.4-py3-none-any.whl", hash = "sha256:f6a22cff0f21f06d72fb2f5cb48c618946777c49385358e0c88d062c59cbd224"}, {file = "psycopg_pool-3.2.4.tar.gz", hash = "sha256:61774b5bbf23e8d22bedc7504707135aaf744679f8ef9b3fe29942920746a6ed"}, @@ -2298,8 +2121,6 @@ version = "0.7.0" description = "Run a subprocess in a pseudo terminal" optional = false python-versions = "*" -groups = ["dev"] -markers = "sys_platform != \"win32\" or os_name != \"nt\"" files = [ {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, @@ -2311,7 +2132,6 @@ version = "0.2.3" description = "Safely evaluate AST nodes without side effects" optional = false python-versions = "*" -groups = ["dev"] files = [ {file = "pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0"}, {file = "pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42"}, @@ -2326,12 +2146,10 @@ version = "2.22" description = "C parser in Python" optional = false python-versions = ">=3.8" -groups = ["main", "dev", "test"] files = [ {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, ] -markers = {main = "platform_python_implementation == \"PyPy\"", test = "platform_python_implementation == \"PyPy\""} [[package]] name = "pydantic" @@ -2339,7 +2157,6 @@ version = "2.10.6" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" -groups = ["main", "test"] files = [ {file = "pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584"}, {file = "pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236"}, @@ -2352,7 +2169,7 @@ typing-extensions = ">=4.12.2" [package.extras] email = ["email-validator (>=2.0.0)"] -timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""] +timezone = ["tzdata"] [[package]] name = "pydantic-core" @@ -2360,7 +2177,6 @@ version = "2.27.2" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" -groups = ["main", "test"] files = [ {file = "pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa"}, {file = "pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c"}, @@ -2473,7 +2289,6 @@ version = "2.19.1" description = "Pygments is a syntax highlighting package written in Python." optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"}, {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"}, @@ -2488,7 +2303,6 @@ version = "7.4.4" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.7" -groups = ["test"] files = [ {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, @@ -2511,7 +2325,6 @@ version = "0.23.8" description = "Pytest support for asyncio" optional = false python-versions = ">=3.8" -groups = ["test"] files = [ {file = "pytest_asyncio-0.23.8-py3-none-any.whl", hash = "sha256:50265d892689a5faefb84df80819d1ecef566eb3549cf915dfb33569359d1ce2"}, {file = "pytest_asyncio-0.23.8.tar.gz", hash = "sha256:759b10b33a6dc61cce40a8bd5205e302978bbbcc00e279a8b61d9a6a3c82e4d3"}, @@ -2530,7 +2343,6 @@ version = "5.0.0" description = "Pytest plugin for measuring coverage." optional = false python-versions = ">=3.8" -groups = ["test"] files = [ {file = "pytest-cov-5.0.0.tar.gz", hash = "sha256:5837b58e9f6ebd335b0f8060eecce69b662415b16dc503883a02f45dfeb14857"}, {file = "pytest_cov-5.0.0-py3-none-any.whl", hash = "sha256:4f0764a1219df53214206bf1feea4633c3b558a2925c8b59f144f682861ce652"}, @@ -2549,7 +2361,6 @@ version = "0.7.0" description = "Pytest Plugin to disable socket calls during tests" optional = false python-versions = ">=3.8,<4.0" -groups = ["test"] files = [ {file = "pytest_socket-0.7.0-py3-none-any.whl", hash = "sha256:7e0f4642177d55d317bbd58fc68c6bd9048d6eadb2d46a89307fa9221336ce45"}, {file = "pytest_socket-0.7.0.tar.gz", hash = "sha256:71ab048cbbcb085c15a4423b73b619a8b35d6a307f46f78ea46be51b1b7e11b3"}, @@ -2564,7 +2375,6 @@ version = "2.3.1" description = "pytest plugin to abort hanging tests" optional = false python-versions = ">=3.7" -groups = ["test"] files = [ {file = "pytest-timeout-2.3.1.tar.gz", hash = "sha256:12397729125c6ecbdaca01035b9e5239d4db97352320af155b3f5de1ba5165d9"}, {file = "pytest_timeout-2.3.1-py3-none-any.whl", hash = "sha256:68188cb703edfc6a18fad98dc25a3c61e9f24d644b0b70f33af545219fc7813e"}, @@ -2579,7 +2389,6 @@ version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -groups = ["dev"] files = [ {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, @@ -2594,7 +2403,6 @@ version = "3.2.1" description = "JSON Log Formatter for the Python Logging Package" optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "python_json_logger-3.2.1-py3-none-any.whl", hash = "sha256:cdc17047eb5374bd311e748b42f99d71223f3b0e186f4206cc5d52aefe85b090"}, {file = "python_json_logger-3.2.1.tar.gz", hash = "sha256:8eb0554ea17cb75b05d2848bc14fb02fbdbd9d6972120781b974380bfa162008"}, @@ -2604,7 +2412,7 @@ files = [ typing_extensions = {version = "*", markers = "python_version < \"3.10\""} [package.extras] -dev = ["backports.zoneinfo ; python_version < \"3.9\"", "black", "build", "freezegun", "mdx_truly_sane_lists", "mike", "mkdocs", "mkdocs-awesome-pages-plugin", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-material (>=8.5)", "mkdocstrings[python]", "msgspec ; implementation_name != \"pypy\" and python_version < \"3.13\"", "msgspec-python313-pre ; implementation_name != \"pypy\" and python_version == \"3.13\"", "mypy", "orjson ; implementation_name != \"pypy\"", "pylint", "pytest", "tzdata", "validate-pyproject[all]"] +dev = ["backports.zoneinfo", "black", "build", "freezegun", "mdx_truly_sane_lists", "mike", "mkdocs", "mkdocs-awesome-pages-plugin", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-material (>=8.5)", "mkdocstrings[python]", "msgspec", "msgspec-python313-pre", "mypy", "orjson", "pylint", "pytest", "tzdata", "validate-pyproject[all]"] [[package]] name = "pywin32" @@ -2612,8 +2420,6 @@ version = "308" description = "Python for Window Extensions" optional = false python-versions = "*" -groups = ["dev"] -markers = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\"" files = [ {file = "pywin32-308-cp310-cp310-win32.whl", hash = "sha256:796ff4426437896550d2981b9c2ac0ffd75238ad9ea2d3bfa67a1abd546d262e"}, {file = "pywin32-308-cp310-cp310-win_amd64.whl", hash = "sha256:4fc888c59b3c0bef905ce7eb7e2106a07712015ea1c8234b703a088d46110e8e"}, @@ -2641,8 +2447,6 @@ version = "2.0.15" description = "Pseudo terminal support for Windows from Python." optional = false python-versions = ">=3.9" -groups = ["dev"] -markers = "os_name == \"nt\"" files = [ {file = "pywinpty-2.0.15-cp310-cp310-win_amd64.whl", hash = "sha256:8e7f5de756a615a38b96cd86fa3cd65f901ce54ce147a3179c45907fa11b4c4e"}, {file = "pywinpty-2.0.15-cp311-cp311-win_amd64.whl", hash = "sha256:9a6bcec2df2707aaa9d08b86071970ee32c5026e10bcc3cc5f6f391d85baf7ca"}, @@ -2659,7 +2463,6 @@ version = "6.0.2" description = "YAML parser and emitter for Python" optional = false python-versions = ">=3.8" -groups = ["main", "dev", "test"] files = [ {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, @@ -2722,7 +2525,6 @@ version = "26.2.1" description = "Python bindings for 0MQ" optional = false python-versions = ">=3.7" -groups = ["dev"] files = [ {file = "pyzmq-26.2.1-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:f39d1227e8256d19899d953e6e19ed2ccb689102e6d85e024da5acf410f301eb"}, {file = "pyzmq-26.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a23948554c692df95daed595fdd3b76b420a4939d7a8a28d6d7dea9711878641"}, @@ -2844,7 +2646,6 @@ version = "0.36.2" description = "JSON Referencing + Python" optional = false python-versions = ">=3.9" -groups = ["dev"] files = [ {file = "referencing-0.36.2-py3-none-any.whl", hash = "sha256:e8699adbbf8b5c7de96d8ffa0eb5c158b3beafce084968e2ea8bb08c6794dcd0"}, {file = "referencing-0.36.2.tar.gz", hash = "sha256:df2e89862cd09deabbdba16944cc3f10feb6b3e6f18e902f7cc25609a34775aa"}, @@ -2861,7 +2662,6 @@ version = "2.32.3" description = "Python HTTP for Humans." optional = false python-versions = ">=3.8" -groups = ["main", "dev", "test"] files = [ {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, @@ -2883,7 +2683,6 @@ version = "1.0.0" description = "A utility belt for advanced users of python-requests" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -groups = ["main", "test"] files = [ {file = "requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6"}, {file = "requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06"}, @@ -2898,7 +2697,6 @@ version = "0.1.4" description = "A pure python RFC3339 validator" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -groups = ["dev"] files = [ {file = "rfc3339_validator-0.1.4-py2.py3-none-any.whl", hash = "sha256:24f6ec1eda14ef823da9e36ec7113124b39c04d50a4d3d3a3c2859577e7791fa"}, {file = "rfc3339_validator-0.1.4.tar.gz", hash = "sha256:138a2abdf93304ad60530167e51d2dfb9549521a836871b88d7f4695d0022f6b"}, @@ -2913,7 +2711,6 @@ version = "0.1.1" description = "Pure python rfc3986 validator" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -groups = ["dev"] files = [ {file = "rfc3986_validator-0.1.1-py2.py3-none-any.whl", hash = "sha256:2f235c432ef459970b4306369336b9d5dbdda31b510ca1e327636e01f528bfa9"}, {file = "rfc3986_validator-0.1.1.tar.gz", hash = "sha256:3d44bde7921b3b9ec3ae4e3adca370438eccebc676456449b145d533b240d055"}, @@ -2925,7 +2722,6 @@ version = "0.22.3" description = "Python bindings to Rust's persistent data structures (rpds)" optional = false python-versions = ">=3.9" -groups = ["dev"] files = [ {file = "rpds_py-0.22.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:6c7b99ca52c2c1752b544e310101b98a659b720b21db00e65edca34483259967"}, {file = "rpds_py-0.22.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:be2eb3f2495ba669d2a985f9b426c1797b7d48d6963899276d22f23e33d47e37"}, @@ -3038,7 +2834,6 @@ version = "0.1.15" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" -groups = ["lint"] files = [ {file = "ruff-0.1.15-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:5fe8d54df166ecc24106db7dd6a68d44852d14eb0729ea4672bb4d96c320b7df"}, {file = "ruff-0.1.15-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6f0bfbb53c4b4de117ac4d6ddfd33aa5fc31beeaa21d23c45c6dd249faf9126f"}, @@ -3065,16 +2860,15 @@ version = "1.8.3" description = "Send file to trash natively under Mac OS X, Windows and Linux" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" -groups = ["dev"] files = [ {file = "Send2Trash-1.8.3-py3-none-any.whl", hash = "sha256:0c31227e0bd08961c7665474a3d1ef7193929fedda4233843689baa056be46c9"}, {file = "Send2Trash-1.8.3.tar.gz", hash = "sha256:b18e7a3966d99871aefeb00cfbcfdced55ce4871194810fc71f4aa484b953abf"}, ] [package.extras] -nativelib = ["pyobjc-framework-Cocoa ; sys_platform == \"darwin\"", "pywin32 ; sys_platform == \"win32\""] -objc = ["pyobjc-framework-Cocoa ; sys_platform == \"darwin\""] -win32 = ["pywin32 ; sys_platform == \"win32\""] +nativelib = ["pyobjc-framework-Cocoa", "pywin32"] +objc = ["pyobjc-framework-Cocoa"] +win32 = ["pywin32"] [[package]] name = "six" @@ -3082,7 +2876,6 @@ version = "1.17.0" description = "Python 2 and 3 compatibility utilities" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -groups = ["dev"] files = [ {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, @@ -3094,7 +2887,6 @@ version = "1.3.1" description = "Sniff out which async library your code is running under" optional = false python-versions = ">=3.7" -groups = ["main", "dev", "test"] files = [ {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, @@ -3106,7 +2898,6 @@ version = "2.6" description = "A modern CSS selector implementation for Beautiful Soup." optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "soupsieve-2.6-py3-none-any.whl", hash = "sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9"}, {file = "soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb"}, @@ -3118,7 +2909,6 @@ version = "2.0.37" description = "Database Abstraction Library" optional = false python-versions = ">=3.7" -groups = ["main"] files = [ {file = "SQLAlchemy-2.0.37-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:da36c3b0e891808a7542c5c89f224520b9a16c7f5e4d6a1156955605e54aef0e"}, {file = "SQLAlchemy-2.0.37-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e7402ff96e2b073a98ef6d6142796426d705addd27b9d26c3b32dbaa06d7d069"}, @@ -3214,7 +3004,6 @@ version = "0.6.3" description = "Extract data from python stack frames and tracebacks for informative displays" optional = false python-versions = "*" -groups = ["dev"] files = [ {file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"}, {file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"}, @@ -3234,7 +3023,6 @@ version = "4.8.1" description = "Pytest Snapshot Test Utility" optional = false python-versions = ">=3.8.1" -groups = ["test"] files = [ {file = "syrupy-4.8.1-py3-none-any.whl", hash = "sha256:274f97cbaf44175f5e478a2f3a53559d31f41c66c6bf28131695f94ac893ea00"}, {file = "syrupy-4.8.1.tar.gz", hash = "sha256:8da8c0311e6d92de0b15767768c6ab98982b7b4a4c67083c08fbac3fbad4d44c"}, @@ -3249,7 +3037,6 @@ version = "9.0.0" description = "Retry code until it succeeds" optional = false python-versions = ">=3.8" -groups = ["main", "test"] files = [ {file = "tenacity-9.0.0-py3-none-any.whl", hash = "sha256:93de0c98785b27fcf659856aa9f54bfbd399e29969b0621bc7f762bd441b4539"}, {file = "tenacity-9.0.0.tar.gz", hash = "sha256:807f37ca97d62aa361264d497b0e31e92b8027044942bfa756160d908320d73b"}, @@ -3265,7 +3052,6 @@ version = "0.18.1" description = "Tornado websocket backend for the Xterm.js Javascript terminal emulator library." optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "terminado-0.18.1-py3-none-any.whl", hash = "sha256:a4468e1b37bb318f8a86514f65814e1afc977cf29b3992a4500d9dd305dcceb0"}, {file = "terminado-0.18.1.tar.gz", hash = "sha256:de09f2c4b85de4765f7714688fff57d3e75bad1f909b589fde880460c753fd2e"}, @@ -3287,7 +3073,6 @@ version = "1.4.0" description = "A tiny CSS parser" optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "tinycss2-1.4.0-py3-none-any.whl", hash = "sha256:3a49cf47b7675da0b15d0c6e1df8df4ebd96e9394bb905a5775adb0d884c5289"}, {file = "tinycss2-1.4.0.tar.gz", hash = "sha256:10c0972f6fc0fbee87c3edb76549357415e94548c1ae10ebccdea16fb404a9b7"}, @@ -3306,7 +3091,6 @@ version = "2.2.1" description = "A lil' TOML parser" optional = false python-versions = ">=3.8" -groups = ["dev", "test", "typing"] files = [ {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, @@ -3341,7 +3125,6 @@ files = [ {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, ] -markers = {dev = "python_version < \"3.11\"", test = "python_full_version <= \"3.11.0a6\"", typing = "python_version < \"3.11\""} [[package]] name = "tornado" @@ -3349,7 +3132,6 @@ version = "6.4.2" description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "tornado-6.4.2-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e828cce1123e9e44ae2a50a9de3055497ab1d0aeb440c5ac23064d9e44880da1"}, {file = "tornado-6.4.2-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:072ce12ada169c5b00b7d92a99ba089447ccc993ea2143c9ede887e0937aa803"}, @@ -3370,7 +3152,6 @@ version = "5.14.3" description = "Traitlets Python configuration system" optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f"}, {file = "traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7"}, @@ -3386,7 +3167,6 @@ version = "2.9.0.20241206" description = "Typing stubs for python-dateutil" optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "types_python_dateutil-2.9.0.20241206-py3-none-any.whl", hash = "sha256:e248a4bc70a486d3e3ec84d0dc30eec3a5f979d6e7ee4123ae043eedbb987f53"}, {file = "types_python_dateutil-2.9.0.20241206.tar.gz", hash = "sha256:18f493414c26ffba692a72369fea7a154c502646301ebfe3d56a04b3767284cb"}, @@ -3398,7 +3178,6 @@ version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" -groups = ["main", "dev", "test", "typing"] files = [ {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, @@ -3410,8 +3189,6 @@ version = "2025.1" description = "Provider of IANA time zone data" optional = false python-versions = ">=2" -groups = ["main"] -markers = "sys_platform == \"win32\"" files = [ {file = "tzdata-2025.1-py2.py3-none-any.whl", hash = "sha256:7e127113816800496f027041c570f50bcd464a020098a3b6b199517772303639"}, {file = "tzdata-2025.1.tar.gz", hash = "sha256:24894909e88cdb28bd1636c6887801df64cb485bd593f2fd83ef29075a81d694"}, @@ -3423,7 +3200,6 @@ version = "1.3.0" description = "RFC 6570 URI Template Processor" optional = false python-versions = ">=3.7" -groups = ["dev"] files = [ {file = "uri-template-1.3.0.tar.gz", hash = "sha256:0e00f8eb65e18c7de20d595a14336e9f337ead580c70934141624b6d1ffdacc7"}, {file = "uri_template-1.3.0-py3-none-any.whl", hash = "sha256:a44a133ea12d44a0c0f06d7d42a52d71282e77e2f937d8abd5655b8d56fc1363"}, @@ -3438,14 +3214,13 @@ version = "2.3.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.9" -groups = ["main", "dev", "test"] files = [ {file = "urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df"}, {file = "urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d"}, ] [package.extras] -brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] @@ -3456,7 +3231,6 @@ version = "0.2.13" description = "Measures the displayed width of unicode strings in a terminal" optional = false python-versions = "*" -groups = ["dev"] files = [ {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, @@ -3468,7 +3242,6 @@ version = "24.11.1" description = "A library for working with the color formats defined by HTML and CSS." optional = false python-versions = ">=3.9" -groups = ["dev"] files = [ {file = "webcolors-24.11.1-py3-none-any.whl", hash = "sha256:515291393b4cdf0eb19c155749a096f779f7d909f7cceea072791cb9095b92e9"}, {file = "webcolors-24.11.1.tar.gz", hash = "sha256:ecb3d768f32202af770477b8b65f318fa4f566c22948673a977b00d589dd80f6"}, @@ -3480,7 +3253,6 @@ version = "0.5.1" description = "Character encoding aliases for legacy web content" optional = false python-versions = "*" -groups = ["dev"] files = [ {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, @@ -3492,7 +3264,6 @@ version = "1.8.0" description = "WebSocket client for Python with low level API options" optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "websocket_client-1.8.0-py3-none-any.whl", hash = "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526"}, {file = "websocket_client-1.8.0.tar.gz", hash = "sha256:3239df9f44da632f96012472805d40a23281a991027ce11d2f45a6f24ac4c3da"}, @@ -3509,7 +3280,6 @@ version = "0.6.2" description = "Python bindings for the Y-CRDT built from yrs (Rust)" optional = false python-versions = "*" -groups = ["dev"] files = [ {file = "y_py-0.6.2-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:c26bada6cd109095139237a46f50fc4308f861f0d304bc9e70acbc6c4503d158"}, {file = "y_py-0.6.2-cp310-cp310-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:bae1b1ad8d2b8cf938a60313f8f7461de609621c5dcae491b6e54975f76f83c5"}, @@ -3593,7 +3363,6 @@ version = "0.8.4" description = "WebSocket connector for Ypy" optional = false python-versions = ">=3.7" -groups = ["dev"] files = [ {file = "ypy_websocket-0.8.4-py3-none-any.whl", hash = "sha256:b1ba0dfcc9762f0ca168d2378062d3ca1299d39076b0f145d961359121042be5"}, {file = "ypy_websocket-0.8.4.tar.gz", hash = "sha256:43a001473f5c8abcf182f603049cf305cbc855ad8deaa9dfa0f3b5a7cea9d0ff"}, @@ -3613,19 +3382,17 @@ version = "3.21.0" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.9" -groups = ["dev"] -markers = "python_version < \"3.10\"" files = [ {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"}, {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"}, ] [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] enabler = ["pytest-enabler (>=2.2)"] -test = ["big-O", "importlib-resources ; python_version < \"3.9\"", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] type = ["pytest-mypy"] [[package]] @@ -3634,7 +3401,6 @@ version = "0.23.0" description = "Zstandard bindings for Python" optional = false python-versions = ">=3.8" -groups = ["main", "test"] files = [ {file = "zstandard-0.23.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bf0a05b6059c0528477fba9054d09179beb63744355cab9f38059548fedd46a9"}, {file = "zstandard-0.23.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fc9ca1c9718cb3b06634c7c8dec57d24e9438b2aa9a0f02b8bb36bf478538880"}, @@ -3742,6 +3508,6 @@ cffi = {version = ">=1.11", markers = "platform_python_implementation == \"PyPy\ cffi = ["cffi (>=1.11)"] [metadata] -lock-version = "2.1" +lock-version = "2.0" python-versions = "^3.9" -content-hash = "dc441ccf21d45952c61386a0a29aadc7d6a2827303a774ee86d8933ecb406638" +content-hash = "a9a2c1b3ebd06e93ff340bed08e0c69886694c51bbf2c07033c170b1b77878bb" \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml index bdb442a..8d34ae6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -19,6 +19,8 @@ sqlalchemy = "^2" pgvector = "<0.4" numpy = ">=1.21" +[tool.poetry.group.docs.dependencies] + [tool.poetry.group.dev.dependencies] jupyterlab = "^3.6.1" @@ -34,7 +36,6 @@ pytest-socket = "^0.7.0" pytest-cov = "^5.0.0" pytest-timeout = "^2.3.1" langchain-tests = "0.3.7" -asyncpg = "^0.30.0" [tool.poetry.group.codespell] optional = true @@ -80,7 +81,7 @@ build-backend = "poetry.core.masonry.api" # https://github.com/tophat/syrupy # --snapshot-warn-unused Prints a warning on unused snapshots rather than fail the test suite. addopts = "--strict-markers --strict-config --durations=5" -# Global timeout for all tests. There should be a good reason for a test to +# Global timeout for all tests. There should be a good reason for a test to # takemore than 30 seconds. timeout = 30 # Registering custom markers. @@ -91,4 +92,4 @@ asyncio_mode = "auto" [tool.codespell] skip = '.git,*.pdf,*.svg,*.pdf,*.yaml,*.ipynb,poetry.lock,*.min.js,*.css,package-lock.json,example_data,_dist,examples,templates,*.trig' ignore-regex = '.*(Stati Uniti|Tense=Pres).*' -ignore-words-list = 'momento,collison,ned,foor,reworkd,parth,whats,aapply,mysogyny,unsecure,damon,crate,aadd,symbl,precesses,accademia,nin' +ignore-words-list = 'momento,collison,ned,foor,reworkd,parth,whats,aapply,mysogyny,unsecure,damon,crate,aadd,symbl,precesses,accademia,nin' \ No newline at end of file From 7650cdc65d66fa35e37225cbfc085f647a658f98 Mon Sep 17 00:00:00 2001 From: Shamsuddin Ahmed Date: Wed, 12 Mar 2025 13:34:24 +0300 Subject: [PATCH 15/15] refactor: update default_db_url and test_db_url from asyncpg to psycopg in connection strings --- tests/integration/test_pgvector_metadata_integration.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/integration/test_pgvector_metadata_integration.py b/tests/integration/test_pgvector_metadata_integration.py index f883f61..7452695 100644 --- a/tests/integration/test_pgvector_metadata_integration.py +++ b/tests/integration/test_pgvector_metadata_integration.py @@ -48,13 +48,13 @@ async def test_database_url(): """ db_suffix = uuid.uuid4().hex test_db = f"langchain_test_{db_suffix}" - default_db_url = "postgresql+asyncpg://langchain:langchain@localhost:6024/postgres" + default_db_url = "postgresql+psycopg://langchain:langchain@localhost:6024/postgres" engine = create_async_engine(default_db_url, isolation_level="AUTOCOMMIT") async with engine.connect() as conn: await conn.execute(text(f"CREATE DATABASE {test_db}")) await engine.dispose() - test_db_url = f"postgresql+asyncpg://langchain:langchain@localhost:6024/{test_db}" + test_db_url = f"postgresql+psycopg://langchain:langchain@localhost:6024/{test_db}" yield test_db_url engine = create_async_engine(default_db_url, isolation_level="AUTOCOMMIT") @@ -84,7 +84,7 @@ async def test_all_retrieval_methods(test_database_url: str) -> None: and where scores are provided, they must be floats. """ connection = test_database_url - embeddings = FakeEmbeddings(size=1352) + embeddings = FakeEmbeddings(size=1536) vectorstore = PGVector( embeddings=embeddings, connection=connection,