fix: Resolve integration tests path issue
This commit is contained in:
parent
644116a2ce
commit
eb65a89621
3 changed files with 10 additions and 16 deletions
|
|
@ -2,6 +2,7 @@ import os
|
||||||
import sys
|
import sys
|
||||||
import uuid
|
import uuid
|
||||||
import pytest
|
import pytest
|
||||||
|
import pathlib
|
||||||
from unittest.mock import patch
|
from unittest.mock import patch
|
||||||
|
|
||||||
from cognee.modules.chunking.TextChunker import TextChunker
|
from cognee.modules.chunking.TextChunker import TextChunker
|
||||||
|
|
@ -24,8 +25,7 @@ GROUND_TRUTH = [
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_PdfDocument(mock_engine):
|
async def test_PdfDocument(mock_engine):
|
||||||
test_file_path = os.path.join(
|
test_file_path = os.path.join(
|
||||||
os.sep,
|
pathlib.Path(__file__).parent.parent.parent,
|
||||||
*(os.path.dirname(__file__).split(os.sep)[:-2]),
|
|
||||||
"test_data",
|
"test_data",
|
||||||
"artificial-intelligence.pdf",
|
"artificial-intelligence.pdf",
|
||||||
)
|
)
|
||||||
|
|
|
||||||
|
|
@ -2,6 +2,7 @@ import os
|
||||||
import sys
|
import sys
|
||||||
import uuid
|
import uuid
|
||||||
import pytest
|
import pytest
|
||||||
|
import pathlib
|
||||||
from unittest.mock import patch
|
from unittest.mock import patch
|
||||||
|
|
||||||
from cognee.modules.chunking.TextChunker import TextChunker
|
from cognee.modules.chunking.TextChunker import TextChunker
|
||||||
|
|
@ -34,10 +35,7 @@ GROUND_TRUTH = {
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_TextDocument(mock_engine, input_file, chunk_size):
|
async def test_TextDocument(mock_engine, input_file, chunk_size):
|
||||||
test_file_path = os.path.join(
|
test_file_path = os.path.join(
|
||||||
os.sep,
|
pathlib.Path(__file__).parent.parent.parent, "test_data", input_file
|
||||||
*(os.path.dirname(__file__).split(os.sep)[:-2]),
|
|
||||||
"test_data",
|
|
||||||
input_file,
|
|
||||||
)
|
)
|
||||||
document = TextDocument(
|
document = TextDocument(
|
||||||
id=uuid.uuid4(),
|
id=uuid.uuid4(),
|
||||||
|
|
|
||||||
|
|
@ -2,6 +2,7 @@ import os
|
||||||
import sys
|
import sys
|
||||||
import uuid
|
import uuid
|
||||||
import pytest
|
import pytest
|
||||||
|
import pathlib
|
||||||
from unittest.mock import patch
|
from unittest.mock import patch
|
||||||
|
|
||||||
from cognee.modules.chunking.TextChunker import TextChunker
|
from cognee.modules.chunking.TextChunker import TextChunker
|
||||||
|
|
@ -18,31 +19,26 @@ chunk_by_sentence_module = sys.modules.get("cognee.tasks.chunks.chunk_by_sentenc
|
||||||
async def test_UnstructuredDocument(mock_engine):
|
async def test_UnstructuredDocument(mock_engine):
|
||||||
# Define file paths of test data
|
# Define file paths of test data
|
||||||
pptx_file_path = os.path.join(
|
pptx_file_path = os.path.join(
|
||||||
os.sep,
|
pathlib.Path(__file__).parent.parent.parent,
|
||||||
*(os.path.dirname(__file__).split(os.sep)[:-2]),
|
|
||||||
"test_data",
|
"test_data",
|
||||||
"example.pptx",
|
"example.pptx",
|
||||||
)
|
)
|
||||||
|
|
||||||
docx_file_path = os.path.join(
|
docx_file_path = os.path.join(
|
||||||
os.sep,
|
pathlib.Path(__file__).parent.parent.parent,
|
||||||
*(os.path.dirname(__file__).split(os.sep)[:-2]),
|
|
||||||
"test_data",
|
"test_data",
|
||||||
"example.docx",
|
"example.docx",
|
||||||
)
|
)
|
||||||
|
|
||||||
csv_file_path = os.path.join(
|
csv_file_path = os.path.join(
|
||||||
os.sep,
|
pathlib.Path(__file__).parent.parent.parent,
|
||||||
*(os.path.dirname(__file__).split(os.sep)[:-2]),
|
|
||||||
"test_data",
|
"test_data",
|
||||||
"example.csv",
|
"example.csv",
|
||||||
)
|
)
|
||||||
|
|
||||||
xlsx_file_path = os.path.join(
|
xlsx_file_path = os.path.join(
|
||||||
os.sep,
|
pathlib.Path(__file__).parent.parent.parent,
|
||||||
*(os.path.dirname(__file__).split(os.sep)[:-2]),
|
"test_dataexample.xlsx",
|
||||||
"test_data",
|
|
||||||
"example.xlsx",
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Define test documents
|
# Define test documents
|
||||||
|
|
|
||||||
Loading…
Add table
Reference in a new issue