Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 9 additions & 9 deletions .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -66,12 +66,12 @@ jobs:

- name: Test with pytest
run: |
poetry run coverage run -m pytest

# - name: Report coverage with Codecov
# if: github.event_name == 'push' && matrix.platform == 'ubuntu-latest'
# uses: codecov/codecov-action@v3
# with:
# token: ${{ secrets.CODECOV_TOKEN }}
# file: ./coverage.xml # optional
# flags: unittests-${{ matrix.python-version }} # optional
poetry run coverage run -m pytest --cov=./ --cov-report=xml
- name: Upload coverage to Codecov
uses: codecov/[email protected]
with:
token: ${{ secrets.CODECOV_TOKEN }}
directory: ./coverage/reports/
env_vars: OS,PYTHON
files: ./coverage.xml
flags: unittests
64 changes: 61 additions & 3 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@
from datetime import datetime

from pytest_asyncio import fixture

from servicex.python_dataset import PythonDataset
from servicex.models import (
TransformRequest,
ResultDestination,
Expand All @@ -37,6 +37,12 @@
TransformedResults,
)

from servicex.dataset_identifier import FileListDataset
from servicex.minio_adapter import MinioAdapter

import pandas as pd
import os


@fixture
def transform_request() -> TransformRequest:
Expand All @@ -50,6 +56,44 @@ def transform_request() -> TransformRequest:
) # type: ignore


@fixture
def minio_adapter() -> MinioAdapter:
return MinioAdapter("localhost", False, "access_key", "secret_key", "bucket")


@fixture
def python_dataset(dummy_parquet_file):
did = FileListDataset(dummy_parquet_file)
dataset = PythonDataset(
title="Test submission",
dataset_identifier=did,
codegen="uproot",
result_format=ResultFormat.parquet, # type: ignore
) # type: ignore

def foo():
return

dataset.with_uproot_function(foo)
return dataset


@fixture
def transformed_result_python_dataset(dummy_parquet_file) -> TransformedResults:
return TransformedResults(
hash="289e90f6fe3780253af35c428b784ac22d3ee9200a7581b8f0a9bdcc5ae93479",
title="Test submission",
codegen="uproot",
request_id="b8c508d0-ccf2-4deb-a1f7-65c839eebabf",
submit_time=datetime.now(),
data_dir="/foo/bar",
file_list=[dummy_parquet_file],
signed_url_list=[],
files=1,
result_format=ResultFormat.parquet,
)


@fixture
def transform_status_response() -> dict:
return {
Expand Down Expand Up @@ -115,16 +159,30 @@ def completed_status() -> TransformStatus:


@fixture
def transformed_result() -> TransformedResults:
def transformed_result(dummy_parquet_file) -> TransformedResults:
return TransformedResults(
hash="123-4455",
title="Test",
codegen="uproot",
request_id="123-45-6789",
submit_time=datetime.now(),
data_dir="/foo/bar",
file_list=["/tmp/1.root", "/tmp/2.root"],
file_list=[dummy_parquet_file],
signed_url_list=[],
files=2,
result_format=ResultFormat.root_file,
)


@fixture
def dummy_parquet_file():
data = {'column1': [1, 2, 3, 4],
'column2': ['A', 'B', 'C', 'D']}
df = pd.DataFrame(data)
parquet_file_path = '1.parquet'
df.to_parquet(parquet_file_path, index=False)

yield parquet_file_path

if os.path.exists(parquet_file_path):
os.remove(parquet_file_path)
Loading