Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/coverage.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ jobs:
verbose: true
token: ${{ secrets.CODECOV_TOKEN }}

# Job 2: Run integration tests with coverage and upload to Codecov
# Job 3: Run integration tests with coverage and upload to Codecov
integration-tests-coverage:
runs-on: ubuntu-latest

Expand Down
12 changes: 10 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ classifiers = [
]
requires-python = '>=3.11,<3.14'
dependencies = [
'essdiffraction', # ESS Diffraction library
'numpy', # Numerical computing library
'colorama', # Color terminal output
'tabulate', # Pretty-print tabular data for terminal output
Expand Down Expand Up @@ -190,7 +191,13 @@ fail_under = 65 # Temporarily reduce to allow gradual improvement

[tool.ruff]
# Temporarily exclude some directories until we have improved the code quality there
exclude = ['tests', 'tmp']
#exclude = ['tests', 'tmp']
exclude = [
'tmp',
'tests/unit',
'tests/integration/fitting',
'tests/integration/scipp-analysis/tmp',
]
indent-width = 4
line-length = 99
# Enable new rules that are not yet stable, like DOC
Expand Down Expand Up @@ -264,7 +271,8 @@ ban-relative-imports = 'all'
force-single-line = true

[tool.ruff.lint.per-file-ignores]
'*test_*.py' = ['S101'] # allow asserts in test files
'*test_*.py' = ['S101'] # allow asserts in test files
'conftest.py' = ['S101'] # allow asserts in test files
# Vendored jupyter_dark_detect: keep as-is from upstream for easy updates
# https://github.com/OpenMined/jupyter-dark-detect/tree/main/jupyter_dark_detect
'src/easydiffraction/utils/_vendored/jupyter_dark_detect/*' = [
Expand Down
4 changes: 2 additions & 2 deletions src/easydiffraction/analysis/calculators/cryspy.py
Original file line number Diff line number Diff line change
Expand Up @@ -361,8 +361,8 @@ def _convert_experiment_to_cryspy_cif(
cif_lines.append(f'{engine_key_name} {attr_obj.value}')

x_data = experiment.data.x
twotheta_min = float(x_data.min())
twotheta_max = float(x_data.max())
twotheta_min = f'{np.round(x_data.min(), 5):.5f}' # float(x_data.min())
twotheta_max = f'{np.round(x_data.max(), 5):.5f}' # float(x_data.max())
cif_lines.append('')
if expt_type.beam_mode.value == BeamModeEnum.CONSTANT_WAVELENGTH:
cif_lines.append(f'_range_2theta_min {twotheta_min}')
Expand Down
19 changes: 17 additions & 2 deletions src/easydiffraction/experiments/categories/data/bragg_pd.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@
description='Standard uncertainty of the measured intensity at this data point.',
value_spec=AttributeSpec(
type_=DataTypes.NUMERIC,
default=0.0,
default=1.0,
content_validator=RangeValidator(ge=0),
),
cif_handler=CifHandler(
Expand Down Expand Up @@ -321,7 +321,22 @@

@property
def meas_su(self) -> np.ndarray:
return np.fromiter((p.intensity_meas_su.value for p in self._calc_items), dtype=float)
# TODO: The following is a temporary workaround to handle zero

Check notice on line 324 in src/easydiffraction/experiments/categories/data/bragg_pd.py

View check run for this annotation

codefactor.io / CodeFactor

src/easydiffraction/experiments/categories/data/bragg_pd.py#L324

Unresolved comment '# TODO: The following is a temporary workaround to handle zero'. (C100)
# or near-zero uncertainties in the data, when dats is loaded
# from CIF files. This is necessary because zero uncertainties
# cause fitting algorithms to fail.
# The current implementation is inefficient.
# In the future, we should extend the functionality of
# the NumericDescriptor to automatically replace the value
# outside of the valid range (`content_validator`) with a
# default value (`default`), when the value is set.
# BraggPdExperiment._load_ascii_data_to_experiment() handles
# this for ASCII data, but we also need to handle CIF data and
# come up with a consistent approach for both data sources.
original = np.fromiter((p.intensity_meas_su.value for p in self._calc_items), dtype=float)
# Replace values smaller than 0.0001 with 1.0
modified = np.where(original < 0.0001, 1.0, original)
return modified

@property
def calc(self) -> np.ndarray:
Expand Down
2 changes: 1 addition & 1 deletion src/easydiffraction/utils/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ def _fetch_data_index() -> dict:
_validate_url(index_url)

# macOS: sha256sum index.json
index_hash = 'sha256:e78f5dd2f229ea83bfeb606502da602fc0b07136889877d3ab601694625dd3d7'
index_hash = 'sha256:9aceaf51d298992058c80903283c9a83543329a063692d49b7aaee1156e76884'
destination_dirname = 'easydiffraction'
destination_fname = 'data-index.json'
cache_dir = pooch.os_cache(destination_dirname)
Expand Down
60 changes: 60 additions & 0 deletions tests/integration/scipp-analysis/dream/conftest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
# SPDX-License-Identifier: BSD-3-Clause
# Copyright (c) 2026 DMSC
"""Shared fixtures for DREAM scipp-analysis integration tests.

This module provides pytest fixtures for downloading and parsing
reduced diffraction data from the DREAM instrument in CIF format.
"""

from pathlib import Path

import gemmi
import pytest
from pooch import retrieve

# Remote CIF file URL (regenerated nightly by scipp reduction pipeline)
CIF_URL = 'https://pub-6c25ef91903d4301a3338bd53b370098.r2.dev/dream_reduced.cif'

# Expected datablock name in the CIF file
DATABLOCK_NAME = 'reduced_tof'


@pytest.fixture(scope='module')
def cif_path(
tmp_path_factory: pytest.TempPathFactory,
) -> str:
"""Download CIF file fresh each test session and return its path.

Uses tmp_path_factory to avoid pooch caching, ensuring the latest
version of the nightly-regenerated CIF file is always used.
"""
tmp_dir = tmp_path_factory.mktemp('dream_data')
return retrieve(url=CIF_URL, known_hash=None, path=tmp_dir)

Comment on lines +15 to +33
Copy link

Copilot AI Feb 10, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

cif_path downloads a nightly-regenerated CIF from a remote URL with known_hash=None and explicitly avoids caching. This makes the integration test suite non-deterministic and prone to failures whenever the upstream file changes or the network is unavailable. Consider pinning to a versioned/test fixture (committed file or a URL+sha256 in known_hash) and letting pooch cache/validate the content for reproducible CI runs.

Copilot uses AI. Check for mistakes.

@pytest.fixture(scope='module')
def cif_content(
cif_path: str,
) -> str:
"""Read the CIF file content as text."""
return Path(cif_path).read_text()


@pytest.fixture(scope='module')
def cif_document(
cif_path: str,
) -> gemmi.cif.Document:
"""Read the CIF file with gemmi and return the document."""
return gemmi.cif.read(cif_path)


@pytest.fixture(scope='module')
def cif_block(
cif_document: gemmi.cif.Document,
) -> gemmi.cif.Block:
"""Return the 'reduced_tof' data block from the CIF document."""
block = cif_document.find_block(DATABLOCK_NAME)
assert block is not None, (
f'Expected CIF datablock {DATABLOCK_NAME!r} was not found in the document.'
)
return block
213 changes: 213 additions & 0 deletions tests/integration/scipp-analysis/dream/test_analyze_reduced_data.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,213 @@
# SPDX-License-Identifier: BSD-3-Clause
# Copyright (c) 2026 DMSC
"""Tests for analyzing reduced diffraction data using easydiffraction.

These tests verify the complete workflow:
1. Define project
2. Add sample model manually defined
3. Modify experiment CIF file
4. Add experiment from modified CIF file
5. Modify default experiment configuration
6. Select parameters to be fitted
7. Do fitting
"""

from pathlib import Path

import pytest

import easydiffraction as ed

# CIF experiment type tags required by easydiffraction to identify
# the experiment configuration (powder TOF neutron diffraction)
EXPT_TYPE_TAGS = {
'_expt_type.sample_form': 'powder',
'_expt_type.beam_mode': 'time-of-flight',
'_expt_type.radiation_probe': 'neutron',
'_expt_type.scattering_type': 'bragg',
}


@pytest.fixture(scope='module')
def prepared_cif_path(
cif_path: str,
tmp_path_factory: pytest.TempPathFactory,
) -> str:
"""Prepare CIF file with experiment type tags for
easydiffraction.
"""
with Path(cif_path).open() as f:
content = f.read()

# Add experiment type tags if missing
for tag, value in EXPT_TYPE_TAGS.items():
if tag not in content:
content += f'\n{tag} {value}'

# Write to temp file
tmp_dir = tmp_path_factory.mktemp('dream_data')
prepared_path = tmp_dir / 'dream_reduced_prepared.cif'
prepared_path.write_text(content)

return str(prepared_path)


@pytest.fixture(scope='module')
def project_with_data(
prepared_cif_path: str,
) -> ed.Project:
"""Create project with sample model, experiment data, and
configuration.

1. Define project
2. Add sample model manually defined
3. Modify experiment CIF file
4. Add experiment from modified CIF file
5. Modify default experiment configuration
"""
# Step 1: Define Project
project = ed.Project()

# Step 2: Define Sample Model manually
project.sample_models.add(name='si')
sample_model = project.sample_models['si']

sample_model.space_group.name_h_m = 'F d -3 m'
sample_model.space_group.it_coordinate_system_code = '1'

sample_model.cell.length_a = 5.43146

sample_model.atom_sites.add(
label='Si',
type_symbol='Si',
fract_x=0.125,
fract_y=0.125,
fract_z=0.125,
wyckoff_letter='c',
b_iso=1.1,
)

# Step 3: Add experiment from modified CIF file
project.experiments.add(cif_path=prepared_cif_path)
experiment = project.experiments['reduced_tof']

# Step 4: Configure experiment
# Link phase
experiment.linked_phases.add(id='si', scale=0.8)

# Instrument setup
experiment.instrument.setup_twotheta_bank = 90.0
experiment.instrument.calib_d_to_tof_linear = 18630.0

# Peak profile parameters
experiment.peak.broad_gauss_sigma_0 = 48500.0
experiment.peak.broad_gauss_sigma_1 = 3000.0
experiment.peak.broad_gauss_sigma_2 = 0.0
experiment.peak.broad_mix_beta_0 = 0.05
experiment.peak.broad_mix_beta_1 = 0.0
experiment.peak.asym_alpha_0 = 0.0
experiment.peak.asym_alpha_1 = 0.26

# Excluded regions
experiment.excluded_regions.add(id='1', start=0, end=10000)
experiment.excluded_regions.add(id='2', start=70000, end=200000)

# Background points
background_points = [
('2', 10000, 0.01),
('3', 14000, 0.2),
('4', 21000, 0.7),
('5', 27500, 0.6),
('6', 40000, 0.3),
('7', 50000, 0.6),
('8', 61000, 0.7),
('9', 70000, 0.6),
]
for id_, x, y in background_points:
experiment.background.add(id=id_, x=x, y=y)

return project


@pytest.fixture(scope='module')
def fitted_project(
project_with_data: ed.Project,
) -> ed.Project:
"""Perform fit and return project with results.

6. Select parameters to be fitted
7. Do fitting
"""
project = project_with_data
sample_model = project.sample_models['si']
experiment = project.experiments['reduced_tof']

# Step 5: Select parameters to be fitted
# Set free parameters for sample model
sample_model.atom_sites['Si'].b_iso.free = True

# Set free parameters for experiment
experiment.linked_phases['si'].scale.free = True
experiment.instrument.calib_d_to_tof_linear.free = True

experiment.peak.broad_gauss_sigma_0.free = True
experiment.peak.broad_gauss_sigma_1.free = True
experiment.peak.broad_mix_beta_0.free = True

# Set free parameters for background
for point in experiment.background:
point.y.free = True

# Step 6: Do fitting
project.analysis.fit()

return project


# Test: Data Loading


def test_analyze_reduced_data__load_cif(
project_with_data: ed.Project,
) -> None:
"""Verify CIF data loads into project correctly."""
assert 'reduced_tof' in project_with_data.experiments.names


def test_analyze_reduced_data__data_size(
project_with_data: ed.Project,
) -> None:
"""Verify loaded data has expected size."""
experiment = project_with_data.experiments['reduced_tof']
# Data should have substantial number of points
assert experiment.data.x.size > 100


# Test: Configuration


def test_analyze_reduced_data__phase_linked(
project_with_data: ed.Project,
) -> None:
"""Verify phase is correctly linked to experiment."""
experiment = project_with_data.experiments['reduced_tof']
assert 'si' in experiment.linked_phases.names


def test_analyze_reduced_data__background_set(
project_with_data: ed.Project,
) -> None:
"""Verify background points are configured."""
experiment = project_with_data.experiments['reduced_tof']
assert len(experiment.background.names) >= 5


# Test: Fitting


def test_analyze_reduced_data__fit_quality(
fitted_project: ed.Project,
) -> None:
"""Verify fit quality is reasonable (chi-square value)."""
chi_square = fitted_project.analysis.fit_results.reduced_chi_square
assert chi_square == pytest.approx(16.0, abs=0.1)
Loading
Loading