Skip to content

Commit

Permalink
Apply automatic formatting
Browse files Browse the repository at this point in the history
  • Loading branch information
pre-commit-ci-lite[bot] committed Apr 29, 2024
1 parent c53b8a6 commit 16e27df
Show file tree
Hide file tree
Showing 8 changed files with 25 additions and 22 deletions.
1 change: 1 addition & 0 deletions src/scippneutron/atoms/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
# SPDX-License-Identifier: BSD-3-Clause
# Copyright (c) 2023 Scipp contributors (https://github.com/scipp)
"""Parameters for neutron interactions with atoms."""

from __future__ import annotations

import dataclasses
Expand Down
29 changes: 14 additions & 15 deletions src/scippneutron/data_streaming/_data_buffer.py
Original file line number Diff line number Diff line change
Expand Up @@ -337,20 +337,20 @@ def init_metadata_buffers(self, stream_info: List[StreamInfo]):
"""
for stream in stream_info:
if stream.flatbuffer_id == SLOW_FB_ID:
self._metadata_buffers[stream.flatbuffer_id][
stream.source_name
] = _SlowMetadataBuffer(stream, self._slow_metadata_buffer_size)
self._metadata_buffers[stream.flatbuffer_id][stream.source_name] = (
_SlowMetadataBuffer(stream, self._slow_metadata_buffer_size)
)
elif stream.flatbuffer_id == FAST_FB_ID:
self._metadata_buffers[stream.flatbuffer_id][
stream.source_name
] = _FastMetadataBuffer(
stream, self._fast_metadata_buffer_size, self._emit_queue
self._metadata_buffers[stream.flatbuffer_id][stream.source_name] = (
_FastMetadataBuffer(
stream, self._fast_metadata_buffer_size, self._emit_queue
)
)
elif stream.flatbuffer_id == CHOPPER_FB_ID:
self._metadata_buffers[stream.flatbuffer_id][
stream.source_name
] = _ChopperMetadataBuffer(
stream, self._chopper_buffer_size, self._emit_queue
self._metadata_buffers[stream.flatbuffer_id][stream.source_name] = (
_ChopperMetadataBuffer(
stream, self._chopper_buffer_size, self._emit_queue
)
)
elif stream.flatbuffer_id == EVENT_FB_ID:
pass # detection events, not metadata
Expand Down Expand Up @@ -430,10 +430,9 @@ def _handled_event_data(self, new_data: bytes) -> bool:
]
frame.coords['detector_id'].values = deserialised_data.detector_id
frame.coords['tof'].values = deserialised_data.time_of_flight
frame.coords[
'pulse_time'
].values = deserialised_data.pulse_time * np.ones_like(
deserialised_data.time_of_flight
frame.coords['pulse_time'].values = (
deserialised_data.pulse_time
* np.ones_like(deserialised_data.time_of_flight)
)
self._current_event += message_size
except WrongSchemaException:
Expand Down
4 changes: 2 additions & 2 deletions src/scippneutron/io/cif.py
Original file line number Diff line number Diff line change
Expand Up @@ -481,7 +481,7 @@ def write(self, f: io.TextIOBase) -> None:


def _convert_input_content(
content: Iterable[Union[Mapping[str, Any], Loop, Chunk]]
content: Iterable[Union[Mapping[str, Any], Loop, Chunk]],
) -> list[Union[Loop, Chunk]]:
return [
item if isinstance(item, (Loop, Chunk)) else Chunk(item) for item in content
Expand All @@ -498,7 +498,7 @@ def _open(fname: Union[str, Path, io.TextIOBase]):


def _preprocess_schema(
schema: Optional[Union[CIFSchema, Iterable[CIFSchema]]]
schema: Optional[Union[CIFSchema, Iterable[CIFSchema]]],
) -> set[CIFSchema]:
if schema is None:
return set()
Expand Down
1 change: 1 addition & 0 deletions src/scippneutron/tof/chopper_cascade.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
See :py:class:`FrameSequence` for the main entry point.
"""

from __future__ import annotations

from dataclasses import dataclass
Expand Down
1 change: 1 addition & 0 deletions src/scippneutron/tof/fakes.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
- Monitor event data including event_time_offset and event_time_zero
- Chopper timestamps
"""

from __future__ import annotations

from typing import Optional
Expand Down
1 change: 1 addition & 0 deletions src/scippneutron/tof/unwrap.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
functions defined here are meant to be used as providers for a Sciline pipeline. See
https://scipp.github.io/sciline/ on how to use Sciline.
"""

import math
from dataclasses import dataclass
from typing import Callable, Mapping, NewType, Optional, Tuple, Union
Expand Down
4 changes: 1 addition & 3 deletions tests/atoms/test_atoms.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,9 +37,7 @@ def test_scattering_params_157gd():
incoherent_scattering_cross_section=sc.scalar(
394.0, variance=7.0**2, unit='barn'
),
total_scattering_cross_section=sc.scalar(
1044.0, variance=8.0**2, unit='barn'
),
total_scattering_cross_section=sc.scalar(1044.0, variance=8.0**2, unit='barn'),
absorption_cross_section=sc.scalar(259000.0, variance=700.0**2, unit='barn'),
)
assert params == expected
Expand Down
6 changes: 4 additions & 2 deletions tests/data_stream_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,8 +40,10 @@
from streaming_data_types.timestamps_tdct import serialise_tdct

from scippneutron.data_streaming._consumer import RunStartError
from scippneutron.data_streaming.data_stream import _data_stream # noqa: E402
from scippneutron.data_streaming.data_stream import StopTime
from scippneutron.data_streaming.data_stream import (
StopTime,
_data_stream, # noqa: E402
)
except ImportError:
pytest.skip("Kafka or Serialisation module is unavailable", allow_module_level=True)

Expand Down

0 comments on commit 16e27df

Please sign in to comment.