Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
32 changes: 32 additions & 0 deletions .github/workflows/formatting.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
name: Pre-commit Checks

on:
pull_request:

jobs:
pre-commit:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4

- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: '3.11'

- name: Install pre-commit
run: |
python -m pip install --upgrade pip
pip install pre-commit

- name: Run pre-commit
run: pre-commit run --all-files

# This step will show the exact files that were modified
- name: Check for modified files
run: |
if [[ -n "$(git status --porcelain)" ]]; then
echo "The following files were modified by pre-commit:"
git status --porcelain
exit 1
fi
14 changes: 14 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.8.1
hooks:
# Run the linter
- id: ruff
args: [ --fix ]
# Run the formatter
- id: ruff-format
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.5.0
hooks:
- id: check-added-large-files
args: ['--maxkb=2000']
9 changes: 9 additions & 0 deletions environment.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
name: easygdf
channels:
- conda-forge
- defaults
dependencies:
- python=3.12
- numpy
- pip
- pre-commit
4 changes: 2 additions & 2 deletions examples/initial_distribution.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
# Save some data to an initial distribution file. Unspecified required values are autofilled for us
easygdf.save_initial_distribution(
"initial.gdf",
x=np.random.normal(size=(3, )),
GBx=np.random.normal(size=(3, )),
x=np.random.normal(size=(3,)),
GBx=np.random.normal(size=(3,)),
t=np.random.random((3,)),
)
2 changes: 1 addition & 1 deletion examples/minimal.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
blocks = [
{"name": "an array", "value": np.array([0, 1, 2, 3])},
{"name": "a string", "value": "Hello world!"},
{"name": "a group", "value": 3.14, "children": [{"name": "child", "value": 1.0}]}
{"name": "a group", "value": 3.14, "children": [{"name": "child", "value": 1.0}]},
]
easygdf.save("minimal.gdf", blocks)

Expand Down
7 changes: 6 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -21,4 +21,9 @@ dependencies = [
easygdf = ["data/*"]

[project.urls]
Homepage = "https://github.com/electronsandstuff/easygdf"
Homepage = "https://github.com/electronsandstuff/easygdf"

[tool.ruff]
line-length = 120
indent-width = 4
target-version = "py312"
39 changes: 23 additions & 16 deletions scripts/generate_test_file.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@
elif s == 8:
NUMPY_TO_GDF[t] = GDF_INT64
else:
raise ValueError("Unable to autodetect GDF flag for numpy data type \"{0}\" with size {1} bytes".format(t, s))
raise ValueError('Unable to autodetect GDF flag for numpy data type "{0}" with size {1} bytes'.format(t, s))


########################################################################################################################
Expand All @@ -113,10 +113,10 @@ def get_header(magic_number=94325877, gdf_version=(1, 1)):
)


def get_block_header(name="", dtype=easygdf.GDF_NULL, single=True, array=False, group_begin=False, group_end=False,
size=0):
flag = dtype + single * GDF_SINGLE + array * GDF_ARRAY + group_begin * GDF_GROUP_BEGIN \
+ group_end * GDF_GROUP_END
def get_block_header(
name="", dtype=easygdf.GDF_NULL, single=True, array=False, group_begin=False, group_end=False, size=0
):
flag = dtype + single * GDF_SINGLE + array * GDF_ARRAY + group_begin * GDF_GROUP_BEGIN + group_end * GDF_GROUP_END
return struct.pack("16sii", bytes(name, "ascii"), flag, size)


Expand Down Expand Up @@ -318,23 +318,32 @@ def get_normalize_screen_floats():
:return: Bytes string representing file
"""
f = get_header()
f += get_block_header(name="position", dtype=GDF_DOUBLE, single=True, array=False,
size=GDF_DTYPES_STRUCT[GDF_DOUBLE][1], group_begin=True)
f += get_block_header(
name="position",
dtype=GDF_DOUBLE,
single=True,
array=False,
size=GDF_DTYPES_STRUCT[GDF_DOUBLE][1],
group_begin=True,
)
f += struct.pack(GDF_DTYPES_STRUCT[GDF_DOUBLE][0], 0.0)

for k in ['ID', 'x', 'y', 'z', 'Bx', 'By', 'Bz', 't', 'm', 'q', 'nmacro', 'rmacro', 'rxy', 'G']:
for k in ["ID", "x", "y", "z", "Bx", "By", "Bz", "t", "m", "q", "nmacro", "rmacro", "rxy", "G"]:
dtype = GDF_DOUBLE
f += get_block_header(name=k, dtype=dtype, single=False, array=True, size=6 * GDF_DTYPES_STRUCT[dtype][1])
d = GDF_DTYPES_STRUCT[dtype][0]
f += struct.pack(6 * d, 0, 1, 2, 3, 4, 5)
f += get_block_header(name="Particles", dtype=GDF_DOUBLE, single=True, array=False,
size=GDF_DTYPES_STRUCT[GDF_DOUBLE][1])
f += get_block_header(
name="Particles", dtype=GDF_DOUBLE, single=True, array=False, size=GDF_DTYPES_STRUCT[GDF_DOUBLE][1]
)
f += struct.pack(GDF_DTYPES_STRUCT[GDF_DOUBLE][0], 0.0)
f += get_block_header(name="pCentral", dtype=GDF_DOUBLE, single=True, array=False,
size=GDF_DTYPES_STRUCT[GDF_DOUBLE][1])
f += get_block_header(
name="pCentral", dtype=GDF_DOUBLE, single=True, array=False, size=GDF_DTYPES_STRUCT[GDF_DOUBLE][1]
)
f += struct.pack(GDF_DTYPES_STRUCT[GDF_DOUBLE][0], 0.0)
f += get_block_header(name="Charge", dtype=GDF_DOUBLE, single=True, array=False,
size=GDF_DTYPES_STRUCT[GDF_DOUBLE][1])
f += get_block_header(
name="Charge", dtype=GDF_DOUBLE, single=True, array=False, size=GDF_DTYPES_STRUCT[GDF_DOUBLE][1]
)
f += struct.pack(GDF_DTYPES_STRUCT[GDF_DOUBLE][0], 0.0)
f += get_block_header(name="", group_end=True)
return f
Expand All @@ -351,7 +360,6 @@ def write_file(path, b):
data_files_path = "easygdf/tests/data"
if __name__ == "__main__":
write_file(os.path.join(data_files_path, "normalize_screen_floats.gdf"), get_normalize_screen_floats())
'''
write_file(os.path.join(data_files_path, "version_mismatch.gdf"), get_file_version_mismatch())
write_file(os.path.join(data_files_path, "wrong_magic_number.gdf"), get_file_wrong_magic_number())
write_file(os.path.join(data_files_path, "too_much_recursion.gdf"), get_file_too_much_recursion())
Expand All @@ -367,4 +375,3 @@ def write_file(path, b):
write_file(os.path.join(data_files_path, "invalid_size_array.gdf"), get_file_invalid_array_size())
write_file(os.path.join(data_files_path, "nested_groups.gdf"), get_file_nested_group())
write_file(os.path.join(data_files_path, "null_array.gdf"), get_file_null_array())
'''
23 changes: 7 additions & 16 deletions scripts/trim_test_files.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,11 +62,11 @@ def round_sigfigs(x, sigfigs):
omags -= 1.0

else: # elif np.all(np.isreal( mantissas )):
fixmsk = mantissas < 1.0,
fixmsk = (mantissas < 1.0,)
mantissas[fixmsk] *= 10.0
omags[fixmsk] -= 1.0

result = xsgn * np.around(mantissas, decimals=sigfigs - 1) * 10.0 ** omags
result = xsgn * np.around(mantissas, decimals=sigfigs - 1) * 10.0**omags
if matrixflag:
result = np.matrix(result, copy=False)

Expand Down Expand Up @@ -103,17 +103,11 @@ def trim_screens_tout():
# Trim down the arrays to the correct number of particles
particle_blocks = []
for b in trimmed_blocks:
new_block = {
"name": b["name"],
"param": b["param"],
"children": []
}
new_block = {"name": b["name"], "param": b["param"], "children": []}
for c in b["children"]:
new_block["children"].append({
"name": c["name"],
"param": round_sigfigs(c["param"][:n_particles], 4),
"children": []
})
new_block["children"].append(
{"name": c["name"], "param": round_sigfigs(c["param"][:n_particles], 4), "children": []}
)
particle_blocks.append(new_block)
d["blocks"] = particle_blocks

Expand Down Expand Up @@ -150,10 +144,7 @@ def trim_initial_distribution():
trimmed_blocks = []
for b in d["blocks"]:
if isinstance(b["param"], np.ndarray):
trimmed_blocks.append({
"name": b["name"],
"param": round_sigfigs(b["param"][:n_particles], 4)
})
trimmed_blocks.append({"name": b["name"], "param": round_sigfigs(b["param"][:n_particles], 4)})
d["blocks"] = trimmed_blocks

# Save the file
Expand Down
65 changes: 59 additions & 6 deletions src/easygdf/__init__.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,61 @@
# This file is part of easygdf and is released under the BSD 3-clause license

from .easygdf import GDF_ASCII, GDF_DOUBLE, GDF_FLOAT, GDF_INT8, GDF_INT16, GDF_INT32, GDF_INT64, GDF_NULL, GDF_UINT8
from .easygdf import GDF_UINT16, GDF_UINT32, GDF_UINT64, GDF_UNDEFINED, GDF_NAME_LEN, GDF_MAGIC
from .easygdf import is_gdf, load, save
from .constants import (
GDF_ASCII,
GDF_DOUBLE,
GDF_FLOAT,
GDF_INT8,
GDF_INT16,
GDF_INT32,
GDF_INT64,
GDF_NULL,
GDF_UINT8,
GDF_UINT16,
GDF_UINT32,
GDF_UINT64,
GDF_UNDEFINED,
GDF_NAME_LEN,
GDF_MAGIC,
)
from .easygdf import (
load,
save,
)
from .initial_distribution import load_initial_distribution, save_initial_distribution
from .screens_touts import load_screens_touts, save_screens_touts
from .utils import get_example_screen_tout_filename, get_example_initial_distribution, GDFError, GDFIOError
from .utils import (
get_example_screen_tout_filename,
get_example_initial_distribution,
is_gdf,
)
from .exceptions import (
GDFError,
GDFIOError,
)

__all__ = [
"GDF_ASCII",
"GDF_DOUBLE",
"GDF_FLOAT",
"GDF_INT8",
"GDF_INT16",
"GDF_INT32",
"GDF_INT64",
"GDF_NULL",
"GDF_UINT8",
"GDF_UINT16",
"GDF_UINT32",
"GDF_UINT64",
"GDF_UNDEFINED",
"GDF_NAME_LEN",
"GDF_MAGIC",
"is_gdf",
"load",
"save",
"load_initial_distribution",
"save_initial_distribution",
"load_screens_touts",
"save_screens_touts",
"get_example_screen_tout_filename",
"get_example_initial_distribution",
"GDFError",
"GDFIOError",
]
88 changes: 88 additions & 0 deletions src/easygdf/constants.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,88 @@
import numpy as np


# Define constants for the GDF specification
GDF_NAME_LEN = 16
GDF_MAGIC = 94325877


# The GDF data type identifiers
GDF_ASCII = 0x0001
GDF_DOUBLE = 0x0003
GDF_FLOAT = 0x0090
GDF_INT8 = 0x0030
GDF_INT16 = 0x0050
GDF_INT32 = 0x0002
GDF_INT64 = 0x0080
GDF_NULL = 0x0010
GDF_UINT8 = 0x0020
GDF_UINT16 = 0x0040
GDF_UINT32 = 0x0060
GDF_UINT64 = 0x0070
GDF_UNDEFINED = 0x0000


# Conversion from GDF types to information used by struct to convert into a python type. First element of the tuple is
# the identifier for conversion and the second element is the size required by struct (so we can double check the file)
GDF_DTYPES_STRUCT = {
GDF_DOUBLE: ("d", 8),
GDF_FLOAT: ("f", 4),
GDF_INT8: ("b", 1),
GDF_INT16: ("h", 2),
GDF_INT32: ("i", 4),
GDF_INT64: ("q", 8),
GDF_UINT8: ("B", 1),
GDF_UINT16: ("H", 2),
GDF_UINT32: ("I", 4),
GDF_UINT64: ("Q", 8),
}


# The same conversion, but for going to numpy data types
GDF_DTYPES_NUMPY = {
GDF_DOUBLE: (np.float64, 8),
GDF_FLOAT: (np.float32, 4),
GDF_INT8: (np.int8, 1),
GDF_INT16: (np.int16, 2),
GDF_INT32: (np.int32, 4),
GDF_INT64: (np.int64, 8),
GDF_UINT8: (np.uint8, 1),
GDF_UINT16: (np.uint16, 2),
GDF_UINT32: (np.uint32, 4),
GDF_UINT64: (np.uint64, 8),
}


# Going from numpy data types to GDF types
NUMPY_TO_GDF = {
"int8": GDF_INT8,
"int16": GDF_INT16,
"int32": GDF_INT32,
"int64": GDF_INT64,
"uint8": GDF_UINT8,
"uint16": GDF_UINT16,
"uint32": GDF_UINT32,
"uint64": GDF_UINT64,
"float_": GDF_DOUBLE,
"float32": GDF_FLOAT,
"float64": GDF_DOUBLE,
}


# Detect platform specific data types for numpy
for t in ["int_", "intc", "intp"]:
s = np.dtype(t).itemsize
if s == 4:
NUMPY_TO_GDF[t] = GDF_INT32
elif s == 8:
NUMPY_TO_GDF[t] = GDF_INT64
else:
raise ValueError('Unable to autodetect GDF flag for numpy data type "{0}" with size {1} bytes'.format(t, s))


# The bit masks for flags in the GDF header
GDF_DTYPE = 255
GDF_GROUP_BEGIN = 256
GDF_GROUP_END = 512
GDF_SINGLE = 1024
GDF_ARRAY = 2048
Loading
Loading