Skip to content

Commit 4e722cc

Browse files
committed
Merge branch 'main' into run_examples_from_pals
2 parents 085fc2c + 7c45243 commit 4e722cc

6 files changed

Lines changed: 220 additions & 66 deletions

File tree

.github/workflows/codeql.yml

Lines changed: 99 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,99 @@
1+
# For most projects, this workflow file will not need changing; you simply need
2+
# to commit it to your repository.
3+
#
4+
# You may wish to alter this file to override the set of languages analyzed,
5+
# or to provide custom queries or build logic.
6+
#
7+
# ******** NOTE ********
8+
# We have attempted to detect the languages in your repository. Please check
9+
# the `language` matrix defined below to confirm you have the correct set of
10+
# supported CodeQL languages.
11+
#
12+
name: "CodeQL Advanced"
13+
14+
on:
15+
push:
16+
branches: [ "main" ]
17+
pull_request:
18+
branches: [ "main" ]
19+
20+
jobs:
21+
analyze:
22+
name: Analyze (${{ matrix.language }})
23+
# Runner size impacts CodeQL analysis time. To learn more, please see:
24+
# - https://gh.io/recommended-hardware-resources-for-running-codeql
25+
# - https://gh.io/supported-runners-and-hardware-resources
26+
# - https://gh.io/using-larger-runners (GitHub.com only)
27+
# Consider using larger runners or machines with greater resources for possible analysis time improvements.
28+
runs-on: ${{ (matrix.language == 'swift' && 'macos-latest') || 'ubuntu-latest' }}
29+
permissions:
30+
# required for all workflows
31+
security-events: write
32+
33+
# required to fetch internal or private CodeQL packs
34+
packages: read
35+
36+
# only required for workflows in private repositories
37+
actions: read
38+
contents: read
39+
40+
strategy:
41+
fail-fast: false
42+
matrix:
43+
include:
44+
- language: actions
45+
build-mode: none
46+
- language: python
47+
build-mode: none
48+
# CodeQL supports the following values keywords for 'language': 'actions', 'c-cpp', 'csharp', 'go', 'java-kotlin', 'javascript-typescript', 'python', 'ruby', 'rust', 'swift'
49+
# Use `c-cpp` to analyze code written in C, C++ or both
50+
# Use 'java-kotlin' to analyze code written in Java, Kotlin or both
51+
# Use 'javascript-typescript' to analyze code written in JavaScript, TypeScript or both
52+
# To learn more about changing the languages that are analyzed or customizing the build mode for your analysis,
53+
# see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/customizing-your-advanced-setup-for-code-scanning.
54+
# If you are analyzing a compiled language, you can modify the 'build-mode' for that language to customize how
55+
# your codebase is analyzed, see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/codeql-code-scanning-for-compiled-languages
56+
steps:
57+
- name: Checkout repository
58+
uses: actions/checkout@v4
59+
60+
# Add any setup steps before running the `github/codeql-action/init` action.
61+
# This includes steps like installing compilers or runtimes (`actions/setup-node`
62+
# or others). This is typically only required for manual builds.
63+
# - name: Setup runtime (example)
64+
# uses: actions/setup-example@v1
65+
66+
# Initializes the CodeQL tools for scanning.
67+
- name: Initialize CodeQL
68+
uses: github/codeql-action/init@v4
69+
with:
70+
languages: ${{ matrix.language }}
71+
build-mode: ${{ matrix.build-mode }}
72+
# If you wish to specify custom queries, you can do so here or in a config file.
73+
# By default, queries listed here will override any specified in a config file.
74+
# Prefix the list here with "+" to use these queries and those in the config file.
75+
76+
# For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
77+
# queries: security-extended,security-and-quality
78+
79+
# If the analyze step fails for one of the languages you are analyzing with
80+
# "We were unable to automatically build your code", modify the matrix above
81+
# to set the build mode to "manual" for that language. Then modify this step
82+
# to build your code.
83+
# ℹ️ Command-line programs to run using the OS shell.
84+
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
85+
- name: Run manual build steps
86+
if: matrix.build-mode == 'manual'
87+
shell: bash
88+
run: |
89+
echo 'If you are using a "manual" build mode for one or more of the' \
90+
'languages you are analyzing, replace this with the commands to build' \
91+
'your code, for example:'
92+
echo ' make bootstrap'
93+
echo ' make release'
94+
exit 1
95+
96+
- name: Perform CodeQL Analysis
97+
uses: github/codeql-action/analyze@v4
98+
with:
99+
category: "/language:${{matrix.language}}"

examples/fodo.py

Lines changed: 12 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,3 @@
1-
import json
2-
import yaml
3-
41
from pals import MagneticMultipoleParameters
52
from pals import Drift
63
from pals import Quadrupole
@@ -45,34 +42,24 @@ def main():
4542
drift3,
4643
],
4744
)
45+
4846
# Serialize to YAML
49-
yaml_data = yaml.dump(line.model_dump(), default_flow_style=False)
50-
print("Dumping YAML data...")
51-
print(f"{yaml_data}")
52-
# Write YAML data to file
53-
yaml_file = "examples_fodo.yaml"
54-
with open(yaml_file, "w") as file:
55-
file.write(yaml_data)
47+
yaml_file = "examples_fodo.pals.yaml"
48+
line.to_file(yaml_file)
49+
5650
# Read YAML data from file
57-
with open(yaml_file, "r") as file:
58-
yaml_data = yaml.safe_load(file)
59-
# Parse YAML data
60-
loaded_line = BeamLine(**yaml_data)
51+
loaded_line = BeamLine.from_file(yaml_file)
52+
6153
# Validate loaded data
6254
assert line == loaded_line
55+
6356
# Serialize to JSON
64-
json_data = json.dumps(line.model_dump(), sort_keys=True, indent=2)
65-
print("Dumping JSON data...")
66-
print(f"{json_data}")
67-
# Write JSON data to file
68-
json_file = "examples_fodo.json"
69-
with open(json_file, "w") as file:
70-
file.write(json_data)
57+
json_file = "examples_fodo.pals.json"
58+
line.to_file(json_file)
59+
7160
# Read JSON data from file
72-
with open(json_file, "r") as file:
73-
json_data = json.loads(file.read())
74-
# Parse JSON data
75-
loaded_line = BeamLine(**json_data)
61+
loaded_line = BeamLine.from_file(json_file)
62+
7663
# Validate loaded data
7764
assert line == loaded_line
7865

src/pals/functions.py

Lines changed: 82 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,82 @@
1+
"""Public, free-standing functions for PALS."""
2+
3+
import os
4+
5+
6+
def inspect_file_extensions(filename: str):
7+
"""Attempt to strip two levels of file extensions to determine the schema.
8+
9+
filename examples: fodo.pals.yaml, fodo.pals.json, ...
10+
"""
11+
file_noext, extension = os.path.splitext(filename)
12+
file_noext_noext, extension_inner = os.path.splitext(file_noext)
13+
14+
if extension_inner != ".pals":
15+
raise RuntimeError(
16+
f"inspect_file_extensions: No support for file {filename} with extension {extension}. "
17+
f"PALS files must end in .pals.json or .pals.yaml or similar."
18+
)
19+
20+
return {
21+
"file_noext": file_noext,
22+
"extension": extension,
23+
"file_noext_noext": file_noext_noext,
24+
"extension_inner": extension_inner,
25+
}
26+
27+
28+
def load_file_to_dict(filename: str) -> dict:
29+
# Attempt to strip two levels of file extensions to determine the schema.
30+
# Examples: fodo.pals.yaml, fodo.pals.json, ...
31+
file_noext, extension, file_noext_noext, extension_inner = inspect_file_extensions(
32+
filename
33+
).values()
34+
35+
# examples: fodo.pals.yaml, fodo.pals.json
36+
with open(filename, "r") as file:
37+
if extension == ".json":
38+
import json
39+
40+
pals_data = json.loads(file.read())
41+
42+
elif extension == ".yaml":
43+
import yaml
44+
45+
pals_data = yaml.safe_load(file)
46+
47+
# TODO: toml, xml
48+
49+
else:
50+
raise RuntimeError(
51+
f"load_file_to_dict: No support for PALS file {filename} with extension {extension} yet."
52+
)
53+
54+
return pals_data
55+
56+
57+
def store_dict_to_file(filename: str, pals_dict: dict):
58+
file_noext, extension, file_noext_noext, extension_inner = inspect_file_extensions(
59+
filename
60+
).values()
61+
62+
# examples: fodo.pals.yaml, fodo.pals.json
63+
if extension == ".json":
64+
import json
65+
66+
json_data = json.dumps(pals_dict, sort_keys=True, indent=2)
67+
with open(filename, "w") as file:
68+
file.write(json_data)
69+
70+
elif extension == ".yaml":
71+
import yaml
72+
73+
yaml_data = yaml.dump(pals_dict, default_flow_style=False)
74+
with open(filename, "w") as file:
75+
file.write(yaml_data)
76+
77+
# TODO: toml, xml
78+
79+
else:
80+
raise RuntimeError(
81+
f"store_dict_to_file: No support for PALS file {filename} with extension {extension} yet."
82+
)

src/pals/kinds/BeamLine.py

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33

44
from .all_elements import get_all_elements_as_annotation
55
from .mixin import BaseElement
6+
from ..functions import load_file_to_dict, store_dict_to_file
67

78

89
class BeamLine(BaseElement):
@@ -25,3 +26,14 @@ def model_dump(self, *args, **kwargs):
2526
from pals.kinds.mixin.all_element_mixin import dump_element_list
2627

2728
return dump_element_list(self, "line", *args, **kwargs)
29+
30+
@staticmethod
31+
def from_file(filename: str) -> "BeamLine":
32+
"""Load a BeamLine from a text file"""
33+
pals_dict = load_file_to_dict(filename)
34+
return BeamLine(**pals_dict)
35+
36+
def to_file(self, filename: str):
37+
"""Save a BeamLine to a text file"""
38+
pals_dict = self.model_dump()
39+
store_dict_to_file(filename, pals_dict)

tests/test_elements.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -105,7 +105,7 @@ def test_Quadrupole():
105105
assert element.ElectricMultipoleP.En2 == element_electric_multipole_En2
106106
assert element.ElectricMultipoleP.Es2 == element_electric_multipole_Es2
107107
assert element.ElectricMultipoleP.tilt2 == element_electric_multipole_tilt2
108-
# Serialize the BeamLine object to YAML
108+
# Serialize the element to YAML
109109
yaml_data = yaml.dump(element.model_dump(), default_flow_style=False)
110110
print(f"\n{yaml_data}")
111111

tests/test_serialization.py

Lines changed: 14 additions & 40 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,4 @@
1-
import json
21
import os
3-
import yaml
42

53
import pals
64

@@ -13,17 +11,10 @@ def test_yaml():
1311
# Create line with both elements
1412
line = pals.BeamLine(name="line", line=[element1, element2])
1513
# Serialize the BeamLine object to YAML
16-
yaml_data = yaml.dump(line.model_dump(), default_flow_style=False)
17-
print(f"\n{yaml_data}")
18-
# Write the YAML data to a test file
19-
test_file = "line.yaml"
20-
with open(test_file, "w") as file:
21-
file.write(yaml_data)
14+
test_file = "line.pals.yaml"
15+
line.to_file(test_file)
2216
# Read the YAML data from the test file
23-
with open(test_file, "r") as file:
24-
yaml_data = yaml.safe_load(file)
25-
# Parse the YAML data back into a BeamLine object
26-
loaded_line = pals.BeamLine(**yaml_data)
17+
loaded_line = pals.BeamLine.from_file(test_file)
2718
# Remove the test file
2819
os.remove(test_file)
2920
# Validate loaded BeamLine object
@@ -38,17 +29,10 @@ def test_json():
3829
# Create line with both elements
3930
line = pals.BeamLine(name="line", line=[element1, element2])
4031
# Serialize the BeamLine object to JSON
41-
json_data = json.dumps(line.model_dump(), sort_keys=True, indent=2)
42-
print(f"\n{json_data}")
43-
# Write the JSON data to a test file
44-
test_file = "line.json"
45-
with open(test_file, "w") as file:
46-
file.write(json_data)
32+
test_file = "line.pals.json"
33+
line.to_file(test_file)
4734
# Read the JSON data from the test file
48-
with open(test_file, "r") as file:
49-
json_data = json.loads(file.read())
50-
# Parse the JSON data back into a BeamLine object
51-
loaded_line = pals.BeamLine(**json_data)
35+
loaded_line = pals.BeamLine.from_file(test_file)
5236
# Remove the test file
5337
os.remove(test_file)
5438
# Validate loaded BeamLine object
@@ -224,21 +208,16 @@ def test_comprehensive_lattice():
224208
],
225209
)
226210

227-
# Test serialization to YAML
228-
yaml_data = yaml.dump(lattice.model_dump(), default_flow_style=False)
229-
print(f"\nComprehensive lattice YAML:\n{yaml_data}")
230-
231211
# Write to temporary file
232-
yaml_file = "comprehensive_lattice.yaml"
233-
with open(yaml_file, "w") as file:
234-
file.write(yaml_data)
212+
yaml_file = "comprehensive_lattice.pals.yaml"
213+
lattice.to_file(yaml_file)
235214

236215
# Read back from file
237216
with open(yaml_file, "r") as file:
238-
loaded_yaml_data = yaml.safe_load(file)
217+
print(f"\nComprehensive lattice YAML:\n{file.read()}")
239218

240219
# Deserialize back to Python object using Pydantic model logic
241-
loaded_lattice = pals.BeamLine(**loaded_yaml_data)
220+
loaded_lattice = pals.BeamLine.from_file(yaml_file)
242221

243222
# Verify the loaded lattice has the correct structure and parameter groups
244223
assert len(loaded_lattice.line) == 31 # Should have 31 elements
@@ -284,21 +263,16 @@ def test_comprehensive_lattice():
284263
assert unionele_loaded.elements[1].kind == "Drift"
285264
assert unionele_loaded.elements[1].length == 0.1
286265

287-
# Test serialization to JSON
288-
json_data = json.dumps(lattice.model_dump(), sort_keys=True, indent=2)
289-
print(f"\nComprehensive lattice JSON:\n{json_data}")
290-
291266
# Write to temporary file
292-
json_file = "comprehensive_lattice.json"
293-
with open(json_file, "w") as file:
294-
file.write(json_data)
267+
json_file = "comprehensive_lattice.pals.json"
268+
lattice.to_file(json_file)
295269

296270
# Read back from file
297271
with open(json_file, "r") as file:
298-
loaded_json_data = json.loads(file.read())
272+
print(f"\nComprehensive lattice JSON:\n{file.read()}")
299273

300274
# Deserialize back to Python object using Pydantic model logic
301-
loaded_lattice_json = pals.BeamLine(**loaded_json_data)
275+
loaded_lattice_json = pals.BeamLine.from_file(json_file)
302276

303277
# Verify the loaded lattice has the correct structure and parameter groups
304278
assert len(loaded_lattice_json.line) == 31 # Should have 31 elements

0 commit comments

Comments
 (0)