Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

madx pattern for doros BPMs #445

Merged
merged 9 commits into from
Sep 2, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,11 @@
# OMC3 Changelog

#### 2024-08-14 - v0.15.3 - _jdilly_

- Fixed:
- Add DOROS BPMs to `twiss.dat`.
- Some Pandas `FutureWarning`s, `DeprecationWarning`s and `PerformanceWarning`s

#### 2024-08-14 - v0.15.2 - _fesoubel_, _jdilly_

- Fixed:
Expand Down
2 changes: 1 addition & 1 deletion omc3/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
__title__ = "omc3"
__description__ = "An accelerator physics tools package for the OMC team at CERN."
__url__ = "https://github.com/pylhc/omc3"
__version__ = "0.15.2"
__version__ = "0.15.3"
__author__ = "pylhc"
__author_email__ = "pylhc@github.com"
__license__ = "MIT"
Expand Down
34 changes: 22 additions & 12 deletions omc3/harpy/frequency.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@
analysis.
Also searches for resonances in the calculated spectra.
"""
from collections import OrderedDict
from numbers import Number

import numpy as np
Expand Down Expand Up @@ -136,7 +135,7 @@ def find_resonances(tunes, nturns, plane, spectra, order_resonances):
"""
resonance_lines = _get_resonance_lines(order_resonances)

df = pd.DataFrame(index=spectra["FREQS"].index, columns=OrderedDict())
df = pd.DataFrame(index=spectra["FREQS"].index, dtype=pd.Float64Dtype())
resonances_freqs = _compute_resonances_with_freqs(plane, tunes, resonance_lines)
if tunes[2] > 0.0:
resonances_freqs.update(_compute_resonances_with_freqs("Z", tunes, resonance_lines))
Expand All @@ -145,10 +144,12 @@ def find_resonances(tunes, nturns, plane, spectra, order_resonances):
max_coefs, max_freqs = _search_highest_coefs(resonances_freqs[resonance], tolerance,
spectra["FREQS"], spectra["COEFFS"])
resstr = _get_resonance_suffix(resonance)
df[f"{COL_FREQ}{resstr}"], df[f"{COL_AMP}{resstr}"], df[f"{COL_PHASE}{resstr}"] = _get_freqs_amps_phases(
max_freqs, max_coefs, resonances_freqs[resonance])
columns = [f"{COL_FREQ}{resstr}", f"{COL_AMP}{resstr}", f"{COL_PHASE}{resstr}"]
df_resonance = _get_freqs_amps_phases(max_freqs, max_coefs, resonances_freqs[resonance])
df_resonance.columns = columns
df.loc[:, columns] = df_resonance

df[f"{COL_PHASE}{resstr}"] = _realign_phases(df.loc[:, f"{COL_PHASE}{resstr}"].to_numpy(),
df.loc[:, f"{COL_PHASE}{resstr}"] = _realign_phases(df.loc[:, f"{COL_PHASE}{resstr}"].to_numpy(),
df.loc[:, f"{COL_FREQ}{resstr}"].to_numpy(), nturns)

return df
Expand All @@ -161,25 +162,34 @@ def _get_main_resonances(tunes, spectra, plane, tolerance, df):
raise ValueError(f"No main {plane} resonances found, "
f"try to increase the tolerance or adjust the tunes")
bad_bpms_by_tune = spectra["COEFFS"].loc[max_coefs == 0.].index
df[f"{COL_TUNE}{plane}"], df[f"{COL_AMP}{plane}"], df[f"{COL_MU}{plane}"] = _get_freqs_amps_phases(
max_freqs, max_coefs, freq)
columns = [f"{COL_TUNE}{plane}", f"{COL_AMP}{plane}", f"{COL_MU}{plane}"]
df_main = _get_freqs_amps_phases(max_freqs, max_coefs, freq)
df_main.columns = columns
df.loc[:, columns] = df_main
if plane != "Z":
df = df.loc[df.index.difference(bad_bpms_by_tune)]
return df, bad_bpms_by_tune


def _calculate_natural_tunes(spectra, nattunes, tolerance, plane):
df = pd.DataFrame(index=spectra["FREQS"].index, columns=OrderedDict())
columns = [f"{COL_NATTUNE}{plane}", f"{COL_NATAMP}{plane}", f"{COL_NATMU}{plane}"]
x, y, _ = nattunes
freq = x % 1 if plane == "X" else y % 1
max_coefs, max_freqs = _search_highest_coefs(freq, tolerance, spectra["FREQS"], spectra["COEFFS"])
df[f"{COL_NATTUNE}{plane}"], df[f"{COL_NATAMP}{plane}"], df[f"{COL_NATMU}{plane}"] = _get_freqs_amps_phases(
max_freqs, max_coefs, freq)
df = _get_freqs_amps_phases(max_freqs, max_coefs, freq)
df.columns = columns
return df


def _get_freqs_amps_phases(max_freqs, max_coefs, freq):
return max_freqs, np.abs(max_coefs), np.sign(0.5 - freq) * np.angle(max_coefs) / PI2
def _get_freqs_amps_phases(max_freqs: pd.Series, max_coefs: pd.Series, freq: float) -> pd.DataFrame:
return pd.DataFrame(
{
f"{COL_FREQ}": max_freqs,
f"{COL_AMP}": np.abs(max_coefs),
f"{COL_PHASE}": np.sign(0.5 - freq) * np.angle(max_coefs) / PI2,
},
dtype=pd.Float64Dtype(),
)


def _realign_phases(phase_data, freq_data, nturns):
Expand Down
44 changes: 25 additions & 19 deletions omc3/harpy/handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -184,29 +184,35 @@ def _get_output_path_without_suffix(output_dir, file_path):
return join(output_dir, basename(file_path))


def _rescale_amps_to_main_line_and_compute_noise(panda, plane):
def _rescale_amps_to_main_line_and_compute_noise(df: pd.DataFrame, plane: str) -> pd.DataFrame:
"""
TODO follows non-transpararent convention
TODO the consequent analysis has to be changed if removed
"""
cols = [col for col in panda.columns.to_numpy() if col.startswith(COL_AMP)]
cols = [col for col in df.columns.to_numpy() if col.startswith(COL_AMP)]
cols.remove(f"{COL_AMP}{plane}")
panda.loc[:, cols] = panda.loc[:, cols].div(panda.loc[:, f"{COL_AMP}{plane}"], axis="index")
amps = panda.loc[:, f"{COL_AMP}{plane}"].to_numpy()
df.loc[:, cols] = df.loc[:, cols].div(df.loc[:, f"{COL_AMP}{plane}"], axis="index")
amps = df.loc[:, f"{COL_AMP}{plane}"].to_numpy()
# Division by two for backwards compatibility with Drive, i.e. the unit is [2mm]
# TODO later remove
panda[f"{COL_AMP}{plane}"] = panda.loc[:, f"{COL_AMP}{plane}"].to_numpy() / 2
if f"{COL_NATAMP}{plane}" in panda.columns:
panda[f"{COL_NATAMP}{plane}"] = panda.loc[:, f"{COL_NATAMP}{plane}"].to_numpy() / 2

if np.max(panda.loc[:, 'NOISE'].to_numpy()) == 0.0:
return panda # Do not calculated errors when no noise was calculated
noise_scaled = panda.loc[:, 'NOISE'] / amps
panda.loc[:, "NOISE_SCALED"] = noise_scaled
panda.loc[:, f"{COL_ERR}{COL_AMP}{plane}"] = panda.loc[:, 'NOISE']
if f"{COL_NATTUNE}{plane}" in panda.columns:
panda.loc[:, f"{COL_ERR}{COL_NATAMP}{plane}"] = panda.loc[:, 'NOISE']
for col in cols:
this_amp = panda.loc[:, col]
panda.loc[:, f"{COL_ERR}{col}"] = noise_scaled * np.sqrt(1 + np.square(this_amp))
return panda
df[f"{COL_AMP}{plane}"] = df.loc[:, f"{COL_AMP}{plane}"].to_numpy() / 2
if f"{COL_NATAMP}{plane}" in df.columns:
df[f"{COL_NATAMP}{plane}"] = df.loc[:, f"{COL_NATAMP}{plane}"].to_numpy() / 2

if np.max(df.loc[:, 'NOISE'].to_numpy()) == 0.0:
return df # Do not calculated errors when no noise was calculated
noise_scaled = df.loc[:, 'NOISE'] / amps
df.loc[:, "NOISE_SCALED"] = noise_scaled
df.loc[:, f"{COL_ERR}{COL_AMP}{plane}"] = df.loc[:, 'NOISE']
if f"{COL_NATTUNE}{plane}" in df.columns:
df.loc[:, f"{COL_ERR}{COL_NATAMP}{plane}"] = df.loc[:, 'NOISE']

# Create dedicated dataframe with error columns to assign later (cleaner
# and faster than assigning individual columns)
df_amp = pd.DataFrame(
data={f"{COL_ERR}{col}": noise_scaled * np.sqrt(1 + np.square(df.loc[:, col])) for col in cols},
index=df.index,
dtype=pd.Float64Dtype()
)
df.loc[:, df_amp.columns] = df_amp
return df
1 change: 1 addition & 0 deletions omc3/model/madx_macros/general.macros.madx
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ select_monitors(): macro = {
k1l, k1sl, k2l, k3l, k4l, wx, wy, phix,
phiy, dmux, dmuy, keyword, dbx, dby,
r11, r12, r21, r22;
select, flag=twiss, pattern="_DOROS$";
}


Expand Down
2 changes: 1 addition & 1 deletion omc3/optics_measurements/beta_from_phase.py
Original file line number Diff line number Diff line change
Expand Up @@ -321,7 +321,7 @@ def _assign_uncertainties(twiss_full, errordefspath):
"""
LOGGER.debug("Start creating uncertainty information")
errdefs = tfs.read(errordefspath)
twiss_full = twiss_full.assign(UNC=False, dK1=0, KdS=0, mKdS=0, dX=0, BPMdS=0)
twiss_full = twiss_full.assign(UNC=False, dK1=0.0, KdS=0.0, mKdS=0.0, dX=0.0, BPMdS=0.0)
# loop over uncertainty definitions, fill the respective columns, set UNC to true
for indx in errdefs.index:
patt = errdefs.loc[indx, "PATTERN"]
Expand Down
6 changes: 3 additions & 3 deletions omc3/optics_measurements/kick.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
This module contains kick functionality of ``optics_measurements``.
It provides functions to compute kick actions.
"""
from collections import OrderedDict
from contextlib import suppress
from os.path import join

Expand Down Expand Up @@ -63,7 +62,8 @@ def _get_kick(measure_input, files, plane):
load_columns, calc_columns, column_types = _get_column_mapping(plane)
kick_frame = pd.DataFrame(data=0.,
index=range(len(files[plane])),
columns=list(load_columns.keys()) + calc_columns)
columns=list(column_types.keys()))
kick_frame = kick_frame.astype(column_types)

for i, df in enumerate(files[plane]):
# load data directly from file
Expand Down Expand Up @@ -131,7 +131,7 @@ def _get_model_arc_betas(measure_input, plane):

def _get_column_mapping(plane):
plane_number = PLANE_TO_NUM[plane]
load_columns = OrderedDict([
load_columns = dict([
(TIME, "TIME"),
(DPP, DPP),
(DPPAMP, DPPAMP),
Expand Down
13 changes: 6 additions & 7 deletions omc3/optics_measurements/phase.py
Original file line number Diff line number Diff line change
Expand Up @@ -244,8 +244,7 @@ def write_special(meas_input, phase_advances, plane_tune, plane):
'BPM2',
f'BPM_PHASE{plane}',
f'BPM_{ERR}PHASE{plane}',]
special_phase_df = pd.DataFrame(columns=special_phase_columns)

to_concat_rows = []
for elem1, elem2 in accel.important_phase_advances():
mus1 = elements.loc[elem1, f"MU{plane}"] - elements.loc[:, f"MU{plane}"]
minmu1 = abs(mus1.loc[meas.index]).idxmin()
Expand All @@ -260,8 +259,7 @@ def write_special(meas_input, phase_advances, plane_tune, plane):
elems_to_bpms = -mus1.loc[minmu1] - mus2.loc[minmu2]
ph_result = ((bpm_phase_advance + elems_to_bpms) * bd)
model_value = (model_value * bd) % 1
new_row = pd.DataFrame(
dict(zip(special_phase_columns, [
new_row = pd.DataFrame([[
elem1,
elem2,
ph_result % 1,
Expand All @@ -274,11 +272,12 @@ def write_special(meas_input, phase_advances, plane_tune, plane):
minmu2,
bpm_phase_advance,
elems_to_bpms,
])),
index=[0]
]],
columns=special_phase_columns,
)
to_concat_rows.append(new_row)

special_phase_df = pd.concat([special_phase_df, new_row], axis="index", ignore_index=True)
special_phase_df = pd.concat(to_concat_rows, axis="index", ignore_index=True)

tfs.write(Path(meas_input.outputdir) / f"{SPECIAL_PHASE_NAME}{plane.lower()}{EXT}", special_phase_df)

Expand Down
3 changes: 2 additions & 1 deletion omc3/optics_measurements/rdt.py
Original file line number Diff line number Diff line change
Expand Up @@ -240,7 +240,8 @@ def fitting(x, f):
for i, bpm_rdt_data in enumerate(line_amp):
popt, pcov = curve_fit(fitting, kick_data, bpm_rdt_data, p0=guess[i])
amps[i] = popt[0]
err_amps[i] = np.sqrt(pcov)[0] if np.isfinite(np.sqrt(pcov)[0]) else 0. # if single file is used, the error is reported as Inf, which is then overwritten with 0
sqrt_pcov = np.sqrt(pcov).flat[0]
err_amps[i] = sqrt_pcov if np.isfinite(sqrt_pcov) else 0. # if single file is used, the error is reported as Inf, which is then overwritten with 0
return amps, err_amps


Expand Down
8 changes: 3 additions & 5 deletions omc3/tune_analysis/bbq_tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,20 +144,18 @@ def _get_interpolated_moving_average(data_series: pd.Series, clean_mask: Union[p

try:
# 'interpolate' fills nan based on index/values of neighbours
data = data.interpolate("index").fillna(method="bfill").fillna(method="ffill")
data = data.interpolate("index").bfill().ffill()
except TypeError as e:
raise TypeError("Interpolation failed. "
"Usually due to a dtype format that is not properly recognized.") from e

shift = -int((length-1)/2) # Shift average to middle value

# calculate mean and fill NaNs at the ends
data_mav = data.rolling(length).mean().shift(shift).fillna(
method="bfill").fillna(method="ffill")
data_mav = data.rolling(length).mean().shift(shift).bfill().ffill()

# calculate deviation to the moving average and fill NaNs at the ends
std_mav = np.sqrt(((data-data_mav)**2).rolling(length).mean().shift(shift).fillna(
method="bfill").fillna(method="ffill"))
std_mav = np.sqrt(((data-data_mav)**2).rolling(length).mean().shift(shift).bfill().ffill())
err_mav = std_mav / np.sqrt(length)

if is_datetime_index:
Expand Down
4 changes: 2 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -109,5 +109,5 @@ markers = [
"cern_network: tests that require access to afs or the technical network",
]
# Helpful for pytest-debugging (leave commented out on commit):
# log_cli=true
# log_level=DEBUG
#log_cli = true
#log_cli_level = "DEBUG"
8 changes: 7 additions & 1 deletion tests/unit/test_model_creator.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,13 @@
from pathlib import Path

import pytest
import tfs
from omc3.model.accelerators.accelerator import AcceleratorDefinitionError, AccExcitationMode
from omc3.model.constants import TWISS_AC_DAT, TWISS_ADT_DAT, TWISS_DAT, TWISS_ELEMENTS_DAT, PATHFETCHER
from omc3.model.manager import get_accelerator
from omc3.model.model_creators.lhc_model_creator import LhcBestKnowledgeCreator, LhcModelCreator
from omc3.model_creator import create_instance_and_model
from omc3.model.model_creators.lhc_model_creator import LhcModelCreator
from omc3.optics_measurements.constants import NAME

INPUTS = Path(__file__).parent.parent / "inputs"
LHC_30CM_MODIFIERS = [Path("R2023a_A30cmC30cmA10mL200cm.madx")]
Expand Down Expand Up @@ -156,6 +157,11 @@ def test_lhc_creation_nominal_driven(tmp_path, acc_models_lhc_2023):
)
check_accel_from_dir_vs_options(tmp_path, accel_opt, accel, required_keys=["beam", "year"])

# quick check for DOROS BPMs
for twiss_name in (TWISS_DAT, TWISS_ELEMENTS_DAT):
df_twiss = tfs.read(tmp_path / twiss_name, index=NAME)
assert any(df_twiss.index.str.match(r"BPM.+_DOROS$"))

# checks that should fail

with pytest.raises(AcceleratorDefinitionError) as excinfo:
Expand Down
8 changes: 0 additions & 8 deletions tests/unit/test_rdt_magnet_order.py
Original file line number Diff line number Diff line change
@@ -1,18 +1,10 @@
import os
import random
import string
import tempfile
import itertools

import numpy as np
import pandas as pd
import pytest
import tfs

from pathlib import Path

import turn_by_turn as tbt
from omc3.definitions.constants import PLANES
from omc3.hole_in_one import hole_in_one_entrypoint

INPUTS = Path(__file__).parent.parent / "inputs"
Expand Down