Source code for toolbox_scs.detectors.bam_detectors

""" Beam Arrival Monitor related sub-routines

    Copyright (2021) SCS Team.

    (contributions preferrably comply with pep8 code structure
    guidelines.)
"""

import logging
import xarray as xr
import numpy as np

from ..misc.bunch_pattern_external import is_pulse_at
from ..mnemonics_machinery import mnemonics_for_run
from ..constants import mnemonics as _mnemonics
from ..misc.bunch_pattern import (npulses_has_changed,
                                  get_unique_sase_pId, load_bpt)
from toolbox_scs.load import get_array

__all__ = [
    'get_bam',
    'get_bam_params',
]

log = logging.getLogger(__name__)


[docs]def get_bam(run, mnemonics=None, merge_with=None, bunchPattern='sase3', pulseIds=None): """ Load beam arrival monitor (BAM) data and align their pulse ID according to the bunch pattern. Sources can be loaded on the fly via the mnemonics argument, or processed from an existing data set (merge_with). Parameters ---------- run: extra_data.DataCollection DataCollection containing the bam data. mnemonics: str or list of str mnemonics for BAM, e.g. "BAM1932M" or ["BAM414", "BAM1932M"]. the arrays are either taken from merge_with or loaded from the DataCollection run. merge_with: xarray Dataset If provided, the resulting Dataset will be merged with this one. If merge_with contains variables in mnemonics, they will be selected, aligned and merged. bunchPattern: str 'sase1' or 'sase3' or 'scs_ppl', bunch pattern used to extract peaks. The pulse ID dimension will be named 'sa1_pId', 'sa3_pId' or 'ol_pId', respectively. pulseIds: list, 1D array Pulse Ids. If None, they are automatically loaded. Returns ------- xarray Dataset with pulse-resolved BAM variables aligned, merged with Dataset *merge_with* if provided. Example ------- >>> import toolbox_scs as tb >>> run = tb.open_run(2711, 303) >>> bam = tb.get_bam(run, 'BAM1932S') """ bam_mnemos = ['BAM4', 'BAM1'] mnemonics = [mnemonics] if isinstance(mnemonics, str) else mnemonics m2 = [] for m in mnemonics: if any([(k in m) for k in bam_mnemos]): m2.append(m) mnemonics = list(set(m2)) if len(mnemonics) == 0: log.info('no BAM mnemonics to process. Skipping.') return merge_with # Prepare the dataset of non-BAM data to merge with if bool(merge_with): ds_mw = merge_with.drop(mnemonics, errors='ignore') else: ds_mw = xr.Dataset() dim_names = {'sase3': 'sa3_pId', 'sase1': 'sa1_pId', 'scs_ppl': 'ol_pId'} bpt = load_bpt(run, ds_mw) if bpt is not None: mask = is_pulse_at(bpt, bunchPattern) mask = mask.rename({'pulse_slot': dim_names[bunchPattern]}) ds = xr.Dataset() run_mnemonics = mnemonics_for_run(run) for m in mnemonics: if merge_with is not None and m in merge_with: da_bam = merge_with[m] else: da_bam = get_array(run, m) if len(da_bam.dims) == 2: if 'BAMbunchId' not in da_bam.dims: continue da_bam = da_bam.sel(BAMbunchId=slice(0, None, 2)) # align the pulse Id if bpt is not None: n = mask.sizes[dim_names[bunchPattern]] da_bam = da_bam.isel(BAMbunchId=slice(0, n)) da_bam = da_bam.assign_coords(BAMbunchId=np.arange(0, n)) da_bam = da_bam.rename(BAMbunchId=dim_names[bunchPattern]) da_bam = da_bam.where(mask, drop=True) # make sure unit is picosecond if run_mnemonics[m]['key'] != 'data.lowChargeArrivalTime': da_bam *= 1e-3 else: # The 1D values (mean, std dev...) are in fs, need to convert to ps mnemo = run_mnemonics[m] first_val = run[mnemo['source']][mnemo['key']].train_from_index(0)[1] if first_val == da_bam[0]: da_bam *= 1e-3 ds = ds.merge(da_bam, join='inner') # merge with non-BAM dataset ds = ds_mw.merge(ds, join='inner') return ds
''' def get_bam_old(run, mnemonics=None, merge_with=None, bunchPattern='sase3'): """ Load beam arrival monitor (BAM) data and align their pulse ID according to the bunch pattern. Sources can be loaded on the fly via the mnemonics argument, or processed from an existing data set (merge_with). Parameters ---------- run: extra_data.DataCollection DataCollection containing the bam data. mnemonics: str or list of str mnemonics for BAM, e.g. "BAM1932M" or ["BAM414", "BAM1932M"]. If None, defaults to "BAM1932M" in case no merge_with dataset is provided. merge_with: xarray Dataset If provided, the resulting Dataset will be merged with this one. The BAM variables of merge_with (if any) will also be selected, aligned and merged. bunchPattern: str 'sase1' or 'sase3' or 'scs_ppl', bunch pattern used to extract peaks. The pulse ID dimension will be named 'sa1_pId', 'sa3_pId' or 'ol_pId', respectively. Returns ------- xarray Dataset with pulse-resolved BAM variables aligned, merged with Dataset *merge_with* if provided. Example ------- >>> import toolbox_scs as tb >>> import toolbox_scs.detectors as tbdet >>> run, _ = tb.load(2711, 303) >>> bam = tbdet.get_bam(run) """ # get the list of mnemonics to process mnemonics = mnemonics_to_process(mnemonics, merge_with, 'BAM') if len(mnemonics) == 0: log.info('No array with unaligned BAM data to extract. Skipping.') return merge_with else: log.info(f'Extracting BAM data from {mnemonics}.') # Prepare the dataset of non-BAM data to merge with if bool(merge_with): mw_ds = merge_with.drop(mnemonics, errors='ignore') else: mw_ds = xr.Dataset() run_mnemonics = mnemonics_for_run(run) # extract the XFEL pulses: slice(0,5400,2) roi = np.s_[:5400:2] ds = xr.Dataset() for m in mnemonics: if bool(merge_with) and m in merge_with: val = merge_with[m].isel({run_mnemonics[m]['dim'][0]: roi}) log.debug(f'Using {m} from merge_with dataset.') else: val = run.get_array(*run_mnemonics[m].values(), roi=roi, name=m) log.debug(f'Loading {m} from DataCollection.') val[run_mnemonics[m]['dim'][0]] = np.arange(2700) # Since winter shutdown 2021-2022, units changed from ps to fs # Here we convert to ps if run_mnemonics[m]['key'] != 'data.lowChargeArrivalTime': val *= 1e-3 ds = ds.merge(val, join='inner') # check if bunch pattern table exists if bool(merge_with) and 'bunchPatternTable' in merge_with: bpt = merge_with['bunchPatternTable'] log.debug('Using bpt from merge_with dataset.') elif 'bunchPatternTable' in run_mnemonics: bpt = run.get_array(*run_mnemonics['bunchPatternTable'].values()) log.debug('Loaded bpt from extra_data run.') else: bpt = None # align the pulse Id if bpt is not None and len(ds.variables) > 0: dim_names = {'sase3': 'sa3_pId', 'sase1': 'sa1_pId', 'scs_ppl': 'ol_pId'} mask = is_pulse_at(bpt, bunchPattern) mask = mask.rename({'pulse_slot': dim_names[bunchPattern]}) ds = ds.rename({run_mnemonics['BAM1932M']['dim'][0]: dim_names[bunchPattern]}) ds = ds.where(mask, drop=True) # merge with non-BAM dataset ds = mw_ds.merge(ds, join='inner') return ds '''
[docs]def get_bam_params(run, mnemo_or_source='BAM1932S'): """ Extract the run values of bamStatus[1-3] and bamError. Parameters ---------- run: extra_data.DataCollection DataCollection containing the bam data. mnemo_or_source: str mnemonic of the BAM, e.g. 'BAM414', or source name, e.g. 'SCS_ILH_LAS/DOOCS/BAM_414_B2. Returns ------- params: dict dictionnary containing the extracted parameters. Note ---- The extracted parameters are run values, they do not reflect any possible change during the run. """ if mnemo_or_source in _mnemonics: run_mnemonics = mnemonics_for_run(run) source = run_mnemonics[mnemo_or_source]['source'].split(':')[0] else: source = mnemo_or_source res = {} res['status1'] = run.get_run_value(source, 'bamStatus1.value') res['status2'] = run.get_run_value(source, 'bamStatus2.value') res['status3'] = run.get_run_value(source, 'bamStatus3.value') res['error'] = run.get_run_value(source, 'bamError.value') return res