Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
139 changes: 109 additions & 30 deletions make_timeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,8 +52,12 @@
parser.add_argument('--test',
action='store_true',
help='Use test data')
parser.add_argument('--test-scenario',
type=int,
help='Name of a scenario for testing missing P3 data')
args = parser.parse_args()

P3_BAD = -100000
AXES_LOC = [0.08, 0.15, 0.83, 0.6]

if args.test:
Expand Down Expand Up @@ -109,9 +113,11 @@ def get_avg_flux(filename):
lines = [line for line in open(filename, 'r')
if line.startswith('AVERAGE ')]
if len(lines) != 1:
raise ValueError('{} file contains {} lines that start with '
'AVERAGE (expect one)'.format(ACE_RATES_FILE, len(lines)))
p3_avg_flux = float(lines[0].split()[4])
print('WARNING: {} file contains {} lines that start with '
'AVERAGE (expect one)'.format(ACE_RATES_FILE, len(lines)))
p3_avg_flux = P3_BAD
else:
p3_avg_flux = float(lines[0].split()[4])
return p3_avg_flux


Expand Down Expand Up @@ -187,11 +193,68 @@ def get_ace_p3(tstart, tstop):
h5 = tables.openFile(ACE_H5_FILE)
times = h5.root.data.col('time')
p3 = h5.root.data.col('p3')
ok = (tstart < times) & (times < tstop) & (p3 > 0)
ok = (tstart < times) & (times < tstop)
h5.close()
return times[ok], p3[ok]


def get_test_vals(scenario, p3_times, p3s, p3_avg, p3_fluence):
if scenario == 1:
# ACE unavailable for the last 8 hours. No P3 avg from MTA.
print('Running test scenario 1')
p3s[:] = 100.0
bad = p3_times[-1] - p3_times < 3600 * 8
p3s[bad] = P3_BAD
p3_avg = P3_BAD
p3_fluence = 0.3e9
elif scenario == 2:
# ACE data flow resumes but MTA does not report an average
# value. Expect ACE fluence to show as red because no P3 avg
# is available for fluence calculation.
print('Running test scenario 2')
p3s[:] = 5000
dt = (p3_times[-1] - p3_times) / 3600.0
bad = (dt < 8) & (dt > 2.5)
p3s[bad] = P3_BAD
p3_avg = P3_BAD
p3_fluence = 0.3e9
elif scenario == 3:
# ACE data flow resumes and MTA reports an average value
# and there is now enough data for a slope and fluence quantiles
print('Running test scenario 3')
p3s[:] = 5000
dt = (p3_times[-1] - p3_times) / 3600.0
bad = (dt < 8) & (dt > 2.5)
p3_avg = 5000
p3s[bad] = P3_BAD
p3_fluence = 0.0e9
elif scenario == 4:
# ACE data flow resumes and MTA reports an average value,
# but not enough data for a slope and fluence quantiles
print('Running test scenario 4')
p3s[:] = 5000
dt = (p3_times[-1] - p3_times) / 3600.0
bad = (dt < 8) & (dt > 0.5)
p3_avg = 5000
p3s[bad] = P3_BAD
p3_fluence = 0.0e9
elif scenario == 5:
# ACE completely unavailable during period. No P3 avg from MTA.
print('Running test scenario 5')
p3s[:] = P3_BAD
p3_avg = P3_BAD
p3_fluence = 0.3e9
elif scenario == 6:
# Random ACE outages
print('Running test scenario 6')
bad = np.random.uniform(size=len(p3s)) < 0.05
p3s[:] = 1000.0
p3s[bad] = P3_BAD
p3_avg = 1000.0
p3_fluence = 0.3e9
return p3s, p3_avg, p3_fluence


def get_hrc(tstart, tstop):
"""
Get the historical HRC proxy rates and filter out bad values.
Expand Down Expand Up @@ -238,10 +301,15 @@ def get_p3_slope(p3_times, p3_vals):
Compute the slope (log10(p3) per hour) of the last 6 hours of ACE P3 values.
"""
ok = (p3_times[-1] - p3_times) < 6 * 3600 # Points within 6 hrs of last available data
x = (p3_times[ok] - p3_times[-1]) / 3600
y = np.log10(p3_vals[ok])
r = np.polyfit(x, y, 1)
return r[0]
ok = ok & (p3_vals > 0) # Good P3 values
slope = None
if np.sum(ok) > 4:
x = (p3_times[ok] - p3_times[-1]) / 3600
if x[-1] - x[0] > 2:
y = np.log10(p3_vals[ok])
slope = np.polyfit(x, y, 1)[0]

return slope


def main():
Expand Down Expand Up @@ -272,16 +340,21 @@ def main():
fluence_date = now
avg_flux = get_avg_flux(ACE_RATES_FILE)

# Get the realtime ACE P3 and HRC proxy values over the time range
p3_times, p3_vals = get_ace_p3(start.secs, now.secs)
hrc_times, hrc_vals = get_hrc(start.secs, now.secs)

# For testing: inject predefined values for different scenarios
if args.test_scenario:
p3_vals, avg_flux, fluence0 = get_test_vals(
args.test_scenario, p3_times, p3_vals, avg_flux, fluence0)

# Compute the predicted fluence based on the current 2hr average flux.
fluence_times = np.arange(fluence_date.secs, stop.secs, args.dt)
rates = np.ones_like(fluence_times) * avg_flux * args.dt
rates = np.ones_like(fluence_times) * max(avg_flux, 0.0) * args.dt
fluence = calc_fluence(fluence_times, fluence0, rates, states)
zero_fluence_at_radzone(fluence_times, fluence, radzones)

# Get the realtime ACE P3 and HRC proxy values over the time range
p3_times, p3_vals = get_ace_p3(start.secs, now.secs)
hrc_times, hrc_vals = get_hrc(start.secs, now.secs)

# Initialize the main plot figure
plt.rc('legend', fontsize=10)
fig = plt.figure(1, figsize=(9, 5))
Expand Down Expand Up @@ -323,20 +396,21 @@ def main():
plot_multi_line(x, y, z, [0, 1, 2], ['k', 'r', 'c'], ax)

# Plot 10, 50, 90 percentiles of fluence
p3_fits, p3_samps, fluences = cfd.get_fluences(
os.path.join(args.data_dir, 'ACE_hourly_avg.npy'))
p3_slope = get_p3_slope(p3_times, p3_vals)
hrs, fl10, fl50, fl90 = cfd.get_fluence_percentiles(
avg_flux, p3_slope, p3_fits, p3_samps, fluences,
args.min_flux_samples, args.max_slope_samples)
fluence_hours = (fluence_times - fluence_times[0]) / 3600.0
for fl_y, linecolor in zip((fl10, fl50, fl90),
('-g', '-b', '-r')):
fl_y = Ska.Numpy.interpolate(fl_y, hrs, fluence_hours)
rates = np.diff(fl_y)
fl_y_atten = calc_fluence(fluence_times[:-1], fluence0, rates, states)
zero_fluence_at_radzone(fluence_times[:-1], fl_y_atten, radzones)
plt.plot(x0 + fluence_hours[:-1] / 24.0, fl_y_atten, linecolor)
if p3_slope is not None and avg_flux > 0:
p3_fits, p3_samps, fluences = cfd.get_fluences(
os.path.join(args.data_dir, 'ACE_hourly_avg.npy'))
hrs, fl10, fl50, fl90 = cfd.get_fluence_percentiles(
avg_flux, p3_slope, p3_fits, p3_samps, fluences,
args.min_flux_samples, args.max_slope_samples)
fluence_hours = (fluence_times - fluence_times[0]) / 3600.0
for fl_y, linecolor in zip((fl10, fl50, fl90),
('-g', '-b', '-r')):
fl_y = Ska.Numpy.interpolate(fl_y, hrs, fluence_hours)
rates = np.diff(fl_y)
fl_y_atten = calc_fluence(fluence_times[:-1], fluence0, rates, states)
zero_fluence_at_radzone(fluence_times[:-1], fl_y_atten, radzones)
plt.plot(x0 + fluence_hours[:-1] / 24.0, fl_y_atten, linecolor)

# Set x and y axis limits
x0, x1 = cxc2pd([start.secs, stop.secs])
Expand Down Expand Up @@ -543,6 +617,7 @@ def write_states_json(fn, fig, ax, states, start, stop, now,

# Iterate through each time step and create corresponding data structure
# with pre-formatted values for display in the output table.
NOT_AVAIL = 'N/A'
for time, pd, state_val, fluence, p3, hrc in izip(times, pds, state_vals,
fluences, p3s, hrcs):
out = {}
Expand All @@ -560,18 +635,18 @@ def write_states_json(fn, fig, ax, states, start, stop, now,
if time < now_secs:
now_idx += 1
out['fluence'] = '{:.2f}e9'.format(fluence_now)
out['p3'] = '{:.0f}'.format(p3)
out['p3'] = '{:.0f}'.format(p3) if p3 > 0 else NOT_AVAIL
out['hrc'] = '{:.0f}'.format(hrc)
else:
out['fluence'] = '{:.2f}e9'.format(fluence)
out['p3'] = '{:.0f}'.format(p3_now)
out['p3'] = '{:.0f}'.format(p3_now) if p3_now > 0 else NOT_AVAIL
out['hrc'] = '{:.0f}'.format(hrc_now)
outs.append(out)
data['states'] = outs
data['now_idx'] = now_idx
data['now_date'] = date_zulu(now)
data['p3_avg_now'] = '{:.0f}'.format(p3_avg)
data['p3_now'] = '{:.0f}'.format(p3_now)
data['p3_avg_now'] = '{:.0f}'.format(p3_avg) if p3_avg > 0 else NOT_AVAIL
data['p3_now'] = '{:.0f}'.format(p3_now) if p3_now > 0 else NOT_AVAIL
data['hrc_now'] = '{:.0f}'.format(hrc_now)

track = next_comm['track_local']['value']
Expand Down Expand Up @@ -613,6 +688,10 @@ def get_fmt_dt(t1, t0):


def log_scale(y):
if isinstance(y, np.ndarray):
bad = y < 0
y = y.copy()
y[bad] = 1e-10
return (np.log10(y) - 1.0) / 2.0

if __name__ == '__main__':
Expand Down
2 changes: 1 addition & 1 deletion task_schedule.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -38,11 +38,11 @@ alert [email protected]
cron * * * * *
check_cron 00 06 * * *
exec 10 : get_iFOT_events.pl
exec 5 : get_web_content.pl
exec 5 : $ENV{SKA_SHARE}/arc/get_ace.py --h5=$ENV{SKA_DATA}/arc/ACE.h5
exec 5 : $ENV{SKA_SHARE}/arc/get_hrc.py --data-dir=$ENV{SKA_DATA}/arc
exec 5 : $ENV{SKA_SHARE}/arc/plot_hrc.py --h5=$ENV{SKA_DATA}/arc/hrc_shield.h5 --out=$ENV{SKA}/www/ASPECT/arc/hrc_shield.png
exec 5 : $ENV{SKA_SHARE}/arc/make_timeline.py --data-dir=$ENV{SKA_DATA}/arc
exec 5 : get_web_content.pl
exec 2 : arc.pl
exec 2 : arc.pl -config arc:arc_ops
exec 10 : $ENV{SKA_SHARE}/arc/arc_time_machine.pl
Expand Down
27 changes: 20 additions & 7 deletions timeline.js
Original file line number Diff line number Diff line change
Expand Up @@ -22,13 +22,28 @@ function getXY(e) {
return [posx, posy]
}

function spanColor(val, color) {
return '<span style="font-weight:bold;color:' + color + '">' + val + '</span>'
}

function setNAToRed(val) {
if (val == 'N/A') {
val = spanColor(val, 'red')
}
return val
}

function setStateTable(idx) {
var state = data.states[idx]
var keys = ['date', 'now_dt', 'simpos', 'pitch', 'ra', 'dec', 'roll',
'pcad_mode', 'si_mode', 'power_cmd', 'ccd_fep', 'vid_clock',
'fluence', 'p3', 'hrc'];
for (var i=0; i<keys.length; i++) {
document.getElementById('tl_' + keys[i]).innerHTML = state[keys[i]];
document.getElementById('tl_' + keys[i]).innerHTML = setNAToRed(state[keys[i]]);
}

if (data['p3_avg_now'] == 'N/A') {
document.getElementById('tl_fluence').innerHTML = spanColor(state['fluence'], 'red')
}

document.getElementById('tl_obsid').innerHTML =
Expand All @@ -41,9 +56,7 @@ function setStateTable(idx) {
} else {
var color = 'red'
}
document.getElementById('tl_si').innerHTML =
'<span style="font-weight:bold; color:' + color + ';">' + state['si'] + '</span>'
//document.getElementById('tl_si').innerHTML = '<font weight="bold" color="' + color + '">' + state['si'] + '</font>'
document.getElementById('tl_si').innerHTML = spanColor(state['si'], color)

if (state['hetg'] == 'INSR') {
var grating = 'HETG';
Expand Down Expand Up @@ -86,14 +99,14 @@ function initTimeLineHandlers() {
document.onmousemove = null;
}
now_idx = data['now_idx']
setStateTable(now_idx);
setStateTable(now_idx, data);
document.getElementById('tl_now').innerHTML = data['now_date']
document.getElementById('tl_track_time').innerHTML = data['track_time']
document.getElementById('tl_track_dt').innerHTML = data['track_dt']
document.getElementById('tl_track_station').innerHTML = data['track_station']
document.getElementById('tl_track_activity').innerHTML = data['track_activity']
document.getElementById('tl_p3_avg_now').innerHTML = data['p3_avg_now']
document.getElementById('tl_p3_now').innerHTML = data['p3_now']
document.getElementById('tl_p3_avg_now').innerHTML = setNAToRed(data['p3_avg_now'])
document.getElementById('tl_p3_now').innerHTML = setNAToRed(data['p3_now'])
document.getElementById('tl_hrc_now').innerHTML = data['hrc_now']

}