Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
190 changes: 190 additions & 0 deletions lompe/data/sdarn_2010_to_2021.csv

Large diffs are not rendered by default.

596 changes: 596 additions & 0 deletions lompe/data/supermag_stations.csv

Large diffs are not rendered by default.

6 changes: 4 additions & 2 deletions lompe/data_tools/README
Original file line number Diff line number Diff line change
@@ -1,9 +1,11 @@
Directory for scripts for data handling.

Currently handles the following data sets:
- DMSP SSUSI (EDR AURORA, netcdf format)
- DMSP SSUSI (EDR AURORA, netcdf format) (as of February 2025, SSUSI data is not supported on "jhuapl" use "cdaweb" option)
- DMSP SSIES (Madrigal)
- SuperDARN (gridmap)
- SuperDARN (gridmap) from Chartier, Alex T
- SuperMAG (netcdf format)
- Iridium (from AMPERE, netcdf format)
- CHAMP (cdf format)
- SWARM (using viresclient)

134 changes: 134 additions & 0 deletions lompe/data_tools/ampere.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,134 @@
import os
import datetime as dt
import requests
from lompe.data_tools.dataloader import read_iridium


def ampere_parsestart(start):
# DO NOT EDIT THIS FUNCTION

# internal helper function adapted from supermag_api.py

if isinstance(start, list):
timestring = "%4.4d-%2.2d-%2.2dT%2.2d:%2.2d" % tuple(start[0:5])
elif isinstance(start, dt.date):
# good to go, TBD
timestring = start.strftime("%Y-%m-%dT%H:%M")
else:
# is a string, reparse, TBD
timestring = start

return (timestring)


def ampere_coreurl(page, logon, start, extent):
# DO NOT EDIT THIS FUNCTION

# internal helper function adapted from supermag_api.py
baseurl = "https://ampere.jhuapl.edu/"

mytime = ampere_parsestart(start)
urlstr = baseurl + 'services/' + page + '?'
urlstr += '&logon=' + logon
urlstr += '&start=' + mytime

urlstr += '&extent=' + ("%12.12d" % extent)

return (urlstr)


def download_iridium_raw(event, basepath='./'):
"""Download netcdf (dB raw) data to be used by lompe from the AMPERE database (jhuapl) for a given event
returns an input for the lompe read_iridium script in dataloader.py in data_tools
Example usage:
event = '2012-04-05'
basepath = 'downloads'
download_iridium(event, basepath)

Args:
event (str): fromat YYYY-MM-DD
basepath (str, optional): path to . Defaults to './'.
tempfile_path (str, optional): path to. Defaults to './'.
file_name (str, optional):name of the file to write the netcdf file. Defaults to ''.

Returns:
saved file: to be used by the lompe read_iridium function in data_tools

Note:
functions "ampere_parsestart" and "ampere_coreurl" are internal helper functions adapted from supermag_api.py
credit to the original author of the functions.
"""
if not basepath.endswith('/'):
basepath += '/'

start = event + 'T00:00:00'
duration = 86400 # Duration in seconds (one day)
# check if the processed file exists
savefile = basepath + event.replace('-', '') + '_iridium.nc'

# checks if file already exists
# checking if the file is not empty
if os.path.isfile(savefile) and os.path.getsize(savefile) > 0:
print(f"File {savefile} already exists at {basepath}.")
return savefile
# return read_iridium(event, basepath='./', tempfile_path='./', file_name='')
else:
import certifi
# URL to download data from (lompe username is already registered in the API)
urlstr = ampere_coreurl('data-rawdB.php', 'lompe', start, duration)
# headers = {"User-Agent": "Mozilla/5.0"}
# verify=certifi.where())
response = requests.get(
urlstr, verify=certifi.where(), stream=True)

# Check if the request was successful
if response.status_code == 200:
# Save the downloaded data to a file
with open(savefile, 'wb') as file:
file.write(response.content)
else:
print(f"Failed to retrieve data: {response.status_code}")
return savefile


def download_iridium(event, basepath='./', tempfile_path='./'):
"""Download and process iridium data for a given event, returns an input for the lompe read_iridium script in dataloader.py in data_tools
Example usage:
event = '2012-04-05'
basepath = 'downloads'
tempfile_path = 'downloads'
file_name = '20120405_iridium.h5'
download_iridium(event, basepath, tempfile_path, file_name)

Args:
event (str): fromat YYYY-MM-DD
basepath (str, optional): path to . Defaults to './'.
tempfile_path (str, optional): path to. Defaults to './'.
file_name (str, optional):name of the file to write the netcdf file. Defaults to ''.

Returns:
saved file: to be used by the lompe read_iridium function in data_tools

Note:
functions "ampere_parsestart" and "ampere_coreurl" are internal helper functions adapted from supermag_api.py
credit to the original author of the functions.
"""
if not basepath.endswith('/'):
basepath += '/'
if not tempfile_path.endswith('/'):
tempfile_path += '/'
savefile = tempfile_path + event.replace('-', '') + '_iridium.h5'
raw_file_name = basepath + event.replace('-', '') + '_iridium.nc'
if os.path.isfile(savefile) and os.path.getsize(savefile) > 0:
print(f"File {savefile} already exists at {tempfile_path}.")
return savefile
elif os.path.isfile(raw_file_name) and os.path.getsize(raw_file_name) > 0:
print(
f"File {raw_file_name} exists and is converting to lompe data as {savefile}.")
return read_iridium(event, basepath=basepath, tempfile_path=tempfile_path)
else:
print(
f"File {savefile} does not exist at {tempfile_path}. Downloading raw data and converting to lompe data as {savefile}.")
_ = download_iridium_raw(
event, basepath=basepath)
return read_iridium(event, basepath=basepath, tempfile_path=tempfile_path)
82 changes: 82 additions & 0 deletions lompe/data_tools/champ.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,82 @@
import os
import numpy as np
import pandas as pd
import requests


def download_champ(event, basepath='./', tempfile_path='./'):
"""
Download CHAMP data from the FTP server and process it in lompe data format.
Note that CHAMP data is only available for the year between 2000 and 2010.

Args:
event (str): format 'YYYY-MM-DD'
basepath (str, optional): path. Defaults to './'.
tempfile_path (str, optional): path. Defaults to './'.

Returns:
savedfile: file name of the processed file if successful
"""
event_date = event.replace('-', '')
year = event[:4]
savefile = tempfile_path + f'CH_ME_MAG_LR_3_{event_date}_0102.cdf'
processed_file = tempfile_path + f'{event_date}_champ.h5'

# Check if the processed file already exists
if os.path.isfile(processed_file):
return processed_file

# Check if the raw file already exists
if not os.path.isfile(savefile):
from requests_ftp import ftp
session = requests.Session()
session.mount('ftp://', ftp.FTPAdapter())
ftp_url = f"ftp://isdcftp.gfz-potsdam.de/champ/ME/Level3/MAG/V0102/{year}/CH_ME_MAG_LR_3_{event_date}_0102.cdf"
try:
# Downloading the file and checking if it was successful
response = session.get(ftp_url)
if response.status_code == 200:
with open(savefile, "wb") as file:
file.write(response.content)
print(f"Downloading {savefile} is successful!")
else:
print(
f"Failed to download the file. Status code: {response.status_code}")
return None
except Exception as e:
print(f"No champ data in this period: {e}")
return None

# Process the downloaded CDF file ot get the magnetic disturbance
try:
import cdflib
import ppigrf
cdf_file = cdflib.CDF(savefile)
mag = cdf_file.varget('B_NEC') # space magnetometer data

# geocentric coords of CHAMP orbit
theta = 90 - cdf_file.varget('Latitude')
phi = cdf_file.varget('Longitude')
r = cdf_file.varget('Radius') / 1000

time = cdflib.cdfepoch.to_datetime(cdf_file.varget('Timestamp'))

# using IGRF to calculate magnetic disturbance (dB) registered by CHAMP
Br, Btheta, Bphi = ppigrf.igrf_gc(r, theta, phi, time[0])
B0 = np.vstack((-Btheta.flatten(), Bphi.flatten(), -Br.flatten()))
dB = mag.T - B0

champ_df = pd.DataFrame({
'Be': dB[1],
'Bn': dB[0],
'Bu': -dB[2],
'lon': phi,
'lat': 90 - theta,
'r': r
}, index=time)
champ_df.to_hdf(processed_file, key='df', mode='w')
os.remove(savefile) # remove the raw file after processing
return processed_file
except Exception as e:
print(f"Failed to process the file: {e}")
return None
134 changes: 134 additions & 0 deletions lompe/data_tools/data_tools_example.ipynb

Large diffs are not rendered by default.

Loading
Loading