Note: The default ITS GitLab runner is a shared resource and is subject to slowdowns during heavy usage.
You can run your own GitLab runner that is dedicated just to your group if you need to avoid processing delays.

Commit 441660d7 authored by Qusai Al Shidi's avatar Qusai Al Shidi 💬
Browse files

FEAT: Add filecache to `web.get_omni_data()`

This will speed up the `get_omni_data()` function even more by keeping
a local cache of the omni data.
parent f8776a0b
...@@ -105,3 +105,11 @@ venv.bak/ ...@@ -105,3 +105,11 @@ venv.bak/
# misc # misc
tags tags
# cache from filecache
*.cache
# pytest created files
*.fts
*.fts.gz
IMF.dat
...@@ -19,6 +19,7 @@ import urllib ...@@ -19,6 +19,7 @@ import urllib
import urllib.request import urllib.request
import warnings import warnings
from dateutil import rrule from dateutil import rrule
from filecache import filecache
import numpy as np import numpy as np
from .tools import _nearest, carrington_rotation_number from .tools import _nearest, carrington_rotation_number
...@@ -170,6 +171,11 @@ def get_omni_data(time_from, time_to, **kwargs): ...@@ -170,6 +171,11 @@ def get_omni_data(time_from, time_to, **kwargs):
time_to=storm_end, time_to=storm_end,
resolution='low') resolution='low')
``` ```
Note:
This function creates a filecache of omni data to speed up multiple
uses of it. The cache is in the directory you run the python
interpreter in.
""" """
# Author: Qusai Al Shidi # Author: Qusai Al Shidi
# Email: qusai@umich.edu # Email: qusai@umich.edu
...@@ -208,7 +214,7 @@ def get_omni_data(time_from, time_to, **kwargs): ...@@ -208,7 +214,7 @@ def get_omni_data(time_from, time_to, **kwargs):
for url in omni['urls'](time_from, time_to): for url in omni['urls'](time_from, time_to):
# Parse omni data # Parse omni data
for line in list(urllib.request.urlopen(url)): for line in _download_omni_data(url):
cols = line.decode('ascii').split() cols = line.decode('ascii').split()
time = omni['parsetime'](cols) time = omni['parsetime'](cols)
...@@ -228,6 +234,13 @@ def get_omni_data(time_from, time_to, **kwargs): ...@@ -228,6 +234,13 @@ def get_omni_data(time_from, time_to, **kwargs):
return return_data return return_data
@filecache
def _download_omni_data(url):
"""Downloads omni data and returns as list
"""
return list(urllib.request.urlopen(url))
def _urls_omni_hires(time_from, time_to): def _urls_omni_hires(time_from, time_to):
"""Returns hires omni urls from time_from to time_to """Returns hires omni urls from time_from to time_to
""" """
...@@ -484,7 +497,7 @@ def download_magnetogram_adapt(time, map_type='fixed', **kwargs): ...@@ -484,7 +497,7 @@ def download_magnetogram_adapt(time, map_type='fixed', **kwargs):
for filename in filenames: for filename in filenames:
# Only try to download if the file does not exist # Only try to download if the file does not exist
if os.path.isfile(directory+filename) == True: if os.path.isfile(directory+filename):
warnings.warn(f'{filename} exists, not downloading', warnings.warn(f'{filename} exists, not downloading',
RuntimeWarning) RuntimeWarning)
else: else:
......
...@@ -6,8 +6,13 @@ import datetime as dt ...@@ -6,8 +6,13 @@ import datetime as dt
from swmfpy.web import * from swmfpy.web import *
TIME = dt.datetime(2016, 2, 3, 2, 1, 1) TIME = dt.datetime(2016, 2, 3, 2, 1, 1)
TIMES = (dt.datetime(2016, 1, 2, 3), dt.datetime(2016, 3, 2, 1))
def test_download_magnetogram_adapt(): def test_download_magnetogram_adapt():
download_magnetogram_adapt(TIME) download_magnetogram_adapt(TIME)
assert isfile('adapt40311_03k012_201602030200_i00015600n1.fts') assert isfile('adapt40311_03k012_201602030200_i00015600n1.fts')
def test_get_omni_data():
data = get_omni_data(*TIMES)
assert isinstance(data, dict), 'Incorrect type for omni data'
assert data['times'][0] == TIMES[0], 'Starting time is incorrect'
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment