Smoothing Jupyter Startup

Within:

C:\Users\\.ipython\profile_default\startup

place whatever files you want executed with the following naming convention

00-global_imports.py
01-local_imports.py
02-global_magics.ipy

with contents like:

00-global_imports.py

#global imports
import matplotlib as mpl
mpl.use('agg')

import datetime          as dt
import matplotlib.cm     as cmap
import matplotlib.dates  as mdates
import matplotlib.pyplot as plt
import matplotlib.ticker as ticker
import numpy             as np
import os 
import re
import scipy             as sp
import scipy.interpolate as interp
import scipy.optimize    as opt
import scipy.special     as special
import spacepy.pycdf     as pycdf
import sys
02-global_magics.ipy

#magic happens
%load_ext autoreload
%autoreload 2
%matplotlib inline

File Server Scraping

Within Python the best way to do file server scraping (or dumpster diving) is something like

def find_OpPt_CDFs(obs,ver,targ_dir):
    file_pattern = re.compile('%s_d[ei]s\d_engr_l1_sigthrsh_\d{14}_v%s.cdf' % (obs,ver))
        
    print 'scraping in %s' % targ_dir
 
    filtered_files = []
    for root, dirs, files in os.walk(targ_dir):
        for file in files:
            if file_pattern.match(file):
                full_filename = (root+'/'+file).replace('\\','/')
                filtered_files.append(full_filename)
    return np.array(filtered_files).flatten()

Note that the regex compile step uses:

    [ei] - either 'e' or 'i'
    \d   - digit
    \d{14} - any 14 digits in a row