Skip to content

Commit cb5d6e8

Browse files
committed
add some wrappers formerly in local BZ lib
1 parent d520331 commit cb5d6e8

File tree

6 files changed

+1142
-0
lines changed

6 files changed

+1142
-0
lines changed

wrappers/PickleRaw.py

+267
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,267 @@
1+
#!/usr/bin/env python
2+
"""
3+
Save/load pickles from Coupled Runs
4+
5+
Usage:
6+
Pickles.py PATH [options]
7+
8+
Examples:
9+
Create all:
10+
Pickles.py /Volumes/BB_4TB/Thesis/Results_03-19
11+
12+
Create just infiltration
13+
Pickles.py . --var head
14+
15+
Arguments:
16+
PATH Directory with list of Model Runs (starts with Results_)
17+
18+
Options:
19+
--fmt=BOOL Run Formatting Script [default: 0]
20+
--var=STR One SWMM Variable: inf, evap, run, heads, soil [default: 0]
21+
--help Print this message
22+
23+
Notes:
24+
Created: 2017-03-06
25+
Update: 2017-05-18
26+
"""
27+
from __future__ import print_function
28+
import BB
29+
import os
30+
import os.path as op
31+
import shutil
32+
33+
import time
34+
import linecache
35+
from collections import OrderedDict
36+
import numpy as np
37+
import pandas as pd
38+
39+
from components import bcpl
40+
import PickleFmt, swmmtoolbox as swmtbx
41+
import flopy.utils.formattedfile as ff
42+
import flopy.utils.binaryfile as bf
43+
#
44+
from docopt import docopt
45+
from schema import Schema, Use, Or
46+
47+
from multiprocessing import Pool
48+
49+
class pickle_base(object):
50+
def __init__(self, path_result):
51+
self.path = path_result
52+
self.path_picks = self._make_pick_dir()
53+
_ = self._make_scenarios_slr()
54+
__ = self._make_ts()
55+
56+
def _make_pick_dir(self, verbose=0):
57+
""" Make Pickles Directory if it doesn't exist """
58+
path_pickle = op.join(self.path, 'Pickles')
59+
try:
60+
os.makedirs(path_pickle)
61+
except:
62+
if verbose:
63+
print ('WARNING - Overwriting exiting pickles in: \n\t{}'.format(path_pickle))
64+
65+
return path_pickle
66+
67+
def _make_scenarios_slr(self):
68+
""" Get Scenarios and SLR from Dirs """
69+
self.scenarios = [op.join(self.path, slr) for slr in os.listdir(self.path)
70+
if slr.startswith('SLR')]
71+
self.slr = [op.basename(scenario).split('_')[0][-3:] for
72+
scenario in self.scenarios]
73+
def _make_ts(self):
74+
""" Pull start/end time from any .out file """
75+
slr_name = 'SLR-{}_{}'.format(self.slr[0], self.scenarios[0][-5:])
76+
out_file = op.join(self.path, slr_name, slr_name + '.out')
77+
st_end = swmtbx.SwmmExtract(out_file).GetDates() # returns a tuple
78+
self.ts_hr = pd.date_range(st_end[0], st_end[1], freq='H')
79+
self.ts_day = pd.date_range(st_end[0], st_end[1], freq='D')
80+
81+
class pickle_swmm(pickle_base):
82+
def __init__(self, path_result):
83+
pickle_base.__init__(self, path_result)
84+
85+
def sys_out(self):
86+
"""
87+
Create a dataframe with SWMM out variables
88+
Careful with times: SWMM stops @ 1 hour of new day(12-30)-00:00); remove
89+
doesn't update last day because no precip i guess
90+
There should be a date given in path_result
91+
"""
92+
# precip, pet, flood, inf, runoff, evap
93+
varnames = ['Precip', 'Pet', 'Flood', 'Vol_Stored', 'Infil', 'Runoff', 'Surf_Evap']
94+
variables = [1, 14, 10, 12, 3, 4, 13]
95+
sys_mat = np.zeros([len(self.ts_hr), len(varnames) * len(self.scenarios)])
96+
colnames = []
97+
for i, scenario in enumerate(self.scenarios):
98+
slr_name = op.basename(scenario)
99+
slr = slr_name[4:7]
100+
out_file = op.join(scenario, '{}.out'.format(slr_name))
101+
colnames.extend(['{}_{}'.format(var_name, slr) for var_name in varnames])
102+
for j,v in enumerate(variables):
103+
# pull and store in matrix; truncate last (empty) day to fit
104+
sys_mat[:, j+i*len(variables)] = (swmtbx.extract_arr(out_file,
105+
'system,{},{}'.format(v,v))
106+
[:len(self.ts_hr)])
107+
swmm_sys = pd.DataFrame(sys_mat, index=self.ts_hr, columns=colnames)
108+
path_res = op.join(self.path_picks, 'swmm_sys.df')
109+
swmm_sys.to_pickle(path_res)
110+
print ('SYS DataFrame pickled to: {}'.format(path_res))
111+
112+
class pickle_uzf(pickle_base):
113+
def __init__(self, path_result):
114+
pickle_base.__init__(self, path_result)
115+
116+
def uzf_arrays(self):
117+
"""
118+
Make 3d numpy arrays of shape (74*51*549)
119+
"""
120+
varnames = ['surf_leak', 'uzf_rch', 'uzf_et', 'uzf_run']
121+
variables = ['SURFACE LEAKAGE', 'UZF RECHARGE', 'GW ET', 'HORT+DUNN']
122+
for scenario in self.scenarios:
123+
slr_name = op.basename(scenario)
124+
slr = slr_name[4:7]
125+
uzf_file = op.join(scenario, '{}.uzfcb2.bin'.format(slr_name))
126+
try:
127+
uzfobj = bf.CellBudgetFile(uzf_file, precision='single')
128+
except:
129+
uzfobj = bf.CellBudgetFile(uzf_file, precision='double')
130+
for i, variable in enumerate(variables):
131+
uzf_data = uzfobj.get_data(text=variable)
132+
sys_mat = np.zeros([len(self.ts_day), 74, 51])
133+
for j in range(len(self.ts_day)):
134+
sys_mat[j,:,:] = uzf_data[j]
135+
# save separately so can load separately and faster
136+
path_res = op.join(self.path_picks, '{}_{}.npy'.format(varnames[i], slr))
137+
np.save(path_res, sys_mat)
138+
print ('UZF arrays pickled to: {}'.format(self.path_picks))
139+
140+
class pickle_ext(pickle_base):
141+
def __init__(self, path_result):
142+
pickle_base.__init__(self, path_result)
143+
144+
def ts_sums(self):
145+
varnames = ['FINF', 'GW_ET']
146+
variables = ['finf', 'pet']
147+
sys_mat = np.zeros([len(self.ts_day), len(varnames) * len(self.scenarios)])
148+
colnames = []
149+
for i, scenario in enumerate(self.scenarios):
150+
slr_name = op.basename(scenario)
151+
slr = slr_name[4:7]
152+
ext_dir = op.join(self.path, scenario, 'ext')
153+
colnames.extend(['{}_{}'.format(var_name, slr) for var_name in varnames])
154+
for j in range(1, len(self.ts_day)+1):
155+
for k, v in enumerate(variables):
156+
v_file = op.join(ext_dir, '{}_{}.ref'.format(v, j))
157+
var = np.fromfile(v_file, sep= ' ')
158+
sys_mat[j-1, k+i*len(varnames)] = var.reshape(74, 51).sum()
159+
160+
ext_sys = pd.DataFrame(sys_mat, index=self.ts_day, columns=colnames)
161+
path_res = op.join(self.path_picks, 'ext_sums.df')
162+
ext_sys.to_pickle(path_res)
163+
print ('EXT DataFrame pickled to: {}'.format(path_res))
164+
165+
### multiprocessing cannot use class methods
166+
def _ts_heads(args):
167+
""" Pull heads from fhd file in parallel """
168+
scenario, path_pickle = args
169+
slr_name = op.basename(scenario)
170+
slr = slr_name[4:7]
171+
head_file = op.join(scenario, op.basename(scenario) + '.fhd')
172+
try:
173+
hds = ff.FormattedHeadFile(head_file, precision='single')
174+
except:
175+
hds = ff.FormattedHeadFile(head_file, precision='double')
176+
heads = hds.get_alldata(mflay=0)
177+
res_path = op.join(path_pickle, 'heads_{}.npy'.format(slr))
178+
np.save(res_path, heads)
179+
# print ('Np array pickled to to: {}'.format(res_path))
180+
181+
def _sub_var(args):
182+
"""
183+
All Subcatchments, All Times. One Variable.
184+
Pickle a npy for each scenario separately.
185+
Based on subs_rungw
186+
"""
187+
param_map = {'inf' : 3, 'evap' : 2, 'run' : 4, 'heads' : 6, 'soil' : 7}
188+
scenario, varname, ts, path_pickle = args
189+
190+
# varnames = [varname]
191+
variables = [param_map[varname]]
192+
193+
slr_name = op.basename(scenario)
194+
slr = slr_name[4:7]
195+
out_file = op.join(scenario, '{}.out'.format(slr_name))
196+
sub_names = [int(name) for name in swmtbx.listdetail(out_file,'subcatchment')]
197+
sys_mat = np.zeros([len(ts), len(sub_names)*len(variables)])
198+
199+
for i, sub in enumerate(sub_names):
200+
for j, var in enumerate(variables):
201+
sys_mat[:, j+i*len(variables)] = (swmtbx.extract_arr(out_file,
202+
'subcatchment,{},{}'.format(sub,var))
203+
[:len(ts)])
204+
205+
path_arr = op.join(path_pickle, 'swmm_{}_{}.npy'.format(varname, slr))
206+
np.save(path_arr, sys_mat)
207+
208+
def main(path_result):
209+
swmm_obj = pickle_swmm(path_result)
210+
swmm_obj.sys_out()
211+
pickle_uzf(path_result).uzf_arrays()
212+
pickle_ext(path_result).ts_sums()
213+
return swmm_obj
214+
215+
if __name__ == '__main__':
216+
start = time.time()
217+
arguments = docopt(__doc__)
218+
typecheck = Schema({'PATH' : os.path.exists, '--fmt' : Use(int),
219+
'--var' : Or(Use(int), str)}, ignore_extra_keys=True)
220+
PATH_result = op.abspath(typecheck.validate(arguments)['PATH'])
221+
args = typecheck.validate(arguments)
222+
223+
### 1 CPU
224+
swmm_obj = main(PATH_result)
225+
scenarios, path_picks = swmm_obj.scenarios, swmm_obj.path_picks
226+
ts_hr = swmm_obj.ts_hr
227+
228+
if args['--fmt']:
229+
print ('Formatting Pickles')
230+
PickleFmt.main(PATH_result);
231+
232+
elif args['--var']:
233+
print ('Pickling SWMM {} to {} ... '.format(args['--var'], path_picks))
234+
pool = Pool(processes=len(scenarios))
235+
res = pool.map(_sub_var, zip(scenarios, [args['--var']]*len(scenarios),
236+
[ts_hr]*len(scenarios), [path_picks]*len(scenarios)))
237+
238+
### Multiprocessing
239+
else:
240+
print ('Pickling FHD heads to: {}'.format(path_picks))
241+
pool = Pool(processes=len(scenarios))
242+
res = pool.map(_ts_heads, zip(scenarios, [path_picks] * len(scenarios)))
243+
244+
print ('Pickling SWMM Heads to {} ... '.format(path_picks))
245+
pool = Pool(processes=len(scenarios))
246+
res = pool.map(_sub_var, zip(scenarios, ['heads']*len(scenarios),
247+
[ts_hr]*len(scenarios), [path_picks]*len(scenarios)))
248+
249+
print ('Pickling SWMM Runoff to {} ... '.format(path_picks))
250+
pool = Pool(processes=len(scenarios))
251+
res = pool.map(_sub_var, zip(scenarios, ['run']*len(scenarios),
252+
[ts_hr]*len(scenarios), [path_picks]*len(scenarios)))
253+
254+
print ('Pickling SWMM Infil to {} ... '.format(path_picks))
255+
pool = Pool(processes=len(scenarios))
256+
res = pool.map(_sub_var, zip(scenarios, ['inf']*len(scenarios),
257+
[ts_hr]*len(scenarios), [path_picks]*len(scenarios)))
258+
259+
print ('Pickling SWMM Evap to {} ... '.format(path_picks))
260+
pool = Pool(processes=len(scenarios))
261+
res = pool.map(_sub_var, zip(scenarios, ['evap']*len(scenarios),
262+
[ts_hr]*len(scenarios), [path_picks]*len(scenarios)))
263+
264+
print ('\nFormatting Data ...\n')
265+
PickleFmt.main(PATH_result);
266+
end = time.time()
267+
print ('Pickles made in ~ {} min'.format(round((end-start)/60., 2)))

wrappers/TC_backup.py

+64
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,64 @@
1+
#!/usr/bin/env python
2+
3+
"""
4+
Backup Pickles and Coupled File to Time Capsule
5+
6+
Usage:
7+
TC_backup.py PATH_DEST
8+
9+
Arguments:
10+
PATH_DEST Time_Capsule/Thesis_Backups
11+
12+
Examples:
13+
Call from script:
14+
TC_backup.py /Volumes/BB_Cap/Thesis_Backups
15+
16+
Call from terminal:
17+
TC_backup.py .
18+
19+
Notes:
20+
The path to the TimeCapsule may be messed up.
21+
Better to call this from terminal.
22+
"""
23+
24+
from __future__ import print_function
25+
import os
26+
import os.path as op
27+
import tarfile
28+
29+
from docopt import docopt
30+
from schema import Schema, Use
31+
32+
def make_tarfile(output_dest, source_dir):
33+
with tarfile.open('{}.tar.gz'.format(output_dest), "w:gz") as tar:
34+
tar.add(source_dir, arcname=os.path.basename(source_dir))
35+
36+
def main(path_dest):
37+
PATH_src = op.join('/', 'Volumes', 'BB_4TB', 'Thesis')
38+
if not op.exists(PATH_src):
39+
raise OSError ('Path to Source Incorrect')
40+
41+
# get list of folders that are already backed up
42+
exists_full = os.listdir(path_dest)
43+
# strip off the ending for comparison
44+
exists = [dirs.split('.')[0] for dirs in exists_full
45+
if dirs.endswith('tar.gz')]
46+
# make only the directories not already existing
47+
for d in os.listdir(PATH_src):
48+
d_full = op.join(PATH_src, d)
49+
# only copy over directories
50+
if not op.isdir(d_full):
51+
continue
52+
if not d in exists:
53+
make_tarfile(op.join(path_dest, d), d_full)
54+
else:
55+
print (d, 'already exists, skipping...')
56+
57+
58+
59+
if __name__ == '__main__':
60+
arguments = docopt(__doc__)
61+
typecheck = Schema({'PATH_DEST' : os.path.exists}, ignore_extra_keys=True)
62+
PATH_tcap = op.abspath(typecheck.validate(arguments)['PATH_DEST'])
63+
64+
main(PATH_tcap)

wrappers/__init__.py

Whitespace-only changes.

0 commit comments

Comments
 (0)