-
Notifications
You must be signed in to change notification settings - Fork 2
/
Copy pathrbgridui_post_extra_script.py
executable file
·186 lines (153 loc) · 6.27 KB
/
rbgridui_post_extra_script.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
#!/usr/bin/env python3
import functools
import os
import hashlib
import subprocess
import json
import re
import shutil
import sys
import gzip
VERSION = 5 # increase to force spiffs flash
PROCESS_ENC = "mbcs" if sys.platform.startswith("win") else "utf-8"
def autodisable_old_lib(base, env):
web_dir = os.path.join(base, "web")
if not os.path.isdir(web_dir):
return False
saved_mtime = 0
our_mtime = 1
for root, dirs, files in os.walk(web_dir):
for name in files:
our_mtime = max(os.path.getmtime(os.path.join(root, name)), our_mtime)
our_mtime = int(our_mtime)
mtime_path = os.path.join(env.get("PROJECT_DIR"), ".gridui_mtime")
if os.path.exists(mtime_path):
with open(mtime_path, "r") as f:
saved_mtime = int(f.read())
if our_mtime > saved_mtime:
with open(mtime_path, "w") as f:
f.write("%d" % our_mtime)
return False
return our_mtime != saved_mtime
def generate_amalgamations(source=None, target=None, env=None, base="."):
web_dir = os.path.join(base, "web")
if not os.path.isdir(web_dir):
return
data_dir = env.get("PROJECT_DATA_DIR", env.get("PROJECTDATA_DIR"))
# Probably unsafe from a library
#if os.path.isdir(data_dir):
# shutil.rmtree(data_dir)
try:
os.mkdir(data_dir, 0o755)
except OSError:
pass
fn_re = re.compile(r"^[0-9]+_.+\.([^.]+)$")
to_combine = {}
for root, dirs, files in os.walk(web_dir):
for name in files:
m = fn_re.match(name)
path = os.path.join(root, name)
if not m:
shutil.copy(path, os.path.join(data_dir, os.path.relpath(path, web_dir)))
else:
to_combine.setdefault(m.group(1), []).append(path)
for ext, files in to_combine.items():
files.sort(key=lambda p: os.path.basename(p))
dst_path = os.path.join(data_dir, "combined." + ext)
print("Combining to %s:\n %s" % (dst_path, "\n ".join(files)))
with open(dst_path, "wb") as dst:
for path in files:
with open(path, "rb") as src:
shutil.copyfileobj(src, dst)
dst.write(b"\n")
for root, _, files in os.walk(data_dir):
for fn in files:
if fn.endswith(".gz"):
continue
path = os.path.join(root, fn)
with open(path, "rb") as src:
dst = gzip.GzipFile(path + ".gz", "wb", 9, mtime=0)
try:
shutil.copyfileobj(src, dst)
finally:
dst.close()
def after_upload(source, target, env, base="."):
web_dir = os.path.join(base, "web")
if not os.path.isdir(web_dir):
return
hasher = hashlib.sha1()
hasher.update(str(VERSION).encode("utf-8"))
for root, dirs, files in os.walk(web_dir):
dirs.sort()
for name in sorted(files):
with open(os.path.join(root, name), "rb") as f:
for chunk in iter(lambda: f.read(32787), b""):
hasher.update(chunk)
try:
with open(env.get("PARTITIONS_TABLE_CSV"), "rb") as f:
hasher.update(f.read())
except OSError as e:
pass
dev_list = subprocess.check_output([ "pio", "device", "list", "--serial", "--json-output" ], env=env["ENV"])
dev_list = json.loads(dev_list.decode(PROCESS_ENC))
for d in dev_list:
hasher.update(d.get("hwid", "").encode("utf-8"))
current_sha1 = hasher.hexdigest()
state_path = os.path.join(env.get("PROJECT_DIR"), ".gridui_uploadfs_sha1")
if os.path.exists(state_path):
with open(state_path, "r") as f:
if f.read() == current_sha1:
print("SPIFFS data are the same.")
return
print("SPIFFS data changed, running uploadfs target!")
env.Execute("pio run -t uploadfs")
with open(state_path, "w") as f:
f.write(current_sha1)
if "Import" in locals():
#print(os.getcwd(), env.Dump())
Import("env")
OUR_SCRIPT_NAME = "rbgridui_post_extra_script.py"
extra_scripts = env.GetProjectOption("extra_scripts", [])
this_post_path = "post:" + os.path.abspath(OUR_SCRIPT_NAME)
base = os.path.abspath(".")
project_dir_abs = os.path.abspath(env.get("PROJECT_DIR"))
if base != project_dir_abs and this_post_path not in extra_scripts:
# The library.json extraScripts are only pre:, and we need a post: script
# let's add one using platform.io internal APIs, what's the worst that can happen, right?
extra_scripts.append(this_post_path)
cfg = env.GetProjectConfig()
cfg.set("env:" + env["PIOENV"], "extra_scripts", extra_scripts)
else:
# Since some version, pio executes this script in the PROJECT_DIR instead of the library's directory.
# This tries to workaround it. Autodisabling old versions probably does not work anymore, but is it needed?
if base == project_dir_abs:
for ex in extra_scripts:
if not ex.startswith("post:") or not ex.endswith("/" + OUR_SCRIPT_NAME):
continue
base = os.path.dirname(ex[5:])
break
if base == project_dir_abs:
print("Failed to figure out Esp32-RBGridUI path, skipping auto-upload")
else:
if autodisable_old_lib(base, env):
print("Autodisabling %s" % base)
else:
print("Using %s" % base)
env.AddPreAction("$BUILD_DIR/spiffs.bin", functools.partial(generate_amalgamations, base=base))
env.AddPostAction("upload", functools.partial(after_upload, base=base))
elif __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser()
subparsers = parser.add_subparsers(dest='cmd')
sub = subparsers.add_parser("generate", help="Generate amalgamations from web/ to data/")
sub.add_argument("--base", help="Base dir of the library", default=os.path.dirname(__file__))
sub.add_argument("--output", "-o", help="Output directory", default="data")
args = parser.parse_args()
env = {
"PROJECTDATA_DIR": args.output,
}
if args.cmd == "generate":
generate_amalgamations(env=env, base=args.base)
else:
parser.print_help()
sys.exit(1)