Skip to content

Commit be240d1

Browse files
Merge pull request #18 from 4ARMED/seleniumwire
Seleniumwire
2 parents b66c91a + c1c9629 commit be240d1

File tree

4 files changed

+23
-11
lines changed

4 files changed

+23
-11
lines changed

pyproject.toml

+2-1
Original file line numberDiff line numberDiff line change
@@ -11,9 +11,10 @@ urls = {Homepage = "https://github.com/4armed/sri-check"}
1111
requires-python = ">=3.6"
1212
dependencies = [
1313
"beautifulsoup4>=4.0",
14+
"blinker==1.7.0", # Required for Seleniumwire
1415
"lxml>=4.8",
1516
"requests>=2.0",
16-
"selenium>=4.10",
17+
"selenium>=4.25",
1718
]
1819

1920
[project.readme]

requirements.txt

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
beautifulsoup4>=4.0
22
lxml>=4.8
33
requests>=2.0
4-
selenium>=4.10
4+
selenium>=4.25

sricheck/__init__.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
__version__ = "1.10.0"
1+
__version__ = "1.12.11"

sricheck/sricheck.py

+19-8
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33
import argparse
44
import base64
55
import hashlib
6+
import os
67
import re
78
import sys
89
import requests
@@ -91,10 +92,9 @@ def is_allowlisted(self, netloc):
9192

9293
def get_html(self):
9394
if self.browser:
94-
from selenium import webdriver
95-
from selenium.webdriver.chrome.options import Options
95+
from seleniumwire import webdriver
9696

97-
chrome_options = Options()
97+
chrome_options = webdriver.ChromeOptions()
9898
chrome_options.add_argument("--headless")
9999
chrome_options.add_argument("--no-sandbox")
100100
chrome_options.add_argument("--disable-dev-shm-usage")
@@ -104,20 +104,31 @@ def get_html(self):
104104
}
105105
}
106106

107-
browser = webdriver.Chrome(options=chrome_options)
107+
browser = webdriver.Chrome(
108+
options=chrome_options,
109+
seleniumwire_options={
110+
'proxy': {
111+
'http': os.environ.get("http_proxy"),
112+
'https': os.environ.get("https_proxy"),
113+
}
114+
}
115+
)
108116

109117
def interceptor(request):
110-
request.headers.update(self.headers)
118+
for key, value in self.headers.items():
119+
del request.headers[key]
120+
request.headers[key] = value
111121

112122
browser.request_interceptor = interceptor
113123
browser.get(self.url)
114-
return browser.execute_script("return document.documentElement.outerHTML;")
124+
content = browser.execute_script("return document.documentElement.outerHTML;")
125+
126+
browser.quit()
127+
return content
115128
else:
116129
# file deepcode ignore Ssrf: The purpose of the script is to parse remote URLs from the CLI
117-
118130
return requests.get(self.url, headers=self.headers).content
119131

120-
121132
def get_remote_resource_tags(self, html):
122133
soup = BeautifulSoup(html, 'lxml')
123134

0 commit comments

Comments
 (0)