Coverage for pesummary/tests/webpage_test.py: 89.0%

228 statements  

« prev     ^ index     » next       coverage.py v7.4.4, created at 2024-05-02 08:42 +0000

1# Licensed under an MIT style license -- see LICENSE.md 

2 

3import os 

4import socket 

5import shutil 

6from glob import glob 

7import numpy as np 

8 

9from pesummary.core.webpage import webpage 

10from pesummary.utils.samples_dict import MultiAnalysisSamplesDict 

11from .base import data_dir 

12 

13from bs4 import BeautifulSoup 

14 

15import pytest 

16import tempfile 

17 

18tmpdir = tempfile.TemporaryDirectory(prefix=".", dir=".").name 

19 

20__author__ = ["Charlie Hoy <charlie.hoy@ligo.org>"] 

21 

22 

23class TestWebpage(object): 

24 

25 def setup_method(self): 

26 directory = tmpdir 

27 try: 

28 os.mkdir(directory) 

29 except: 

30 shutil.rmtree(directory) 

31 os.mkdir(directory) 

32 

33 def teardown_method(self): 

34 directory = tmpdir 

35 try: 

36 shutil.rmtree(directory) 

37 except: 

38 pass 

39 

40 def test_make_html(self): 

41 webdir = tmpdir 

42 assert os.path.isfile("{}/home.html".format(tmpdir)) == False 

43 webpage.make_html(webdir, pages=["home"]) 

44 assert os.path.isfile("{}/home.html".format(tmpdir)) == True 

45 

46 def test_open_html(self): 

47 webdir = tmpdir 

48 baseurl = "https://example" 

49 open("{}/home.html".format(tmpdir), "a").close() 

50 f = webpage.open_html(webdir, baseurl, "home") 

51 assert isinstance(f, webpage.page) == True 

52 

53 

54class TestPage(object): 

55 

56 def setup_method(self): 

57 webdir = tmpdir 

58 try: 

59 os.mkdir(webdir) 

60 except: 

61 shutil.rmtree(webdir) 

62 os.mkdir(webdir) 

63 os.mkdir("{}/css".format(tmpdir)) 

64 f = open("{}/css/command_line.css".format(tmpdir), "w") 

65 f.close() 

66 baseurl = "https://example" 

67 webpage.make_html(webdir, pages=["home"]) 

68 self.html = webpage.open_html(webdir, baseurl, "home") 

69 

70 def teardown_method(self): 

71 directory = tmpdir 

72 try: 

73 shutil.rmtree(directory) 

74 except: 

75 pass 

76 

77 def open_and_read(self, path): 

78 f = open(path) 

79 f = f.readlines() 

80 return f 

81 

82 def test_add_content(self): 

83 content = "testing\n" 

84 self.html.add_content(content, indent=2) 

85 self.html.close() 

86 f = self.open_and_read("{}/home.html".format(tmpdir)) 

87 assert any(elem == " testing\n" for elem in f) == True 

88 

89 def test_header(self): 

90 self.html.make_header(approximant="approx") 

91 self.html.close() 

92 with open("{}/home.html".format(tmpdir)) as fp: 

93 soup = BeautifulSoup(fp, features="html.parser") 

94 assert str(soup.h7.string) == 'None' 

95 

96 def test_footer(self): 

97 self.html.make_footer() 

98 self.html.close() 

99 with open("{}/home.html".format(tmpdir)) as fp: 

100 soup = BeautifulSoup(fp, features="html.parser") 

101 assert "This page was produced by" in str(soup.p) 

102 assert soup.div["class"] == ['jumbotron'] 

103 

104 @pytest.mark.parametrize('links', [("other", "example")]) 

105 def test_navbar(self, links): 

106 self.html.make_navbar(links) 

107 self.html.close() 

108 with open("{}/home.html".format(tmpdir)) as fp: 

109 soup = BeautifulSoup(fp, features="html.parser") 

110 all_links = soup.find_all("a", class_="nav-link") 

111 assert len(all_links) == 4 

112 assert all_links[0].text == "other" 

113 assert all_links[1].text == "example" 

114 

115 @pytest.mark.parametrize('headings, contents', [(["column1", "column2"], 

116 [["entry1", "entry2"], ["entry3", "entry4"]]),]) 

117 def test_table(self, headings, contents): 

118 self.html.make_table(headings=headings, contents=contents) 

119 self.html.close() 

120 with open("{}/home.html".format(tmpdir)) as fp: 

121 soup = BeautifulSoup(fp, features="html.parser") 

122 columns = soup.find_all("th") 

123 assert len(columns) == 2 

124 assert columns[0].text == "column1" 

125 assert columns[1].text == "column2" 

126 entries = soup.find_all("td") 

127 assert len(entries) == 4 

128 for num, i in enumerate(entries): 

129 assert i.text == "entry{}".format(num+1) 

130 

131 @pytest.mark.parametrize('language', [('ini'),]) 

132 def test_code_block(self, language): 

133 with open(data_dir + "/example.ini", 'r') as f: 

134 contents = f.read() 

135 styles = self.html.make_code_block(language='ini', contents=contents) 

136 with open('{}/example_config.css'.format(tmpdir), 'w') as f: 

137 f.write(styles) 

138 f.close() 

139 self.html.close() 

140 with open("{}/home.html".format(tmpdir)) as fp: 

141 soup = BeautifulSoup(fp, features="html.parser") 

142 assert soup.div["class"] == ["highlight"] 

143 all_entries = soup.find_all("span") 

144 _text = [ii.text for ii in all_entries] 

145 for i in contents.split("\n"): 

146 try: 

147 assert i in _text 

148 except AssertionError: 

149 # the string 'example=' in the config file splits to 'example' 

150 # and '=' in the html code block 

151 assert i[:-1] in _text 

152 assert i[-1] in _text 

153 

154 def test_table_of_images(self): 

155 contents = [["image1.png"], ["image2.png"]] 

156 self.html.make_table_of_images(contents=contents) 

157 self.html.close() 

158 with open("{}/home.html".format(tmpdir)) as fp: 

159 soup = BeautifulSoup(fp, features="html.parser") 

160 all_images = soup.find_all("img") 

161 assert len(all_images) == 2 

162 assert all_images[0]["src"] == "image1.png" 

163 assert all_images[1]["src"] == "image2.png" 

164 

165 def test_insert_image(self): 

166 path = "./path/to/image.png" 

167 self.html.insert_image(path) 

168 self.html.close() 

169 with open("{}/home.html".format(tmpdir)) as fp: 

170 soup = BeautifulSoup(fp, features="html.parser") 

171 all_images = soup.find_all("img") 

172 assert len(all_images) == 1 

173 assert all_images[0]["src"] == "./path/to/image.png" 

174 

175 def test_accordian(self): 

176 headings = ["example"] 

177 content = ["./path/to/image.png"] 

178 self.html.make_accordian(headings=headings, content=content) 

179 self.html.close() 

180 with open("{}/home.html".format(tmpdir)) as fp: 

181 soup = BeautifulSoup(fp, features="html.parser") 

182 assert soup.img["src"] == "./path/to/image.png" 

183 assert soup.button["class"] == ["btn", "btn-link", "collapsed"] 

184 assert len(soup.find_all("div", class_="accordian")) == 1 

185 assert "example" in soup.button.text 

186 

187 def test_search_bar(self): 

188 self.html.make_search_bar() 

189 self.html.close() 

190 with open("{}/home.html".format(tmpdir)) as fp: 

191 soup = BeautifulSoup(fp, features="html.parser") 

192 assert soup.find_all("button", class_="")[0].text == "Submit" 

193 

194 def test_modal_carousel(self): 

195 images = ["./path/to/image.png"] 

196 self.html.make_modal_carousel(images=images) 

197 self.html.close() 

198 with open("{}/home.html".format(tmpdir)) as fp: 

199 soup = BeautifulSoup(fp, features="html.parser") 

200 images = soup.find_all("img") 

201 assert len(images) == 1 

202 assert images[0]["src"] == "./path/to/image.png" 

203 assert len(soup.find_all("div", class_="carousel-item")) == 1 

204 assert len(soup.find_all("div", class_="modal-lg")) == 1 

205 assert len(soup.find_all("div", class_="carousel")) == 1 

206 

207 

208class TestWebpage(object): 

209 """ 

210 """ 

211 def setup_method(self): 

212 """ 

213 """ 

214 from pesummary.gw.webpage.main import _WebpageGeneration 

215 

216 self.labels = ["one", "two"] 

217 self.samples = MultiAnalysisSamplesDict({ 

218 label: { 

219 param: np.random.uniform(0.2, 1.0, 1000) for param in 

220 ["chirp_mass", "mass_ratio"] 

221 } for label in self.labels 

222 }) 

223 self.webpage = _WebpageGeneration( 

224 webdir=tmpdir, labels=self.labels, samples=self.samples, 

225 pepredicates_probs={label: None for label in self.labels}, 

226 same_parameters=["chirp_mass", "mass_ratio"] 

227 ) 

228 self.webpage.generate_webpages() 

229 

230 def test_comparison_stats(self): 

231 """ 

232 """ 

233 from scipy.spatial.distance import jensenshannon 

234 from pesummary.utils.utils import jensen_shannon_divergence 

235 from pesummary.utils.bounded_1d_kde import ReflectionBoundedKDE 

236 

237 comparison_stats = self.webpage.comparison_stats 

238 for param in ['chirp_mass', 'mass_ratio']: 

239 if param == "chirp_mass": 

240 xlow = 0. 

241 xhigh = None 

242 else: 

243 xlow = 0. 

244 xhigh = 1. 

245 js = comparison_stats[param][1][0][1] 

246 samples = [ 

247 self.samples[label][param] for label in self.labels 

248 ] 

249 x = np.linspace(np.min(samples), np.max(samples), 100) 

250 _js = jensenshannon( 

251 ReflectionBoundedKDE(samples[0], xlow=xlow, xhigh=xhigh)(x), 

252 ReflectionBoundedKDE(samples[1], xlow=xlow, xhigh=xhigh)(x) 

253 )**2 

254 np.testing.assert_almost_equal(js, _js, 5) 

255 

256 def test_displayed_label_summary_table(self): 

257 """Test that the summary table displayed on the webpages show the 

258 correct information 

259 """ 

260 for label in self.labels: 

261 with open("{}/html/{}_{}.html".format(tmpdir, label, label)) as fp: 

262 soup = BeautifulSoup(fp, features="html.parser") 

263 table = soup.find(lambda tag: tag.name=='table') 

264 rows = table.findAll(lambda tag: tag.name=='tr') 

265 _tags = ["th"] + ["td"] * (len(rows) - 1) 

266 data = [ 

267 [ 

268 hh.string for hh in _row.findAll( 

269 lambda tag: tag.name==_tags[num] 

270 ) 

271 ] for num, _row in enumerate(rows) 

272 ] 

273 _samples = self.samples[label] 

274 _key_data = _samples.key_data 

275 for row in data[1:]: 

276 for num, header in enumerate(data[0][1:]): 

277 try: 

278 np.testing.assert_almost_equal( 

279 _key_data[row[0]][header], float(row[num + 1]), 3 

280 ) 

281 except ValueError: 

282 assert _key_data[row[0]][header] is None 

283 assert row[num + 1] == 'None' 

284 

285 def test_displayed_comparison_parameter_summary_table(self): 

286 """ 

287 """ 

288 with open("{}/html/Comparison_chirp_mass.html".format(tmpdir)) as fp: 

289 soup = BeautifulSoup(fp, features="html.parser") 

290 table = soup.find(lambda tag: tag.name=='table') 

291 rows = table.findAll(lambda tag: tag.name=='tr') 

292 _tags = ["th"] + ["td"] * (len(rows) - 1) 

293 data = [ 

294 [ 

295 hh.string for hh in _row.findAll( 

296 lambda tag: tag.name==_tags[num] 

297 ) 

298 ] for num, _row in enumerate(rows) 

299 ] 

300 _key_data = { 

301 label: self.samples[label].key_data["chirp_mass"] for label in 

302 self.labels 

303 } 

304 for row in data[1:]: 

305 for num, header in enumerate(data[0][1:]): 

306 try: 

307 np.testing.assert_almost_equal( 

308 _key_data[row[0]][header], float(row[num + 1]), 3 

309 ) 

310 except ValueError: 

311 assert _key_data[row[0]][header] is None 

312 assert row[num + 1] == 'None'