@@ -46,7 +46,7 @@ def install_dependencies(dependencies_file):
46
46
DIRECTORY_KEEP = FILENAME_KEEP | set (['/' ])
47
47
EXTENSION_KEEP = set ('.' )
48
48
49
- scraper = cloudscraper .create_scraper ()
49
+ SCRAPER = cloudscraper .create_scraper ()
50
50
51
51
def set_args ():
52
52
global args
@@ -136,7 +136,7 @@ def download(filename, url, directory='.', extension='png', text='', ok=200):
136
136
separation = ' ' * (20 - len (text ))
137
137
print_colored (f'{ text } { separation } - Already exists' , Fore .YELLOW )
138
138
return False
139
- req = scraper .get (url )
139
+ req = SCRAPER .get (url )
140
140
if success (req , text , ok , print_ok = bool (text )):
141
141
data = req .content
142
142
write_file (path , data )
@@ -202,7 +202,7 @@ def parse_chapters_range(chapters, last):
202
202
def get_number (number ):
203
203
return last if number == 'last' else int (number )
204
204
205
- ranges = chapters .split (',' )
205
+ ranges = chapters .replace ( ' ' , '' ). split (',' )
206
206
try :
207
207
for r in ranges :
208
208
split = r .split ('..' )
@@ -213,10 +213,22 @@ def get_number(number):
213
213
except ValueError :
214
214
error ('Invalid chapters format' )
215
215
216
- def filename_chapter_range ():
217
- min_chapter = min (CHAPTERS )
218
- max_chapter = max (CHAPTERS )
219
- return str (min_chapter ) if min_chapter == max_chapter else f'{ min_chapter } -{ max_chapter } '
216
+ def filename_chapter_range (sorted_chapters ):
217
+ ranges = [] # list[(start, end)]
218
+
219
+ if len (sorted_chapters ) > 0 :
220
+ start_chapter = sorted_chapters [0 ]
221
+ end_chapter = start_chapter
222
+
223
+ for chapter in sorted_chapters :
224
+ if chapter > end_chapter + 1 :
225
+ ranges .append ((start_chapter , end_chapter ))
226
+ start_chapter = chapter
227
+ end_chapter = chapter
228
+
229
+ ranges .append ((start_chapter , end_chapter ))
230
+
231
+ return ',' .join (map (lambda range : f'{ range [0 ]} -{ range [1 ]} ' if range [0 ] != range [1 ] else str (range [0 ]), ranges ))
220
232
221
233
def split_rotate_2_pages (rotate ):
222
234
return str (1 if rotate else 0 )
@@ -292,7 +304,7 @@ def online_search():
292
304
}
293
305
294
306
# Alternative Search: https://inmanga.com/OnMangaQuickSearch/Source/QSMangaList.json
295
- search = scraper .post (SEARCH_URL , data = data , headers = headers )
307
+ search = SCRAPER .post (SEARCH_URL , data = data , headers = headers )
296
308
exit_if_fails (search )
297
309
298
310
return BeautifulSoup (search .content , 'html.parser' ).find_all ("a" , href = True , recursive = False )
@@ -367,7 +379,7 @@ def online_search():
367
379
if args .cache :
368
380
all_chapters = [int (chapter [0 ]) for chapter in folders (directory )]
369
381
else :
370
- chapters_json = scraper .get (CHAPTERS_WEBSITE + uuid )
382
+ chapters_json = SCRAPER .get (CHAPTERS_WEBSITE + uuid )
371
383
exit_if_fails (chapters_json )
372
384
chapters_full = load_json (chapters_json .content , 'data' , 'result' )
373
385
all_chapters = [int (chapter ['Number' ]) for chapter in chapters_full ]
@@ -377,10 +389,11 @@ def online_search():
377
389
print_colored (f'Last downloaded chapter: { last } ' , Fore .YELLOW , Style .BRIGHT );
378
390
379
391
if args .chapters :
380
- args .chapters = args .chapters .replace (' ' , '' )
381
392
parse_chapters_range (args .chapters , last )
382
393
else :
383
394
CHAPTERS .update (all_chapters )
395
+
396
+ CHAPTERS = sorted (CHAPTERS )
384
397
385
398
if args .cache :
386
399
for chapter in CHAPTERS :
@@ -394,7 +407,6 @@ def online_search():
394
407
if number in CHAPTERS :
395
408
uuids [number ] = chapter ['Identification' ]
396
409
397
- CHAPTERS = sorted (CHAPTERS )
398
410
print_dim (f'{ len (CHAPTERS )} chapter{ plural (len (CHAPTERS ))} will be downloaded - Cancel with Ctrl+C' )
399
411
400
412
if not args .cache :
@@ -410,7 +422,7 @@ def online_search():
410
422
url = CHAPTER_PAGES_WEBSITE + uuid
411
423
412
424
chapter_dir = chapter_directory (manga , chapter )
413
- page = scraper .get (url )
425
+ page = SCRAPER .get (url )
414
426
if success (page , print_ok = False ):
415
427
html = BeautifulSoup (page .content , 'html.parser' )
416
428
pages = html .find (id = 'PageList' ).find_all (True , recursive = False )
@@ -427,7 +439,7 @@ def online_search():
427
439
extension = f'.{ args .format .lower ()} '
428
440
args .format = args .format .upper ()
429
441
430
- chapter_range = args . chapters . replace ( 'last' , str ( last )) if args . chapters else filename_chapter_range ()
442
+ chapter_range = filename_chapter_range (CHAPTERS )
431
443
432
444
if args .format != 'PNG' :
433
445
print_colored (f'Converting to { args .format } ...' , Fore .BLUE , Style .BRIGHT )
@@ -480,4 +492,6 @@ def online_search():
480
492
os .rename (f'{ MANGA_DIR } /{ chapter } { extension } ' , path )
481
493
print_colored (f'DONE: { os .path .abspath (path )} ' , Fore .GREEN , Style .BRIGHT )
482
494
else :
495
+ if len (CHAPTERS ) == 1 :
496
+ directory = os .path .abspath (chapter_directory (manga , CHAPTERS [0 ]))
483
497
print_colored (f'DONE: { directory } ' , Fore .GREEN , Style .BRIGHT )
0 commit comments