Ajout boucle try sur les dls
This commit is contained in:
parent
9f7a7f14f0
commit
2a5eb235e4
@ -10,12 +10,23 @@ from os import mkdir, listdir
|
||||
from re import findall
|
||||
from time import sleep
|
||||
from threading import Thread
|
||||
|
||||
'''
|
||||
############## 4chan thread scrapper ################
|
||||
here we look for particular threads on 4chan and dl their images
|
||||
This script is deisgned to look for specific words on 4 chan boards, and download all images from relevant thread.
|
||||
Usage of a VPN is recommended since 4chan is a shady place. Use at your own risk !
|
||||
|
||||
|
||||
. \\
|
||||
,` ( ` SquiP
|
||||
( \' "
|
||||
`-.__)_
|
||||
|
||||
'''
|
||||
|
||||
|
||||
|
||||
|
||||
def getArgs():
|
||||
'''Gets all the arguments passed to the script and returns them in a parse_args()-type object.
|
||||
No args
|
||||
@ -133,23 +144,25 @@ def constant_dl(folder, url):
|
||||
- folder: folder to dl into
|
||||
- url : board to watch
|
||||
'''
|
||||
while True:
|
||||
sleep(2)
|
||||
soup = html_get(url)
|
||||
hrefs = thread_finder(soup, keyword)
|
||||
sources = scraper(soup)
|
||||
#item_dl(sources, folder)
|
||||
try:
|
||||
while True:
|
||||
sleep(2)
|
||||
soup = html_get(url)
|
||||
hrefs = thread_finder(soup, keyword)
|
||||
sources = scraper(soup)
|
||||
#item_dl(sources, folder)
|
||||
|
||||
#Dling all threads found
|
||||
|
||||
#oneshot
|
||||
for href in hrefs:
|
||||
print(f"going after {url}{href}")
|
||||
subsoup = html_get(f"{url}{href}")
|
||||
subsources = scraper(subsoup)
|
||||
print(subsources)
|
||||
item_dl(subsources, folder)
|
||||
#Dling all threads found
|
||||
|
||||
#oneshot
|
||||
for href in hrefs:
|
||||
print(f"going after {url}{href}")
|
||||
subsoup = html_get(f"{url}{href}")
|
||||
subsources = scraper(subsoup)
|
||||
print(subsources)
|
||||
item_dl(subsources, folder)
|
||||
except Exception as e:
|
||||
print(f"Houston, we had a problem: \n{e}")
|
||||
|
||||
|
||||
|
||||
|
Reference in New Issue
Block a user