from Crawler.Crawler import Crawler # def do_it(): # crawler = Crawler() # # files = [] # for url in setting.urls: # for page in range(1, setting.max_page+1): # page_url = url+str(page) # page_links = crawler.crawl_list(page_url) # # # for link in page_links: # # files += crawl_downlink(link) # # # # download_files(files) # # do_it() crawler = Crawler() crawler.crawl()