file sorter 추가

This commit is contained in:
2018-05-24 03:50:40 +09:00
parent f32ff66d2f
commit 917894fcac
3 changed files with 36 additions and 16 deletions

View File

@@ -2,6 +2,7 @@ import requests
import urllib import urllib
import bs4 import bs4
import re import re
import os
from .Setting import Setting from .Setting import Setting
from .ProxyHandler import ProxyHandler from .ProxyHandler import ProxyHandler
@@ -203,3 +204,19 @@ class Crawler:
Logger.log('Crawling start') Logger.log('Crawling start')
self.crawl_torrent() self.crawl_torrent()
Logger.log('Crawling finished') Logger.log('Crawling finished')
class Sorter:
@staticmethod
def move_files():
setting = Setting()
if not 'file_download_path' in setting.settings or not 'file_move_path' in setting.settings:
return
file_list = os.listdir(setting.settings['file_download_path'])
for filename in file_list:
for video in setting.settings['video']:
if video['title'] in filename:
old_path = os.path.join(setting.settings['file_download_path'], filename)
new_path = os.path.join(setting.settings['file_move_path'], video['title'] + '/' + filename)
os.rename(old_path, new_path)

View File

@@ -61,6 +61,20 @@ class Setting:
Logger.log(e) Logger.log(e)
exit() exit()
if 'download_path' in self.settings and self.settings['file_download_path'][-1] != '/':
self.settings['file_download_path'] += '/'
if 'file_move_path' in self.settings:
if self.settings['file_move_path'][-1] != '/':
self.settings['file_move_path'] += '/'
if not os.path.exists(self.settings['file_move_path']):
try:
os.makedirs(self.settings['file_move_path'])
except Exception as e:
Logger.log(e)
exit()
video['keyword'] += self.settings['keyword_append'] video['keyword'] += self.settings['keyword_append']
def load_downloaded(self): def load_downloaded(self):

21
Main.py
View File

@@ -1,21 +1,10 @@
from Crawler.Crawler import Crawler from Crawler.Crawler import Crawler
from Crawler.Crawler import Sorter
if __name__ == '__main__': if __name__ == '__main__':
file_sorter = Sorter()
file_sorter.move_files()
crawler = Crawler() crawler = Crawler()
crawler.crawl() crawler.crawl()
# import requests
# import bs4
# resp = requests.get('http://www.gatherproxy.com')
# soup = bs4.BeautifulSoup(resp.text, 'lxml')
# # print(soup)
# table = soup.select('table#tblproxy')
# trs = table[0].select('tr')
# for tr in trs[2:5]:
# tds = tr.select('td')
# if len(tds) < 2:
# continue
# ip, port = tds[1].text, tds[2].text
# print('ip: {}, port: {}'.format(ip, port))