Files
clients/WebBasedCrawler/base/dbdata.py
2017-07-18 11:12:43 +09:00

52 lines
1.5 KiB
Python

from pymysql.connections import Connection
import datetime
from numbers import Number
class DataDBRow:
def __init__(self):
self.platform_name = None
self.platform_form = None
self.platform_title = None
self.article_form = None
self.article_parent = None
self.article_id = None
self.article_nickname = None
self.article_title = None
self.article_data = None
self.article_url = None
self.article_hit = 0
self.article_date = None
self.article_order = 0
self.article_profile = None
self.article_profileurl = None
self.platform_id = None
self.keyword_id = -1
self.reply_url = None
self.etc = None
def get_insert_query(self, conn, db_num):
inst = DataDBRow()
keys = ''
values = ''
for key, value_type in inst.__dict__.items():
if key.startswith('__') or callable(value_type):
continue
if len(keys) > 0:
keys += ', '
values += ', '
keys += key
value = self.__dict__[key]
if isinstance(value, Number):
values += str(value)
elif isinstance(value, str):
values += conn.escape(value.encode('utf8').decode('utf8'))
else:
values += conn.escape(value)
query = 'insert into data_{} ({}) values ({})'.format(db_num, keys, values)
return query