csv出力のコード修正その2
This commit is contained in:
parent
8b7e7a2a65
commit
d640155e75
1 changed files with 13 additions and 5 deletions
18
dislocker.py
18
dislocker.py
|
@ -6,6 +6,7 @@ import hashlib
|
|||
import string
|
||||
import random
|
||||
import csv
|
||||
from datetime import datetime
|
||||
|
||||
class DL():
|
||||
def __init__(self):
|
||||
|
@ -190,23 +191,30 @@ class Bot(discord.Client):
|
|||
cursor.close()
|
||||
return result
|
||||
|
||||
def format_datetime(self, value):
|
||||
if isinstance(value, datetime):
|
||||
return value.strftime('%Y-%m-%d %H:%M:%S')
|
||||
return value
|
||||
|
||||
def report_export(self, **kwargs):
|
||||
try:
|
||||
csv_file_path = self.export_dir_path + "pc_usage_history.csv"
|
||||
main_table = "pc_usage_history"
|
||||
related_table = "club_member"
|
||||
cursor = dislocker.db.cursor()
|
||||
# メインテーブルの列情報を取得(user_idを除く)
|
||||
cursor.execute(sql.SQL("SELECT * FROM {} LIMIT 0").format(sql.Identifier(main_table)))
|
||||
cursor.execute(psycopg2.sql.SQL("SELECT * FROM {} LIMIT 0").format(psycopg2.sql.Identifier(main_table)))
|
||||
main_columns = [desc[0] for desc in cursor.description if desc[0] != 'member_id']
|
||||
|
||||
# クエリを作成(列名を明確に指定)
|
||||
query = sql.SQL("""
|
||||
query = psycopg2.sql.SQL("""
|
||||
SELECT {main_columns}, {related_table}.name
|
||||
FROM {main_table}
|
||||
LEFT JOIN {related_table} ON {main_table}.member_id = {related_table}.id
|
||||
""").format(
|
||||
main_columns=sql.SQL(', ').join([sql.SQL("{}.{}").format(sql.Identifier(main_table), sql.Identifier(col)) for col in main_columns]),
|
||||
main_table=sql.Identifier(main_table),
|
||||
related_table=sql.Identifier(related_table)
|
||||
main_columns=psycopg2.sql.SQL(', ').join([psycopg2.sql.SQL("{}.{}").format(psycopg2.sql.Identifier(main_table), psycopg2.sql.Identifier(col)) for col in main_columns]),
|
||||
main_table=psycopg2.sql.Identifier(main_table),
|
||||
related_table=psycopg2.sql.Identifier(related_table)
|
||||
)
|
||||
|
||||
cursor.execute(query)
|
||||
|
|
Loading…
Reference in a new issue