fix-os-error-after-completion
This commit is contained in:
@@ -2,6 +2,7 @@
|
|||||||
"""
|
"""
|
||||||
Core scaev module for Scaev Auctions
|
Core scaev module for Scaev Auctions
|
||||||
"""
|
"""
|
||||||
|
import os
|
||||||
import sqlite3
|
import sqlite3
|
||||||
import asyncio
|
import asyncio
|
||||||
import time
|
import time
|
||||||
@@ -471,3 +472,62 @@ class TroostwijkScraper:
|
|||||||
|
|
||||||
await browser.close()
|
await browser.close()
|
||||||
return results
|
return results
|
||||||
|
|
||||||
|
def export_to_files(self) -> Dict[str, str]:
|
||||||
|
"""Export database to CSV/JSON files"""
|
||||||
|
import sqlite3
|
||||||
|
import json
|
||||||
|
import csv
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
|
||||||
|
output_dir = os.path.dirname(self.cache.db_path)
|
||||||
|
|
||||||
|
conn = sqlite3.connect(self.cache.db_path)
|
||||||
|
conn.row_factory = sqlite3.Row
|
||||||
|
cursor = conn.cursor()
|
||||||
|
|
||||||
|
files = {}
|
||||||
|
|
||||||
|
# Export auctions
|
||||||
|
cursor.execute("SELECT * FROM auctions")
|
||||||
|
auctions = [dict(row) for row in cursor.fetchall()]
|
||||||
|
|
||||||
|
auctions_csv = os.path.join(output_dir, f'auctions_{timestamp}.csv')
|
||||||
|
auctions_json = os.path.join(output_dir, f'auctions_{timestamp}.json')
|
||||||
|
|
||||||
|
if auctions:
|
||||||
|
with open(auctions_csv, 'w', newline='', encoding='utf-8') as f:
|
||||||
|
writer = csv.DictWriter(f, fieldnames=auctions[0].keys())
|
||||||
|
writer.writeheader()
|
||||||
|
writer.writerows(auctions)
|
||||||
|
|
||||||
|
with open(auctions_json, 'w', encoding='utf-8') as f:
|
||||||
|
json.dump(auctions, f, indent=2, ensure_ascii=False)
|
||||||
|
|
||||||
|
files['auctions_csv'] = auctions_csv
|
||||||
|
files['auctions_json'] = auctions_json
|
||||||
|
print(f" Exported {len(auctions)} auctions")
|
||||||
|
|
||||||
|
# Export lots
|
||||||
|
cursor.execute("SELECT * FROM lots")
|
||||||
|
lots = [dict(row) for row in cursor.fetchall()]
|
||||||
|
|
||||||
|
lots_csv = os.path.join(output_dir, f'lots_{timestamp}.csv')
|
||||||
|
lots_json = os.path.join(output_dir, f'lots_{timestamp}.json')
|
||||||
|
|
||||||
|
if lots:
|
||||||
|
with open(lots_csv, 'w', newline='', encoding='utf-8') as f:
|
||||||
|
writer = csv.DictWriter(f, fieldnames=lots[0].keys())
|
||||||
|
writer.writeheader()
|
||||||
|
writer.writerows(lots)
|
||||||
|
|
||||||
|
with open(lots_json, 'w', encoding='utf-8') as f:
|
||||||
|
json.dump(lots, f, indent=2, ensure_ascii=False)
|
||||||
|
|
||||||
|
files['lots_csv'] = lots_csv
|
||||||
|
files['lots_json'] = lots_json
|
||||||
|
print(f" Exported {len(lots)} lots")
|
||||||
|
|
||||||
|
conn.close()
|
||||||
|
return files
|
||||||
Reference in New Issue
Block a user