ViewVC Help
View File | Revision Log | Show Annotations | View Changeset | Root Listing
root/ns_dev/Python/NinoCode/Active_prgs/Gromulus/GromulusDatabaseUtilities.py
Revision: 990
Committed: Mon Mar 9 21:20:37 2026 UTC (2 weeks, 4 days ago) by nino.borges
Content type: text/x-python
File size: 15409 byte(s)
Log Message:
Gromulus v1.5: user metadata save workflow, schema expansion, and UI refactor

- Added user-editable game metadata workflow with explicit Save button (no autosave).
- Added dirty-state tracking and Save/Discard/Cancel prompts when navigating away or closing with unsaved changes.
- Added DB schema support for new metadata fields:
  - 
  - , 
  - , 
  - , 
  - , 
- Wired UI to user-first metadata precedence with  fallback for release date, genre, cooperative, max players, and description.
- Added release date display/storage conversion:
  - GUI display 
  - DB storage 
- Refactored main game info panel:
  - moved hash/file/No-Intro/TOSEC detail fields into  modal
  - added compact TOSEC/NoIntro match indicators
  - added Favorite, Release Date, Genre, Cooperative, Max Players, Description, and User Notes controls
- Enhanced artwork previews from prior update:
  - larger preview boxes
  - aspect-ratio-preserving scaling
  - click-to-open full-size modal viewer
- Updated schema/documentation files to stay aligned:
  - 
  - 
  - canonical DB utility schema/migration logic

File Contents

# Content
1 """
2 This allows for some of the lower level db utilities, such as creating the inital Gromulus database, db backup, export, import for migration, etc. I'm assuming I'll use the create database once and then maybe each time I update the schema.
3 I'm not sure how I'll best do data migration manually.
4 """
5
6 import sqlite3
7 import os
8 import shutil
9 import json
10
11
12 class GromulusDatabaseUtilities:
13 def __init__(self, db_path: str):
14 self.db_path = db_path
15 self._canonical_tables = {
16 "main_app_system": """
17 CREATE TABLE main_app_system(
18 id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
19 name TEXT,
20 short_name TEXT,
21 relative_file_path TEXT
22 )
23 """,
24 "main_app": """
25 CREATE TABLE main_app(
26 id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
27 game_name TEXT,
28 game_name_scraped TEXT,
29 container_file_name TEXT,
30 description TEXT,
31 description_scraped TEXT,
32 system_console INTEGER,
33 system_console_scraped TEXT,
34 path_to_screenshot_box TEXT,
35 path_to_screenshot_title TEXT,
36 path_to_screenshot_ingame TEXT,
37 path_to_video TEXT,
38 user_notes TEXT,
39 container_md5_hash TEXT,
40 version TEXT,
41 favorite_game INTEGER,
42 release_date TEXT,
43 release_date_scraped TEXT,
44 game_genre TEXT,
45 game_genre_scraped TEXT,
46 cooperative TEXT,
47 cooperative_scraped TEXT,
48 max_players TEXT,
49 max_players_scraped TEXT
50 )
51 """,
52 "no_intro_system": """
53 CREATE TABLE no_intro_system(
54 id INTEGER NOT NULL PRIMARY KEY,
55 name TEXT,
56 description TEXT,
57 dat_version TEXT
58 )
59 """,
60 "no_intro_main": """
61 CREATE TABLE no_intro_main(
62 game_name TEXT,
63 no_intro_id TEXT,
64 clone_of_id TEXT,
65 description TEXT,
66 rom_name TEXT,
67 crc TEXT,
68 md5 TEXT,
69 sha1 TEXT,
70 sha256 TEXT,
71 status TEXT,
72 no_intro_system_id INTEGER NOT NULL,
73 app_system_id INTEGER,
74 FOREIGN KEY(no_intro_system_id) REFERENCES no_intro_system(id)
75 )
76 """,
77 "tosec_main": """
78 CREATE TABLE tosec_main(
79 game_name TEXT,
80 description TEXT,
81 rom_name TEXT,
82 crc TEXT,
83 md5 TEXT,
84 sha1 TEXT,
85 system_id INTEGER,
86 app_system_id INTEGER,
87 FOREIGN KEY(system_id) REFERENCES no_intro_system(id)
88 )
89 """,
90 "dat_import_history": """
91 CREATE TABLE dat_import_history(
92 id INTEGER PRIMARY KEY AUTOINCREMENT,
93 app_system_id INTEGER NOT NULL,
94 source TEXT NOT NULL,
95 dat_name TEXT,
96 dat_description TEXT,
97 dat_version TEXT,
98 imported_at TEXT NOT NULL,
99 entry_count INTEGER NOT NULL DEFAULT 0,
100 UNIQUE(app_system_id, source),
101 FOREIGN KEY(app_system_id) REFERENCES main_app_system(id)
102 )
103 """,
104 }
105
106 def create_database(self):
107 """Creates a fresh Gromulus SQLite database using the canonical schema."""
108 if os.path.exists(self.db_path):
109 os.remove(self.db_path)
110
111 conn = sqlite3.connect(self.db_path)
112 cursor = conn.cursor()
113
114 for ddl in self._canonical_tables.values():
115 cursor.execute(ddl)
116 self._create_indexes(cursor)
117
118 conn.commit()
119 conn.close()
120 print("Database created successfully at:", self.db_path)
121
122 def normalize_schema(self):
123 """
124 Normalizes an existing DB to the canonical schema without dropping data.
125 Adds missing tables/columns and backfills known old column names.
126 """
127 conn = sqlite3.connect(self.db_path)
128 cursor = conn.cursor()
129
130 for table_name, ddl in self._canonical_tables.items():
131 cursor.execute(
132 "SELECT name FROM sqlite_master WHERE type='table' AND name=?",
133 (table_name,),
134 )
135 if cursor.fetchone() is None:
136 cursor.execute(ddl)
137
138 self._ensure_columns(
139 cursor,
140 "main_app",
141 [
142 "game_name", "game_name_scraped", "container_file_name", "description",
143 "description_scraped", "system_console", "system_console_scraped",
144 "path_to_screenshot_box", "path_to_screenshot_title",
145 "path_to_screenshot_ingame", "path_to_video", "user_notes",
146 "container_md5_hash", "version", "favorite_game", "release_date",
147 "release_date_scraped", "game_genre", "game_genre_scraped",
148 "cooperative", "cooperative_scraped", "max_players", "max_players_scraped",
149 ],
150 )
151 self._ensure_columns(cursor, "main_app_system", ["name", "short_name", "relative_file_path"])
152 self._ensure_columns(cursor, "no_intro_system", ["name", "description", "dat_version"])
153 self._ensure_columns(
154 cursor,
155 "no_intro_main",
156 [
157 "game_name", "no_intro_id", "clone_of_id", "description", "rom_name",
158 "crc", "md5", "sha1", "sha256", "status", "no_intro_system_id", "app_system_id",
159 ],
160 )
161 self._ensure_columns(
162 cursor,
163 "tosec_main",
164 ["game_name", "description", "rom_name", "crc", "md5", "sha1", "system_id", "app_system_id"],
165 )
166 self._ensure_columns(
167 cursor,
168 "dat_import_history",
169 ["app_system_id", "source", "dat_name", "dat_description", "dat_version", "imported_at", "entry_count"],
170 )
171
172 self._backfill_aliases(cursor)
173 self._create_indexes(cursor)
174 conn.commit()
175 conn.close()
176 print("Schema normalized successfully at:", self.db_path)
177
178 def _ensure_columns(self, cursor, table_name, required_columns):
179 cursor.execute(f"PRAGMA table_info({table_name})")
180 existing_columns = {row[1] for row in cursor.fetchall()}
181 for column_name in required_columns:
182 if column_name not in existing_columns:
183 cursor.execute(f"ALTER TABLE {table_name} ADD COLUMN {column_name} TEXT")
184
185 def _column_exists(self, cursor, table_name, column_name):
186 cursor.execute(f"PRAGMA table_info({table_name})")
187 return any(row[1] == column_name for row in cursor.fetchall())
188
189 def _backfill_aliases(self, cursor):
190 # main_app alias mapping
191 if self._column_exists(cursor, "main_app", "file_name"):
192 cursor.execute(
193 "UPDATE main_app SET container_file_name=file_name "
194 "WHERE (container_file_name IS NULL OR container_file_name='') "
195 "AND file_name IS NOT NULL"
196 )
197 if self._column_exists(cursor, "main_app", "md5"):
198 cursor.execute(
199 "UPDATE main_app SET container_md5_hash=md5 "
200 "WHERE (container_md5_hash IS NULL OR container_md5_hash='') AND md5 IS NOT NULL"
201 )
202 if self._column_exists(cursor, "main_app", "system_id"):
203 cursor.execute(
204 "UPDATE main_app SET system_console=system_id "
205 "WHERE (system_console IS NULL OR system_console='') AND system_id IS NOT NULL"
206 )
207 if self._column_exists(cursor, "main_app", "rom_filename"):
208 cursor.execute(
209 "UPDATE main_app SET container_file_name=rom_filename "
210 "WHERE (container_file_name IS NULL OR container_file_name='') "
211 "AND rom_filename IS NOT NULL"
212 )
213 if self._column_exists(cursor, "main_app", "rom_hash"):
214 cursor.execute(
215 "UPDATE main_app SET container_md5_hash=rom_hash "
216 "WHERE (container_md5_hash IS NULL OR container_md5_hash='') "
217 "AND rom_hash IS NOT NULL"
218 )
219 if self._column_exists(cursor, "main_app", "notes"):
220 cursor.execute(
221 "UPDATE main_app SET user_notes=notes "
222 "WHERE (user_notes IS NULL OR user_notes='') AND notes IS NOT NULL"
223 )
224
225 # system table alias mapping
226 if self._column_exists(cursor, "no_intro_system", "system_name"):
227 cursor.execute(
228 "UPDATE no_intro_system SET name=system_name "
229 "WHERE (name IS NULL OR name='') AND system_name IS NOT NULL"
230 )
231 if self._column_exists(cursor, "no_intro_system", "system_description"):
232 cursor.execute(
233 "UPDATE no_intro_system SET description=system_description "
234 "WHERE (description IS NULL OR description='') AND system_description IS NOT NULL"
235 )
236
237 # no_intro_main alias mapping
238 if self._column_exists(cursor, "no_intro_main", "rom_hash"):
239 cursor.execute(
240 "UPDATE no_intro_main SET md5=rom_hash "
241 "WHERE (md5 IS NULL OR md5='') AND rom_hash IS NOT NULL"
242 )
243 if self._column_exists(cursor, "no_intro_main", "rom_filename"):
244 cursor.execute(
245 "UPDATE no_intro_main SET rom_name=rom_filename "
246 "WHERE (rom_name IS NULL OR rom_name='') AND rom_filename IS NOT NULL"
247 )
248 if self._column_exists(cursor, "no_intro_main", "system_id"):
249 cursor.execute(
250 "UPDATE no_intro_main SET no_intro_system_id=system_id "
251 "WHERE (no_intro_system_id IS NULL OR no_intro_system_id='') AND system_id IS NOT NULL"
252 )
253 if self._column_exists(cursor, "no_intro_main", "app_system_id"):
254 cursor.execute(
255 """
256 UPDATE no_intro_main
257 SET app_system_id = (
258 SELECT ma.system_console
259 FROM main_app_file_hash fh
260 JOIN main_app ma ON ma.id = fh.container_file_id
261 WHERE lower(fh.file_md5_hash) = lower(no_intro_main.md5)
262 GROUP BY ma.system_console
263 ORDER BY COUNT(*) DESC
264 LIMIT 1
265 )
266 WHERE app_system_id IS NULL
267 """
268 )
269
270 # tosec_main alias mapping
271 if self._column_exists(cursor, "tosec_main", "rom_hash"):
272 cursor.execute(
273 "UPDATE tosec_main SET md5=rom_hash "
274 "WHERE (md5 IS NULL OR md5='') AND rom_hash IS NOT NULL"
275 )
276 if self._column_exists(cursor, "tosec_main", "rom_filename"):
277 cursor.execute(
278 "UPDATE tosec_main SET rom_name=rom_filename "
279 "WHERE (rom_name IS NULL OR rom_name='') AND rom_filename IS NOT NULL"
280 )
281 if self._column_exists(cursor, "tosec_main", "app_system_id"):
282 cursor.execute(
283 """
284 UPDATE tosec_main
285 SET app_system_id = (
286 SELECT ma.system_console
287 FROM main_app_file_hash fh
288 JOIN main_app ma ON ma.id = fh.container_file_id
289 WHERE lower(fh.file_md5_hash) = lower(tosec_main.md5)
290 GROUP BY ma.system_console
291 ORDER BY COUNT(*) DESC
292 LIMIT 1
293 )
294 WHERE app_system_id IS NULL
295 """
296 )
297
298 def _create_indexes(self, cursor):
299 cursor.execute("CREATE INDEX IF NOT EXISTS idx_main_app_md5 ON main_app(container_md5_hash)")
300 cursor.execute("CREATE INDEX IF NOT EXISTS idx_main_app_system ON main_app(system_console)")
301 cursor.execute("CREATE INDEX IF NOT EXISTS idx_no_intro_md5 ON no_intro_main(md5)")
302 cursor.execute("CREATE INDEX IF NOT EXISTS idx_tosec_md5 ON tosec_main(md5)")
303 cursor.execute("CREATE INDEX IF NOT EXISTS idx_no_intro_system_md5 ON no_intro_main(app_system_id, md5)")
304 cursor.execute("CREATE INDEX IF NOT EXISTS idx_tosec_system_md5 ON tosec_main(app_system_id, md5)")
305 cursor.execute("CREATE INDEX IF NOT EXISTS idx_dat_import_system_source ON dat_import_history(app_system_id, source)")
306
307 def backup_database(self, backup_path):
308 """Copies the current database to a new location."""
309 if not os.path.exists(self.db_path):
310 raise FileNotFoundError("Database file not found.")
311 shutil.copy2(self.db_path, backup_path)
312 print(f"Database backed up to: {backup_path}")
313
314 def export_data_to_json(self, export_path):
315 """Exports all table data to a JSON file."""
316 conn = sqlite3.connect(self.db_path)
317 cursor = conn.cursor()
318
319 data = {}
320 for table in [
321 "main_app_system", "main_app",
322 "no_intro_system", "no_intro_main",
323 "tosec_main", "dat_import_history"
324 ]:
325 cursor.execute(
326 "SELECT name FROM sqlite_master WHERE type='table' AND name=?",
327 (table,),
328 )
329 if cursor.fetchone() is None:
330 continue
331 cursor.execute(f"SELECT * FROM {table}")
332 columns = [description[0] for description in cursor.description]
333 rows = cursor.fetchall()
334 data[table] = [dict(zip(columns, row)) for row in rows]
335
336 with open(export_path, "w", encoding="utf-8") as f:
337 json.dump(data, f, indent=2)
338
339 conn.close()
340 print(f"Exported data to JSON: {export_path}")
341
342 def import_data_from_json(self, import_path):
343 """Imports table data from a JSON file (assumes schema is already created)."""
344 if not os.path.exists(import_path):
345 raise FileNotFoundError("Import file not found.")
346
347 with open(import_path, "r", encoding="utf-8") as f:
348 data = json.load(f)
349
350 conn = sqlite3.connect(self.db_path)
351 cursor = conn.cursor()
352
353 for table, rows in data.items():
354 if not rows:
355 continue
356 columns = rows[0].keys()
357 placeholders = ", ".join(["?"] * len(columns))
358 col_list = ", ".join(columns)
359
360 for row in rows:
361 values = tuple(row[col] for col in columns)
362 cursor.execute(f"INSERT INTO {table} ({col_list}) VALUES ({placeholders})", values)
363
364 conn.commit()
365 conn.close()
366 print(f"Imported data from JSON: {import_path}")