forked from Bouni/kicad-jlcpcb-tools
-
Notifications
You must be signed in to change notification settings - Fork 0
/
library.py
662 lines (595 loc) · 24.6 KB
/
library.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
"""Handle the JLCPCB parts database."""
import contextlib
from enum import Enum
import logging
import os
from pathlib import Path
import sqlite3
from threading import Thread
import time
import requests # pylint: disable=import-error
import wx # pylint: disable=import-error
from .events import (
MessageEvent,
PopulateFootprintListEvent,
ResetGaugeEvent,
UpdateGaugeEvent,
)
from .helpers import PLUGIN_PATH, dict_factory, natural_sort_collation
from .unzip_parts import unzip_parts
class LibraryState(Enum):
"""The various states of the library."""
INITIALIZED = 0
UPDATE_NEEDED = 1
DOWNLOAD_RUNNING = 2
class Library:
"""A storage class to get data from a sqlite database and write it back."""
def __init__(self, parent):
self.logger = logging.getLogger(__name__)
self.parent = parent
self.order_by = "LCSC Part"
self.order_dir = "ASC"
self.datadir = os.path.join(PLUGIN_PATH, "jlcpcb")
self.partsdb_file = os.path.join(self.datadir, "parts-fts5.db")
self.rotationsdb_file = os.path.join(self.datadir, "rotations.db")
self.mappingsdb_file = os.path.join(self.datadir, "mappings.db")
self.state = None
self.category_map = {}
self.setup()
self.check_library()
def setup(self):
"""Check if folders and database exist, setup if not."""
if not os.path.isdir(self.datadir):
self.logger.info(
"Data directory 'jlcpcb' does not exist and will be created."
)
Path(self.datadir).mkdir(parents=True, exist_ok=True)
def check_library(self):
"""Check if the database files exists, if not trigger update / create database."""
if (
not os.path.isfile(self.partsdb_file)
or os.path.getsize(self.partsdb_file) == 0
):
self.state = LibraryState.UPDATE_NEEDED
else:
self.state = LibraryState.INITIALIZED
if (
not os.path.isfile(self.rotationsdb_file)
or os.path.getsize(self.rotationsdb_file) == 0
):
self.create_rotation_table()
self.migrate_rotations()
if (
not os.path.isfile(self.mappingsdb_file)
or os.path.getsize(self.mappingsdb_file) == 0
):
self.create_mapping_table()
self.migrate_mappings()
def set_order_by(self, n):
"""Set which value we want to order by when getting data from the database."""
order_by = [
"LCSC Part",
"MFR.Part",
"Package",
"Solder Joint",
"Library Type",
"Stock",
"Manufacturer",
"Description",
"Price",
]
if self.order_by == order_by[n] and self.order_dir == "ASC":
self.order_dir = "DESC"
else:
self.order_by = order_by[n]
self.order_dir = "ASC"
def search(self, parameters):
"""Search the database for parts that meet the given parameters."""
# skip searching if there are no keywords and the part number
# field is empty as there are too many parts for the search
# to reasonbly show the desired part
if parameters["keyword"] == "" and (
"part_no" not in parameters or parameters["part_no"] == ""
):
return []
# Note: this must mach the widget order in PartSelectorDialog init and
# populate_part_list in parselector.py
columns = [
"LCSC Part",
"MFR.Part",
"Package",
"Solder Joint",
"Library Type",
"Stock",
"Manufacturer",
"Description",
"Price",
]
s = ",".join(f'"{c}"' for c in columns)
query = f"SELECT {s} FROM parts WHERE "
match_chunks = []
like_chunks = []
query_chunks = []
# Build 'match_chunks' and 'like_chunks' arrays
#
# FTS5 (https://www.sqlite.org/fts5.html) has a substring limit of
# at least 3 characters.
# 'Substrings consisting of fewer than 3 unicode characters do not
# match any rows when used with a full-text query'
#
# However, they will still match with a LIKE.
#
# So extract out the <3 character strings and add a 'LIKE' term
# for each of those.
if parameters["keyword"] != "":
keywords = parameters["keyword"].split(" ")
match_keywords_intermediate = []
for w in keywords:
# skip over empty keywords
if w != "":
if len(w) < 3: # LIKE entry
kw = f"description LIKE '%{w}%'"
like_chunks.append(kw)
else: # MATCH entry
kw = f'"{w}"'
match_keywords_intermediate.append(kw)
if match_keywords_intermediate:
match_entry = " AND ".join(match_keywords_intermediate)
match_chunks.append(f"{match_entry}")
if "manufacturer" in parameters and parameters["manufacturer"] != "":
p = parameters["manufacturer"]
match_chunks.append(f'"Manufacturer":"{p}"')
if "package" in parameters and parameters["package"] != "":
p = parameters["package"]
match_chunks.append(f'"Package":"{p}"')
if (
"category" in parameters
and parameters["category"] != ""
and parameters["category"] != "All"
):
p = parameters["category"]
match_chunks.append(f'"First Category":"{p}"')
if "subcategory" in parameters and parameters["subcategory"] != "":
p = parameters["subcategory"]
match_chunks.append(f'"Second Category":"{p}"')
if "part_no" in parameters and parameters["part_no"] != "":
p = parameters["part_no"]
match_chunks.append(f'"MFR.Part":"{p}"')
if "solder_joints" in parameters and parameters["solder_joints"] != "":
p = parameters["solder_joints"]
match_chunks.append(f'"Solder Joint":"{p}"')
library_types = []
if parameters["basic"]:
library_types.append('"Basic"')
if parameters["extended"]:
library_types.append('"Extended"')
if library_types:
query_chunks.append(f'"Library Type" IN ({",".join(library_types)})')
if parameters["stock"]:
query_chunks.append('"Stock" > "0"')
if not match_chunks and not like_chunks and not query_chunks:
return []
if match_chunks:
query += "parts MATCH '"
query += " AND ".join(match_chunks)
query += "'"
if like_chunks:
if match_chunks:
query += " AND "
query += " AND ".join(like_chunks)
if query_chunks:
if match_chunks or like_chunks:
query += " AND "
query += " AND ".join(query_chunks)
query += f' ORDER BY "{self.order_by}" COLLATE naturalsort {self.order_dir}'
query += " LIMIT 1000"
with contextlib.closing(sqlite3.connect(self.partsdb_file)) as con:
con.create_collation("naturalsort", natural_sort_collation)
with con as cur:
return cur.execute(query).fetchall()
def delete_parts_table(self):
"""Delete the parts table."""
with contextlib.closing(sqlite3.connect(self.partsdb_file)) as con, con as cur:
cur.execute("DROP TABLE IF EXISTS parts")
cur.commit()
def create_meta_table(self):
"""Create the meta table."""
with contextlib.closing(sqlite3.connect(self.partsdb_file)) as con, con as cur:
cur.execute(
"CREATE TABLE IF NOT EXISTS meta ('filename', 'size', 'partcount', 'date', 'last_update')"
)
cur.commit()
def create_rotation_table(self):
"""Create the rotation table."""
self.logger.debug("Create SQLite table for rotations")
with contextlib.closing(
sqlite3.connect(self.rotationsdb_file)
) as con, con as cur:
cur.execute("CREATE TABLE IF NOT EXISTS rotation ('regex', 'correction')")
cur.commit()
def get_correction_data(self, regex):
"""Get the correction data by its regex."""
with contextlib.closing(
sqlite3.connect(self.rotationsdb_file)
) as con, con as cur:
return cur.execute(
f"SELECT * FROM rotation WHERE regex = '{regex}'"
).fetchone()
def delete_correction_data(self, regex):
"""Delete a correction from the database."""
with contextlib.closing(
sqlite3.connect(self.rotationsdb_file)
) as con, con as cur:
cur.execute(f"DELETE FROM rotation WHERE regex = '{regex}'")
cur.commit()
def update_correction_data(self, regex, rotation):
"""Update a correction in the database."""
with contextlib.closing(
sqlite3.connect(self.rotationsdb_file)
) as con, con as cur:
cur.execute(
f"UPDATE rotation SET correction = '{rotation}' WHERE regex = '{regex}'"
)
cur.commit()
def insert_correction_data(self, regex, rotation):
"""Insert a correction into the database."""
with contextlib.closing(
sqlite3.connect(self.rotationsdb_file)
) as con, con as cur:
cur.execute(
"INSERT INTO rotation VALUES (?, ?)",
(regex, rotation),
)
cur.commit()
def get_all_correction_data(self):
"""Get all corrections from the database."""
with contextlib.closing(
sqlite3.connect(self.rotationsdb_file)
) as con, con as cur:
try:
result = cur.execute(
"SELECT * FROM rotation ORDER BY regex ASC"
).fetchall()
return [(c[0], int(c[1])) for c in result]
except sqlite3.OperationalError:
return []
def create_mapping_table(self):
"""Create the mapping table."""
with contextlib.closing(
sqlite3.connect(self.mappingsdb_file)
) as con, con as cur:
cur.execute(
"CREATE TABLE IF NOT EXISTS mapping ('footprint', 'value', 'LCSC')"
)
cur.commit()
def get_mapping_data(self, footprint, value):
"""Get the mapping data by its regex."""
with contextlib.closing(
sqlite3.connect(self.mappingsdb_file)
) as con, con as cur:
return cur.execute(
f"SELECT * FROM mapping WHERE footprint = '{footprint}' AND value = '{value}'"
).fetchone()
def delete_mapping_data(self, footprint, value):
"""Delete a mapping from the database."""
with contextlib.closing(
sqlite3.connect(self.mappingsdb_file)
) as con, con as cur:
cur.execute(
f"DELETE FROM mapping WHERE footprint = '{footprint}' AND value = '{value}'"
)
cur.commit()
def update_mapping_data(self, footprint, value, LCSC):
"""Update a mapping in the database."""
with contextlib.closing(
sqlite3.connect(self.mappingsdb_file)
) as con, con as cur:
cur.execute(
f"UPDATE mapping SET LCSC = '{LCSC}' WHERE footprint = '{footprint}' AND value = '{value}'"
)
cur.commit()
def insert_mapping_data(self, footprint, value, LCSC):
"""Insert a mapping into the database."""
with contextlib.closing(
sqlite3.connect(self.mappingsdb_file)
) as con, con as cur:
cur.execute(
"INSERT INTO mapping VALUES (?, ?, ?)",
(footprint, value, LCSC),
)
cur.commit()
def get_all_mapping_data(self):
"""Get all mapping from the database."""
with contextlib.closing(
sqlite3.connect(self.mappingsdb_file)
) as con, con as cur:
return [
list(c)
for c in cur.execute(
"SELECT * FROM mapping ORDER BY footprint ASC"
).fetchall()
]
def create_parts_table(self, columns):
"""Create the parts table."""
with contextlib.closing(sqlite3.connect(self.partsdb_file)) as con, con as cur:
cols = ",".join([f" '{c}'" for c in columns])
cur.execute(f"CREATE TABLE IF NOT EXISTS parts ({cols})")
cur.commit()
def get_part_details(self, number: str) -> dict:
"""Get the part details for a LCSC number using optimized FTS5 querying."""
with contextlib.closing(sqlite3.connect(self.partsdb_file)) as con:
con.row_factory = dict_factory # noqa: DC05
cur = con.cursor()
query = """SELECT "LCSC Part" AS lcsc, "Stock" AS stock, "Library Type" AS type FROM parts WHERE parts MATCH :number"""
cur.execute(query, {"number": number})
return next((n for n in cur.fetchall() if n["lcsc"] == number), {})
def update(self):
"""Update the sqlite parts database from the JLCPCB CSV."""
Thread(target=self.download).start()
def download(self):
"""Actual worker thread that downloads and imports the parts data."""
self.state = LibraryState.DOWNLOAD_RUNNING
start = time.time()
wx.PostEvent(self.parent, ResetGaugeEvent())
# Define basic variables
url_stub = "https://bouni.github.io/kicad-jlcpcb-tools/"
cnt_file = "chunk_num_fts5.txt"
progress_file = os.path.join(self.datadir, "progress.txt")
chunk_file_stub = "parts-fts5.db.zip."
completed_chunks = set()
# Check if there is a progress file
if os.path.exists(progress_file):
with open(progress_file) as f:
# Read completed chunk indices from the progress file
completed_chunks = {int(line.strip()) for line in f.readlines()}
# Get the total number of chunks to download
try:
r = requests.get(
url_stub + cnt_file, allow_redirects=True, stream=True, timeout=300
)
if r.status_code != requests.codes.ok:
wx.PostEvent(
self.parent,
MessageEvent(
title="HTTP GET Error",
text=f"Failed to fetch count of database parts, error code {r.status_code}\n"
+ "URL was:\n"
f"'{url_stub + cnt_file}'",
style="error",
),
)
self.state = LibraryState.INITIALIZED
return
total_chunks = int(r.text)
except Exception as e:
wx.PostEvent(
self.parent,
MessageEvent(
title="Download Error",
text=f"Failed to fetch database chunk count, {e}",
style="error",
),
)
self.state = LibraryState.INITIALIZED
return
# Re-download incomplete or missing chunks
for i in range(total_chunks):
chunk_index = i + 1
chunk_file = chunk_file_stub + f"{chunk_index:03}"
chunk_path = os.path.join(self.datadir, chunk_file)
# Check if the chunk is logged as completed but the file might be incomplete
if chunk_index in completed_chunks:
if os.path.exists(chunk_path):
# Validate the size of the chunk file
try:
expected_size = int(
requests.head(
url_stub + chunk_file, timeout=300
).headers.get("Content-Length", 0)
)
actual_size = os.path.getsize(chunk_path)
if actual_size == expected_size:
self.logger.debug(
"Skipping already downloaded and validated chunk %d.",
chunk_index,
)
continue
else:
self.logger.warning(
"Chunk %d is incomplete, re-downloading.", chunk_index
)
except Exception as e:
self.logger.warning(
"Unable to validate chunk %d, re-downloading. Error: %s",
chunk_index,
e,
)
else:
self.logger.warning(
"Chunk %d marked as completed but file is missing, re-downloading.",
chunk_index,
)
# Download the chunk
try:
with open(chunk_path, "wb") as f:
r = requests.get(
url_stub + chunk_file,
allow_redirects=True,
stream=True,
timeout=300,
)
if r.status_code != requests.codes.ok:
wx.PostEvent(
self.parent,
MessageEvent(
title="Download Error",
text=f"Failed to download chunk {chunk_index}, error code {r.status_code}\n"
+ "URL was:\n"
f"'{url_stub + chunk_file}'",
style="error",
),
)
self.state = LibraryState.INITIALIZED
return
size = int(r.headers.get("Content-Length", 0))
self.logger.debug(
"Downloading chunk %d/%d (%.2f MB)",
chunk_index,
total_chunks,
size / 1024 / 1024,
)
for data in r.iter_content(chunk_size=4096):
f.write(data)
progress = f.tell() / size * 100
wx.PostEvent(self.parent, UpdateGaugeEvent(value=progress))
self.logger.debug("Chunk %d downloaded successfully.", chunk_index)
# Update progress file after successful download
with open(progress_file, "a") as f:
f.write(f"{chunk_index}\n")
except Exception as e:
wx.PostEvent(
self.parent,
MessageEvent(
title="Download Error",
text=f"Failed to download chunk {chunk_index}, {e}",
style="error",
),
)
self.state = LibraryState.INITIALIZED
return
# Delete progress file to indicate the download is complete
if os.path.exists(progress_file):
os.remove(progress_file)
# Combine and extract downloaded files
self.logger.debug("Combining and extracting zip part files...")
try:
unzip_parts(self.datadir)
except Exception as e:
wx.PostEvent(
self.parent,
MessageEvent(
title="Extract Error",
text=f"Failed to combine and extract the JLCPCB database, {e}",
style="error",
),
)
self.state = LibraryState.INITIALIZED
return
# Check if the database file was successfully extracted
if not os.path.exists(self.partsdb_file):
wx.PostEvent(
self.parent,
MessageEvent(
title="Download Error",
text="Failed to extract the database file from the downloaded zip.",
style="error",
),
)
self.state = LibraryState.INITIALIZED
return
wx.PostEvent(self.parent, ResetGaugeEvent())
end = time.time()
wx.PostEvent(self.parent, PopulateFootprintListEvent())
wx.PostEvent(
self.parent,
MessageEvent(
title="Success",
text=f"Successfully downloaded and imported the JLCPCB database in {end - start:.2f} seconds!",
style="info",
),
)
self.state = LibraryState.INITIALIZED
def create_tables(self, headers):
"""Create all tables."""
self.create_meta_table()
self.delete_parts_table()
self.create_parts_table(headers)
self.create_rotation_table()
self.create_mapping_table()
@property
def categories(self):
"""The primary categories in the database.
Caching the relatively small set of category and subcategory maps
gives a noticeable speed improvement over repeatedly reading the
information from the on-disk database.
"""
if not self.category_map:
self.category_map.setdefault("", "")
# Populate the cache.
with contextlib.closing(
sqlite3.connect(self.partsdb_file)
) as con, con as cur:
for row in cur.execute(
'SELECT * from categories ORDER BY UPPER("First Category"), UPPER("Second Category")'
):
self.category_map.setdefault(row[0], []).append(row[1])
tmp = list(self.category_map.keys())
tmp.insert(0, "All")
return tmp
def get_subcategories(self, category):
"""Get the subcategories associated with the given category."""
return self.category_map[category]
def migrate_rotations(self):
"""Migrate existing rotations from parts db to rotations db."""
with contextlib.closing(
sqlite3.connect(self.partsdb_file)
) as pdb, contextlib.closing(
sqlite3.connect(self.rotationsdb_file)
) as rdb, pdb as pcur, rdb as rcur:
try:
result = pcur.execute(
"SELECT * FROM rotation ORDER BY regex ASC"
).fetchall()
if not result:
return
for r in result:
rcur.execute(
"INSERT INTO rotation VALUES (?, ?)",
(r[0], r[1]),
)
rcur.commit()
self.logger.debug(
"Migrated %d rotations to sepetrate database.", len(result)
)
pcur.execute("DROP TABLE IF EXISTS rotation")
pcur.commit()
self.logger.debug("Droped rotations table from parts database.")
except sqlite3.OperationalError:
return
def migrate_mappings(self):
"""Migrate existing mappings from parts db to mappings db."""
with contextlib.closing(
sqlite3.connect(self.partsdb_file)
) as pdb, contextlib.closing(
sqlite3.connect(self.mappingsdb_file)
) as mdb, pdb as pcur, mdb as mcur:
try:
result = pcur.execute(
"SELECT * FROM mapping ORDER BY footprint ASC"
).fetchall()
if not result:
return
for r in result:
mcur.execute(
"INSERT INTO mapping VALUES (?, ?)",
(r[0], r[1]),
)
mcur.commit()
self.logger.debug(
"Migrated %d mappings to sepetrate database.", len(result)
)
pcur.execute("DROP TABLE IF EXISTS mapping")
pcur.commit()
self.logger.debug("Droped mappings table from parts database.")
except sqlite3.OperationalError:
return
def get_last_update(self) -> str:
"""Get last update from meta table."""
with contextlib.closing(sqlite3.connect(self.partsdb_file)) as con, con as cur:
try:
last_update = cur.execute("SELECT last_update FROM meta").fetchone()
if last_update:
return last_update[0]
return ""
except sqlite3.OperationalError:
return ""