diff --git a/.gdb/qt5prettyprinters/__pycache__/qt.cpython-311.pyc b/.gdb/qt5prettyprinters/__pycache__/qt.cpython-311.pyc index c4f1065..8d57c4e 100644 Binary files a/.gdb/qt5prettyprinters/__pycache__/qt.cpython-311.pyc and b/.gdb/qt5prettyprinters/__pycache__/qt.cpython-311.pyc differ diff --git a/.gdb/qt5prettyprinters/qt.py b/.gdb/qt5prettyprinters/qt.py index 7693192..f0e2a83 100644 --- a/.gdb/qt5prettyprinters/qt.py +++ b/.gdb/qt5prettyprinters/qt.py @@ -48,6 +48,7 @@ class QByteArrayPrinter: def __init__(self, val): self.val = val + self.size = self.val['d']['size'] # Qt4 has 'data', Qt5 doesn't self.isQt4 = has_field(self.val['d'], 'data') # Qt6 has d.ptr, Qt5 doesn't @@ -78,11 +79,11 @@ def stringData(self): return self.val['d'].cast(gdb.lookup_type("char").const().pointer()) + self.val['d']['offset'] def children(self): - return self._iterator(self.stringData(), self.val['d']['size']) + return self._iterator(self.stringData(), self.size) def to_string(self): #todo: handle charset correctly - return self.stringData() + return self.stringData().string(length = self.size) def display_hint (self): return 'string' @@ -755,7 +756,7 @@ def build_dictionary (): pretty_printers_dict[re.compile('^QByteArray$')] = lambda val: QByteArrayPrinter(val) pretty_printers_dict[re.compile('^QList<.*>$')] = lambda val: QListPrinter(val, 'QList', None) pretty_printers_dict[re.compile('^QStringList$')] = lambda val: QListPrinter(val, 'QStringList', 'QString') - pretty_printers_dict[re.compile('^QQueue')] = lambda val: QListPrinter(val, 'QQueue', None) + pretty_printers_dict[re.compile('^QQueue<.*>$')] = lambda val: QListPrinter(val, 'QQueue', None) pretty_printers_dict[re.compile('^QVector<.*>$')] = lambda val: QVectorPrinter(val, 'QVector') pretty_printers_dict[re.compile('^QStack<.*>$')] = lambda val: QVectorPrinter(val, 'QStack') pretty_printers_dict[re.compile('^QLinkedList<.*>$')] = lambda val: QLinkedListPrinter(val) @@ -769,8 +770,8 @@ def build_dictionary (): pretty_printers_dict[re.compile('^QUrl$')] = lambda val: QUrlPrinter(val) pretty_printers_dict[re.compile('^QSet<.*>$')] = lambda val: QSetPrinter(val) pretty_printers_dict[re.compile('^QChar$')] = lambda val: QCharPrinter(val) - pretty_printers_dict[re.compile('^QUuid')] = lambda val: QUuidPrinter(val) - pretty_printers_dict[re.compile('^QVariant')] = lambda val: QVariantPrinter(val) + pretty_printers_dict[re.compile('^QUuid$')] = lambda val: QUuidPrinter(val) + pretty_printers_dict[re.compile('^QVariant$')] = lambda val: QVariantPrinter(val) build_dictionary () diff --git a/.gitignore b/.gitignore index c3a1b1f..ff994d8 100644 --- a/.gitignore +++ b/.gitignore @@ -10,3 +10,4 @@ Makefile build-Debug build .cache +.gdb/qt5prettyprinters/__pycache__ diff --git a/CMakeLists.txt b/CMakeLists.txt index 44d00fc..2e619c6 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -79,7 +79,7 @@ target_link_libraries(${PROJECT_NAME} PUBLIC # ${CURL_LIBRARY} #) -target_include_directories(${PROJECT_NAME} PRIVATE ${CURL_INCLUDE_DIR} libs/miniz ) +target_include_directories(${PROJECT_NAME} PRIVATE ${CURL_INCLUDE_DIR} libs/miniz src ) set_target_properties(${PROJECT_NAME} PROPERTIES CXX_STANDARD 17 diff --git a/README.md b/README.md index d92506f..55ebd73 100644 --- a/README.md +++ b/README.md @@ -22,6 +22,7 @@ manipulation configuration files. Bash for launching with Steam or Lutris or jus * checksums * download maps/mods, and install them * add a filter for html in database +* make sure the rest of the GUI still updates while downloading # trle.net mods the launcher will download and play with wine-tkg that I tested * [Calypsis Jungle - Part One](https://www.trle.net/sc/levelfeatures.php?lid=3500) @@ -36,6 +37,19 @@ You need those, should be installed an desktop linux * curl * Qt5 +To ensure that our application works properly right now, you need to download the +required certificates from Firefox and add them to your system’s certificate store. +Unfortunately, I cannot share these certificates directly. + +``` +/etc/ssl/certs/trle-net-chain.pem +/etc/ssl/certs/trle-net.pem +``` +Something like that should work on you're system +``` +sudo update-ca-certificates +``` + ```shell cmake -DCMAKE_INSTALL_PREFIX=~/.local . make install diff --git a/src/FileManager.cpp b/src/FileManager.cpp index 8bb0060..685955d 100644 --- a/src/FileManager.cpp +++ b/src/FileManager.cpp @@ -440,52 +440,51 @@ bool FileManager::moveFilesToDirectory(const QString& fromLevelDirectory, const bool FileManager::moveFilesToParentDirectory(const QString& levelDirectory) { - const QString& directoryPath = levelDir_m.absolutePath() + QDir::separator() + levelDirectory; - QRegExp regex("/[^/]+$"); // Matches the last component of the path starting with a slash - int indexOfRegex = regex.indexIn(levelDirectory); + QDir dir(levelDir_m.absolutePath() + QDir::separator() + levelDirectory); - QString tmp = levelDirectory; - if (indexOfRegex == -1) { + // Kontrollera om katalogen finns + if (!dir.exists()) { + qWarning() << "Directory does not exist:" << dir.absolutePath(); return false; } - const QString& parentDirectory = tmp.remove(indexOfRegex, tmp.length() - indexOfRegex); - const QString& directoryParentPath = levelDir_m.absolutePath() + QDir::separator() + parentDirectory; - QDir dir(directoryPath); - - if(!moveFilesToDirectory(levelDirectory, parentDirectory)) - { + // Hämta föräldrakatalogen + QDir parentDir = dir; + if (!parentDir.cdUp()) { + qWarning() << "Failed to access parent directory of:" << dir.absolutePath(); return false; } - // Get list of all entries (files and directories) excluding '.' and '..' - QStringList entryDirList = dir.entryList(QDir::Dirs | QDir::NoDotAndDotDot); + // Lista alla filer och kataloger (exklusive '.' och '..') + QFileInfoList fileList = dir.entryInfoList(QDir::Files | QDir::Dirs | QDir::NoDotAndDotDot); - //TODO: make it look recursively and delete empty dir + // Flytta alla filer och kataloger till föräldrakatalogen + for (const QFileInfo& fileInfo : fileList) { + QString srcPath = fileInfo.absoluteFilePath(); + QString destPath = parentDir.absolutePath() + QDir::separator() + fileInfo.fileName(); - // Move files and recursively move directories - for (const QString& entry : entryDirList) - { - QString entryPath = directoryPath + QDir::separator() + entry; - createDirectory(parentDirectory + QDir::separator() + entry, false); - if (!moveFilesToDirectory(levelDirectory + QDir::separator() + entry, parentDirectory + QDir::separator() + entry)) - { - qWarning() << "Failed to move directory:" << entryPath; - return false; + if (fileInfo.isDir()) { + // Flytta katalogen rekursivt + QDir srcDir(srcPath); + if (!srcDir.rename(srcPath, destPath)) { + qWarning() << "Failed to move directory:" << srcPath; + return false; + } + } else { + // Flytta filen + if (!QFile::rename(srcPath, destPath)) { + qWarning() << "Failed to move file:" << srcPath; + return false; + } } } - - // Remove the directory if it's not the root directory - /* - if (dir != levelDir_m) - { - if (!dir.rmdir(".")) - { - qWarning() << "Failed to remove directory:" << directoryPath; - return false; - } + + // Ta bort den ursprungliga (nu tomma) katalogen + if (!dir.rmdir(".")) { + qWarning() << "Failed to remove directory:" << dir.absolutePath(); + return false; } - */ + return true; } diff --git a/src/Model.cpp b/src/Model.cpp index e41d1ba..7a71307 100644 --- a/src/Model.cpp +++ b/src/Model.cpp @@ -1,6 +1,7 @@ #include #include "Model.h" +// Those lambda should be in another header file Model::Model(QObject *parent) : QObject(parent) { instructionManager.addInstruction(4, [this](int id) { @@ -111,7 +112,7 @@ bool Model::setUpOg(int id) const size_t sm = list[1].size(); if (s!=sm) { - qDebug() << "Corrupt List"; + qDebug() << "Corrupt list, there seems to bee more or less checksums for the files\n"; return false; } const QString& sd = "/Original.TR" + QString::number(id) +"/"; diff --git a/src/TombRaiderLinuxLauncher.cpp b/src/TombRaiderLinuxLauncher.cpp index 37147f1..8cef82e 100644 --- a/src/TombRaiderLinuxLauncher.cpp +++ b/src/TombRaiderLinuxLauncher.cpp @@ -86,6 +86,7 @@ int TombRaiderLinuxLauncher::testallGames(int id){ void TombRaiderLinuxLauncher::checkCommonFiles() { + testallGames(2); testallGames(3); testallGames(4); generateList(); @@ -107,6 +108,22 @@ void TombRaiderLinuxLauncher::generateList() foreach (const QFileInfo &file, enteryInfoList) { QString Ending = file.fileName().right(4); + if (Ending == ".TR2") + { + if(file.fileName() == "Original.TR2") + { + QListWidgetItem *wi = new QListWidgetItem(QIcon(pictures+"Tomb_Raider_II.jpg"),"Tomb Raider II Original"); + wi->setData(Qt::UserRole, QVariant(-2)); + ui->listWidgetModds->addItem(wi); + } + else + { + qDebug() << "No link or id implementation"; + //read some json file + //QListWidgetItem *wi = new QListWidgetItem(QIcon(pictures+"Tomb_Raider_II_unkown.jpg"),file.fileName()); + //ui->listWidgetModds->addItem(wi); + } + } if (Ending == ".TR3") { if(file.fileName() == "Original.TR3") diff --git a/src/TombRaiderLinuxLauncher.ui b/src/TombRaiderLinuxLauncher.ui index 0f83dde..939b282 100644 --- a/src/TombRaiderLinuxLauncher.ui +++ b/src/TombRaiderLinuxLauncher.ui @@ -386,8 +386,20 @@ Filter/Sort - - + + 0 + + + 0 + + + 0 + + + 0 + + + 1 @@ -396,21 +408,24 @@ - 640 - 640 + 525 + 675 - - - 1280 - 1280 - + + + + + - + + + + Qt::PlainText - :/pictures/pictures/Lara2.jpg + :/pictures/pictures/Lara2.jpg true @@ -418,19 +433,37 @@ - + 1 1 - + + Qt::LeftToRight + + + + QLayout::SetDefaultConstraint + + + 0 + + + 0 + + + 0 + + + 0 + - + <html><head/><body><p align="center"><span style=" font-size:12pt;">Filter the level list</span></p></body></html> @@ -488,7 +521,7 @@ - + @@ -511,6 +544,11 @@ + + + - All - + + Alien/Space @@ -713,6 +751,11 @@ + + + - All - + + easy @@ -750,6 +793,11 @@ + + + - All - + + TR1 @@ -797,6 +845,11 @@ + + + - All - + + short @@ -823,12 +876,7 @@ - - - - - - + @@ -841,10 +889,10 @@ - + - + 12 @@ -856,7 +904,7 @@ - + 12 @@ -868,7 +916,7 @@ - + 12 @@ -882,54 +930,54 @@ - - + + 12 - Type + Class - - + + 12 - Level name - - - true + Author - - + + 12 - Class + Type - - + + 12 - User name + Level name + + + true @@ -937,7 +985,7 @@ - + Qt::Vertical @@ -957,13 +1005,10 @@ Qt::Horizontal - - QSizePolicy::MinimumExpanding - - 0 - 0 + 60 + 20 @@ -1269,6 +1314,8 @@ p, li { white-space: pre-wrap; }
QtWebEngineWidgets/QWebEngineView
- + + + diff --git a/src/pictures/Lara2.jpg b/src/pictures/Lara2.jpg index dc95ef8..13360ee 100644 Binary files a/src/pictures/Lara2.jpg and b/src/pictures/Lara2.jpg differ diff --git a/src/pictures/Lara2.xcf b/src/pictures/Lara2.xcf new file mode 100644 index 0000000..d2fb931 Binary files /dev/null and b/src/pictures/Lara2.xcf differ diff --git a/utils/README b/utils/README index 300492a..fdbf378 100644 --- a/utils/README +++ b/utils/README @@ -35,3 +35,6 @@ installing it. but that one got me stuck where a door was supposed to open for a key. +6.The Hidden Dagger II - Dragon's Legacy + https://www.trle.net/sc/levelfeatures.php?lid=3621 + Starts the game and first map, seems to be no problems diff --git a/utils/addData.py b/utils/addData.py index 9fa6234..83c48ab 100644 --- a/utils/addData.py +++ b/utils/addData.py @@ -11,9 +11,34 @@ import fcntl import time import json +import logging + +# Set up logging +logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(levelname)s:%(message)s') +logging.getLogger("requests").setLevel(logging.DEBUG) +logging.getLogger("urllib3").setLevel(logging.DEBUG) + +def download_file(url, cert, file_name): + response = requests.get(url, stream=True, verify=cert) + response.raise_for_status() + + total_size = int(response.headers.get('content-length', 0)) + block_size = 1024 # 1 Kilobyte + wrote = 0 + + with open(file_name, 'wb') as file: + for data in tqdm(response.iter_content(block_size), total=total_size // block_size, unit='KB', unit_scale=True): + wrote += len(data) + file.write(data) + + if total_size != 0 and wrote != total_size: + logging.error("ERROR, something went wrong with the download") + else: + logging.info(f"Downloaded {file_name} successfully") + if __name__ == "__main__": if len(sys.argv) != 2: - print("Usage: python3 addData.py FILE.json") + logging.error("Usage: python3 addData.py FILE.json") sys.exit(1) else: file = sys.argv[1] @@ -26,7 +51,7 @@ lock_fd = open(lock_file, 'w') fcntl.flock(lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB) except IOError: - print("Another instance is already running") + logging.error("Another instance is already running") sys.exit(1) time.sleep(2) @@ -35,13 +60,17 @@ file_info = json.load(json_file) zip_url = file_info.get('download_url') -response = requests.get(zip_url) +cert = '/home/noisecode3/mySecretVirusFolder/trle-net-chain.pem' + +response = requests.get(zip_url, verify=cert) zip_content = response.content md5_hash = hashlib.md5(zip_content).hexdigest() zip_md5 = file_info.get('zipFileMd5') if md5_hash != zip_md5: + logging.error("MD5 checksum does not match") sys.exit(1) + conn = sqlite3.connect('tombll.db') c = conn.cursor() @@ -62,7 +91,6 @@ info_difficulty = file_info.get('difficulty') info_duration = file_info.get('duration') -# Retrieve the corresponding IDs from other tables c.execute("SELECT InfoDifficultyID FROM InfoDifficulty WHERE value = ?", (info_difficulty,)) difficulty_id = c.fetchone()[0] @@ -75,7 +103,6 @@ c.execute("SELECT InfoClassID FROM InfoClass WHERE value = ?", (info_class,)) class_id = c.fetchone()[0] -# Insert data into the Info table using the retrieved IDs c.execute(''' INSERT INTO Info (title, author, release, difficulty, duration, type, class) VALUES (?,?,?,?,?,?,?) @@ -99,14 +126,14 @@ c.execute("INSERT INTO Level (body, walkthrough, zipID, infoID) VALUES (?, ?, ?, ?)", (level_body, level_walkthrough, zip_id, info_id)) except sqlite3.Error as e: - print(f"SQLite error: {e}") + logging.error(f"SQLite error: {e}") c.execute("SELECT MAX(LevelID) FROM Level") level_id = c.fetchone()[0] -print("Current level_id:", level_id) +logging.info(f"Current level_id: {level_id}") screen_url = file_info.get('screen') -screen_response = requests.get(screen_url) +screen_response = requests.get(screen_url, verify=cert) screen_content = screen_response.content screen_file_name = os.path.basename(screen_url) c.execute("INSERT INTO Picture (data) VALUES (?)", (screen_content,)) @@ -115,7 +142,7 @@ c.execute("INSERT INTO Screens (pictureID, levelID) VALUES (?, ?)", (screen_id, level_id)) for screen_large in file_info.get("screensLarge", []): - response = requests.get(screen_large) + response = requests.get(screen_large, verify=cert) screen_content = response.content file_name = os.path.basename(screen_large) c.execute("INSERT INTO Picture (data) VALUES (?)", (screen_content,)) @@ -123,7 +150,6 @@ screen_id = c.fetchone()[0] c.execute("INSERT INTO Screens (pictureID, levelID) VALUES (?, ?)", (screen_id, level_id)) - with open(zip_name, 'wb') as zip_file: zip_file.write(zip_content) @@ -138,36 +164,29 @@ file_content = f.read() file_md5 = hashlib.md5(file_content).hexdigest() - # Check if the file with the same md5sum already exists in Files table c.execute("SELECT FileID FROM Files WHERE md5sum = ? AND path = ?", (file_md5, relative_path)) existing_file = c.fetchone() if existing_file: - # File already exists, use the existing FileID file_id = existing_file[0] - print(f"File with md5sum {file_md5} and path {relative_path} already exists. Using existing FileID: {file_id}") + logging.info(f"File with md5sum {file_md5} and path {relative_path} already exists. Using existing FileID: {file_id}") else: - # File doesn't exist, insert it into Files table c.execute("INSERT INTO Files (md5sum, path) VALUES (?, ?)", (file_md5, relative_path)) file_id = c.lastrowid - print(f"Inserted new file with md5sum {file_md5}. New FileID: {file_id}") + logging.info(f"Inserted new file with md5sum {file_md5}. New FileID: {file_id}") try: - # Check if the combination of fileID and levelID already exists in LevelFileList c.execute("SELECT 1 FROM LevelFileList WHERE fileID = ? AND levelID = ?", (file_id, level_id)) existing_combination = c.fetchone() if not existing_combination: - # Combination doesn't exist, insert it into LevelFileList c.execute("INSERT INTO LevelFileList (fileID, levelID) VALUES (?, ?)", (file_id, level_id)) else: - # Combination already exists, print a message or handle it as needed - print(f"Combination of FileID {file_id} and LevelID {level_id} already exists in LevelFileList. Skipping insertion.") + logging.info(f"Combination of FileID {file_id} and LevelID {level_id} already exists in LevelFileList. Skipping insertion.") except sqlite3.IntegrityError as e: - # Print more details about the uniqueness violation - print(f"Uniqueness violation in LevelFileList: {e}") - print(f"FileID: {file_id}, LevelID: {level_id}") + logging.error(f"Uniqueness violation in LevelFileList: {e}") + logging.error(f"FileID: {file_id}, LevelID: {level_id}") conn.commit() conn.close() diff --git a/utils/getData.py b/utils/getData.py index 15068f8..cd93b97 100644 --- a/utils/getData.py +++ b/utils/getData.py @@ -8,33 +8,47 @@ import fcntl import hashlib import requests +import logging from bs4 import BeautifulSoup + +# Set up logging +logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(levelname)s:%(message)s') +logging.getLogger("requests").setLevel(logging.DEBUG) +logging.getLogger("urllib3").setLevel(logging.DEBUG) + if __name__ == "__main__": if len(sys.argv) != 2: - print("Usage: python3 getData.py URL") + logging.error("Usage: python3 getData.py URL") sys.exit(1) else: url = sys.argv[1] + lock_file = '/tmp/TRLE.lock' try: lock_fd = open(lock_file, 'w') fcntl.flock(lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB) except IOError: - print("Another instance is already running") + logging.error("Another instance is already running") sys.exit(1) time.sleep(2) # test url # url = 'https://www.trle.net/sc/levelfeatures.php?lid=3573' -response = requests.get(url) +cert = '/home/noisecode3/mySecretVirusFolder/trle-net-chain.pem' + +try: + response = requests.get(url, verify=cert) + response.raise_for_status() +except requests.exceptions.RequestException as e: + logging.error(f"Error fetching URL {url}: {e}") + sys.exit(1) if response.status_code == 200: soup = BeautifulSoup(response.text, 'html.parser') - # print(f'response.text: {response.text}') + logging.debug(f'response.text: {response.text}') title_span = soup.find('span', class_='subHeader') - if title_span: title = title_span.get_text(strip=True) br_tag = title_span.find('br') @@ -43,23 +57,15 @@ else: title = "missing" - author = soup.find('a', class_='linkl').get_text(strip=True) \ - or "missing" - type = soup.find('td', string='file type:').find_next('td').get_text(strip=True) \ - or "missing" - class_ = soup.find('td', string='class:').find_next('td').get_text(strip=True) \ - or "missing" - releaseDate = soup.find('td', string='release date:').find_next('td').get_text(strip=True) \ - or "missing" - difficulty = soup.find('td', string='difficulty:').find_next('td').get_text(strip=True) \ - or "missing" - duration = soup.find('td', string='duration:').find_next('td').get_text(strip=True) \ - or "missing" + author = soup.find('a', class_='linkl').get_text(strip=True) or "missing" + type = soup.find('td', string='file type:').find_next('td').get_text(strip=True) or "missing" + class_ = soup.find('td', string='class:').find_next('td').get_text(strip=True) or "missing" + releaseDate = soup.find('td', string='release date:').find_next('td').get_text(strip=True) or "missing" + difficulty = soup.find('td', string='difficulty:').find_next('td').get_text(strip=True) or "missing" + duration = soup.find('td', string='duration:').find_next('td').get_text(strip=True) or "missing" + specific_tags = soup.find_all('td', class_='medGText', align='left', valign='top') - if len(specific_tags) >= 2: - body = specific_tags[1] - else: - body = "missing" + body = specific_tags[1] if len(specific_tags) >= 2 else "missing" zipFileSize = float( soup.find('td', string='file size:') @@ -67,58 +73,52 @@ .get_text(strip=True) .replace(',', '') .replace('MB', '') - ) or 0.0 + ) or 0.0 download_link = soup.find('a', string='Download') if download_link: url = download_link['href'] time.sleep(2) - response2 = requests.head(url, allow_redirects=True) - - if response2.status_code == 200: + try: + response2 = requests.head(url, verify=cert, allow_redirects=True) + response2.raise_for_status() download_url = response2.url - # Extract the file name from the URL file_name = response2.url.split('/')[-1] zipFileName = file_name - - md5_checksum = hashlib.md5(requests.get(url).content).hexdigest() + md5_checksum = hashlib.md5(requests.get(url, verify=cert).content).hexdigest() zipFileMd5 = md5_checksum - else: - print(f'Failed to retrieve file information. Status code: {response2.status_code}') + except requests.exceptions.RequestException as e: + logging.error(f"Failed to retrieve file information from {url}: {e}") walkthrough_link = soup.find('a', string='Walkthrough') if walkthrough_link: url = 'https://www.trle.net/sc/' + walkthrough_link['href'] time.sleep(2) - response3 = requests.get(url) - - if response3.status_code == 200: + try: + response3 = requests.get(url, verify=cert) + response3.raise_for_status() soup2 = BeautifulSoup(response3.text, 'html.parser') iframe_tag = soup2.find('iframe') iframe_src = iframe_tag['src'] url = "https://www.trle.net" + iframe_src - response4 = requests.get(url) + response4 = requests.get(url, verify=cert) if response4.status_code == 200: walkthrough = response4.text else: - print(f'Failed to retrieve file information. Status code: {response3.status_code}') - else: - print(f'Failed to retrieve file information. Status code: {response3.status_code}') + logging.error(f'Failed to retrieve iframe content. Status code: {response4.status_code}') + except requests.exceptions.RequestException as e: + logging.error(f"Failed to retrieve Walkthrough from {url}: {e}") - # Find all the onmouseover links onmouseover_links = soup.find_all(lambda tag: tag.name == 'a' and 'onmouseover' in tag.attrs) - - # Extract the href attribute from the onmouseover links hrefs = [link['href'] for link in onmouseover_links] screensLarge = hrefs image_tag = soup.find('img', class_='border') - screen = 'https://www.trle.net' + image_tag['src'] + def get_var(var_name): return globals().get(var_name, "") - # Create a dictionary with your variables data = { "title": title, "author": author, @@ -143,12 +143,11 @@ def get_var(var_name): data["walkthrough"] = str(walkthrough) except NameError: data["walkthrough"] = "" - # Write the dictionary to a JSON file + with open('data.json', 'w') as json_file: json.dump(data, json_file) else: - print(f'Failed to retrieve content. Status code: {response.status_code}') + logging.error(f'Failed to retrieve content. Status code: {response.status_code}') lock_fd.close() os.remove(lock_file) - diff --git a/utils/getData2.py b/utils/getData2.py new file mode 100644 index 0000000..4b503ca --- /dev/null +++ b/utils/getData2.py @@ -0,0 +1,175 @@ +""" +Grab raw data from trle.net and put it in a data.json file +""" +import os +import sys +import time +import json +import fcntl +import hashlib +import requests +import logging +from bs4 import BeautifulSoup +from tqdm import tqdm + +# Set up logging +logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(levelname)s:%(message)s') +logging.getLogger("requests").setLevel(logging.DEBUG) +logging.getLogger("urllib3").setLevel(logging.DEBUG) + +def download_file(url, cert, file_name): + response = requests.get(url, stream=True, verify=cert) + response.raise_for_status() + + total_size = int(response.headers.get('content-length', 0)) + block_size = 1024 # 1 Kilobyte + wrote = 0 + + with open(file_name, 'wb') as file: + for data in tqdm(response.iter_content(block_size), total=total_size // block_size, unit='KB', unit_scale=True): + wrote = wrote + len(data) + file.write(data) + + if total_size != 0 and wrote != total_size: + logging.error("ERROR, something went wrong with the download") + else: + logging.info(f"Downloaded {file_name} successfully") + +if __name__ == "__main__": + if len(sys.argv) != 2: + logging.error("Usage: python3 getData.py URL") + sys.exit(1) + else: + url = sys.argv[1] + +lock_file = '/tmp/TRLE.lock' +try: + lock_fd = open(lock_file, 'w') + fcntl.flock(lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB) +except IOError: + logging.error("Another instance is already running") + sys.exit(1) + +time.sleep(2) +# test url +# url = 'https://www.trle.net/sc/levelfeatures.php?lid=3573' + +cert = '/home/noisecode3/mySecretVirusFolder/trle-net-chain.pem' + +try: + response = requests.get(url, verify=cert) + response.raise_for_status() +except requests.exceptions.RequestException as e: + logging.error(f"Error fetching URL {url}: {e}") + sys.exit(1) + +if response.status_code == 200: + soup = BeautifulSoup(response.text, 'html.parser') + logging.debug(f'response.text: {response.text}') + + title_span = soup.find('span', class_='subHeader') + if title_span: + title = title_span.get_text(strip=True) + br_tag = title_span.find('br') + if br_tag: + title = title_span.contents[0].strip() + else: + title = "missing" + + author = soup.find('a', class_='linkl').get_text(strip=True) or "missing" + type = soup.find('td', string='file type:').find_next('td').get_text(strip=True) or "missing" + class_ = soup.find('td', string='class:').find_next('td').get_text(strip=True) or "missing" + releaseDate = soup.find('td', string='release date:').find_next('td').get_text(strip=True) or "missing" + difficulty = soup.find('td', string='difficulty:').find_next('td').get_text(strip=True) or "missing" + duration = soup.find('td', string='duration:').find_next('td').get_text(strip=True) or "missing" + + specific_tags = soup.find_all('td', class_='medGText', align='left', valign='top') + body = specific_tags[1] if len(specific_tags) >= 2 else "missing" + + zipFileSize = float( + soup.find('td', string='file size:') + .find_next('td') + .get_text(strip=True) + .replace(',', '') + .replace('MB', '') + ) or 0.0 + + download_link = soup.find('a', string='Download') + if download_link: + url = download_link['href'] + time.sleep(2) + try: + response2 = requests.head(url, verify=cert, allow_redirects=True) + response2.raise_for_status() + download_url = response2.url + file_name = response2.url.split('/')[-1] + zipFileName = file_name + + download_file(url, cert, file_name) + + md5_checksum = hashlib.md5(open(file_name, 'rb').read()).hexdigest() + zipFileMd5 = md5_checksum + except requests.exceptions.RequestException as e: + logging.error(f"Failed to retrieve file information from {url}: {e}") + + walkthrough_link = soup.find('a', string='Walkthrough') + if walkthrough_link: + url = 'https://www.trle.net/sc/' + walkthrough_link['href'] + time.sleep(2) + try: + response3 = requests.get(url, verify=cert) + response3.raise_for_status() + soup2 = BeautifulSoup(response3.text, 'html.parser') + iframe_tag = soup2.find('iframe') + iframe_src = iframe_tag['src'] + url = "https://www.trle.net" + iframe_src + response4 = requests.get(url, verify=cert) + if response4.status_code == 200: + walkthrough = response4.text + else: + logging.error(f'Failed to retrieve iframe content. Status code: {response4.status_code}') + except requests.exceptions.RequestException as e: + logging.error(f"Failed to retrieve Walkthrough from {url}: {e}") + + onmouseover_links = soup.find_all(lambda tag: tag.name == 'a' and 'onmouseover' in tag.attrs) + hrefs = [link['href'] for link in onmouseover_links] + screensLarge = hrefs + + image_tag = soup.find('img', class_='border') + screen = 'https://www.trle.net' + image_tag['src'] + + def get_var(var_name): + return globals().get(var_name, "") + + data = { + "title": title, + "author": author, + "type": type, + "class_": class_, + "releaseDate": releaseDate, + "difficulty": difficulty, + "duration": duration, + "screen": screen, + "screensLarge": screensLarge, + "zipFileSize": zipFileSize, + "zipFileName": zipFileName, + "zipFileMd5": zipFileMd5, + "body": body, + "walkthrough": get_var("walkthrough"), + "download_url": download_url, + } + if body: + data["body"] = str(body) + try: + if walkthrough: + data["walkthrough"] = str(walkthrough) + except NameError: + data["walkthrough"] = "" + + with open('data.json', 'w') as json_file: + json.dump(data, json_file) +else: + logging.error(f'Failed to retrieve content. Status code: {response.status_code}') + +lock_fd.close() +os.remove(lock_file) diff --git a/utils/tombll.db b/utils/tombll.db index 13de8f6..29bd576 100644 Binary files a/utils/tombll.db and b/utils/tombll.db differ