diff --git a/build.sh b/build.sh index 2de0769..3ba802c 100755 --- a/build.sh +++ b/build.sh @@ -1,5 +1,5 @@ #!/bin/bash -cd "$(dirname $0)" +cd "$(dirname "$0")" rm -fr build mkdir build cd build diff --git a/clean_nvim_lsp.sh b/clean_nvim_lsp.sh index 5c3cb44..32f1d99 100755 --- a/clean_nvim_lsp.sh +++ b/clean_nvim_lsp.sh @@ -1,5 +1,5 @@ #!/bin/bash -cd "$(dirname $0)" +cd "$(dirname "$0")" # clean up command rm -fr CMakeCache.txt CMakeFiles Makefile cmake_install.cmake \ compile_commands.json TombRaiderLinuxLauncher \ diff --git a/setup_nvim_lsp.sh b/setup_nvim_lsp.sh index b98ec34..27f2a2f 100755 --- a/setup_nvim_lsp.sh +++ b/setup_nvim_lsp.sh @@ -1,5 +1,5 @@ #!/bin/bash -cd "$(dirname $0)" +cd "$(dirname "$0")" # don't forget that you need to build it one time # and look at .neovim how to use debuger mkdir -p .gdb/qt5prettyprinters/ diff --git a/utils/getData.py b/utils/getData.py index 9b3cccf..54b29af 100644 --- a/utils/getData.py +++ b/utils/getData.py @@ -11,20 +11,43 @@ import logging from bs4 import BeautifulSoup from tqdm import tqdm +from urllib.parse import urlparse # Set up logging logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(levelname)s:%(message)s') logging.getLogger("requests").setLevel(logging.DEBUG) +def validate_url(url): + # Kontrollera att URL:en har rätt format + parsed_url = urlparse(url) + + # Kontrollera att URL:en har ett giltigt schema och nätverksplats (domän) + if not all([parsed_url.scheme, parsed_url.netloc]): + logging.error("Invalid URL format.") + sys.exit(1) + + # Kontrollera att protokollet är https + if parsed_url.scheme != "https": + logging.error("Only HTTPS URLs are allowed.") + sys.exit(1) + + # Kontrollera att domänen är trle.net eller subdomäner av trle.net + if not parsed_url.netloc.endswith("trle.net"): + logging.error("URL must belong to the domain 'trle.net'.") + sys.exit(1) + + # Om alla kontroller passerar, returnera True (eller inget om du bara vill validera) + return True + def calculate_md5(url, cert): try: # Stream the response to handle large files response = requests.get(url, verify=cert, stream=True, timeout=10) response.raise_for_status() - + # Get the total length of the file for the progress bar total_length = int(response.headers.get('content-length', 0)) - + # Initialize the MD5 hash object md5_hash = hashlib.md5() @@ -34,19 +57,22 @@ def calculate_md5(url, cert): if chunk: # filter out keep-alive new chunks md5_hash.update(chunk) progress_bar.update(len(chunk)) - + # Return the hex digest of the MD5 hash return md5_hash.hexdigest() except requests.exceptions.RequestException as e: logging.error(f"Failed to download {url}: {e}") return None +url = None + if __name__ == "__main__": if len(sys.argv) != 2: logging.error("Usage: python3 getData.py URL") sys.exit(1) else: url = sys.argv[1] + validate_url(url) lock_file = '/tmp/TRLE.lock' try: @@ -171,9 +197,6 @@ def calculate_md5(url, cert): image_tag = soup.find('img', class_='border') screen = 'https://www.trle.net' + image_tag['src'] - def get_var(var_name): - return globals().get(var_name, "") - data = { "title": title, "author": author, @@ -188,7 +211,7 @@ def get_var(var_name): "zipFileName": zipFileName, "zipFileMd5": zipFileMd5, "body": body, - "walkthrough": get_var("walkthrough"), + "walkthrough": walkthrough, "download_url": download_url, } if body: diff --git a/utils/makeDatabase.py b/utils/makeDatabase.py index 07c95a3..eb5375c 100644 --- a/utils/makeDatabase.py +++ b/utils/makeDatabase.py @@ -175,7 +175,7 @@ class INT NOT NULL, # Add Game File data for i in range(1, 6): file = f'fileList-TR{i}.json' - + # Load data from JSON file with open(file, 'r') as json_file: file_info = json.load(json_file) @@ -225,3 +225,4 @@ class INT NOT NULL, conn.commit() conn.close() + diff --git a/utils/updateDB_1.0.0.py b/utils/updateDB_1.0.0.py index 6fe0f25..fe951a7 100644 --- a/utils/updateDB_1.0.0.py +++ b/utils/updateDB_1.0.0.py @@ -68,3 +68,4 @@ class INT NOT NULL, # Usage update_table_schema_and_data(arg_path) +