diff --git a/submissions/final-submission/final_submission.ipynb b/submissions/Final Submission/Building and Training Mini Network/Mini Network.ipynb
similarity index 100%
rename from submissions/final-submission/final_submission.ipynb
rename to submissions/Final Submission/Building and Training Mini Network/Mini Network.ipynb
diff --git a/submissions/Final Submission/Building and Training Mini Network/Mini Network.pdf b/submissions/Final Submission/Building and Training Mini Network/Mini Network.pdf
new file mode 100644
index 0000000..81c4079
Binary files /dev/null and b/submissions/Final Submission/Building and Training Mini Network/Mini Network.pdf differ
diff --git a/submissions/Final Submission/Data Collection and Preprocessing/Update_1.ipynb b/submissions/Final Submission/Data Collection and Preprocessing/Update_1.ipynb
new file mode 100644
index 0000000..1b2d358
--- /dev/null
+++ b/submissions/Final Submission/Data Collection and Preprocessing/Update_1.ipynb
@@ -0,0 +1,1568 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "id": "view-in-github",
+ "colab_type": "text"
+ },
+ "source": [
+ ""
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "source": [
+ "## Download the Repository\n",
+ "\n",
+ "[Repository Link](https://github.com/balnarendrasapa/road-detection)\n",
+ "\n",
+ "- This is our team's repository. This repository contains all the necessary code that we worked on and it also contains the dataset that we annotated.\n",
+ "\n",
+ "- You do not need to do anything like uploading and adjusting the paths. Just run the cells sequentially.\n",
+ "\n",
+ "- All the necessary commands are written in this notebook itself"
+ ],
+ "metadata": {
+ "id": "JzycIPSy2AKH"
+ }
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 1,
+ "metadata": {
+ "colab": {
+ "base_uri": "https://localhost:8080/",
+ "height": 0
+ },
+ "id": "dyznWPpKmNIs",
+ "outputId": "de527f56-f2a7-4593-c4c6-ab12b4646ca6"
+ },
+ "outputs": [
+ {
+ "output_type": "stream",
+ "name": "stdout",
+ "text": [
+ "Cloning into 'road-detection'...\n",
+ "remote: Enumerating objects: 324, done.\u001b[K\n",
+ "remote: Counting objects: 100% (65/65), done.\u001b[K\n",
+ "remote: Compressing objects: 100% (52/52), done.\u001b[K\n",
+ "remote: Total 324 (delta 12), reused 20 (delta 12), pack-reused 259\u001b[K\n",
+ "Receiving objects: 100% (324/324), 199.88 MiB | 31.94 MiB/s, done.\n",
+ "Resolving deltas: 100% (105/105), done.\n"
+ ]
+ }
+ ],
+ "source": [
+ "!git clone https://github.com/balnarendrasapa/road-detection.git"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "source": [
+ "## Install the Requirements\n",
+ "\n",
+ "- Install all the python dependencies\n",
+ "- After Installing dependencies, Restart the runtime. If you do not restart the runtime, the python will throw \"module not found error\""
+ ],
+ "metadata": {
+ "id": "AVXcandz2wFA"
+ }
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 2,
+ "metadata": {
+ "colab": {
+ "base_uri": "https://localhost:8080/",
+ "height": 2320
+ },
+ "id": "298SpxZcDf1R",
+ "outputId": "c2a60646-eca6-47d0-d56c-61bc3fa7f08d"
+ },
+ "outputs": [
+ {
+ "output_type": "stream",
+ "name": "stdout",
+ "text": [
+ "Requirement already satisfied: certifi==2023.7.22 in /usr/local/lib/python3.10/dist-packages (from -r road-detection/TwinLiteNet/requirements.txt (line 1)) (2023.7.22)\n",
+ "Requirement already satisfied: charset-normalizer==3.3.2 in /usr/local/lib/python3.10/dist-packages (from -r road-detection/TwinLiteNet/requirements.txt (line 2)) (3.3.2)\n",
+ "Collecting colorama==0.4.6 (from -r road-detection/TwinLiteNet/requirements.txt (line 3))\n",
+ " Downloading colorama-0.4.6-py2.py3-none-any.whl (25 kB)\n",
+ "Requirement already satisfied: contourpy==1.2.0 in /usr/local/lib/python3.10/dist-packages (from -r road-detection/TwinLiteNet/requirements.txt (line 4)) (1.2.0)\n",
+ "Requirement already satisfied: cycler==0.12.1 in /usr/local/lib/python3.10/dist-packages (from -r road-detection/TwinLiteNet/requirements.txt (line 5)) (0.12.1)\n",
+ "Collecting dnspython==2.4.2 (from -r road-detection/TwinLiteNet/requirements.txt (line 6))\n",
+ " Downloading dnspython-2.4.2-py3-none-any.whl (300 kB)\n",
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m300.4/300.4 kB\u001b[0m \u001b[31m4.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
+ "\u001b[?25hCollecting elephant==0.12.0 (from -r road-detection/TwinLiteNet/requirements.txt (line 7))\n",
+ " Downloading elephant-0.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (1.3 MB)\n",
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.3/1.3 MB\u001b[0m \u001b[31m9.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
+ "\u001b[?25hRequirement already satisfied: filelock==3.13.1 in /usr/local/lib/python3.10/dist-packages (from -r road-detection/TwinLiteNet/requirements.txt (line 8)) (3.13.1)\n",
+ "Requirement already satisfied: fonttools==4.44.0 in /usr/local/lib/python3.10/dist-packages (from -r road-detection/TwinLiteNet/requirements.txt (line 9)) (4.44.0)\n",
+ "Collecting fsspec==2023.10.0 (from -r road-detection/TwinLiteNet/requirements.txt (line 10))\n",
+ " Downloading fsspec-2023.10.0-py3-none-any.whl (166 kB)\n",
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m166.4/166.4 kB\u001b[0m \u001b[31m11.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
+ "\u001b[?25hRequirement already satisfied: idna==3.4 in /usr/local/lib/python3.10/dist-packages (from -r road-detection/TwinLiteNet/requirements.txt (line 11)) (3.4)\n",
+ "Requirement already satisfied: Jinja2==3.1.2 in /usr/local/lib/python3.10/dist-packages (from -r road-detection/TwinLiteNet/requirements.txt (line 12)) (3.1.2)\n",
+ "Collecting joblib==1.2.0 (from -r road-detection/TwinLiteNet/requirements.txt (line 13))\n",
+ " Downloading joblib-1.2.0-py3-none-any.whl (297 kB)\n",
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m298.0/298.0 kB\u001b[0m \u001b[31m15.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
+ "\u001b[?25hRequirement already satisfied: kiwisolver==1.4.5 in /usr/local/lib/python3.10/dist-packages (from -r road-detection/TwinLiteNet/requirements.txt (line 14)) (1.4.5)\n",
+ "Requirement already satisfied: MarkupSafe==2.1.3 in /usr/local/lib/python3.10/dist-packages (from -r road-detection/TwinLiteNet/requirements.txt (line 15)) (2.1.3)\n",
+ "Requirement already satisfied: matplotlib==3.7.1 in /usr/local/lib/python3.10/dist-packages (from -r road-detection/TwinLiteNet/requirements.txt (line 16)) (3.7.1)\n",
+ "Requirement already satisfied: mpmath==1.3.0 in /usr/local/lib/python3.10/dist-packages (from -r road-detection/TwinLiteNet/requirements.txt (line 17)) (1.3.0)\n",
+ "Collecting neo==0.12.0 (from -r road-detection/TwinLiteNet/requirements.txt (line 18))\n",
+ " Downloading neo-0.12.0-py3-none-any.whl (586 kB)\n",
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m586.9/586.9 kB\u001b[0m \u001b[31m15.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
+ "\u001b[?25hRequirement already satisfied: networkx==3.2.1 in /usr/local/lib/python3.10/dist-packages (from -r road-detection/TwinLiteNet/requirements.txt (line 19)) (3.2.1)\n",
+ "Collecting numpy==1.24.3 (from -r road-detection/TwinLiteNet/requirements.txt (line 20))\n",
+ " Downloading numpy-1.24.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (17.3 MB)\n",
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m17.3/17.3 MB\u001b[0m \u001b[31m53.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
+ "\u001b[?25hCollecting opencv-python==4.7.0.72 (from -r road-detection/TwinLiteNet/requirements.txt (line 21))\n",
+ " Downloading opencv_python-4.7.0.72-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (61.8 MB)\n",
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m61.8/61.8 MB\u001b[0m \u001b[31m10.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
+ "\u001b[?25hRequirement already satisfied: packaging==23.2 in /usr/local/lib/python3.10/dist-packages (from -r road-detection/TwinLiteNet/requirements.txt (line 22)) (23.2)\n",
+ "Collecting Pillow==9.5.0 (from -r road-detection/TwinLiteNet/requirements.txt (line 23))\n",
+ " Downloading Pillow-9.5.0-cp310-cp310-manylinux_2_28_x86_64.whl (3.4 MB)\n",
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m3.4/3.4 MB\u001b[0m \u001b[31m58.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
+ "\u001b[?25hRequirement already satisfied: pyparsing==3.1.1 in /usr/local/lib/python3.10/dist-packages (from -r road-detection/TwinLiteNet/requirements.txt (line 24)) (3.1.1)\n",
+ "Requirement already satisfied: python-dateutil==2.8.2 in /usr/local/lib/python3.10/dist-packages (from -r road-detection/TwinLiteNet/requirements.txt (line 25)) (2.8.2)\n",
+ "Collecting python-etcd==0.4.5 (from -r road-detection/TwinLiteNet/requirements.txt (line 26))\n",
+ " Downloading python-etcd-0.4.5.tar.gz (37 kB)\n",
+ " Preparing metadata (setup.py) ... \u001b[?25l\u001b[?25hdone\n",
+ "Requirement already satisfied: PyYAML==6.0.1 in /usr/local/lib/python3.10/dist-packages (from -r road-detection/TwinLiteNet/requirements.txt (line 27)) (6.0.1)\n",
+ "Collecting quantities==0.14.1 (from -r road-detection/TwinLiteNet/requirements.txt (line 28))\n",
+ " Downloading quantities-0.14.1-py3-none-any.whl (87 kB)\n",
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m87.9/87.9 kB\u001b[0m \u001b[31m11.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
+ "\u001b[?25hRequirement already satisfied: requests==2.31.0 in /usr/local/lib/python3.10/dist-packages (from -r road-detection/TwinLiteNet/requirements.txt (line 29)) (2.31.0)\n",
+ "Collecting scikit-learn==1.3.2 (from -r road-detection/TwinLiteNet/requirements.txt (line 30))\n",
+ " Downloading scikit_learn-1.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (10.8 MB)\n",
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m10.8/10.8 MB\u001b[0m \u001b[31m65.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
+ "\u001b[?25hCollecting scipy==1.10.1 (from -r road-detection/TwinLiteNet/requirements.txt (line 31))\n",
+ " Downloading scipy-1.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (34.4 MB)\n",
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m34.4/34.4 MB\u001b[0m \u001b[31m29.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
+ "\u001b[?25hRequirement already satisfied: six==1.16.0 in /usr/local/lib/python3.10/dist-packages (from -r road-detection/TwinLiteNet/requirements.txt (line 32)) (1.16.0)\n",
+ "Requirement already satisfied: sympy==1.12 in /usr/local/lib/python3.10/dist-packages (from -r road-detection/TwinLiteNet/requirements.txt (line 33)) (1.12)\n",
+ "Requirement already satisfied: threadpoolctl==3.2.0 in /usr/local/lib/python3.10/dist-packages (from -r road-detection/TwinLiteNet/requirements.txt (line 34)) (3.2.0)\n",
+ "Requirement already satisfied: torch==2.1.0 in /usr/local/lib/python3.10/dist-packages (from -r road-detection/TwinLiteNet/requirements.txt (line 35)) (2.1.0+cu118)\n",
+ "Requirement already satisfied: torchdata==0.7.0 in /usr/local/lib/python3.10/dist-packages (from -r road-detection/TwinLiteNet/requirements.txt (line 36)) (0.7.0)\n",
+ "Collecting torchelastic==0.2.2 (from -r road-detection/TwinLiteNet/requirements.txt (line 37))\n",
+ " Downloading torchelastic-0.2.2-py3-none-any.whl (111 kB)\n",
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m111.5/111.5 kB\u001b[0m \u001b[31m15.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
+ "\u001b[?25hRequirement already satisfied: torchtext==0.16.0 in /usr/local/lib/python3.10/dist-packages (from -r road-detection/TwinLiteNet/requirements.txt (line 38)) (0.16.0)\n",
+ "Requirement already satisfied: torchvision==0.16.0 in /usr/local/lib/python3.10/dist-packages (from -r road-detection/TwinLiteNet/requirements.txt (line 39)) (0.16.0+cu118)\n",
+ "Requirement already satisfied: tqdm==4.66.1 in /usr/local/lib/python3.10/dist-packages (from -r road-detection/TwinLiteNet/requirements.txt (line 40)) (4.66.1)\n",
+ "Collecting typing_extensions==4.8.0 (from -r road-detection/TwinLiteNet/requirements.txt (line 41))\n",
+ " Downloading typing_extensions-4.8.0-py3-none-any.whl (31 kB)\n",
+ "Requirement already satisfied: urllib3==2.0.7 in /usr/local/lib/python3.10/dist-packages (from -r road-detection/TwinLiteNet/requirements.txt (line 42)) (2.0.7)\n",
+ "Requirement already satisfied: webcolors==1.13 in /usr/local/lib/python3.10/dist-packages (from -r road-detection/TwinLiteNet/requirements.txt (line 43)) (1.13)\n",
+ "Collecting yacs==0.1.8 (from -r road-detection/TwinLiteNet/requirements.txt (line 44))\n",
+ " Downloading yacs-0.1.8-py3-none-any.whl (14 kB)\n",
+ "Collecting zipp==3.15.0 (from -r road-detection/TwinLiteNet/requirements.txt (line 45))\n",
+ " Downloading zipp-3.15.0-py3-none-any.whl (6.8 kB)\n",
+ "Requirement already satisfied: triton==2.1.0 in /usr/local/lib/python3.10/dist-packages (from torch==2.1.0->-r road-detection/TwinLiteNet/requirements.txt (line 35)) (2.1.0)\n",
+ "Building wheels for collected packages: python-etcd\n",
+ " Building wheel for python-etcd (setup.py) ... \u001b[?25l\u001b[?25hdone\n",
+ " Created wheel for python-etcd: filename=python_etcd-0.4.5-py3-none-any.whl size=38481 sha256=9db474052e1f4012c68d40d82fff1be4d4bf213aa023bb4722617e1b64390a78\n",
+ " Stored in directory: /root/.cache/pip/wheels/93/5f/1b/056db07a0ab1c0b7efe175928d2a10b614e0e00d7bab0b6496\n",
+ "Successfully built python-etcd\n",
+ "Installing collected packages: zipp, yacs, typing_extensions, Pillow, numpy, joblib, fsspec, dnspython, colorama, scipy, quantities, python-etcd, opencv-python, torchelastic, scikit-learn, neo, elephant\n",
+ " Attempting uninstall: zipp\n",
+ " Found existing installation: zipp 3.17.0\n",
+ " Uninstalling zipp-3.17.0:\n",
+ " Successfully uninstalled zipp-3.17.0\n",
+ " Attempting uninstall: typing_extensions\n",
+ " Found existing installation: typing_extensions 4.5.0\n",
+ " Uninstalling typing_extensions-4.5.0:\n",
+ " Successfully uninstalled typing_extensions-4.5.0\n",
+ " Attempting uninstall: Pillow\n",
+ " Found existing installation: Pillow 9.4.0\n",
+ " Uninstalling Pillow-9.4.0:\n",
+ " Successfully uninstalled Pillow-9.4.0\n",
+ " Attempting uninstall: numpy\n",
+ " Found existing installation: numpy 1.23.5\n",
+ " Uninstalling numpy-1.23.5:\n",
+ " Successfully uninstalled numpy-1.23.5\n",
+ " Attempting uninstall: joblib\n",
+ " Found existing installation: joblib 1.3.2\n",
+ " Uninstalling joblib-1.3.2:\n",
+ " Successfully uninstalled joblib-1.3.2\n",
+ " Attempting uninstall: fsspec\n",
+ " Found existing installation: fsspec 2023.6.0\n",
+ " Uninstalling fsspec-2023.6.0:\n",
+ " Successfully uninstalled fsspec-2023.6.0\n",
+ " Attempting uninstall: scipy\n",
+ " Found existing installation: scipy 1.11.3\n",
+ " Uninstalling scipy-1.11.3:\n",
+ " Successfully uninstalled scipy-1.11.3\n",
+ " Attempting uninstall: opencv-python\n",
+ " Found existing installation: opencv-python 4.8.0.76\n",
+ " Uninstalling opencv-python-4.8.0.76:\n",
+ " Successfully uninstalled opencv-python-4.8.0.76\n",
+ " Attempting uninstall: scikit-learn\n",
+ " Found existing installation: scikit-learn 1.2.2\n",
+ " Uninstalling scikit-learn-1.2.2:\n",
+ " Successfully uninstalled scikit-learn-1.2.2\n",
+ "\u001b[31mERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts.\n",
+ "lida 0.0.10 requires fastapi, which is not installed.\n",
+ "lida 0.0.10 requires kaleido, which is not installed.\n",
+ "lida 0.0.10 requires python-multipart, which is not installed.\n",
+ "lida 0.0.10 requires uvicorn, which is not installed.\n",
+ "gcsfs 2023.6.0 requires fsspec==2023.6.0, but you have fsspec 2023.10.0 which is incompatible.\n",
+ "tensorflow-probability 0.22.0 requires typing-extensions<4.6.0, but you have typing-extensions 4.8.0 which is incompatible.\u001b[0m\u001b[31m\n",
+ "\u001b[0mSuccessfully installed Pillow-9.5.0 colorama-0.4.6 dnspython-2.4.2 elephant-0.12.0 fsspec-2023.10.0 joblib-1.2.0 neo-0.12.0 numpy-1.24.3 opencv-python-4.7.0.72 python-etcd-0.4.5 quantities-0.14.1 scikit-learn-1.3.2 scipy-1.10.1 torchelastic-0.2.2 typing_extensions-4.8.0 yacs-0.1.8 zipp-3.15.0\n"
+ ]
+ },
+ {
+ "output_type": "display_data",
+ "data": {
+ "application/vnd.colab-display-data+json": {
+ "pip_warning": {
+ "packages": [
+ "PIL",
+ "numpy"
+ ]
+ }
+ }
+ },
+ "metadata": {}
+ }
+ ],
+ "source": [
+ "!pip install -r road-detection/TwinLiteNet/requirements.txt"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "source": [
+ "## Copy Dataset from Repository\n",
+ "\n",
+ "- Our repository contains dataset.zip in datasets folder in the repository. copy that zip file to root"
+ ],
+ "metadata": {
+ "id": "WtYxavR2503Q"
+ }
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "!cp road-detection/datasets/dataset.zip ./"
+ ],
+ "metadata": {
+ "id": "ihjXltFR1OQI"
+ },
+ "execution_count": 1,
+ "outputs": []
+ },
+ {
+ "cell_type": "markdown",
+ "source": [
+ "## Unzip the file"
+ ],
+ "metadata": {
+ "id": "ferlFJ_76GBA"
+ }
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "!unzip dataset.zip"
+ ],
+ "metadata": {
+ "colab": {
+ "base_uri": "https://localhost:8080/",
+ "height": 0
+ },
+ "id": "w7AUZJZ0f491",
+ "outputId": "c3185efa-27a4-487d-d336-3e8960a81d58"
+ },
+ "execution_count": 2,
+ "outputs": [
+ {
+ "output_type": "stream",
+ "name": "stdout",
+ "text": [
+ "Archive: dataset.zip\n",
+ " creating: dataset/test/\n",
+ " creating: dataset/test/images/\n",
+ " inflating: dataset/test/images/road_image_160.png \n",
+ " inflating: dataset/test/images/road_image_161.png \n",
+ " inflating: dataset/test/images/road_image_162.png \n",
+ " inflating: dataset/test/images/road_image_163.png \n",
+ " inflating: dataset/test/images/road_image_164.png \n",
+ " inflating: dataset/test/images/road_image_165.png \n",
+ " inflating: dataset/test/images/road_image_166.png \n",
+ " inflating: dataset/test/images/road_image_167.png \n",
+ " inflating: dataset/test/images/road_image_168.png \n",
+ " inflating: dataset/test/images/road_image_169.png \n",
+ " inflating: dataset/test/images/road_image_170.png \n",
+ " inflating: dataset/test/images/road_image_171.png \n",
+ " inflating: dataset/test/images/road_image_172.png \n",
+ " inflating: dataset/test/images/road_image_173.png \n",
+ " inflating: dataset/test/images/road_image_174.png \n",
+ " inflating: dataset/test/images/road_image_175.png \n",
+ " inflating: dataset/test/images/road_image_176.png \n",
+ " inflating: dataset/test/images/road_image_177.png \n",
+ " inflating: dataset/test/images/road_image_178.png \n",
+ " inflating: dataset/test/images/road_image_179.png \n",
+ " creating: dataset/test/lane/\n",
+ " inflating: dataset/test/lane/road_image_160.png \n",
+ " inflating: dataset/test/lane/road_image_161.png \n",
+ " inflating: dataset/test/lane/road_image_162.png \n",
+ " inflating: dataset/test/lane/road_image_163.png \n",
+ " inflating: dataset/test/lane/road_image_164.png \n",
+ " inflating: dataset/test/lane/road_image_165.png \n",
+ " inflating: dataset/test/lane/road_image_166.png \n",
+ " inflating: dataset/test/lane/road_image_167.png \n",
+ " inflating: dataset/test/lane/road_image_168.png \n",
+ " inflating: dataset/test/lane/road_image_169.png \n",
+ " inflating: dataset/test/lane/road_image_170.png \n",
+ " inflating: dataset/test/lane/road_image_171.png \n",
+ " inflating: dataset/test/lane/road_image_172.png \n",
+ " inflating: dataset/test/lane/road_image_173.png \n",
+ " inflating: dataset/test/lane/road_image_174.png \n",
+ " inflating: dataset/test/lane/road_image_175.png \n",
+ " inflating: dataset/test/lane/road_image_176.png \n",
+ " inflating: dataset/test/lane/road_image_177.png \n",
+ " inflating: dataset/test/lane/road_image_178.png \n",
+ " inflating: dataset/test/lane/road_image_179.png \n",
+ " creating: dataset/test/segments/\n",
+ " inflating: dataset/test/segments/road_image_160.png \n",
+ " inflating: dataset/test/segments/road_image_161.png \n",
+ " inflating: dataset/test/segments/road_image_162.png \n",
+ " inflating: dataset/test/segments/road_image_163.png \n",
+ " inflating: dataset/test/segments/road_image_164.png \n",
+ " inflating: dataset/test/segments/road_image_165.png \n",
+ " inflating: dataset/test/segments/road_image_166.png \n",
+ " inflating: dataset/test/segments/road_image_167.png \n",
+ " inflating: dataset/test/segments/road_image_168.png \n",
+ " inflating: dataset/test/segments/road_image_169.png \n",
+ " inflating: dataset/test/segments/road_image_170.png \n",
+ " inflating: dataset/test/segments/road_image_171.png \n",
+ " inflating: dataset/test/segments/road_image_172.png \n",
+ " inflating: dataset/test/segments/road_image_173.png \n",
+ " inflating: dataset/test/segments/road_image_174.png \n",
+ " inflating: dataset/test/segments/road_image_175.png \n",
+ " inflating: dataset/test/segments/road_image_176.png \n",
+ " inflating: dataset/test/segments/road_image_177.png \n",
+ " inflating: dataset/test/segments/road_image_178.png \n",
+ " inflating: dataset/test/segments/road_image_179.png \n",
+ " creating: dataset/train/\n",
+ " creating: dataset/train/images/\n",
+ " inflating: dataset/train/images/road_image_0.png \n",
+ " inflating: dataset/train/images/road_image_1.png \n",
+ " inflating: dataset/train/images/road_image_10.png \n",
+ " inflating: dataset/train/images/road_image_100.png \n",
+ " inflating: dataset/train/images/road_image_101.png \n",
+ " inflating: dataset/train/images/road_image_102.png \n",
+ " inflating: dataset/train/images/road_image_103.png \n",
+ " inflating: dataset/train/images/road_image_104.png \n",
+ " inflating: dataset/train/images/road_image_105.png \n",
+ " inflating: dataset/train/images/road_image_106.png \n",
+ " inflating: dataset/train/images/road_image_107.png \n",
+ " inflating: dataset/train/images/road_image_108.png \n",
+ " inflating: dataset/train/images/road_image_109.png \n",
+ " inflating: dataset/train/images/road_image_11.png \n",
+ " inflating: dataset/train/images/road_image_110.png \n",
+ " inflating: dataset/train/images/road_image_111.png \n",
+ " inflating: dataset/train/images/road_image_112.png \n",
+ " inflating: dataset/train/images/road_image_113.png \n",
+ " inflating: dataset/train/images/road_image_114.png \n",
+ " inflating: dataset/train/images/road_image_115.png \n",
+ " inflating: dataset/train/images/road_image_116.png \n",
+ " inflating: dataset/train/images/road_image_117.png \n",
+ " inflating: dataset/train/images/road_image_118.png \n",
+ " inflating: dataset/train/images/road_image_119.png \n",
+ " inflating: dataset/train/images/road_image_12.png \n",
+ " inflating: dataset/train/images/road_image_120.png \n",
+ " inflating: dataset/train/images/road_image_121.png \n",
+ " inflating: dataset/train/images/road_image_122.png \n",
+ " inflating: dataset/train/images/road_image_123.png \n",
+ " inflating: dataset/train/images/road_image_124.png \n",
+ " inflating: dataset/train/images/road_image_125.png \n",
+ " inflating: dataset/train/images/road_image_126.png \n",
+ " inflating: dataset/train/images/road_image_127.png \n",
+ " inflating: dataset/train/images/road_image_128.png \n",
+ " inflating: dataset/train/images/road_image_129.png \n",
+ " inflating: dataset/train/images/road_image_13.png \n",
+ " inflating: dataset/train/images/road_image_130.png \n",
+ " inflating: dataset/train/images/road_image_131.png \n",
+ " inflating: dataset/train/images/road_image_132.png \n",
+ " inflating: dataset/train/images/road_image_133.png \n",
+ " inflating: dataset/train/images/road_image_134.png \n",
+ " inflating: dataset/train/images/road_image_135.png \n",
+ " inflating: dataset/train/images/road_image_136.png \n",
+ " inflating: dataset/train/images/road_image_137.png \n",
+ " inflating: dataset/train/images/road_image_138.png \n",
+ " inflating: dataset/train/images/road_image_139.png \n",
+ " inflating: dataset/train/images/road_image_14.png \n",
+ " inflating: dataset/train/images/road_image_140.png \n",
+ " inflating: dataset/train/images/road_image_141.png \n",
+ " inflating: dataset/train/images/road_image_142.png \n",
+ " inflating: dataset/train/images/road_image_143.png \n",
+ " inflating: dataset/train/images/road_image_144.png \n",
+ " inflating: dataset/train/images/road_image_145.png \n",
+ " inflating: dataset/train/images/road_image_146.png \n",
+ " inflating: dataset/train/images/road_image_147.png \n",
+ " inflating: dataset/train/images/road_image_148.png \n",
+ " inflating: dataset/train/images/road_image_149.png \n",
+ " inflating: dataset/train/images/road_image_15.png \n",
+ " inflating: dataset/train/images/road_image_150.png \n",
+ " inflating: dataset/train/images/road_image_151.png \n",
+ " inflating: dataset/train/images/road_image_152.png \n",
+ " inflating: dataset/train/images/road_image_153.png \n",
+ " inflating: dataset/train/images/road_image_154.png \n",
+ " inflating: dataset/train/images/road_image_155.png \n",
+ " inflating: dataset/train/images/road_image_156.png \n",
+ " inflating: dataset/train/images/road_image_157.png \n",
+ " inflating: dataset/train/images/road_image_158.png \n",
+ " inflating: dataset/train/images/road_image_159.png \n",
+ " inflating: dataset/train/images/road_image_16.png \n",
+ " inflating: dataset/train/images/road_image_17.png \n",
+ " inflating: dataset/train/images/road_image_18.png \n",
+ " inflating: dataset/train/images/road_image_19.png \n",
+ " inflating: dataset/train/images/road_image_2.png \n",
+ " inflating: dataset/train/images/road_image_20.png \n",
+ " inflating: dataset/train/images/road_image_21.png \n",
+ " inflating: dataset/train/images/road_image_22.png \n",
+ " inflating: dataset/train/images/road_image_23.png \n",
+ " inflating: dataset/train/images/road_image_24.png \n",
+ " inflating: dataset/train/images/road_image_25.png \n",
+ " inflating: dataset/train/images/road_image_26.png \n",
+ " inflating: dataset/train/images/road_image_27.png \n",
+ " inflating: dataset/train/images/road_image_28.png \n",
+ " inflating: dataset/train/images/road_image_29.png \n",
+ " inflating: dataset/train/images/road_image_3.png \n",
+ " inflating: dataset/train/images/road_image_30.png \n",
+ " inflating: dataset/train/images/road_image_31.png \n",
+ " inflating: dataset/train/images/road_image_32.png \n",
+ " inflating: dataset/train/images/road_image_33.png \n",
+ " inflating: dataset/train/images/road_image_34.png \n",
+ " inflating: dataset/train/images/road_image_35.png \n",
+ " inflating: dataset/train/images/road_image_36.png \n",
+ " inflating: dataset/train/images/road_image_37.png \n",
+ " inflating: dataset/train/images/road_image_38.png \n",
+ " inflating: dataset/train/images/road_image_39.png \n",
+ " inflating: dataset/train/images/road_image_4.png \n",
+ " inflating: dataset/train/images/road_image_40.png \n",
+ " inflating: dataset/train/images/road_image_41.png \n",
+ " inflating: dataset/train/images/road_image_42.png \n",
+ " inflating: dataset/train/images/road_image_43.png \n",
+ " inflating: dataset/train/images/road_image_44.png \n",
+ " inflating: dataset/train/images/road_image_45.png \n",
+ " inflating: dataset/train/images/road_image_46.png \n",
+ " inflating: dataset/train/images/road_image_47.png \n",
+ " inflating: dataset/train/images/road_image_48.png \n",
+ " inflating: dataset/train/images/road_image_49.png \n",
+ " inflating: dataset/train/images/road_image_5.png \n",
+ " inflating: dataset/train/images/road_image_50.png \n",
+ " inflating: dataset/train/images/road_image_51.png \n",
+ " inflating: dataset/train/images/road_image_52.png \n",
+ " inflating: dataset/train/images/road_image_53.png \n",
+ " inflating: dataset/train/images/road_image_54.png \n",
+ " inflating: dataset/train/images/road_image_55.png \n",
+ " inflating: dataset/train/images/road_image_56.png \n",
+ " inflating: dataset/train/images/road_image_57.png \n",
+ " inflating: dataset/train/images/road_image_58.png \n",
+ " inflating: dataset/train/images/road_image_59.png \n",
+ " inflating: dataset/train/images/road_image_6.png \n",
+ " inflating: dataset/train/images/road_image_60.png \n",
+ " inflating: dataset/train/images/road_image_61.png \n",
+ " inflating: dataset/train/images/road_image_62.png \n",
+ " inflating: dataset/train/images/road_image_63.png \n",
+ " inflating: dataset/train/images/road_image_64.png \n",
+ " inflating: dataset/train/images/road_image_65.png \n",
+ " inflating: dataset/train/images/road_image_66.png \n",
+ " inflating: dataset/train/images/road_image_67.png \n",
+ " inflating: dataset/train/images/road_image_68.png \n",
+ " inflating: dataset/train/images/road_image_69.png \n",
+ " inflating: dataset/train/images/road_image_7.png \n",
+ " inflating: dataset/train/images/road_image_70.png \n",
+ " inflating: dataset/train/images/road_image_71.png \n",
+ " inflating: dataset/train/images/road_image_72.png \n",
+ " inflating: dataset/train/images/road_image_73.png \n",
+ " inflating: dataset/train/images/road_image_74.png \n",
+ " inflating: dataset/train/images/road_image_75.png \n",
+ " inflating: dataset/train/images/road_image_76.png \n",
+ " inflating: dataset/train/images/road_image_77.png \n",
+ " inflating: dataset/train/images/road_image_78.png \n",
+ " inflating: dataset/train/images/road_image_79.png \n",
+ " inflating: dataset/train/images/road_image_8.png \n",
+ " inflating: dataset/train/images/road_image_80.png \n",
+ " inflating: dataset/train/images/road_image_81.png \n",
+ " inflating: dataset/train/images/road_image_82.png \n",
+ " inflating: dataset/train/images/road_image_83.png \n",
+ " inflating: dataset/train/images/road_image_84.png \n",
+ " inflating: dataset/train/images/road_image_85.png \n",
+ " inflating: dataset/train/images/road_image_86.png \n",
+ " inflating: dataset/train/images/road_image_87.png \n",
+ " inflating: dataset/train/images/road_image_88.png \n",
+ " inflating: dataset/train/images/road_image_89.png \n",
+ " inflating: dataset/train/images/road_image_9.png \n",
+ " inflating: dataset/train/images/road_image_90.png \n",
+ " inflating: dataset/train/images/road_image_91.png \n",
+ " inflating: dataset/train/images/road_image_92.png \n",
+ " inflating: dataset/train/images/road_image_93.png \n",
+ " inflating: dataset/train/images/road_image_94.png \n",
+ " inflating: dataset/train/images/road_image_95.png \n",
+ " inflating: dataset/train/images/road_image_96.png \n",
+ " inflating: dataset/train/images/road_image_97.png \n",
+ " inflating: dataset/train/images/road_image_98.png \n",
+ " inflating: dataset/train/images/road_image_99.png \n",
+ " creating: dataset/train/lane/\n",
+ " inflating: dataset/train/lane/road_image_0.png \n",
+ " inflating: dataset/train/lane/road_image_1.png \n",
+ " inflating: dataset/train/lane/road_image_10.png \n",
+ " inflating: dataset/train/lane/road_image_100.png \n",
+ " inflating: dataset/train/lane/road_image_101.png \n",
+ " inflating: dataset/train/lane/road_image_102.png \n",
+ " inflating: dataset/train/lane/road_image_103.png \n",
+ " inflating: dataset/train/lane/road_image_104.png \n",
+ " inflating: dataset/train/lane/road_image_105.png \n",
+ " inflating: dataset/train/lane/road_image_106.png \n",
+ " inflating: dataset/train/lane/road_image_107.png \n",
+ " inflating: dataset/train/lane/road_image_108.png \n",
+ " inflating: dataset/train/lane/road_image_109.png \n",
+ " inflating: dataset/train/lane/road_image_11.png \n",
+ " inflating: dataset/train/lane/road_image_110.png \n",
+ " inflating: dataset/train/lane/road_image_111.png \n",
+ " inflating: dataset/train/lane/road_image_112.png \n",
+ " inflating: dataset/train/lane/road_image_113.png \n",
+ " inflating: dataset/train/lane/road_image_114.png \n",
+ " inflating: dataset/train/lane/road_image_115.png \n",
+ " inflating: dataset/train/lane/road_image_116.png \n",
+ " inflating: dataset/train/lane/road_image_117.png \n",
+ " inflating: dataset/train/lane/road_image_118.png \n",
+ " inflating: dataset/train/lane/road_image_119.png \n",
+ " inflating: dataset/train/lane/road_image_12.png \n",
+ " inflating: dataset/train/lane/road_image_120.png \n",
+ " inflating: dataset/train/lane/road_image_121.png \n",
+ " inflating: dataset/train/lane/road_image_122.png \n",
+ " inflating: dataset/train/lane/road_image_123.png \n",
+ " inflating: dataset/train/lane/road_image_124.png \n",
+ " inflating: dataset/train/lane/road_image_125.png \n",
+ " inflating: dataset/train/lane/road_image_126.png \n",
+ " inflating: dataset/train/lane/road_image_127.png \n",
+ " inflating: dataset/train/lane/road_image_128.png \n",
+ " inflating: dataset/train/lane/road_image_129.png \n",
+ " inflating: dataset/train/lane/road_image_13.png \n",
+ " inflating: dataset/train/lane/road_image_130.png \n",
+ " inflating: dataset/train/lane/road_image_131.png \n",
+ " inflating: dataset/train/lane/road_image_132.png \n",
+ " inflating: dataset/train/lane/road_image_133.png \n",
+ " inflating: dataset/train/lane/road_image_134.png \n",
+ " inflating: dataset/train/lane/road_image_135.png \n",
+ " inflating: dataset/train/lane/road_image_136.png \n",
+ " inflating: dataset/train/lane/road_image_137.png \n",
+ " inflating: dataset/train/lane/road_image_138.png \n",
+ " inflating: dataset/train/lane/road_image_139.png \n",
+ " inflating: dataset/train/lane/road_image_14.png \n",
+ " inflating: dataset/train/lane/road_image_140.png \n",
+ " inflating: dataset/train/lane/road_image_141.png \n",
+ " inflating: dataset/train/lane/road_image_142.png \n",
+ " inflating: dataset/train/lane/road_image_143.png \n",
+ " inflating: dataset/train/lane/road_image_144.png \n",
+ " inflating: dataset/train/lane/road_image_145.png \n",
+ " inflating: dataset/train/lane/road_image_146.png \n",
+ " inflating: dataset/train/lane/road_image_147.png \n",
+ " inflating: dataset/train/lane/road_image_148.png \n",
+ " inflating: dataset/train/lane/road_image_149.png \n",
+ " inflating: dataset/train/lane/road_image_15.png \n",
+ " inflating: dataset/train/lane/road_image_150.png \n",
+ " inflating: dataset/train/lane/road_image_151.png \n",
+ " inflating: dataset/train/lane/road_image_152.png \n",
+ " inflating: dataset/train/lane/road_image_153.png \n",
+ " inflating: dataset/train/lane/road_image_154.png \n",
+ " inflating: dataset/train/lane/road_image_155.png \n",
+ " inflating: dataset/train/lane/road_image_156.png \n",
+ " inflating: dataset/train/lane/road_image_157.png \n",
+ " inflating: dataset/train/lane/road_image_158.png \n",
+ " inflating: dataset/train/lane/road_image_159.png \n",
+ " inflating: dataset/train/lane/road_image_16.png \n",
+ " inflating: dataset/train/lane/road_image_17.png \n",
+ " inflating: dataset/train/lane/road_image_18.png \n",
+ " inflating: dataset/train/lane/road_image_19.png \n",
+ " inflating: dataset/train/lane/road_image_2.png \n",
+ " inflating: dataset/train/lane/road_image_20.png \n",
+ " inflating: dataset/train/lane/road_image_21.png \n",
+ " inflating: dataset/train/lane/road_image_22.png \n",
+ " inflating: dataset/train/lane/road_image_23.png \n",
+ " inflating: dataset/train/lane/road_image_24.png \n",
+ " inflating: dataset/train/lane/road_image_25.png \n",
+ " inflating: dataset/train/lane/road_image_26.png \n",
+ " inflating: dataset/train/lane/road_image_27.png \n",
+ " inflating: dataset/train/lane/road_image_28.png \n",
+ " inflating: dataset/train/lane/road_image_29.png \n",
+ " inflating: dataset/train/lane/road_image_3.png \n",
+ " inflating: dataset/train/lane/road_image_30.png \n",
+ " inflating: dataset/train/lane/road_image_31.png \n",
+ " inflating: dataset/train/lane/road_image_32.png \n",
+ " inflating: dataset/train/lane/road_image_33.png \n",
+ " inflating: dataset/train/lane/road_image_34.png \n",
+ " inflating: dataset/train/lane/road_image_35.png \n",
+ " inflating: dataset/train/lane/road_image_36.png \n",
+ " inflating: dataset/train/lane/road_image_37.png \n",
+ " inflating: dataset/train/lane/road_image_38.png \n",
+ " inflating: dataset/train/lane/road_image_39.png \n",
+ " inflating: dataset/train/lane/road_image_4.png \n",
+ " inflating: dataset/train/lane/road_image_40.png \n",
+ " inflating: dataset/train/lane/road_image_41.png \n",
+ " inflating: dataset/train/lane/road_image_42.png \n",
+ " inflating: dataset/train/lane/road_image_43.png \n",
+ " inflating: dataset/train/lane/road_image_44.png \n",
+ " inflating: dataset/train/lane/road_image_45.png \n",
+ " inflating: dataset/train/lane/road_image_46.png \n",
+ " inflating: dataset/train/lane/road_image_47.png \n",
+ " inflating: dataset/train/lane/road_image_48.png \n",
+ " inflating: dataset/train/lane/road_image_49.png \n",
+ " inflating: dataset/train/lane/road_image_5.png \n",
+ " inflating: dataset/train/lane/road_image_50.png \n",
+ " inflating: dataset/train/lane/road_image_51.png \n",
+ " inflating: dataset/train/lane/road_image_52.png \n",
+ " inflating: dataset/train/lane/road_image_53.png \n",
+ " inflating: dataset/train/lane/road_image_54.png \n",
+ " inflating: dataset/train/lane/road_image_55.png \n",
+ " inflating: dataset/train/lane/road_image_56.png \n",
+ " inflating: dataset/train/lane/road_image_57.png \n",
+ " inflating: dataset/train/lane/road_image_58.png \n",
+ " inflating: dataset/train/lane/road_image_59.png \n",
+ " inflating: dataset/train/lane/road_image_6.png \n",
+ " inflating: dataset/train/lane/road_image_60.png \n",
+ " inflating: dataset/train/lane/road_image_61.png \n",
+ " inflating: dataset/train/lane/road_image_62.png \n",
+ " inflating: dataset/train/lane/road_image_63.png \n",
+ " inflating: dataset/train/lane/road_image_64.png \n",
+ " inflating: dataset/train/lane/road_image_65.png \n",
+ " inflating: dataset/train/lane/road_image_66.png \n",
+ " inflating: dataset/train/lane/road_image_67.png \n",
+ " inflating: dataset/train/lane/road_image_68.png \n",
+ " inflating: dataset/train/lane/road_image_69.png \n",
+ " inflating: dataset/train/lane/road_image_7.png \n",
+ " inflating: dataset/train/lane/road_image_70.png \n",
+ " inflating: dataset/train/lane/road_image_71.png \n",
+ " inflating: dataset/train/lane/road_image_72.png \n",
+ " inflating: dataset/train/lane/road_image_73.png \n",
+ " inflating: dataset/train/lane/road_image_74.png \n",
+ " inflating: dataset/train/lane/road_image_75.png \n",
+ " inflating: dataset/train/lane/road_image_76.png \n",
+ " inflating: dataset/train/lane/road_image_77.png \n",
+ " inflating: dataset/train/lane/road_image_78.png \n",
+ " inflating: dataset/train/lane/road_image_79.png \n",
+ " inflating: dataset/train/lane/road_image_8.png \n",
+ " inflating: dataset/train/lane/road_image_80.png \n",
+ " inflating: dataset/train/lane/road_image_81.png \n",
+ " inflating: dataset/train/lane/road_image_82.png \n",
+ " inflating: dataset/train/lane/road_image_83.png \n",
+ " inflating: dataset/train/lane/road_image_84.png \n",
+ " inflating: dataset/train/lane/road_image_85.png \n",
+ " inflating: dataset/train/lane/road_image_86.png \n",
+ " inflating: dataset/train/lane/road_image_87.png \n",
+ " inflating: dataset/train/lane/road_image_88.png \n",
+ " inflating: dataset/train/lane/road_image_89.png \n",
+ " inflating: dataset/train/lane/road_image_9.png \n",
+ " inflating: dataset/train/lane/road_image_90.png \n",
+ " inflating: dataset/train/lane/road_image_91.png \n",
+ " inflating: dataset/train/lane/road_image_92.png \n",
+ " inflating: dataset/train/lane/road_image_93.png \n",
+ " inflating: dataset/train/lane/road_image_94.png \n",
+ " inflating: dataset/train/lane/road_image_95.png \n",
+ " inflating: dataset/train/lane/road_image_96.png \n",
+ " inflating: dataset/train/lane/road_image_97.png \n",
+ " inflating: dataset/train/lane/road_image_98.png \n",
+ " inflating: dataset/train/lane/road_image_99.png \n",
+ " creating: dataset/train/segments/\n",
+ " inflating: dataset/train/segments/road_image_0.png \n",
+ " inflating: dataset/train/segments/road_image_1.png \n",
+ " inflating: dataset/train/segments/road_image_10.png \n",
+ " inflating: dataset/train/segments/road_image_100.png \n",
+ " inflating: dataset/train/segments/road_image_101.png \n",
+ " inflating: dataset/train/segments/road_image_102.png \n",
+ " inflating: dataset/train/segments/road_image_103.png \n",
+ " inflating: dataset/train/segments/road_image_104.png \n",
+ " inflating: dataset/train/segments/road_image_105.png \n",
+ " inflating: dataset/train/segments/road_image_106.png \n",
+ " inflating: dataset/train/segments/road_image_107.png \n",
+ " inflating: dataset/train/segments/road_image_108.png \n",
+ " inflating: dataset/train/segments/road_image_109.png \n",
+ " inflating: dataset/train/segments/road_image_11.png \n",
+ " inflating: dataset/train/segments/road_image_110.png \n",
+ " inflating: dataset/train/segments/road_image_111.png \n",
+ " inflating: dataset/train/segments/road_image_112.png \n",
+ " inflating: dataset/train/segments/road_image_113.png \n",
+ " inflating: dataset/train/segments/road_image_114.png \n",
+ " inflating: dataset/train/segments/road_image_115.png \n",
+ " inflating: dataset/train/segments/road_image_116.png \n",
+ " inflating: dataset/train/segments/road_image_117.png \n",
+ " inflating: dataset/train/segments/road_image_118.png \n",
+ " inflating: dataset/train/segments/road_image_119.png \n",
+ " inflating: dataset/train/segments/road_image_12.png \n",
+ " inflating: dataset/train/segments/road_image_120.png \n",
+ " inflating: dataset/train/segments/road_image_121.png \n",
+ " inflating: dataset/train/segments/road_image_122.png \n",
+ " inflating: dataset/train/segments/road_image_123.png \n",
+ " inflating: dataset/train/segments/road_image_124.png \n",
+ " inflating: dataset/train/segments/road_image_125.png \n",
+ " inflating: dataset/train/segments/road_image_126.png \n",
+ " inflating: dataset/train/segments/road_image_127.png \n",
+ " inflating: dataset/train/segments/road_image_128.png \n",
+ " inflating: dataset/train/segments/road_image_129.png \n",
+ " inflating: dataset/train/segments/road_image_13.png \n",
+ " inflating: dataset/train/segments/road_image_130.png \n",
+ " inflating: dataset/train/segments/road_image_131.png \n",
+ " inflating: dataset/train/segments/road_image_132.png \n",
+ " inflating: dataset/train/segments/road_image_133.png \n",
+ " inflating: dataset/train/segments/road_image_134.png \n",
+ " inflating: dataset/train/segments/road_image_135.png \n",
+ " inflating: dataset/train/segments/road_image_136.png \n",
+ " inflating: dataset/train/segments/road_image_137.png \n",
+ " inflating: dataset/train/segments/road_image_138.png \n",
+ " inflating: dataset/train/segments/road_image_139.png \n",
+ " inflating: dataset/train/segments/road_image_14.png \n",
+ " inflating: dataset/train/segments/road_image_140.png \n",
+ " inflating: dataset/train/segments/road_image_141.png \n",
+ " inflating: dataset/train/segments/road_image_142.png \n",
+ " inflating: dataset/train/segments/road_image_143.png \n",
+ " inflating: dataset/train/segments/road_image_144.png \n",
+ " inflating: dataset/train/segments/road_image_145.png \n",
+ " inflating: dataset/train/segments/road_image_146.png \n",
+ " inflating: dataset/train/segments/road_image_147.png \n",
+ " inflating: dataset/train/segments/road_image_148.png \n",
+ " inflating: dataset/train/segments/road_image_149.png \n",
+ " inflating: dataset/train/segments/road_image_15.png \n",
+ " inflating: dataset/train/segments/road_image_150.png \n",
+ " inflating: dataset/train/segments/road_image_151.png \n",
+ " inflating: dataset/train/segments/road_image_152.png \n",
+ " inflating: dataset/train/segments/road_image_153.png \n",
+ " inflating: dataset/train/segments/road_image_154.png \n",
+ " inflating: dataset/train/segments/road_image_155.png \n",
+ " inflating: dataset/train/segments/road_image_156.png \n",
+ " inflating: dataset/train/segments/road_image_157.png \n",
+ " inflating: dataset/train/segments/road_image_158.png \n",
+ " inflating: dataset/train/segments/road_image_159.png \n",
+ " inflating: dataset/train/segments/road_image_16.png \n",
+ " inflating: dataset/train/segments/road_image_17.png \n",
+ " inflating: dataset/train/segments/road_image_18.png \n",
+ " inflating: dataset/train/segments/road_image_19.png \n",
+ " inflating: dataset/train/segments/road_image_2.png \n",
+ " inflating: dataset/train/segments/road_image_20.png \n",
+ " inflating: dataset/train/segments/road_image_21.png \n",
+ " inflating: dataset/train/segments/road_image_22.png \n",
+ " inflating: dataset/train/segments/road_image_23.png \n",
+ " inflating: dataset/train/segments/road_image_24.png \n",
+ " inflating: dataset/train/segments/road_image_25.png \n",
+ " inflating: dataset/train/segments/road_image_26.png \n",
+ " inflating: dataset/train/segments/road_image_27.png \n",
+ " inflating: dataset/train/segments/road_image_28.png \n",
+ " inflating: dataset/train/segments/road_image_29.png \n",
+ " inflating: dataset/train/segments/road_image_3.png \n",
+ " inflating: dataset/train/segments/road_image_30.png \n",
+ " inflating: dataset/train/segments/road_image_31.png \n",
+ " inflating: dataset/train/segments/road_image_32.png \n",
+ " inflating: dataset/train/segments/road_image_33.png \n",
+ " inflating: dataset/train/segments/road_image_34.png \n",
+ " inflating: dataset/train/segments/road_image_35.png \n",
+ " inflating: dataset/train/segments/road_image_36.png \n",
+ " inflating: dataset/train/segments/road_image_37.png \n",
+ " inflating: dataset/train/segments/road_image_38.png \n",
+ " inflating: dataset/train/segments/road_image_39.png \n",
+ " inflating: dataset/train/segments/road_image_4.png \n",
+ " inflating: dataset/train/segments/road_image_40.png \n",
+ " inflating: dataset/train/segments/road_image_41.png \n",
+ " inflating: dataset/train/segments/road_image_42.png \n",
+ " inflating: dataset/train/segments/road_image_43.png \n",
+ " inflating: dataset/train/segments/road_image_44.png \n",
+ " inflating: dataset/train/segments/road_image_45.png \n",
+ " inflating: dataset/train/segments/road_image_46.png \n",
+ " inflating: dataset/train/segments/road_image_47.png \n",
+ " inflating: dataset/train/segments/road_image_48.png \n",
+ " inflating: dataset/train/segments/road_image_49.png \n",
+ " inflating: dataset/train/segments/road_image_5.png \n",
+ " inflating: dataset/train/segments/road_image_50.png \n",
+ " inflating: dataset/train/segments/road_image_51.png \n",
+ " inflating: dataset/train/segments/road_image_52.png \n",
+ " inflating: dataset/train/segments/road_image_53.png \n",
+ " inflating: dataset/train/segments/road_image_54.png \n",
+ " inflating: dataset/train/segments/road_image_55.png \n",
+ " inflating: dataset/train/segments/road_image_56.png \n",
+ " inflating: dataset/train/segments/road_image_57.png \n",
+ " inflating: dataset/train/segments/road_image_58.png \n",
+ " inflating: dataset/train/segments/road_image_59.png \n",
+ " inflating: dataset/train/segments/road_image_6.png \n",
+ " inflating: dataset/train/segments/road_image_60.png \n",
+ " inflating: dataset/train/segments/road_image_61.png \n",
+ " inflating: dataset/train/segments/road_image_62.png \n",
+ " inflating: dataset/train/segments/road_image_63.png \n",
+ " inflating: dataset/train/segments/road_image_64.png \n",
+ " inflating: dataset/train/segments/road_image_65.png \n",
+ " inflating: dataset/train/segments/road_image_66.png \n",
+ " inflating: dataset/train/segments/road_image_67.png \n",
+ " inflating: dataset/train/segments/road_image_68.png \n",
+ " inflating: dataset/train/segments/road_image_69.png \n",
+ " inflating: dataset/train/segments/road_image_7.png \n",
+ " inflating: dataset/train/segments/road_image_70.png \n",
+ " inflating: dataset/train/segments/road_image_71.png \n",
+ " inflating: dataset/train/segments/road_image_72.png \n",
+ " inflating: dataset/train/segments/road_image_73.png \n",
+ " inflating: dataset/train/segments/road_image_74.png \n",
+ " inflating: dataset/train/segments/road_image_75.png \n",
+ " inflating: dataset/train/segments/road_image_76.png \n",
+ " inflating: dataset/train/segments/road_image_77.png \n",
+ " inflating: dataset/train/segments/road_image_78.png \n",
+ " inflating: dataset/train/segments/road_image_79.png \n",
+ " inflating: dataset/train/segments/road_image_8.png \n",
+ " inflating: dataset/train/segments/road_image_80.png \n",
+ " inflating: dataset/train/segments/road_image_81.png \n",
+ " inflating: dataset/train/segments/road_image_82.png \n",
+ " inflating: dataset/train/segments/road_image_83.png \n",
+ " inflating: dataset/train/segments/road_image_84.png \n",
+ " inflating: dataset/train/segments/road_image_85.png \n",
+ " inflating: dataset/train/segments/road_image_86.png \n",
+ " inflating: dataset/train/segments/road_image_87.png \n",
+ " inflating: dataset/train/segments/road_image_88.png \n",
+ " inflating: dataset/train/segments/road_image_89.png \n",
+ " inflating: dataset/train/segments/road_image_9.png \n",
+ " inflating: dataset/train/segments/road_image_90.png \n",
+ " inflating: dataset/train/segments/road_image_91.png \n",
+ " inflating: dataset/train/segments/road_image_92.png \n",
+ " inflating: dataset/train/segments/road_image_93.png \n",
+ " inflating: dataset/train/segments/road_image_94.png \n",
+ " inflating: dataset/train/segments/road_image_95.png \n",
+ " inflating: dataset/train/segments/road_image_96.png \n",
+ " inflating: dataset/train/segments/road_image_97.png \n",
+ " inflating: dataset/train/segments/road_image_98.png \n",
+ " inflating: dataset/train/segments/road_image_99.png \n",
+ " creating: dataset/validation/\n",
+ " creating: dataset/validation/images/\n",
+ " inflating: dataset/validation/images/road_image_180.png \n",
+ " inflating: dataset/validation/images/road_image_181.png \n",
+ " inflating: dataset/validation/images/road_image_182.png \n",
+ " inflating: dataset/validation/images/road_image_183.png \n",
+ " inflating: dataset/validation/images/road_image_184.png \n",
+ " inflating: dataset/validation/images/road_image_185.png \n",
+ " inflating: dataset/validation/images/road_image_186.png \n",
+ " inflating: dataset/validation/images/road_image_187.png \n",
+ " inflating: dataset/validation/images/road_image_188.png \n",
+ " inflating: dataset/validation/images/road_image_189.png \n",
+ " inflating: dataset/validation/images/road_image_190.png \n",
+ " inflating: dataset/validation/images/road_image_191.png \n",
+ " inflating: dataset/validation/images/road_image_192.png \n",
+ " inflating: dataset/validation/images/road_image_193.png \n",
+ " inflating: dataset/validation/images/road_image_194.png \n",
+ " inflating: dataset/validation/images/road_image_195.png \n",
+ " inflating: dataset/validation/images/road_image_196.png \n",
+ " inflating: dataset/validation/images/road_image_197.png \n",
+ " inflating: dataset/validation/images/road_image_198.png \n",
+ " inflating: dataset/validation/images/road_image_199.png \n",
+ " creating: dataset/validation/lane/\n",
+ " inflating: dataset/validation/lane/road_image_180.png \n",
+ " inflating: dataset/validation/lane/road_image_181.png \n",
+ " inflating: dataset/validation/lane/road_image_182.png \n",
+ " inflating: dataset/validation/lane/road_image_183.png \n",
+ " inflating: dataset/validation/lane/road_image_184.png \n",
+ " inflating: dataset/validation/lane/road_image_185.png \n",
+ " inflating: dataset/validation/lane/road_image_186.png \n",
+ " inflating: dataset/validation/lane/road_image_187.png \n",
+ " inflating: dataset/validation/lane/road_image_188.png \n",
+ " inflating: dataset/validation/lane/road_image_189.png \n",
+ " inflating: dataset/validation/lane/road_image_190.png \n",
+ " inflating: dataset/validation/lane/road_image_191.png \n",
+ " inflating: dataset/validation/lane/road_image_192.png \n",
+ " inflating: dataset/validation/lane/road_image_193.png \n",
+ " inflating: dataset/validation/lane/road_image_194.png \n",
+ " inflating: dataset/validation/lane/road_image_195.png \n",
+ " inflating: dataset/validation/lane/road_image_196.png \n",
+ " inflating: dataset/validation/lane/road_image_197.png \n",
+ " inflating: dataset/validation/lane/road_image_198.png \n",
+ " inflating: dataset/validation/lane/road_image_199.png \n",
+ " creating: dataset/validation/segments/\n",
+ " inflating: dataset/validation/segments/road_image_180.png \n",
+ " inflating: dataset/validation/segments/road_image_181.png \n",
+ " inflating: dataset/validation/segments/road_image_182.png \n",
+ " inflating: dataset/validation/segments/road_image_183.png \n",
+ " inflating: dataset/validation/segments/road_image_184.png \n",
+ " inflating: dataset/validation/segments/road_image_185.png \n",
+ " inflating: dataset/validation/segments/road_image_186.png \n",
+ " inflating: dataset/validation/segments/road_image_187.png \n",
+ " inflating: dataset/validation/segments/road_image_188.png \n",
+ " inflating: dataset/validation/segments/road_image_189.png \n",
+ " inflating: dataset/validation/segments/road_image_190.png \n",
+ " inflating: dataset/validation/segments/road_image_191.png \n",
+ " inflating: dataset/validation/segments/road_image_192.png \n",
+ " inflating: dataset/validation/segments/road_image_193.png \n",
+ " inflating: dataset/validation/segments/road_image_194.png \n",
+ " inflating: dataset/validation/segments/road_image_195.png \n",
+ " inflating: dataset/validation/segments/road_image_196.png \n",
+ " inflating: dataset/validation/segments/road_image_197.png \n",
+ " inflating: dataset/validation/segments/road_image_198.png \n",
+ " inflating: dataset/validation/segments/road_image_199.png \n"
+ ]
+ }
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "source": [
+ "## Import the all the required libraries"
+ ],
+ "metadata": {
+ "id": "bpUdANiK6K-i"
+ }
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 3,
+ "metadata": {
+ "id": "hVDJcpeP5d1J"
+ },
+ "outputs": [],
+ "source": [
+ "import torch\n",
+ "import cv2\n",
+ "import torch.utils.data\n",
+ "import torchvision.transforms as transforms\n",
+ "import numpy as np\n",
+ "import os\n",
+ "import random\n",
+ "import math\n",
+ "from matplotlib import pyplot as plt\n",
+ "import torch.nn as nn"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "source": [
+ "## Image transformation functions\n",
+ "\n",
+ "- By paper author"
+ ],
+ "metadata": {
+ "id": "MXX5-aH58B4c"
+ }
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 4,
+ "metadata": {
+ "id": "ywi8_wbg5jZQ"
+ },
+ "outputs": [],
+ "source": [
+ "def augment_hsv(img, hgain=0.015, sgain=0.7, vgain=0.4):\n",
+ " \"\"\"change color hue, saturation, value\"\"\"\n",
+ " r = np.random.uniform(-1, 1, 3) * [hgain, sgain, vgain] + 1 # random gains\n",
+ " hue, sat, val = cv2.split(cv2.cvtColor(img, cv2.COLOR_BGR2HSV))\n",
+ " dtype = img.dtype # uint8\n",
+ "\n",
+ " x = np.arange(0, 256, dtype=np.int16)\n",
+ " lut_hue = ((x * r[0]) % 180).astype(dtype)\n",
+ " lut_sat = np.clip(x * r[1], 0, 255).astype(dtype)\n",
+ " lut_val = np.clip(x * r[2], 0, 255).astype(dtype)\n",
+ "\n",
+ " img_hsv = cv2.merge((cv2.LUT(hue, lut_hue), cv2.LUT(sat, lut_sat), cv2.LUT(val, lut_val))).astype(dtype)\n",
+ " cv2.cvtColor(img_hsv, cv2.COLOR_HSV2BGR, dst=img) # no return needed"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 5,
+ "metadata": {
+ "id": "NMEu5Ey35mWQ"
+ },
+ "outputs": [],
+ "source": [
+ "def random_perspective(combination, degrees=10, translate=.1, scale=.1, shear=10, perspective=0.0, border=(0, 0)):\n",
+ " \"\"\"combination of img transform\"\"\"\n",
+ " # torchvision.transforms.RandomAffine(degrees=(-10, 10), translate=(.1, .1), scale=(.9, 1.1), shear=(-10, 10))\n",
+ " # targets = [cls, xyxy]\n",
+ " img, gray, line = combination\n",
+ " height = img.shape[0] + border[0] * 2 # shape(h,w,c)\n",
+ " width = img.shape[1] + border[1] * 2\n",
+ "\n",
+ " # Center\n",
+ " C = np.eye(3)\n",
+ " C[0, 2] = -img.shape[1] / 2 # x translation (pixels)\n",
+ " C[1, 2] = -img.shape[0] / 2 # y translation (pixels)\n",
+ "\n",
+ " # Perspective\n",
+ " P = np.eye(3)\n",
+ " P[2, 0] = random.uniform(-perspective, perspective) # x perspective (about y)\n",
+ " P[2, 1] = random.uniform(-perspective, perspective) # y perspective (about x)\n",
+ "\n",
+ " # Rotation and Scale\n",
+ " R = np.eye(3)\n",
+ " a = random.uniform(-degrees, degrees)\n",
+ " # a += random.choice([-180, -90, 0, 90]) # add 90deg rotations to small rotations\n",
+ " s = random.uniform(1 - scale, 1 + scale)\n",
+ " # s = 2 ** random.uniform(-scale, scale)\n",
+ " R[:2] = cv2.getRotationMatrix2D(angle=a, center=(0, 0), scale=s)\n",
+ "\n",
+ " # Shear\n",
+ " S = np.eye(3)\n",
+ " S[0, 1] = math.tan(random.uniform(-shear, shear) * math.pi / 180) # x shear (deg)\n",
+ " S[1, 0] = math.tan(random.uniform(-shear, shear) * math.pi / 180) # y shear (deg)\n",
+ "\n",
+ " # Translation\n",
+ " T = np.eye(3)\n",
+ " T[0, 2] = random.uniform(0.5 - translate, 0.5 + translate) * width # x translation (pixels)\n",
+ " T[1, 2] = random.uniform(0.5 - translate, 0.5 + translate) * height # y translation (pixels)\n",
+ "\n",
+ " # Combined rotation matrix\n",
+ " M = T @ S @ R @ P @ C # order of operations (right to left) is IMPORTANT\n",
+ " if (border[0] != 0) or (border[1] != 0) or (M != np.eye(3)).any(): # image changed\n",
+ " if perspective:\n",
+ " img = cv2.warpPerspective(img, M, dsize=(width, height), borderValue=(114, 114, 114))\n",
+ " gray = cv2.warpPerspective(gray, M, dsize=(width, height), borderValue=0)\n",
+ " line = cv2.warpPerspective(line, M, dsize=(width, height), borderValue=0)\n",
+ " else: # affine\n",
+ " img = cv2.warpAffine(img, M[:2], dsize=(width, height), borderValue=(114, 114, 114))\n",
+ " gray = cv2.warpAffine(gray, M[:2], dsize=(width, height), borderValue=0)\n",
+ " line = cv2.warpAffine(line, M[:2], dsize=(width, height), borderValue=0)\n",
+ "\n",
+ "\n",
+ "\n",
+ " combination = (img, gray, line)\n",
+ " return combination"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "source": [
+ "## Custom Dataset Class\n",
+ "\n",
+ "- This custom dataset class is based on the dataset class written by the author but with slight modifications like path. we have adjusted the path according to the google colab."
+ ],
+ "metadata": {
+ "id": "mFv9HU486TLr"
+ }
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 6,
+ "metadata": {
+ "id": "_LoqglKDR2Sw"
+ },
+ "outputs": [],
+ "source": [
+ "class MyDataset(torch.utils.data.Dataset):\n",
+ " '''\n",
+ " Class to load the dataset\n",
+ " '''\n",
+ " def __init__(self, transform=None,valid=False):\n",
+ " '''\n",
+ " :param imList: image list (Note that these lists have been processed and pickled using the loadData.py)\n",
+ " :param labelList: label list (Note that these lists have been processed and pickled using the loadData.py)\n",
+ " :param transform: Type of transformation. SEe Transforms.py for supported transformations\n",
+ " '''\n",
+ "\n",
+ " self.transform = transform\n",
+ " self.Tensor = transforms.ToTensor()\n",
+ " self.valid=valid\n",
+ " if valid:\n",
+ " self.root='dataset/validation/images'\n",
+ " self.names=os.listdir(self.root)\n",
+ " else:\n",
+ " self.root='dataset/train/images/'\n",
+ " self.names=os.listdir(self.root)\n",
+ "\n",
+ " def __len__(self):\n",
+ " return len(self.names)\n",
+ "\n",
+ " def __getitem__(self, idx):\n",
+ " '''\n",
+ "\n",
+ " :param idx: Index of the image file\n",
+ " :return: returns the image and corresponding label file.\n",
+ " '''\n",
+ " W_=640\n",
+ " H_=360\n",
+ " image_name=os.path.join(self.root,self.names[idx])\n",
+ "\n",
+ " image = cv2.imread(image_name)\n",
+ " original_image = cv2.imread(image_name)\n",
+ " label1 = cv2.imread(image_name.replace(\"images\",\"segments\").replace(\"jpg\",\"png\"), 0)\n",
+ " label2 = cv2.imread(image_name.replace(\"images\",\"lane\").replace(\"jpg\",\"png\"), 0)\n",
+ " if not self.valid:\n",
+ " if random.random()<0.5:\n",
+ " combination = (image, label1, label2)\n",
+ " (image, label1, label2)= random_perspective(\n",
+ " combination=combination,\n",
+ " degrees=10,\n",
+ " translate=0.1,\n",
+ " scale=0.25,\n",
+ " shear=0.0\n",
+ " )\n",
+ " if random.random()<0.5:\n",
+ " augment_hsv(image)\n",
+ " if random.random() < 0.5:\n",
+ " image = np.fliplr(image)\n",
+ " label1 = np.fliplr(label1)\n",
+ " label2 = np.fliplr(label2)\n",
+ "\n",
+ " label1 = cv2.resize(label1, (W_, H_))\n",
+ " label2 = cv2.resize(label2, (W_, H_))\n",
+ " image = cv2.resize(image, (W_, H_))\n",
+ "\n",
+ " _,seg_b1 = cv2.threshold(label1,1,255,cv2.THRESH_BINARY_INV)\n",
+ " _,seg_b2 = cv2.threshold(label2,1,255,cv2.THRESH_BINARY_INV)\n",
+ " _,seg1 = cv2.threshold(label1,1,255,cv2.THRESH_BINARY)\n",
+ " _,seg2 = cv2.threshold(label2,1,255,cv2.THRESH_BINARY)\n",
+ "\n",
+ " seg1 = self.Tensor(seg1)\n",
+ " seg2 = self.Tensor(seg2)\n",
+ " seg_b1 = self.Tensor(seg_b1)\n",
+ " seg_b2 = self.Tensor(seg_b2)\n",
+ " seg_da = torch.stack((seg_b1[0], seg1[0]),0)\n",
+ " seg_ll = torch.stack((seg_b2[0], seg2[0]),0)\n",
+ " image = image[:, :, ::-1].transpose(2, 0, 1)\n",
+ " image = np.ascontiguousarray(image)\n",
+ "\n",
+ " return original_image, image_name,torch.from_numpy(image),(seg_da,seg_ll)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "source": [
+ "## Intialize a dataloader\n",
+ "\n",
+ "- Intialize a dataloader with batch size 8"
+ ],
+ "metadata": {
+ "id": "b6Ly9Ek16kg-"
+ }
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 7,
+ "metadata": {
+ "id": "qIK3UcD3STAG"
+ },
+ "outputs": [],
+ "source": [
+ "from torch.utils.data import DataLoader\n",
+ "\n",
+ "train_dataloader = DataLoader(MyDataset(), batch_size = 8, shuffle = True)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "source": [
+ "## Display images\n",
+ "\n",
+ "- Show first sample of each mini-batch with size 8"
+ ],
+ "metadata": {
+ "id": "ERrb-mex6rGx"
+ }
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 8,
+ "metadata": {
+ "id": "DjQRwgtn5XJY",
+ "colab": {
+ "base_uri": "https://localhost:8080/",
+ "height": 796
+ },
+ "outputId": "ce49ea03-7ac5-4639-f7aa-a3dc7b394ec8"
+ },
+ "outputs": [
+ {
+ "output_type": "stream",
+ "name": "stdout",
+ "text": [
+ "dataset/train/images/road_image_3.png\n",
+ "dataset/train/images/road_image_44.png\n",
+ "dataset/train/images/road_image_78.png\n",
+ "dataset/train/images/road_image_146.png\n",
+ "dataset/train/images/road_image_156.png\n",
+ "dataset/train/images/road_image_102.png\n",
+ "dataset/train/images/road_image_35.png\n",
+ "dataset/train/images/road_image_106.png\n",
+ "dataset/train/images/road_image_4.png\n",
+ "dataset/train/images/road_image_70.png\n",
+ "dataset/train/images/road_image_114.png\n",
+ "dataset/train/images/road_image_72.png\n",
+ "dataset/train/images/road_image_92.png\n",
+ "dataset/train/images/road_image_113.png\n",
+ "dataset/train/images/road_image_143.png\n",
+ "dataset/train/images/road_image_98.png\n",
+ "dataset/train/images/road_image_15.png\n",
+ "dataset/train/images/road_image_2.png\n",
+ "dataset/train/images/road_image_139.png\n",
+ "dataset/train/images/road_image_91.png\n"
+ ]
+ },
+ {
+ "output_type": "display_data",
+ "data": {
+ "text/plain": [
+ "