diff --git a/.dockerignore b/.dockerignore
new file mode 100644
index 0000000..cbbd756
--- /dev/null
+++ b/.dockerignore
@@ -0,0 +1,10 @@
+.git/
+.idea/
+.vscode/
+.env
+logs/
+*.log
+*.swp
+
+dkr/data
+dkr/config/caddy
diff --git a/.env.example b/.env.example
index c249864..0f304c2 100644
--- a/.env.example
+++ b/.env.example
@@ -2,10 +2,43 @@ DEBUG=false
FLASK_ENV=production
FLASK_DEBUG=0
-V4_HOST=ipv4.myip.example.com
-V6_HOST=ipv6.myip.example.com
+MAIN_HOST=myip.example.com
+EXTRA_HOSTS=myip.vc,address.computer
-IP_HEADER=X-REAL-IP
+V4_SUBDOMAIN=v4
+V6_SUBDOMAIN=v6
+
+#### V4_HOST and V6_HOST aren't normally needed due to using V4/V6_SUBDOMAIN
+# V4_HOST=ipv4.myip.example.com
+# V6_HOST=ipv6.myip.example.com
+
+#### Can be: redis, memcached, sqlite, memory
+# CACHE_ADAPTER=auto
+# CACHE_ADAPTER_INIT=true
+
+# REDIS_HOST=localhost
+# REDIS_PORT=6379
+# REDIS_DB=0
+
+# IP_HEADER=X-REAL-IP
+
+# HOST=127.0.0.1
+# HOST=::1
+# PORT=5151
+# FILTER_HOSTS=true
+
+# USE_RICH_LOGGING=true
+# RICH_TRACEBACKS=true
+# LOG_LEVEL=WARNING
+#### LOG_DIR can be relative to myip app, or absolute path e.g. /var/log/myip
+# LOG_DIR=logs
+
+#####
+# Generally for development/debugging only:
+#####
+# USE_FAKE_IPS=true
+# USE_IP_HEADER=false
+# FORCE_MAIN_HOST=false
+# FLASK_ENV=development
+# FLASK_DEBUG=1
-HOST=127.0.0.1
-PORT=5151
diff --git a/.gitignore b/.gitignore
index 1ee8ead..0be5afc 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,2 +1,15 @@
venv/
+env/
+.vscode/
+.idea/
.env
+
+*.log
+*.swp
+*.db
+*.sqlite3
+*.pyc
+
+__pycache__
+
+docker-compose.yml
diff --git a/Dockerfile b/Dockerfile
new file mode 100644
index 0000000..e2f958d
--- /dev/null
+++ b/Dockerfile
@@ -0,0 +1,39 @@
+FROM python:3.9-alpine
+
+RUN apk update && \
+ apk add libmemcached-dev postgresql-dev libpq mariadb-dev mariadb-connector-c-dev openssl-dev && \
+ apk add gcc musl-dev libffi-dev py-cryptography python3-dev
+
+RUN apk add curl wget bash zsh
+
+
+RUN pip3 install -U pip && \
+ pip3 install -U pipenv
+
+RUN curl https://sh.rustup.rs -sSf | sh -s -- --profile default --default-toolchain nightly -y
+
+ENV PATH "${HOME}/.cargo/bin:${PATH}"
+ARG PATH="${PATH}"
+
+WORKDIR /app
+
+RUN apk add sudo rsync
+RUN mkdir /app/logs
+COPY Pipfile Pipfile.lock requirements.txt update_geoip.sh /app/
+
+VOLUME /usr/local/var/GeoIP
+RUN /app/update_geoip.sh
+
+RUN sed -Ei 's/python\_version \= "3.8"/python_version = "3.9"/' Pipfile
+RUN PATH="${HOME}/.cargo/bin/:${PATH}" pipenv --python python3.9 install --ignore-pipfile
+
+COPY .env.example LICENSE.txt README.md run.sh update_geoip.sh wsgi.py /app/
+COPY myip/ /app/myip/
+COPY static/ /app/static/
+COPY dkr/init.sh /app/
+RUN chmod +x /app/init.sh /app/update_geoip.sh /app/run.sh /app/wsgi.py
+
+EXPOSE 5252
+
+ENTRYPOINT [ "/app/init.sh" ]
+
diff --git a/Pipfile b/Pipfile
index f66fc09..2920d72 100644
--- a/Pipfile
+++ b/Pipfile
@@ -18,11 +18,14 @@ gunicorn = "*"
flask-cors = ">=3.0.8"
rich = "*"
accept-types = "*"
-privex-helpers = {version = ">=3.0", extras = ["full"]}
+#privex-helpers = {version = ">=3.0", extras = ["cache"]}
+privex-helpers = ">=3.0"
+pylibmc = "*"
requests = ">=2.2"
pyyaml = "*"
markdown = ">=3.0.1"
aiohttp = ">=3.7.4"
+privex-db = ">=0.9.2"
[requires]
python_version = "3.8"
diff --git a/Pipfile.lock b/Pipfile.lock
index a63b615..34c933b 100644
--- a/Pipfile.lock
+++ b/Pipfile.lock
@@ -1,7 +1,7 @@
{
"_meta": {
"hash": {
- "sha256": "92275122c3e7ff48cdb2d7a231f369e7d070785e0b8ed6eba1f376f927a9d07f"
+ "sha256": "5ad555df11691e6914911fd57c0e2caf3f5480aa54232a5e921e35db91fbe750"
},
"pipfile-spec": 6,
"requires": {
@@ -67,25 +67,12 @@
"index": "pypi",
"version": "==3.7.4.post0"
},
- "aiomcache": {
- "hashes": [
- "sha256:17d82e0586c8500a7a3dac0fdef67e2d0300c82d4acb1595a9925783b7c382b5",
- "sha256:b38062efca87f2e6ac3b406bd816ca790900b03fef1c5c38fa61c18212c38825"
- ],
- "version": "==0.6.0"
- },
- "aioredis": {
- "hashes": [
- "sha256:15f8af30b044c771aee6787e5ec24694c048184c7b9e54c3b60c750a4b93273a",
- "sha256:b61808d7e97b7cd5a92ed574937a079c9387fdadd22bfbfa7ad2fd319ecc26e3"
- ],
- "version": "==1.3.1"
- },
"aiosqlite": {
"hashes": [
"sha256:6c49dc6d3405929b1d08eeccc72306d3677503cc5e5e43771efc1e00232e8231",
"sha256:f0e6acc24bc4864149267ac82fb46dfb3be4455f99fe21df82609cc6e6baee51"
],
+ "markers": "python_version >= '3.6'",
"version": "==0.17.0"
},
"async-property": {
@@ -111,14 +98,6 @@
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'",
"version": "==21.2.0"
},
- "bleach": {
- "hashes": [
- "sha256:6123ddc1052673e52bab52cdc955bcb57a015264a1c57d37bea2f6b817af0125",
- "sha256:98b3170739e5e83dd9dc19633f074727ad848cbedb6026708c8ac2d3b697a433"
- ],
- "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'",
- "version": "==3.3.0"
- },
"certifi": {
"hashes": [
"sha256:2bbf76fd432960138b3ef6dda3dde0544f27cbf8546c458e60baf371917ba9ee",
@@ -126,60 +105,6 @@
],
"version": "==2021.5.30"
},
- "cffi": {
- "hashes": [
- "sha256:005a36f41773e148deac64b08f233873a4d0c18b053d37da83f6af4d9087b813",
- "sha256:04c468b622ed31d408fea2346bec5bbffba2cc44226302a0de1ade9f5ea3d373",
- "sha256:06d7cd1abac2ffd92e65c0609661866709b4b2d82dd15f611e602b9b188b0b69",
- "sha256:06db6321b7a68b2bd6df96d08a5adadc1fa0e8f419226e25b2a5fbf6ccc7350f",
- "sha256:0857f0ae312d855239a55c81ef453ee8fd24136eaba8e87a2eceba644c0d4c06",
- "sha256:0f861a89e0043afec2a51fd177a567005847973be86f709bbb044d7f42fc4e05",
- "sha256:1071534bbbf8cbb31b498d5d9db0f274f2f7a865adca4ae429e147ba40f73dea",
- "sha256:158d0d15119b4b7ff6b926536763dc0714313aa59e320ddf787502c70c4d4bee",
- "sha256:1bf1ac1984eaa7675ca8d5745a8cb87ef7abecb5592178406e55858d411eadc0",
- "sha256:1f436816fc868b098b0d63b8920de7d208c90a67212546d02f84fe78a9c26396",
- "sha256:24a570cd11895b60829e941f2613a4f79df1a27344cbbb82164ef2e0116f09c7",
- "sha256:24ec4ff2c5c0c8f9c6b87d5bb53555bf267e1e6f70e52e5a9740d32861d36b6f",
- "sha256:2894f2df484ff56d717bead0a5c2abb6b9d2bf26d6960c4604d5c48bbc30ee73",
- "sha256:29314480e958fd8aab22e4a58b355b629c59bf5f2ac2492b61e3dc06d8c7a315",
- "sha256:293e7ea41280cb28c6fcaaa0b1aa1f533b8ce060b9e701d78511e1e6c4a1de76",
- "sha256:34eff4b97f3d982fb93e2831e6750127d1355a923ebaeeb565407b3d2f8d41a1",
- "sha256:35f27e6eb43380fa080dccf676dece30bef72e4a67617ffda586641cd4508d49",
- "sha256:3c3f39fa737542161d8b0d680df2ec249334cd70a8f420f71c9304bd83c3cbed",
- "sha256:3d3dd4c9e559eb172ecf00a2a7517e97d1e96de2a5e610bd9b68cea3925b4892",
- "sha256:43e0b9d9e2c9e5d152946b9c5fe062c151614b262fda2e7b201204de0b99e482",
- "sha256:48e1c69bbacfc3d932221851b39d49e81567a4d4aac3b21258d9c24578280058",
- "sha256:51182f8927c5af975fece87b1b369f722c570fe169f9880764b1ee3bca8347b5",
- "sha256:58e3f59d583d413809d60779492342801d6e82fefb89c86a38e040c16883be53",
- "sha256:5de7970188bb46b7bf9858eb6890aad302577a5f6f75091fd7cdd3ef13ef3045",
- "sha256:65fa59693c62cf06e45ddbb822165394a288edce9e276647f0046e1ec26920f3",
- "sha256:681d07b0d1e3c462dd15585ef5e33cb021321588bebd910124ef4f4fb71aef55",
- "sha256:69e395c24fc60aad6bb4fa7e583698ea6cc684648e1ffb7fe85e3c1ca131a7d5",
- "sha256:6c97d7350133666fbb5cf4abdc1178c812cb205dc6f41d174a7b0f18fb93337e",
- "sha256:6e4714cc64f474e4d6e37cfff31a814b509a35cb17de4fb1999907575684479c",
- "sha256:72d8d3ef52c208ee1c7b2e341f7d71c6fd3157138abf1a95166e6165dd5d4369",
- "sha256:8ae6299f6c68de06f136f1f9e69458eae58f1dacf10af5c17353eae03aa0d827",
- "sha256:8b198cec6c72df5289c05b05b8b0969819783f9418e0409865dac47288d2a053",
- "sha256:99cd03ae7988a93dd00bcd9d0b75e1f6c426063d6f03d2f90b89e29b25b82dfa",
- "sha256:9cf8022fb8d07a97c178b02327b284521c7708d7c71a9c9c355c178ac4bbd3d4",
- "sha256:9de2e279153a443c656f2defd67769e6d1e4163952b3c622dcea5b08a6405322",
- "sha256:9e93e79c2551ff263400e1e4be085a1210e12073a31c2011dbbda14bda0c6132",
- "sha256:9ff227395193126d82e60319a673a037d5de84633f11279e336f9c0f189ecc62",
- "sha256:a465da611f6fa124963b91bf432d960a555563efe4ed1cc403ba5077b15370aa",
- "sha256:ad17025d226ee5beec591b52800c11680fca3df50b8b29fe51d882576e039ee0",
- "sha256:afb29c1ba2e5a3736f1c301d9d0abe3ec8b86957d04ddfa9d7a6a42b9367e396",
- "sha256:b85eb46a81787c50650f2392b9b4ef23e1f126313b9e0e9013b35c15e4288e2e",
- "sha256:bb89f306e5da99f4d922728ddcd6f7fcebb3241fc40edebcb7284d7514741991",
- "sha256:cbde590d4faaa07c72bf979734738f328d239913ba3e043b1e98fe9a39f8b2b6",
- "sha256:cc5a8e069b9ebfa22e26d0e6b97d6f9781302fe7f4f2b8776c3e1daea35f1adc",
- "sha256:cd2868886d547469123fadc46eac7ea5253ea7fcb139f12e1dfc2bbd406427d1",
- "sha256:d42b11d692e11b6634f7613ad8df5d6d5f8875f5d48939520d351007b3c13406",
- "sha256:df5052c5d867c1ea0b311fb7c3cd28b19df469c056f7fdcfe88c7473aa63e333",
- "sha256:f2d45f97ab6bb54753eab54fffe75aaf3de4ff2341c9daee1987ee1837636f1d",
- "sha256:fd78e5fee591709f32ef6edb9a015b4aa1a5022598e36227500c8f4e02328d9c"
- ],
- "version": "==1.14.5"
- },
"chardet": {
"hashes": [
"sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa",
@@ -211,38 +136,6 @@
],
"version": "==0.9.1"
},
- "cryptography": {
- "hashes": [
- "sha256:0f1212a66329c80d68aeeb39b8a16d54ef57071bf22ff4e521657b27372e327d",
- "sha256:1e056c28420c072c5e3cb36e2b23ee55e260cb04eee08f702e0edfec3fb51959",
- "sha256:240f5c21aef0b73f40bb9f78d2caff73186700bf1bc6b94285699aff98cc16c6",
- "sha256:26965837447f9c82f1855e0bc8bc4fb910240b6e0d16a664bb722df3b5b06873",
- "sha256:37340614f8a5d2fb9aeea67fd159bfe4f5f4ed535b1090ce8ec428b2f15a11f2",
- "sha256:3d10de8116d25649631977cb37da6cbdd2d6fa0e0281d014a5b7d337255ca713",
- "sha256:3d8427734c781ea5f1b41d6589c293089704d4759e34597dce91014ac125aad1",
- "sha256:7ec5d3b029f5fa2b179325908b9cd93db28ab7b85bb6c1db56b10e0b54235177",
- "sha256:8e56e16617872b0957d1c9742a3f94b43533447fd78321514abbe7db216aa250",
- "sha256:de4e5f7f68220d92b7637fc99847475b59154b7a1b3868fb7385337af54ac9ca",
- "sha256:eb8cc2afe8b05acbd84a43905832ec78e7b3873fb124ca190f574dca7389a87d",
- "sha256:ee77aa129f481be46f8d92a1a7db57269a2f23052d5f2433b4621bb457081cc9"
- ],
- "version": "==3.4.7"
- },
- "dnspython": {
- "hashes": [
- "sha256:95d12f6ef0317118d2a1a6fc49aac65ffec7eb8087474158f42f26a639135216",
- "sha256:e4a87f0b573201a0f3727fa18a516b055fd1107e0e5477cded4a2de497df1dd4"
- ],
- "version": "==2.1.0"
- },
- "docutils": {
- "hashes": [
- "sha256:686577d2e4c32380bb50cbb22f575ed742d58168cee37e99117a854bcd88f125",
- "sha256:cf316c8370a737a022b72b56874f6602acf974a37a9fba42ec2876387549fc61"
- ],
- "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'",
- "version": "==0.17.1"
- },
"flask": {
"hashes": [
"sha256:1c4c257b1892aec1398784c63791cbaa43062f1f7aeb555c4da961b20ee68f55",
@@ -275,52 +168,6 @@
"index": "pypi",
"version": "==20.1.0"
},
- "hiredis": {
- "hashes": [
- "sha256:04026461eae67fdefa1949b7332e488224eac9e8f2b5c58c98b54d29af22093e",
- "sha256:04927a4c651a0e9ec11c68e4427d917e44ff101f761cd3b5bc76f86aaa431d27",
- "sha256:07bbf9bdcb82239f319b1f09e8ef4bdfaec50ed7d7ea51a56438f39193271163",
- "sha256:09004096e953d7ebd508cded79f6b21e05dff5d7361771f59269425108e703bc",
- "sha256:0adea425b764a08270820531ec2218d0508f8ae15a448568109ffcae050fee26",
- "sha256:0b39ec237459922c6544d071cdcf92cbb5bc6685a30e7c6d985d8a3e3a75326e",
- "sha256:0d5109337e1db373a892fdcf78eb145ffb6bbd66bb51989ec36117b9f7f9b579",
- "sha256:0f41827028901814c709e744060843c77e78a3aca1e0d6875d2562372fcb405a",
- "sha256:11d119507bb54e81f375e638225a2c057dda748f2b1deef05c2b1a5d42686048",
- "sha256:1233e303645f468e399ec906b6b48ab7cd8391aae2d08daadbb5cad6ace4bd87",
- "sha256:139705ce59d94eef2ceae9fd2ad58710b02aee91e7fa0ccb485665ca0ecbec63",
- "sha256:1f03d4dadd595f7a69a75709bc81902673fa31964c75f93af74feac2f134cc54",
- "sha256:240ce6dc19835971f38caf94b5738092cb1e641f8150a9ef9251b7825506cb05",
- "sha256:294a6697dfa41a8cba4c365dd3715abc54d29a86a40ec6405d677ca853307cfb",
- "sha256:3d55e36715ff06cdc0ab62f9591607c4324297b6b6ce5b58cb9928b3defe30ea",
- "sha256:3dddf681284fe16d047d3ad37415b2e9ccdc6c8986c8062dbe51ab9a358b50a5",
- "sha256:3f5f7e3a4ab824e3de1e1700f05ad76ee465f5f11f5db61c4b297ec29e692b2e",
- "sha256:508999bec4422e646b05c95c598b64bdbef1edf0d2b715450a078ba21b385bcc",
- "sha256:5d2a48c80cf5a338d58aae3c16872f4d452345e18350143b3bf7216d33ba7b99",
- "sha256:5dc7a94bb11096bc4bffd41a3c4f2b958257085c01522aa81140c68b8bf1630a",
- "sha256:65d653df249a2f95673976e4e9dd7ce10de61cfc6e64fa7eeaa6891a9559c581",
- "sha256:7492af15f71f75ee93d2a618ca53fea8be85e7b625e323315169977fae752426",
- "sha256:7f0055f1809b911ab347a25d786deff5e10e9cf083c3c3fd2dd04e8612e8d9db",
- "sha256:807b3096205c7cec861c8803a6738e33ed86c9aae76cac0e19454245a6bbbc0a",
- "sha256:81d6d8e39695f2c37954d1011c0480ef7cf444d4e3ae24bc5e89ee5de360139a",
- "sha256:87c7c10d186f1743a8fd6a971ab6525d60abd5d5d200f31e073cd5e94d7e7a9d",
- "sha256:8b42c0dc927b8d7c0eb59f97e6e34408e53bc489f9f90e66e568f329bff3e443",
- "sha256:a00514362df15af041cc06e97aebabf2895e0a7c42c83c21894be12b84402d79",
- "sha256:a39efc3ade8c1fb27c097fd112baf09d7fd70b8cb10ef1de4da6efbe066d381d",
- "sha256:a4ee8000454ad4486fb9f28b0cab7fa1cd796fc36d639882d0b34109b5b3aec9",
- "sha256:a7928283143a401e72a4fad43ecc85b35c27ae699cf5d54d39e1e72d97460e1d",
- "sha256:adf4dd19d8875ac147bf926c727215a0faf21490b22c053db464e0bf0deb0485",
- "sha256:ae8427a5e9062ba66fc2c62fb19a72276cf12c780e8db2b0956ea909c48acff5",
- "sha256:b4c8b0bc5841e578d5fb32a16e0c305359b987b850a06964bd5a62739d688048",
- "sha256:b84f29971f0ad4adaee391c6364e6f780d5aae7e9226d41964b26b49376071d0",
- "sha256:c39c46d9e44447181cd502a35aad2bb178dbf1b1f86cf4db639d7b9614f837c6",
- "sha256:cb2126603091902767d96bcb74093bd8b14982f41809f85c9b96e519c7e1dc41",
- "sha256:dcef843f8de4e2ff5e35e96ec2a4abbdf403bd0f732ead127bd27e51f38ac298",
- "sha256:e3447d9e074abf0e3cd85aef8131e01ab93f9f0e86654db7ac8a3f73c63706ce",
- "sha256:f52010e0a44e3d8530437e7da38d11fb822acfb0d5b12e9cd5ba655509937ca0",
- "sha256:f8196f739092a78e4f6b1b2172679ed3343c39c61a3e9d722ce6fcf1dac2824a"
- ],
- "version": "==2.0.0"
- },
"idna": {
"hashes": [
"sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6",
@@ -329,14 +176,6 @@
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==2.10"
},
- "importlib-metadata": {
- "hashes": [
- "sha256:4a5611fea3768d3d967c447ab4e93f567d95db92225b43b7b238dbfb855d70bb",
- "sha256:c6513572926a96458f8c8f725bf0e00108fba0c9583ade9bd15b869c9d726e33"
- ],
- "markers": "python_version >= '3.6'",
- "version": "==4.6.0"
- },
"itsdangerous": {
"hashes": [
"sha256:5174094b9637652bdb841a3029700391451bd092ba3db90600dea710ba28e97c",
@@ -353,14 +192,6 @@
"markers": "python_version >= '3.6'",
"version": "==3.0.1"
},
- "keyring": {
- "hashes": [
- "sha256:045703609dd3fccfcdb27da201684278823b72af515aedec1a8515719a038cb8",
- "sha256:8f607d7d1cc502c43a932a275a56fe47db50271904513a379d39df1af277ac48"
- ],
- "markers": "python_version >= '3.6'",
- "version": "==23.0.1"
- },
"markdown": {
"hashes": [
"sha256:31b5b491868dcc87d6c24b7e3d19a0d730d59d3e46f4eea6430a321bed387a49",
@@ -459,32 +290,15 @@
"markers": "python_version >= '3.6'",
"version": "==5.1.0"
},
- "packaging": {
- "hashes": [
- "sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5",
- "sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a"
- ],
- "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
- "version": "==20.9"
- },
- "pkginfo": {
- "hashes": [
- "sha256:029a70cb45c6171c329dfc890cde0879f8c52d6f3922794796e06f577bb03db4",
- "sha256:9fdbea6495622e022cc72c2e5e1b735218e4ffb2a2a69cde2694a6c1f16afb75"
- ],
- "version": "==1.7.0"
- },
"privex-db": {
"hashes": [
"sha256:7158831693bd426f8c380cc015cb5771e79ec70564e9063dd27592aafb88b236",
"sha256:b7cffa20d6b516c82d8d09dec33259ae198249aec3fe2559195cbfba0bf761d2"
],
+ "index": "pypi",
"version": "==0.9.2"
},
"privex-helpers": {
- "extras": [
- "full"
- ],
"hashes": [
"sha256:2f7b9adae53f74c0f6dba5b1fd20b0bbcf87dd124e2a5099454aafa77822ff1f",
"sha256:c6ebcf8f6d4a25e5da57b02716f5b43adefb27698dbffc842825d989743e50b3"
@@ -500,14 +314,6 @@
"index": "pypi",
"version": "==1.0.6"
},
- "pycparser": {
- "hashes": [
- "sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0",
- "sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705"
- ],
- "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
- "version": "==2.20"
- },
"pygments": {
"hashes": [
"sha256:a18f47b506a429f6f4b9df81bb02beab9ca21d0a5fee38ed15aef65f0545519f",
@@ -524,16 +330,9 @@
"sha256:c749b4251c1137837d00542b62992b96cd2aed639877407f66291120dd6de2ff",
"sha256:e6c0c452336db0868d0de521d48872c2a359b1233b974c6b32c36ce68abc4820"
],
+ "index": "pypi",
"version": "==1.6.1"
},
- "pyparsing": {
- "hashes": [
- "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1",
- "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"
- ],
- "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'",
- "version": "==2.4.7"
- },
"python-dateutil": {
"hashes": [
"sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c",
@@ -592,13 +391,6 @@
"index": "pypi",
"version": "==5.4.1"
},
- "readme-renderer": {
- "hashes": [
- "sha256:63b4075c6698fcfa78e584930f07f39e05d46f3ec97f65006e430b595ca6348c",
- "sha256:92fd5ac2bf8677f310f3303aa4bce5b9d5f9f2094ab98c29f13791d7b805a3db"
- ],
- "version": "==29.0"
- },
"redis": {
"hashes": [
"sha256:0e7e0cfca8660dea8b7d5cd8c4f6c5e29e11f31158c0b0ae91a397f00e5a05a2",
@@ -615,20 +407,6 @@
"index": "pypi",
"version": "==2.25.1"
},
- "requests-toolbelt": {
- "hashes": [
- "sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f",
- "sha256:968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0"
- ],
- "version": "==0.9.1"
- },
- "rfc3986": {
- "hashes": [
- "sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835",
- "sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97"
- ],
- "version": "==1.5.0"
- },
"rich": {
"hashes": [
"sha256:6e8a3e2c61e6cf6193bfcffbb89865a0973af7779d3ead913fdbbbc33f457c2c",
@@ -637,13 +415,6 @@
"index": "pypi",
"version": "==10.4.0"
},
- "semver": {
- "hashes": [
- "sha256:ced8b23dceb22134307c1b8abfa523da14198793d9787ac838e70e29e77458d4",
- "sha256:fa0fe2722ee1c3f57eac478820c3a5ae2f624af8264cbdf9000c980ff7f75e3f"
- ],
- "version": "==2.13.0"
- },
"six": {
"hashes": [
"sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926",
@@ -660,21 +431,6 @@
"markers": "python_version >= '3.5'",
"version": "==1.2.0"
},
- "tqdm": {
- "hashes": [
- "sha256:24be966933e942be5f074c29755a95b315c69a91f839a29139bf26ffffe2d3fd",
- "sha256:aa0c29f03f298951ac6318f7c8ce584e48fa22ec26396e6411e43d038243bdb2"
- ],
- "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
- "version": "==4.61.1"
- },
- "twine": {
- "hashes": [
- "sha256:16f706f2f1687d7ce30e7effceee40ed0a09b7c33b9abb5ef6434e5551565d83",
- "sha256:a56c985264b991dc8a8f4234eb80c5af87fa8080d0c224ad8f2cd05a2c22e83b"
- ],
- "version": "==3.4.1"
- },
"typing-extensions": {
"hashes": [
"sha256:0ac0f89795dd19de6b97debb0c6af1c70987fd80a2d62d1958f7e56fcc31b497",
@@ -688,16 +444,9 @@
"sha256:39fb8672126159acb139a7718dd10806104dec1e2f0f6c88aab05d17df10c8d4",
"sha256:f57b4c16c62fa2760b7e3d97c35b255512fb6b59a259730f36ba32ce9f8e342f"
],
- "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4.0'",
+ "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4'",
"version": "==1.26.6"
},
- "webencodings": {
- "hashes": [
- "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78",
- "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"
- ],
- "version": "==0.5.1"
- },
"werkzeug": {
"hashes": [
"sha256:1de1db30d010ff1af14a009224ec49ab2329ad2cde454c8a708130642d579c42",
@@ -706,13 +455,6 @@
"markers": "python_version >= '3.6'",
"version": "==2.0.1"
},
- "wheel": {
- "hashes": [
- "sha256:78b5b185f0e5763c26ca1e324373aadd49182ca90e825f7853f4b2509215dc0e",
- "sha256:e11eefd162658ea59a60a0f6c7d493a7190ea4b9a85e335b33489d9f17e0245e"
- ],
- "version": "==0.36.2"
- },
"yarl": {
"hashes": [
"sha256:00d7ad91b6583602eb9c1d085a2cf281ada267e9a197e8b7cae487dadbfa293e",
@@ -755,14 +497,6 @@
],
"markers": "python_version >= '3.6'",
"version": "==1.6.3"
- },
- "zipp": {
- "hashes": [
- "sha256:3607921face881ba3e026887d8150cca609d517579abe052ac81fc5aeffdbd76",
- "sha256:51cb66cc54621609dd593d1787f286ee42a5c0adbb4b29abea5a63edc3e03098"
- ],
- "markers": "python_version >= '3.6'",
- "version": "==3.4.1"
}
},
"develop": {
diff --git a/README.md b/README.md
index 414878a..7eb98c8 100644
--- a/README.md
+++ b/README.md
@@ -3,8 +3,110 @@
Our **IP Information Tool** is a small Python 3 web application written using the Flask framework, which allows a user to quickly see
their IPv4 address, their IPv6 address (if they have one), GeoIP information about each (city, country, ISP name, AS number), plus browser user agent.
-You can test it out using our production My IP website: [Privex - What is my IP?](https://myip.privex.io)
-
+You can test it out using our production My IP website: [Privex - What is my IP?](https://myip.vc)
+
+We operate our own **Whats My IP** service (using this project) on 3 domains as of June 2021:
+
+- [myip.vc](https://myip.vc)
+- [address.computer](https://address.computer)
+- [myip.privex.io](https://myip.privex.io)
+
+## Table of Contents
+
+- [Privex's IP Information Tool](#privex-s-ip-information-tool)
+ * [Features](#features)
+- [License](#license)
+- [Requirements](#requirements)
+- [Docker](#docker)
+ * [Solo Container](#solo-container)
+ * [Docker Compose](#docker-compose)
+- [Normal Installation](#normal-installation)
+ * [Ubuntu/Debian Quickstart](#ubuntu-debian-quickstart)
+ * [Webserver Example Configurations](#webserver-example-configurations)
+ + [Example Caddy (v2) Caddyfile Configuration](#example-caddy--v2--caddyfile-configuration)
+ + [Example Nginx Configuration](#example-nginx-configuration)
+- [Keeping your GeoIP2 databases up-to-date](#keeping-your-geoip2-databases-up-to-date)
+ * [Register for a Maxmind account to get an API key](#register-for-a-maxmind-account-to-get-an-api-key)
+ * [Install the geoipupdate tool from Maxmind](#install-the-geoipupdate-tool-from-maxmind)
+ * [Configure the tool with your API credentials](#configure-the-tool-with-your-api-credentials)
+ * [Setup a weekly cron to update your GeoIP databases](#setup-a-weekly-cron-to-update-your-geoip-databases)
+ * [Run geoipupdate to update your DBs now](#run-geoipupdate-to-update-your-dbs-now)
+- [Contributing](#contributing)
+- [Thanks for reading!](#thanks-for-reading-)
+
+Table of contents generated with markdown-toc
+
+## Features
+
+**The index HTML page supports:**
+
+ - Shows the **IPv4 or IPv6 address that your browser directly connected from**, without
+ using Javascript, to allow some functionality without JS.
+ - Shows specifically your **IPv4 address** using a JS fetch query to the app's API via an IPv4-only subdomain
+ - Shows specifically your **IPv6 address** using a JS fetch query to the app's API via an IPv6-only subdomain
+
+**Supports outputting IP information in a variety of formats**
+
+ - **HTML** (index page)
+ - **JSON**
+ - **YAML** (yml)
+ - **Plain Text** (greppable)
+ - **Individual pieces of information** as plain text (like [icanhazip.com](https://icanhazip.com) - but more features)
+
+**API**
+
+ - `/` (`/index`) - Supports outputting in-depth IP information as HTML, JSON, YAML, and Plain Text;
+ format can be requested by either:
+ - changing the URL extension (e.g. `index.txt`)
+ - requesting via `Accept` header e.g. `Accept: application/yaml`
+ - requesting via GET or POST e.g. `?format=json` (supports both url form encoded, and JSON body)
+ - `/flat(/)` - Outputs individual pieces of information about your IP address as plain text.
+ - The root of the URI, e.g. `/flat` or `/flat/` outputs just the IPv4 or IPv6 address you're connecting from
+ just like [icanhazip.com](https://icanhazip.com)
+ - `` can be set to various names of information you want to retrieve about your IP address, and
+ it will output that individual piece of information in plain text, e.g. `/flat/country` may return `Sweden`,
+ `/flat/city` may return `Stockholm`, `/flat/asn` may return `210083` (Privex's ASN), and so on.
+
+ For a full listing of available `/flat` info endpoints, visit `/api/` on any of our My IP domains,
+ [or just click here to see flat endpoint info on myip.vc](https://myip.vc/api/#flat-endpoint).
+
+
+ - `/lookup(.y(a)?ml|.json|.txt)(/)(/)` - This endpoint is designed for looking up information
+ about an IP address other than the one you're connecting from. However, if you just query `/lookup`, or
+ one of it's extension variants without an address, it will return information about the address you're
+ connecting from.
+
+ - If you access the standard endpoint and specify an IPv4 or IPv6 address, then it will return information
+ about that address in JSON format, e.g. `/lookup/185.130.47.1`
+
+ - Just like `/index` - you can request different output formats from `/lookup`, either by changing
+ the extension `/lookup.yml/2a07:e00::333`, requesting via `Accept` header e.g. `Accept: text/plain`,
+ or requesting via GET or POST e.g. `?format=json`.
+
+ The supported formats for lookup are: JSON (default), YAML/YML, and TXT (Plain Text).
+
+ - As for `` - this URL segment supports the same info names as `/flat`, and allows you to retrieve
+ just **singular pieces of information** about an arbitrary IPv4 or IPv6 address.
+
+ For example, `/lookup/185.130.47.1/country` would output `Netherlands`, and `/lookup/2a07:e00::333/location`
+ outputs `Stockholm, 173 11, Sweden`
+ - It's possible to request information about multiple IP addresses at once, which will be returned as a dictionary
+ (hash map / object) by specifying them in one of two ways:
+ - The cleanest way, is to POST to `/lookup/` with a JSON body, containing the list (array) `addrs` which
+ holds one or more IPv4 and/or IPv6 addresses as strings, e.g. `{"addrs": ["185.130.47.1", "2a07:e00::333"]}`
+
+ You can test this easily using [HTTPie](https://httpie.io/docs):
+ ```sh
+ http -p hbHB POST https://myip.vc/lookup/ \
+ 'addrs:=["185.130.47.1", "2a07:e00::333", "185.130.46.92"]'
+ ```
+ - Alternatively, you can pass `addrs` as a string - comma separated addresses, via either GET, or
+ standard form encoded POST.
+
+ GET example: `/lookup?addrs=185.130.47.1,2a07:e00::333,185.130.46.92`
+
+
+
# License
This project is licensed under the **GNU AGPL v3**
@@ -47,22 +149,156 @@ Here's the important parts:
# Requirements
- - **Ubuntu Bionic Server 18.04** is recommended, however other distros may work
+ - **Ubuntu Bionic Server 18.04** or **Ubuntu Focal 20.04** is recommended, however other distros may work
- **Redis** - Used for caching GeoIP data
- A copy of **GeoIP2** City + ASN, or **GeoLite2** City + ASN (you can get GeoLite2 just by running the included `update_geoip.sh`)
- - **Python 3.7+** is strongly recommended (3.6 is the bare minimum)
+ - **Python 3.7+** is strongly recommended (3.6 is the bare minimum).
+ - Confirmed working perfectly on Python 3.8 and 3.9 too :)
- Minimal hardware requirements, will probably run on as little as 512mb RAM and 1 core
-# Installation
+# Docker
-Quickstart (Tested on Ubuntu Bionic 18.04 - may work on other Debian-based distros):
+## Docker Compose
+
+We include a `docker-compose` setup, which allows you to easily run a `myip` container, as well as a `redis` container, with
+various container/image configuration easily managed from `docker-compose.yml`
+
+To allow users to customise their `docker-compose.yml`, along with their `.env` and other configuration files related to the
+Docker setup, they don't exist under the names which they'd actually be used by default, since they're excluded from Git to ensure
+updates don't affect your existing configurations.
+
+Thus, to generate a `docker-compose.yml` file, and the various other configuration files used by the compose file, you need to
+either run `./prep-docker.sh` (purely generates any missing configuration files ), or `./start-docker.sh`
+(generates missing config files, and then starts docker-compose).
+
+If you're already familiar with `docker-compose`, or simply know that you need to make some changes to the config before the
+containers are started, then you'll want prep-docker, which purely generates the configs:
+
+```sh
+# Generate any user config files which don't yet exist, such as docker-compose.yml and .env
+./prep-docker.sh
+```
+
+Once you've customised your `docker-compose.yml` and `.env` to your needs (or decided that the defaults will be fine for you), run
+start-docker:
+
+```sh
+# First, generates any user config files which don't yet exist, such as docker-compose.yml and .env
+# Then, creates/starts the docker containers using `docker-compose up -d`
+./start-docker.sh
+```
+
+Once you've ran `start-docker.sh` - you should be able to see the container `myip` and `myip-redis`
+when you run `docker ps`:
+
+```
+CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES
+d75146ddd7c0 privex/myip "/app/init.sh" 2 minutes ago Up 2 minutes 127.0.0.1:5252->5252/tcp myip
+2e411428f3d2 redis:latest "docker-entrypoint.s…" 2 minutes ago Up 2 minutes 127.0.0.2:6379->6379/tcp myip-redis
+```
+
+Now all you need to do, is [setup a reverse proxy (webserver) such as Caddy or Nginx](#webserver-example-configurations),
+and point it at `127.0.0.1:5252` :)
+
+**NOTE:** The default docker-compose will mount your local `/usr/share/GeoIP`
+onto the container's `/usr/share/GeoIP` - so that the GeoIP databases can be updated independently from the container on the host.
+
+This means that you need to make sure that your host's GeoIP2 databases are regularly updated.
+
+To ensure your local host GeoIP2 databases stay up to date, follow the instructions in the
+section [Keeping your GeoIP2 Databases Up-to-date](#keeping-your-geoip2-databases-up-to-date)
+
+## Solo Container
+
+You can run our `whats-my-ip` app on it's own using our official Docker image from DockerHub.
+
+Due to the way Docker networking works, you'll generally need a reverse proxy in-front, which forwards
+the client's IP as `X-REAL-IP` (all uppercase), or an alternative header which you can set
+using the `IP_HEADER` environment variable.
+
+For example, to run a container in the background, named `myip`, which exposes it's gunicorn server
+port on your host server at `127.0.0.1:5252`, and have it auto-delete itself when it's container is shutdown,
+you'd run the following command:
+```sh
+docker run --name myip --rm -p 127.0.0.1:5252:5252 -itd privex/myip
```
+
+To make sure it's started correctly, and to diagnose any issues if it didn't, check it's logs:
+
+```sh
+docker logs -t myip
+```
+
+Ideally, you need to do the following for the best experience with the docker container:
+
+- Expose the port to your localhost using `-p 127.0.0.1:5252:5252`
+
+- Make sure it has a sensible container name such as `myip` - for easily interacting with it,
+ e.g. checking it's logs, restarting it, stopping it, etc.
+
+- Mount `/usr/share/GeoIP` as a volume from your local host onto the container, so that you can regularly
+ update the GeoIP databases. The ones that are included with the container, are likely to be quite old,
+ as they'd only be updated whenever we update this project and release a new docker image.
+
+ To do this, add `-v "/usr/share/GeoIP:/usr/share/GeoIP"` either directly before, or after
+ the port exposure (`-p 127.0.0.1:5252:5252`) with a space between.
+
+ To ensure your local host GeoIP2 databases stay up to date, follow the instructions in
+ the section [Keeping your GeoIP2 Databases Up-to-date](#keeping-your-geoip2-databases-up-to-date)
+
+- Create a `.env` file (see the `.env.example`), but avoid adding/uncommenting `HOST` or `PORT`,
+ as they will cause problems in Docker. To use it, simply add the `--env-arg` CLI arguments
+ to your `docker run` command, for example: `--env-file /home/youruser/myip/.env`
+
+- Run a [webserver / reverse proxy on the host](#webserver-example-configurations) (actual bare server),
+ not in Docker.
+
+ If you run a webserver such as Caddy or Nginx within Docker, it generally won't be able to see the
+ client's IP address, at least not without some complicated docker network configuration + firewall rules.
+
+- If possible, link the `myip` container to a `redis` container, and make sure that the `REDIS_HOST`
+ environment variable contains the container name of the `redis` container (docker's link system
+ will resolve a container name such as `redis` to the Docker LAN IP of the container).
+
+ If you don't want to link a redis container, you should set the environment variable `CACHE_ADAPTER`
+ to either `memory` (stores cache in memory, cache is lost when app is restarted), or `sqlite`
+ (cache will be persisted in an sqlite3 database, but will be lost if the container is destroyed,
+ e.g. when updated to a newer image). This will prevent many warnings being printed when you
+ start the container / make the first request related to being unable to connect to the default
+ Redis or Memcached server.
+
+ You can permanently persist the SQLite3 databases used for caching with the `sqlite` cache adapter,
+ simply by adding a volume bind between a folder on your local host, and `/root/.privex_cache` on
+ the container, e.g. `-v "${HOME}/.privex_cache:/root/.privex_cache"`
+
+Example full commands, including Redis linking:
+
+```sh
+touch .env
+# Create a Redis container with persistent storage at ~/whats-my-ip/dkr/data/redis on the host
+docker run --name redis --hostname redis --restart always -p 127.0.0.1:6379:6379 \
+ -v "${HOME}/whats-my-ip/dkr/data/redis:/data" -itd redis
+
+# Create the myip container - link it to the redis container, add an ENV var pointing it's Redis cache
+# to the redis container, load the .env file at ~/whats-my-ip/.env, expose it's server port at 127.0.0.1:5252
+docker run --name myip --link redis --restart always --env 'REDIS_HOST=redis' \
+ --env-file "${HOME}/whats-my-ip/.env" -p 127.0.0.1:5252:5252 -itd privex/myip
+```
+
+
+# Normal Installation
+
+## Ubuntu/Debian Quickstart
+
+Quickstart (Tested on Ubuntu Bionic 18.04 - may work on other Debian-based distros):
+
+```sh
sudo apt update -y
####
#
-# - Python 3.7 is strongly recommended, we cannot guarantee compatibility with older versions
+# - Python 3.7, 3.8, 3.9, or newer is strongly recommended, we cannot guarantee compatibility with older versions
# - Redis is used for caching GeoIP data for increased performance
#
####
@@ -149,6 +385,30 @@ sudo systemctl start myip.service
```
+## Webserver Example Configurations
+
+### Example Caddy (v2) Caddyfile Configuration
+
+```
+myip.example.com, v4.myip.example.com, v6.myip.example.com
+{
+ root * /home/myip/whats-my-ip/static
+ route /static/* {
+ uri strip_prefix /static
+ file_server
+ }
+ route /favicon.ico {
+ file_server
+ }
+ reverse_proxy 127.0.0.1:5151 {
+ header_up X-REAL-IP {remote_host}
+ header_up X-Forwarded-For {remote_host}
+ header_up X-Forwarded-Proto {http.request.scheme}
+ header_up X-Forwarded-Host {host}
+ }
+}
+```
+
### Example Nginx Configuration
```
@@ -184,6 +444,126 @@ server {
}
```
+# Keeping your GeoIP2 databases up-to-date
+
+## Register for a Maxmind account to get an API key
+
+Register for a Maxmind Account (it's FREE), so that you can get an API key to use with `geoipupdate`
+
+The API key will allow you to use their `geoipupdate` tool for Linux/UNIX, which can be used with crontab
+to automatically update your GeoIP2 databases every week, or however often you'd like.
+
+You can register here for free: [https://www.maxmind.com/en/geolite2/signup](https://www.maxmind.com/en/geolite2/signup)
+
+## Install the geoipupdate tool from Maxmind
+
+On Ubuntu (and maybe Debian / debian-based distros), you can install `geoipupdate` from Maxmind's PPA:
+
+```sh
+sudo apt update -y
+# You need software-properties-common to be able to install PPAs
+sudo apt install -y software-properties-common
+# Add/enable Maxmind's PPA repo
+sudo add-apt-repository -y ppa:maxmind/ppa
+
+# Update your repos to make sure you get the new package indexes from the PPA
+sudo apt update -y
+# Install the geoipupdate tool
+sudo apt install -y geoipupdate
+```
+
+## Configure the tool with your API credentials
+
+Open the file `/etc/GeoIP.conf` in whatever editor you prefer - `nano` is common and one of the easiest for users who aren't
+the most familiar with using the CLI:
+
+```sh
+sudo nano /etc/GeoIP.conf
+```
+
+Add your AccountID and LicenseKey from Maxmind, plus make sure `EditionIDs` contains all 3 databases as shown below - in
+your `/etc/GeoIP.conf`
+
+```sh
+# The account ID and license key you got after registering with Maxmind for free.
+AccountID 12345
+LicenseKey aBCdeF12345
+
+EditionIDs GeoLite2-ASN GeoLite2-City GeoLite2-Country
+
+## The directory to store the database files. Defaults to /usr/share/GeoIP
+DatabaseDirectory /usr/share/GeoIP
+```
+
+## Setup a weekly cron to update your GeoIP databases
+
+Create a small script in your `/etc/cron.weekly` folder, to make sure that `geoipupdate` is automatically ran at least once per
+week (Maxmind releases GeoLite updates every 1-2 weeks).
+
+Simply paste the below commands into your terminal, and it will create the cron script for you, and mark it
+as an executable.
+
+```sh
+sudo tee /etc/cron.weekly/geoip <<"EOF"
+#!/usr/bin/env bash
+/usr/bin/geoipupdate
+
+EOF
+
+sudo chmod +x /etc/cron.weekly/geoip
+```
+
+## Run geoipupdate to update your DBs now
+
+First, check if `/usr/share/GeoIP` exists:
+
+```sh
+sudo ls -lah /usr/share/GeoIP
+```
+
+If you see an error such as `No such file or directory` - then the folder doesn't exist yet,
+and you should create it, to avoid issues:
+
+```sh
+sudo mkdir -pv /usr/share/GeoIP
+```
+
+Now you can run `geoipupdate` to update your databases now (`-v` means verbose, so it prints detailed logs):
+
+```sh
+sudo geoipupdate -v
+```
+
+If there were no obvious errors, then your databases should be updated :)
+
+To confirm they're updated, you can simply use `ls` to check the dates on the files in the folder:
+
+```sh
+ls -lha /usr/share/GeoIP
+```
+
+You should see the database files starting with `GeoLite2-` - with a date showing sometime within
+the past 2 weeks (14 days) on them:
+
+```sh
+total 80M
+drwxr-xr-x 2 root root 4.0K Jun 29 18:03 .
+drwxr-xr-x 108 root root 4.0K Jun 27 20:16 ..
+-rw-r--r-- 1 root root 1.4M Mar 15 2018 GeoIP.dat
+-rw------- 1 root root 0 May 3 2020 .geoipupdate.lock
+-rw-r--r-- 1 root root 5.3M Mar 15 2018 GeoIPv6.dat
+-rw-r--r-- 1 root root 7.1M Jun 27 06:34 GeoLite2-ASN.mmdb
+-rw-r--r-- 1 root root 62M Jun 27 06:34 GeoLite2-City.mmdb
+-rw-r--r-- 1 root root 4.1M Jun 27 06:34 GeoLite2-Country.mmdb
+```
+
+You can ignore `GeoIP.dat` and `GeoIPv6.dat` if they're present - those are an older format of Maxmind's GeoIP
+database which are sometimes installed by certain legacy Linux packages. `geoipupdate` doesn't affect those,
+and Privex's `whats-my-ip` application does not use those.
+
+If you've got this far without any issues - then congratulations! You've now got up-to-date GeoIP2 database files,
+and the cron will ensure they stay up-to-date :)
+
# Contributing
We're very happy to accept pull requests, and work on any issues reported to us.
@@ -236,4 +616,6 @@ If you'd rather read the whole legal text, it should be included as `privex_cont
# Thanks for reading!
-**If this project has helped you, consider [grabbing a VPS or Dedicated Server from Privex](https://www.privex.io) - prices start at as little as US$8/mo (we take cryptocurrency!)**
\ No newline at end of file
+**If this project has helped you,**
+**consider [grabbing a VPS or Dedicated Server from Privex](https://www.privex.io) - prices**
+**start at as little as US$8/mo (we take cryptocurrency!)**
diff --git a/dkr/config/caddy/.gitignore b/dkr/config/caddy/.gitignore
new file mode 100644
index 0000000..497148d
--- /dev/null
+++ b/dkr/config/caddy/.gitignore
@@ -0,0 +1,3 @@
+*
+!*.example
+!.gitignore
diff --git a/dkr/config/caddy/Caddyfile.example b/dkr/config/caddy/Caddyfile.example
new file mode 100644
index 0000000..c2db8d5
--- /dev/null
+++ b/dkr/config/caddy/Caddyfile.example
@@ -0,0 +1,31 @@
+################################################################################
+# Example Caddyfile for Privex/whats-my-ip Caddy docker container.
+#
+# If this file is 'Caddyfile.example' then you need to copy this file
+# to 'Caddyfile' for it to actually work.
+#
+# For production, simply point your domain at your server's IPv4 and/or IPv6
+# address using an A and/or AAAA record, replace ':80' in Caddyfile with your
+# domain, e.g. myip.example.com
+# and Caddy will automatically get an SSL/TLS certificate + use HTTPS
+# for your domain, with no effort needed from you.
+#
+################################################################################
+
+:80
+{
+ root * /app/static
+ route /static/* {
+ uri strip_prefix /static
+ file_server
+ }
+ route /favicon.ico {
+ file_server
+ }
+ reverse_proxy myip:5252 {
+ header_up X-REAL-IP {remote_host}
+ header_up X-Forwarded-For {remote_host}
+ header_up X-Forwarded-Proto {http.request.scheme}
+ header_up X-Forwarded-Host {host}
+ }
+}
diff --git a/dkr/config/caddy/caddy.env.example b/dkr/config/caddy/caddy.env.example
new file mode 100644
index 0000000..b2533ff
--- /dev/null
+++ b/dkr/config/caddy/caddy.env.example
@@ -0,0 +1,2 @@
+CADDY_SSL_PORT="[::]:443:443"
+CADDY_HTTP_PORT="[::]:80:80"
diff --git a/dkr/config/redis/.gitignore b/dkr/config/redis/.gitignore
new file mode 100644
index 0000000..497148d
--- /dev/null
+++ b/dkr/config/redis/.gitignore
@@ -0,0 +1,3 @@
+*
+!*.example
+!.gitignore
diff --git a/dkr/data/caddy/.gitignore b/dkr/data/caddy/.gitignore
new file mode 100644
index 0000000..40263ad
--- /dev/null
+++ b/dkr/data/caddy/.gitignore
@@ -0,0 +1,4 @@
+*
+!.gitignore
+!data/
+!config/
diff --git a/dkr/data/caddy/config/.gitignore b/dkr/data/caddy/config/.gitignore
new file mode 100644
index 0000000..d6b7ef3
--- /dev/null
+++ b/dkr/data/caddy/config/.gitignore
@@ -0,0 +1,2 @@
+*
+!.gitignore
diff --git a/dkr/data/caddy/data/.gitignore b/dkr/data/caddy/data/.gitignore
new file mode 100644
index 0000000..d6b7ef3
--- /dev/null
+++ b/dkr/data/caddy/data/.gitignore
@@ -0,0 +1,2 @@
+*
+!.gitignore
diff --git a/dkr/data/redis/.gitignore b/dkr/data/redis/.gitignore
new file mode 100644
index 0000000..d6b7ef3
--- /dev/null
+++ b/dkr/data/redis/.gitignore
@@ -0,0 +1,2 @@
+*
+!.gitignore
diff --git a/dkr/init.sh b/dkr/init.sh
new file mode 100755
index 0000000..c965080
--- /dev/null
+++ b/dkr/init.sh
@@ -0,0 +1,70 @@
+#!/usr/bin/env bash
+################################################################
+# #
+# Docker runner script for: #
+# #
+# Privex IP Information Tool #
+# (C) 2021 Privex Inc. GNU AGPL v3 #
+# #
+# Privex Site: https://www.privex.io/ #
+# #
+# Github Repo: https://github.com/Privex/whats-my-ip #
+# #
+################################################################
+
+export PATH="/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/usr/local/games:${PATH}"
+export PATH="${HOME}/.local/bin:/snap/bin:${PATH}"
+
+######
+# Directory where the script is located, so we can source files regardless of where PWD is
+######
+
+DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+
+cd "$DIR"
+
+[[ -f .env ]] && source .env || true
+
+# Override these defaults inside of `.env`
+: ${HOST='0.0.0.0'}
+: ${PORT='5252'}
+: ${GU_WORKERS='4'} # Number of Gunicorn worker processes
+#EXTRA_ARGS=()
+
+if (( $# > 0 )); then
+ if [[ "$1" == "-h" || "$1" == "--help" ]]; then
+ echo "
+Usage: docker run --rm -it privex/myip (host=${HOST}) (port=${PORT}) (workers=${GU_WORKERS}) [extra_args_for_gunicorn]
+
+We recommend using Docker's environment flags for the run command, e.g. '--env-file ${HOME}/myip.env'
+or '--env \"HOST=::\" --env GU_WORKERS=10'
+
+"
+ return 0
+ fi
+ i=0
+
+ while (( $# > 0 )); do
+ if (( i < 3 )) && [[ -n "$1" ]]; then
+ (( i == 0 )) && HOST="$1" && echo " > Set HOST from CLI argument: $HOST"
+ (( i == 1 )) && PORT="$1" && echo " > Set PORT from CLI argument: $PORT"
+ (( i == 2 )) && GU_WORKERS="$1" && echo " > Set GU_WORKERS from CLI argument: $GU_WORKERS"
+ fi
+# if (( i > 2 )); then
+# EXTRA_ARGS+=("$1")
+# fi
+ i=$(( i + 1 ))
+ shift
+ done
+fi
+
+echo " > HOST: ${HOST}"
+echo " > PORT: ${PORT}"
+echo " > GU_WORKERS: ${GU_WORKERS}"
+#echo " > EXTRA_ARGS:" "$(printf '%q ' "${EXTRA_ARGS[@]}")"
+
+#if (( ${#EXTRA_ARGS[@]} > 0 )); then
+# pipenv run gunicorn -b "${HOST}:${PORT}" -w "$GU_WORKERS" "${EXTRA_ARGS[@]}" wsgi
+#else
+pipenv run gunicorn -b "${HOST}:${PORT}" -w "$GU_WORKERS" wsgi
+#fi
diff --git a/example-docker-compose.yml b/example-docker-compose.yml
new file mode 100644
index 0000000..51feade
--- /dev/null
+++ b/example-docker-compose.yml
@@ -0,0 +1,57 @@
+version: '2'
+
+services:
+ myip:
+ container_name: myip
+ restart: always
+# build: .
+ image: privex/myip
+ ports:
+ - "127.0.0.1:5252:5252"
+ volumes:
+ - "/usr/share/GeoIP:/usr/share/GeoIP"
+ depends_on:
+ - redis
+ environment:
+ REDIS_HOST: redis
+ CACHE_ADAPTER: redis
+ env_file: ./.env
+
+ redis:
+ restart: always
+ image: redis:latest
+ hostname: redis
+ container_name: myip-redis
+ env_file: ./dkr/config/redis/redis.env
+ volumes:
+ - "./dkr/data/redis:/data"
+ ports:
+ # To avoid a potential conflict with a host-installed Redis,
+ # we expose the port onto the alternative loopback IP 127.0.0.2
+ - "127.0.0.2:6379:6379"
+
+############################################
+# Due to Docker networking generally using some form of NAT between clients and Docker containers,
+# it's somewhat pointless to run a webserver for this app in Docker, since it will be unable to
+# detect their IP, resulting in the app simply receiving the Docker network gateway IP, instead of
+# the actual client's IP.
+#
+# Unless your server is behind an external reverse proxy such as Cloudflare, which forwards the client's IP
+# in a header which you're able to process / pass to the application, then we strongly recommend that you run
+# a webserver/reverse proxy on the host (actual bare server), not in docker.
+#####
+#
+# caddy:
+# restart: always
+# image: caddy:latest
+# hostname: caddy
+# container_name: myip-caddy
+# volumes:
+# - "./dkr/config/caddy/Caddyfile:/etc/caddy/Caddyfile"
+# - "./dkr/data/caddy/data:/data"
+# - "./dkr/data/caddy/config:/config"
+# env_file: ./dkr/config/caddy/caddy.env
+# ports:
+# - "443:443"
+# - "80:80"
+
diff --git a/myip/app.py b/myip/app.py
index 1b6e925..f4d3098 100755
--- a/myip/app.py
+++ b/myip/app.py
@@ -28,7 +28,7 @@
from markdown.extensions.toc import TocExtension
from markdown.extensions.fenced_code import FencedCodeExtension
from myip import settings
-from myip.core import GeoType, app, cf, dump_yaml, get_redis, get_ip, get_rdns, merge_frm, wants_type
+from myip.core import GeoType, app, cf, dump_yaml, get_cache, get_ip, get_rdns, merge_frm, wants_type
from flask import Response, request, jsonify, render_template, render_template_string
from privex.helpers import DictDataClass, DictObject, K, STRBYTES, T, V, empty, empty_if, ip_is_v4, stringify
from privex.helpers.geoip import GeoIPResult, geolocate_ips
@@ -267,7 +267,7 @@ def get_geodata(ip, fail=False) -> Optional[GeoIPResult]:
"""
ip = str(ip)
- r, rkey = get_redis(), f'geoip:{ip}'
+ r, rkey = get_cache(), f'geoip:{ip}'
cgdata: STRBYTES = r.get(rkey)
if not empty(cgdata):
diff --git a/myip/core.py b/myip/core.py
index e5378d7..ecc0073 100644
--- a/myip/core.py
+++ b/myip/core.py
@@ -22,14 +22,16 @@
import sys
import warnings
# from pathlib import Path
+from ipaddress import IPv4Address, IPv6Address
from typing import Any, ContextManager, Iterable, List, Mapping, Type, Union
import geoip2.database
import yaml
from flask import Flask, Request, request
from privex.loghelper import LogHelper
-from privex.helpers import DictDataClass, DictObject, Dictable, K, T, ip_is_v6, ip_is_v4, empty, empty_if
+from privex.helpers import CacheAdapter, DictDataClass, DictObject, Dictable, K, T, ip_is_v6, ip_is_v4, empty, empty_if, r_cache, stringify
from privex.helpers.geoip import geoip_manager
+from privex.helpers.cache import adapter_get, adapter_set, MemoryCache
import accept_types
from myip import settings
@@ -55,7 +57,7 @@
console_std = console_err = Mocker.make_mock_class('Console')
err_print = printerr = print_err = lambda *args, file=sys.stderr, **kwargs: print(*args, file=file, **kwargs)
std_print = printstd = print_std = print
-import redis
+
import logging
@@ -96,11 +98,80 @@
"""This dictionary stores instances of various connection classes, such as Redis and GeoIP2"""
-def get_redis() -> redis.Redis:
- """Initialise or obtain a Redis instance from _STORE"""
- if 'redis' not in _STORE:
- _STORE['redis'] = redis.Redis(host=settings.REDIS_HOST, port=settings.REDIS_PORT)
- return _STORE['redis']
+def set_cache_adapter(adapter: Union[str, CacheAdapter] = None, reset=False) -> CacheAdapter:
+ if not reset and settings.CACHE_ADAPTER_SET:
+ log.debug(" [core.set_cache_adapter] CACHE_ADAPTER_SET is True + reset is False. Returning "
+ "pre-configured adapter via adapter_get...")
+ adp = adapter_get()
+ log.debug(" [core.set_cache_adapter] Adapter is: %s", repr(adp))
+ return adp
+
+ adapter = empty_if(adapter, settings.CACHE_ADAPTER, zero=True)
+ if empty(adapter, zero=True):
+ try:
+ # import redis
+ log.debug(" [core.set_cache_adapter] Attempting to import RedisCache")
+ from privex.helpers.cache.RedisCache import RedisCache
+ log.debug(" [core.set_cache_adapter] Successfully imported RedisCache - calling adapter_set(RedisCache())")
+ res = adapter_set(RedisCache(use_pickle=True))
+ res.set("myip:testing_cache", "test123", 120)
+ crz = res.get("myip:testing_cache", fail=True)
+ assert stringify(crz) == "test123"
+ log.info(" [core.set_cache_adapter] REDIS WORKS :) - Successfully tested Redis by setting + getting a key, "
+ "and validating the result. Will use Redis for caching!")
+ except Exception as sce:
+ log.warning(f"Failed to import 'privex.helpers.cache.RedisCache' for cache adapter. Reason: {type(sce)} - {sce!s}")
+ log.warning("Please make sure the package 'redis' is installed to use the Redis adapter, and that "
+ "the Redis server is actually running. Attempting to fallback to Memcached")
+ try:
+ log.debug(" [core.set_cache_adapter] Attempting to import MemcachedCache")
+ from privex.helpers.cache.MemcachedCache import MemcachedCache
+ log.debug(" [core.set_cache_adapter] Successfully imported MemcachedCache - calling adapter_set(MemcachedCache())")
+
+ res = adapter_set(MemcachedCache(use_pickle=True))
+ res.set("myip:testing_cache", "test123", 120)
+ crz = res.get("myip:testing_cache", fail=True)
+ assert stringify(crz) == "test123"
+ log.info(" [core.set_cache_adapter] MEMCACHED WORKS :) - Successfully tested Memcached by setting + getting a key, "
+ "and validating the result. Will use Memcached for caching!")
+ except Exception as scx:
+ log.warning(f"Failed to import 'privex.helpers.cache.MemcachedCache' for cache adapter. Reason: {type(scx)} - {scx!s}")
+ log.warning("Please make sure the package 'pylibmc' is installed to use the Memcached adapter, and that "
+ "the Memcached server is actually running. Attempting to fallback to Memory Cache")
+ log.debug(" [core.set_cache_adapter] Failed to set both redis + memcached. Falling back to "
+ "MemoryCache - adapter_set(MemoryCache())")
+
+ res = adapter_set(MemoryCache())
+ log.info(" [core.set_cache_adapter] Going to use Memory Cache for caching.")
+ else:
+ log.debug(" [core.set_cache_adapter] Setting Cache Adapter using user specified string: %s", adapter)
+
+ res = adapter_set(adapter)
+ log.debug(" [core.set_cache_adapter] Got cache adapter from adapter_set(%s): %s", repr(adapter), repr(res))
+
+ settings.CACHE_ADAPTER_SET = True
+ return res
+
+
+if settings.CACHE_ADAPTER_INIT:
+ set_cache_adapter()
+
+
+def get_cache(default: Union[str, CacheAdapter] = None, reset=False) -> CacheAdapter:
+ if not reset and settings.CACHE_ADAPTER_SET:
+ log.debug(" [core.get_cache] CACHE_ADAPTER_SET is True + reset is False. Returning "
+ "pre-configured adapter via adapter_get...")
+ adp = adapter_get()
+ log.debug(" [core.get_cache] Adapter is: %s", repr(adp))
+ return adp
+ return set_cache_adapter(default, reset=reset)
+
+
+# def get_redis() -> redis.Redis:
+# """Initialise or obtain a Redis instance from _STORE"""
+# if 'redis' not in _STORE:
+# _STORE['redis'] = redis.Redis(host=settings.REDIS_HOST, port=settings.REDIS_PORT)
+# return _STORE['redis']
class GeoType(Enum):
@@ -150,12 +221,19 @@ def get_ip() -> str:
return request.remote_addr or 'Unknown IP...'
-def get_rdns(ip):
+def get_rdns_base(ip: Union[str, IPv4Address, IPv6Address, Any], fallback: T = None, fail=False) -> Union[str, T]:
+ ip = str(stringify(ip))
try:
return str(socket.gethostbyaddr(ip)[0])
except Exception as e:
log.info('Could not resolve IP %s due to exception %s %s', ip, type(e), str(e))
- return ""
+ if fail: raise e
+ return fallback
+
+
+@r_cache(lambda ip, fallback="", fail=False: f"myip:rdns:{ip!s}:{fail!r}", cache_time=settings.RDNS_CACHE_SEC)
+def get_rdns(ip: Union[str, IPv4Address, IPv6Address, Any], fallback: T = "", fail=False) -> Union[str, T]:
+ return get_rdns_base(ip, fallback, fail=fail)
CONTENT_TYPES = dict(
diff --git a/myip/settings.py b/myip/settings.py
index a8c7e75..42c93f4 100644
--- a/myip/settings.py
+++ b/myip/settings.py
@@ -35,6 +35,13 @@
dotenv.load_dotenv()
+MINUTE = 60
+HOUR = MINUTE * 60
+DAY = HOUR * 24
+WEEK = DAY * 7
+MONTH = DAY * 30
+YEAR = DAY * 365
+
cf = DictObject()
APP_DIR = Path(__file__).parent.expanduser().resolve()
TEMPLATES_DIR = APP_DIR / 'templates'
@@ -95,6 +102,48 @@
instead of reading the host from the headers.
"""
+CACHE_ADAPTER = env('CACHE_ADAPTER', None)
+"""
+This environment var controls which caching adapter is used to store any cached data from the app.
+
+By default, it's set to ``None`` (empty) which results in the ``auto`` behaviour - trying redis, memcached, and then memory cache,
+until one works.
+
+Choices:
+
+ * blank ``''`` / auto / automatic = All three of these options result in the variable being set to ``None``, which results in
+ the default cache adapter selection behaviour being used, which tries the following 3 cache adapters in order, until one works::
+
+ * ``RedisCache`` - requires the ``redis`` PyPi package to be used
+ * ``MemcachedCache`` - requires the ``pylibmc`` PyPi package to be used
+ * ``MemoryCache`` - dependency free (apart from the package containing this adapter itself - ``privex-helpers``)
+
+ * ram / mem / memory / MemoryCache = In-Memory cache - stores cached data in application RAM, which is lost when app is restarted.
+
+ * mcache / memcache / memcached / MemcachedCache = Memcached cache - stores cached data in ``memcached`` server,
+ requires ``memcached`` service to be installed and running on the server
+
+ * redis / RedisCache / RedisAdapter = Redis cache - stores cached data in ``redis`` server, requires ``redis`` service
+ to be installed and running either on this server, or a remote one specified using ``REDIS_HOST`` / ``REDIS_PORT`` env vars.
+
+ * sqlite / sqlite3 / sqlitedb = SQLite3 DB Cache - This is the only persistent cache which doesn't require a separate
+ server daemon to be running alongside the app. However, it can have performance issues, as it's file-based design means
+ only one thing (thread/app) can write to it at a time, while both reading/writing likely involves Python's GIL,
+ preventing asynchronous / threaded code from properly running in parallel.
+
+"""
+
+if empty(CACHE_ADAPTER, zero=True) or CACHE_ADAPTER.lower() in ['auto', 'automatic']:
+ CACHE_ADAPTER = None
+
+CACHE_ADAPTER_SET = False
+
+CACHE_ADAPTER_INIT = env_bool('CACHE_ADAPTER_INIT', True)
+"""
+When true, automatically sets and initialises the cache adapter in core.py during app init. When False,
+the cache adapter will be lazy-set/init, i.e. only setup once something calls :func:`myip.core.get_cache`
+"""
+
def _gen_hosts(*domains) -> list:
domlist = []
@@ -132,11 +181,15 @@ def _gen_hosts(*domains) -> list:
pvx_settings.GEOCITY = GEOIP_PATH / GEOCITY_NAME
pvx_settings.GEOCOUNTRY = GEOIP_PATH / GEOCOUNTRY_NAME
-cf['GEOIP_CACHE_SEC'] = GEOIP_CACHE_SEC = int(env('GEOIP_CACHE_SEC', 600))
+cf['GEOIP_CACHE_SEC'] = GEOIP_CACHE_SEC = int(env('GEOIP_CACHE_SEC', 10 * MINUTE))
"""Amount of seconds to cache GeoIP data in Redis for. Default is 600 seconds (10 minutes)"""
-REDIS_HOST = env('REDIS_HOST', 'localhost')
-REDIS_PORT = int(env('REDIS_PORT', 6379))
+cf['RDNS_CACHE_SEC'] = RDNS_CACHE_SEC = int(env('RDNS_CACHE_SEC', 1 * HOUR))
+"""Amount of seconds to cache Reverse DNS (rDNS) lookup results. Default is 1 hour (3600 seconds)"""
+
+pvx_settings.REDIS_HOST = REDIS_HOST = env('REDIS_HOST', 'localhost')
+pvx_settings.REDIS_PORT = REDIS_PORT = int(env('REDIS_PORT', 6379))
+pvx_settings.REDIS_DB = REDIS_DB = int(env('REDIS_DB', 0))
#######################################
#
diff --git a/prep-docker.sh b/prep-docker.sh
new file mode 100755
index 0000000..b2fb694
--- /dev/null
+++ b/prep-docker.sh
@@ -0,0 +1,38 @@
+#!/usr/bin/env bash
+################################################################
+# #
+# Docker configuration prep script for: #
+# #
+# Privex IP Information Tool #
+# (C) 2021 Privex Inc. GNU AGPL v3 #
+# #
+# Privex Site: https://www.privex.io/ #
+# #
+# Github Repo: https://github.com/Privex/whats-my-ip #
+# #
+################################################################
+
+export PATH="/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/usr/local/games:${PATH}"
+export PATH="${HOME}/.local/bin:/snap/bin:${PATH}"
+
+######
+# Directory where the script is located, so we can source files regardless of where PWD is
+######
+
+DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+
+cd "$DIR"
+
+: ${DK_CONF="${DIR}/dkr/config"}
+: ${DK_DATA="${DIR}/dkr/data"}
+
+echo " [...] Auto-generating any missing ENV files + configs needed for docker-compose ..."
+
+[[ -f ./docker-compose.yml ]] || cp -v example-docker-compose.yml docker-compose.yml
+[[ -f .env ]] || cp -v .env.example .env
+
+[[ -f "${DK_CONF}/caddy/Caddyfile" ]] || cp -v "${DK_CONF}/caddy/Caddyfile.example" "${DK_CONF}/caddy/Caddyfile"
+[[ -f "${DK_CONF}/caddy/caddy.env" ]] || cp -v "${DK_CONF}/caddy/caddy.env.example" "${DK_CONF}/caddy/caddy.env"
+[[ -f "${DK_CONF}/redis/redis.env" ]] || { echo "Touching file: ${DK_CONF}/redis/redis.env"; touch "${DK_CONF}/redis/redis.env"; };
+
+echo " [+++] Finished generating configs."
diff --git a/requirements.txt b/requirements.txt
index df2d05b..d9f1b59 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -5,11 +5,13 @@ gunicorn>=19.9.0
Jinja2
MarkupSafe
maxminddb>=1.4.1
-privex-helpers[full]>=3.0
+privex-helpers>=3.0
privex-loghelper>=1.0.5
+privex-db>=0.9.2
markdown>=3.0.1
python-dotenv
redis>=3.3
+pylibmc
requests>=2.2
pyyaml
rich
diff --git a/run.sh b/run.sh
index 9dc070c..779f36e 100755
--- a/run.sh
+++ b/run.sh
@@ -1,4 +1,4 @@
-#!/bin/bash
+#!/usr/bin/env bash
################################################################
# #
# Production runner script for: #
diff --git a/start-docker.sh b/start-docker.sh
new file mode 100755
index 0000000..b7a1379
--- /dev/null
+++ b/start-docker.sh
@@ -0,0 +1,53 @@
+#!/usr/bin/env bash
+################################################################
+# #
+# Docker config + start script for: #
+# #
+# Privex IP Information Tool #
+# (C) 2021 Privex Inc. GNU AGPL v3 #
+# #
+# Privex Site: https://www.privex.io/ #
+# #
+# Github Repo: https://github.com/Privex/whats-my-ip #
+# #
+################################################################
+
+export PATH="/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/usr/local/games:${PATH}"
+export PATH="${HOME}/.local/bin:/snap/bin:${PATH}"
+
+######
+# Directory where the script is located, so we can source files regardless of where PWD is
+######
+
+DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+
+cd "$DIR"
+
+#: ${DK_CONF="${DIR}/dkr/config"}
+#: ${DK_DATA="${DIR}/dkr/data"}
+bash "${DIR}/prep-docker.sh"
+: ${DK_COMPOSE_BIN="docker-compose"}
+
+#echo " [...] Auto-generating any missing ENV files + configs needed for docker-compose ..."
+#
+#[[ -f ./docker-compose.yml ]] || cp -v example-docker-compose.yml docker-compose.yml
+#[[ -f .env ]] || cp -v .env.example .env
+#
+#[[ -f "${DK_CONF}/caddy/Caddyfile" ]] || cp -v "${DK_CONF}/caddy/Caddyfile.example" "${DK_CONF}/caddy/Caddyfile"
+#[[ -f "${DK_CONF}/caddy/caddy.env" ]] || cp -v "${DK_CONF}/caddy/caddy.env.example" "${DK_CONF}/caddy/caddy.env"
+#[[ -f "${DK_CONF}/redis/redis.env" ]] || { echo "Touching file: ${DK_CONF}/redis/redis.env"; touch "${DK_CONF}/redis/redis.env"; };
+
+#echo " [+++] Finished generating configs."
+echo " [...] Starting My IP application in Docker using 'docker-compose up -d'"
+
+"${DK_COMPOSE_BIN}" up -d
+_ret=$?
+
+if (( _ret )); then
+ echo -e "\n [!!!] ERROR: Non-zero return code from docker-compose! Exit code: ${_ret}\n"
+else
+ echo -e "\n [+++] docker-compose exited with return code 0 (success) - no error detected :)\n"
+fi
+
+exit $_ret
+
diff --git a/update_geoip.sh b/update_geoip.sh
index c3c5a71..49a8865 100755
--- a/update_geoip.sh
+++ b/update_geoip.sh
@@ -1,10 +1,10 @@
-#!/bin/bash
+#!/usr/bin/env bash
################################################################
# #
# Geolite2 database update script for: #
# #
# Privex IP Information Tool #
-# (C) 2019 Privex Inc. GNU AGPL v3 #
+# (C) 2021 Privex Inc. GNU AGPL v3 #
# #
# Privex Site: https://www.privex.io/ #
# #
@@ -12,9 +12,13 @@
# #
################################################################
+export PATH="/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/usr/local/games:${PATH}"
+export PATH="${HOME}/.local/bin:/snap/bin:${PATH}"
+
# Types of GeoLite2 databases to download
k=(ASN Country City)
+
################
# Override the installation directory by specifying on the CLI.
# Make sure the current user actually has permission to place files in that folder.
@@ -23,19 +27,59 @@ k=(ASN Country City)
#
# user@host ~ $ GEOLITE_DIR='/usr/share/GeoIP' ./update_geoip.sh
#
-: ${GEOLITE_DIR='/usr/local/var/GeoIP'}
+: ${GEOLITE_DIR='/usr/share/GeoIP'}
+: ${REMOTE_SRV='files.privex.io'}
+: ${REMOTE_DIR='/cdn/extras/GeoIP'}
+
+find-cmd() {
+ [[ -f "/usr/bin/$1" || -f "/bin/$1" || -f "/usr/sbin/$1" || -f "/sbin/$1" || -f "/usr/local/bin/$1" ]]
+}
+
+xsudo() {
+ if (( EUID == 0 )); then
+ while (( $# > 0 )); do
+ if [[ "$1" == "sudo" || "$1" == "sg-sudo" ]]; then
+ >&2 echo -e "Attempted to run sudo with sudo!!!!"
+ return 2
+ fi
+ [[ "$1" == "--" ]] && break
+ if grep -Eq '^\-' <<< "$1"; then
+ shift
+ else
+ break
+ fi
+ done
+ env -- "$@"
+ return $?
+ else
+ if find-cmd sudo; then
+ sudo "$@"
+ return $?
+ elif find-cmd su; then
+ su -c "$(printf '%q ' "$@")"
+ return $?
+ fi
+ fi
+}
-cd /tmp
-mkdir -p "$GEOLITE_DIR"
+#cd /tmp
+echo -e "\n >>> Creating GeoIP folder if it doesn't already exist: $GEOLITE_DIR \n\n"
-echo "Removing any old Geolites from temp folder ..."
-rm -rv GeoLite2-*
-echo "Downloading new Geolite databases into /tmp/"
+[[ -d "$GEOLITE_DIR" ]] || sudo mkdir -vp "$GEOLITE_DIR"
+
+#echo "Removing any old Geolites from temp folder ..."
+#rm -rv GeoLite2-*
+echo -e "\n >>>> Downloading new Geolite databases into ${GEOLITE_DIR} \n"
for i in ${k[@]}; do
- echo "Downloading Geolite $i ..."
- wget -q http://geolite.maxmind.com/download/geoip/database/GeoLite2-${i}.tar.gz
- echo "Extracting Geolite $i ..."
- tar xf GeoLite2-${i}.tar.gz
- echo "Installing Geolite $i into ${GEOLITE_DIR}/GeoLite2-${i}.mmdb ..."
- cp -v GeoLite2-${i}_*/GeoLite2-${i}.mmdb "${GEOLITE_DIR}/"
+ echo -e "\n\n > Downloading Geolite $i into ${GEOLITE_DIR} ... \n"
+ #wget -q http://geolite.maxmind.com/download/geoip/database/GeoLite2-${i}.tar.gz
+ sudo rsync -avch --progress "rsync://${REMOTE_SRV}${REMOTE_DIR}/GeoLite2-${i}.mmdb" "${GEOLITE_DIR}/"
+ #echo "Extracting Geolite $i ..."
+ #tar xf GeoLite2-${i}.tar.gz
+ #echo "Installing Geolite $i into ${GEOLITE_DIR}/GeoLite2-${i}.mmdb ..."
+ #cp -v GeoLite2-${i}_*/GeoLite2-${i}.mmdb "${GEOLITE_DIR}/"
+ #cp -v GeoLite2-${i}.mmdb "${GEOLITE_DIR}/"
done
+
+echo -e "\n\n +++++++++ FINISHED +++++++++ \n\n"
+