diff --git a/.circleci/config.yml b/.circleci/config.yml
index 9342347bdf..5ef0c6bcc0 100644
--- a/.circleci/config.yml
+++ b/.circleci/config.yml
@@ -1,4 +1,6 @@
-version: 2
+version: 2.1
+orbs:
+ node: circleci/node@5.2.0
jobs:
build-macos:
resource_class: macos.m1.medium.gen1
@@ -95,6 +97,44 @@ jobs:
# Remove the temporary certificate file
rm -f certificate.p12
+ build-web:
+ working_directory: ~/tidepool-org/chrome-uploader
+ parallelism: 1
+ # CircleCI 2.0 does not support environment variables that refer to each other the same way as 1.0 did.
+ # If any of these refer to each other, rewrite them so that they don't or see https://circleci.com/docs/2.0/env-vars/#interpolating-environment-variables-to-set-other-environment-variables .
+ environment:
+ HOMEBREW_NO_AUTO_UPDATE: 1
+ BASH_ENV: ".circleci/bash_env.sh"
+ DISPLAY: ":99"
+ docker:
+ - image: cimg/node:18.17.1-browsers
+ steps:
+ - setup_remote_docker:
+ version: docker23
+ - run: sudo apt-get update && sudo apt-get install -y build-essential git curl libusb-1.0 libavutil-dev libxss1 libsecret-1-dev libudev-dev libgtk-3-0 libcanberra-gtk3-module packagekit-gtk3-module chromium-browser fonts-liberation libappindicator3-1 libasound2 libatk-bridge2.0-0 libatspi2.0-0 libcairo2 libcups2 libgbm1 libgdk-pixbuf2.0-0 libgtk-3-0 libpango-1.0-0 libpangocairo-1.0-0 libxcursor1 libxss1 xdg-utils xvfb libdbus-glib-1-2 libgtk-3-dev libxt6
+ - checkout
+ # - run: mv .nvmrc .nvmrc.tmp
+ # - node/install:
+ # install-yarn: true
+ # node-version: '18.17.1'
+ # - run: mv .nvmrc.tmp .nvmrc
+ - run: git submodule sync
+ - run: git submodule update --init
+ - run: echo 'export PATH=${PATH}:${HOME}/${CIRCLE_PROJECT_REPONAME}/node_modules/.bin' >> $BASH_ENV
+ - restore_cache:
+ key: dependency-cache-web-{{ checksum "package.json" }}
+ - run: yarn config set cache-folder ~/.cache/yarn
+ - run: yarn --frozen-lockfile
+ - save_cache:
+ key: dependency-cache-web-{{ checksum "package.json" }}
+ paths:
+ - ~/.cache/yarn
+ - ./node_modules
+ # Test
+ - run: yarn lint
+ - run: Xvfb :99 -screen 0 1280x1024x24 & > /dev/null && yarn test
+ # Build docker image
+ - run: if [ -z "$CIRCLE_PR_NUMBER" ]; then ./artifact.sh; else echo "Forked repo; no docker image built."; fi
build-windows:
machine:
image: windows-server-2022-gui:current
@@ -138,6 +178,10 @@ workflows:
filters:
tags:
only: /^v.*/
+ - build-web:
+ filters:
+ tags:
+ only: /^v.*/
- build-windows:
filters:
tags:
diff --git a/.config.js b/.config.js
index e8d34164da..6c051b079c 100644
--- a/.config.js
+++ b/.config.js
@@ -15,6 +15,85 @@
* == BSD2 LICENSE ==
*/
+import env from "./app/utils/env";
+
+const serverEnvironments = {
+ local: {
+ hosts: ['localhost:3001'],
+ API_URL: 'http://localhost:8009',
+ UPLOAD_URL: 'http://localhost:8009',
+ DATA_URL: 'http://localhost:9220',
+ BLIP_URL: 'http://localhost:3000'
+ },
+ development: {
+ hosts: ['localhost:31500'],
+ API_URL: 'http://localhost:31500',
+ UPLOAD_URL: 'http://localhost:31500',
+ DATA_URL: 'http://localhost:31500/dataservices',
+ BLIP_URL: 'http://localhost:31500'
+ },
+ dev1: {
+ hosts: ['dev1.dev.tidepool.org'],
+ API_URL: 'https://dev1.dev.tidepool.org',
+ UPLOAD_URL: 'https://dev1.dev.tidepool.org',
+ DATA_URL: 'https://dev1.dev.tidepool.org/dataservices',
+ BLIP_URL: 'https://dev1.dev.tidepool.org'
+ },
+ qa1: {
+ hosts: ['qa1.development.tidepool.org', 'dev-app.tidepool.org', 'dev-api.tidepool.org'],
+ API_URL: 'https://qa1.development.tidepool.org',
+ UPLOAD_URL: 'https://qa1.development.tidepool.org',
+ DATA_URL: 'https://qa1.development.tidepool.org/dataservices',
+ BLIP_URL: 'https://qa1.development.tidepool.org'
+ },
+ qa2: {
+ hosts: ['qa2.development.tidepool.org', 'stg-app.tidepool.org', 'stg-api.tidepool.org'],
+ API_URL: 'https://qa2.development.tidepool.org',
+ UPLOAD_URL: 'https://qa2.development.tidepool.org',
+ DATA_URL: 'https://qa2.development.tidepool.org/dataservices',
+ BLIP_URL: 'https://qa2.development.tidepool.org'
+ },
+ qa3: {
+ hosts: ['qa3.development.tidepool.org'],
+ API_URL: 'https://qa3.development.tidepool.org',
+ UPLOAD_URL: 'https://qa3.development.tidepool.org',
+ DATA_URL: 'https://qa3.development.tidepool.org/dataservices',
+ BLIP_URL: 'https://qa3.development.tidepool.org'
+ },
+ int: {
+ hosts: ['external.integration.tidepool.org', 'int-app.tidepool.org', 'int-api.tidepool.org'],
+ API_URL: 'https://external.integration.tidepool.org',
+ UPLOAD_URL: 'https://external.integration.tidepool.org',
+ DATA_URL: 'https://external.integration.tidepool.org/dataservices',
+ BLIP_URL: 'https://external.integration.tidepool.org'
+ },
+ prd: {
+ hosts: ['app.tidepool.org', 'api.tidepool.org', 'prd-app.tidepool.org', 'prd-api.tidepool.org'],
+ API_URL: 'https://api.tidepool.org',
+ UPLOAD_URL: 'https://api.tidepool.org',
+ DATA_URL: 'https://api.tidepool.org/dataservices',
+ BLIP_URL: 'https://app.tidepool.org'
+ },
+};
+
+function serverEnvFromLocation() {
+ const url = new URL(window.location.href);
+ let host = url.hostname;
+ if (host === 'localhost') {
+ host += ':' + url.port;
+ }
+ return serverEnvFromHost(host)
+}
+
+function serverEnvFromHost(host) {
+ for (const [server, environment] of Object.entries(serverEnvironments)) {
+ if (_.includes(environment.hosts, host)) {
+ return server
+ }
+ }
+ return 'prd';
+}
+
function stringToBoolean(str, defaultValue) {
if (str === 'true') {
return true;
@@ -32,14 +111,17 @@ function stringToArray(str, defaultValue) {
return str.split(',');
}
+const selectedServerEnv = env.browser ? serverEnvFromLocation() : 'prd';
+
module.exports = {
// this is to always have the Bows logger turned on!
// NB: it is distinct from our own "debug mode"
DEBUG: stringToBoolean(process.env.DEBUG, true),
// the defaults for these need to be pointing to prod
- API_URL: process.env.API_URL || 'https://api.tidepool.org',
- UPLOAD_URL: process.env.UPLOAD_URL || 'https://uploads.tidepool.org',
- DATA_URL: process.env.DATA_URL || 'https://api.tidepool.org/dataservices',
- BLIP_URL: process.env.BLIP_URL || 'https://app.tidepool.org',
+ API_URL: process.env.API_URL || serverEnvironments[selectedServerEnv].API_URL,
+ UPLOAD_URL: process.env.UPLOAD_URL || serverEnvironments[selectedServerEnv].UPLOAD_URL,
+ DATA_URL: process.env.DATA_URL || serverEnvironments[selectedServerEnv].DATA_URL,
+ BLIP_URL: process.env.BLIP_URL || serverEnvironments[selectedServerEnv].BLIP_URL,
DEFAULT_TIMEZONE: process.env.DEFAULT_TIMEZONE || 'America/Los_Angeles',
+ I18N_ENABLED: stringToBoolean(process.env.I18N_ENABLED, false),
};
diff --git a/.dockerignore b/.dockerignore
index af27d5c322..88e1c42704 100755
--- a/.dockerignore
+++ b/.dockerignore
@@ -2,6 +2,8 @@
Dockerfile
+Dockerfile.dev
+
docker-compose.yaml
-.env
\ No newline at end of file
+.env
diff --git a/.gitignore b/.gitignore
index 4df06bcd8a..65285681a9 100644
--- a/.gitignore
+++ b/.gitignore
@@ -10,7 +10,9 @@ lib-cov
pids
logs
results
-config/
+config/*
+!config/local.sh
+!config/local.example.js
build
build/Release
.eslintcache
diff --git a/Dockerfile b/Dockerfile
old mode 100755
new mode 100644
index ae60bc71ef..7f5742955f
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,41 +1,91 @@
-FROM ubuntu:18.04
+FROM node:18.17.1-alpine as base
+WORKDIR /app
+RUN mkdir -p dist node_modules .yarn-cache && chown -R node:node .
-ENV DEBIAN_FRONTEND noninteractive
-ENV NODE_VERSION "v16.14.2"
-
-# Lots of packages. Some dependencies and stuff for GUI.
-RUN apt-get -qq -y update && \
- apt-get -qq -y install build-essential git curl libusb-1.0 libavutil-dev libxss1 \
- libsecret-1-dev libudev-dev libgtk-3-0 libcanberra-gtk3-module packagekit-gtk3-module \
- chromium-browser
-
-RUN useradd -s /bin/bash node && mkdir -p /home/node/.config \
- && chown -R node:node /home/node
-
-# Yarn
-RUN curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | apt-key add -
-
-RUN echo "deb https://dl.yarnpkg.com/debian/ stable main" | tee /etc/apt/sources.list.d/yarn.list
-RUN apt-get -qq -y update && apt-get -qq -y install yarn
-
-# Node
-RUN curl -O https://nodejs.org/download/release/$NODE_VERSION/node-$NODE_VERSION-linux-x64.tar.gz \
- && tar -xzf node-$NODE_VERSION-linux-x64.tar.gz -C /usr/local/bin
-
-ENV PATH=/usr/local/bin/node-$NODE_VERSION-linux-x64/bin:${PATH}
-
-RUN chown -R node:$(id -gn node) /home/node/.config
-
-WORKDIR /home/node
-
-RUN mkdir uploader
-
-ENV NODE_ENV "development"
-
-WORKDIR /home/node/uploader/
-
-COPY entrypoint.sh entrypoint.sh
+FROM base as build
+ARG API_URL
+ARG UPLOAD_URL
+ARG DATA_URL
+ARG BLIP_URL
+ARG REALM_HOST
+ARG PORT=3001
+ARG SERVICE_NAME=uploader
+ARG ROLLBAR_POST_SERVER_TOKEN
+ARG I18N_ENABLED=false
+ARG RX_ENABLED=false
+ARG PENDO_ENABLED=true
+ARG TRAVIS_COMMIT
+# Set ENV from ARGs
+ENV \
+ API_URL=$API_URL \
+ UPLOAD_URL=$UPLOAD_URL \
+ DATA_URL=$DATA_URL \
+ BLIP_URL=$BLIP_URL \
+ REALM_HOST=$REALM_HOST \
+ PORT=$PORT \
+ SERVICE_NAME=$SERVICE_NAME \
+ ROLLBAR_POST_TOKEN=$ROLLBAR_POST_SERVER_TOKEN \
+ I18N_ENABLED=$I18N_ENABLED \
+ RX_ENABLED=$RX_ENABLED \
+ PENDO_ENABLED=$PENDO_ENABLED \
+ TRAVIS_COMMIT=$TRAVIS_COMMIT \
+ NODE_ENV=development
+# Install dependancies
+RUN \
+ echo "http://dl-cdn.alpinelinux.org/alpine/edge/community" >> /etc/apk/repositories \
+ && echo "http://dl-cdn.alpinelinux.org/alpine/edge/main" >> /etc/apk/repositories \
+ && echo "http://dl-cdn.alpinelinux.org/alpine/edge/testing" >> /etc/apk/repositories \
+ && apk --no-cache update \
+ && apk --no-cache upgrade \
+ && apk add --no-cache --virtual .build-deps alpine-sdk python3 linux-headers eudev-dev ffmpeg-dev \
+ && rm -rf /var/cache/apk/* /tmp/*
USER node
+RUN mkdir -p /home/node/.yarn-cache /home/node/.cache/yarn
+COPY --chown=node:node package.json yarn.lock ./
+RUN --mount=type=cache,target=/home/node/.yarn-cache,id=yarn,uid=1000,gid=1000 yarn install --ignore-scripts --cache-folder /home/node/.yarn-cache
+# Copy source files, and possibily invalidate so we have to rebuild
+COPY --chown=node:node . .
+RUN npm run build-web
+USER root
+RUN apk del .build-deps
-ENTRYPOINT ["/bin/bash", "entrypoint.sh"]
+FROM base as production
+ARG API_URL
+ARG UPLOAD_URL
+ARG DATA_URL
+ARG BLIP_URL
+ARG REALM_HOST
+ARG PORT=3001
+ARG SERVICE_NAME=uploader
+ARG ROLLBAR_POST_SERVER_TOKEN
+ARG I18N_ENABLED=false
+ARG RX_ENABLED=false
+ARG PENDO_ENABLED=true
+ARG TRAVIS_COMMIT
+# Set ENV from ARGs
+ENV \
+ API_URL=$API_URL \
+ UPLOAD_URL=$UPLOAD_URL \
+ DATA_URL=$DATA_URL \
+ BLIP_URL=$BLIP_URL \
+ REALM_HOST=$REALM_HOST \
+ PORT=$PORT \
+ SERVICE_NAME=$SERVICE_NAME \
+ ROLLBAR_POST_TOKEN=$ROLLBAR_POST_SERVER_TOKEN \
+ I18N_ENABLED=$I18N_ENABLED \
+ RX_ENABLED=$RX_ENABLED \
+ PENDO_ENABLED=$PENDO_ENABLED \
+ TRAVIS_COMMIT=$TRAVIS_COMMIT \
+ NODE_ENV=production
+# Only install dependancies needed for the production server
+USER node
+RUN yarn add express@4.16.3 helmet@7.0.0 body-parser@1.18.3
+# Copy only files needed to run the server
+COPY --from=build /app/dist dist
+COPY --from=build \
+ /app/config.server.js \
+ /app/package.json \
+ /app/server.js \
+ ./
+CMD ["node", "server.js"]
diff --git a/Dockerfile.dev b/Dockerfile.dev
new file mode 100755
index 0000000000..283690b1a6
--- /dev/null
+++ b/Dockerfile.dev
@@ -0,0 +1,41 @@
+FROM ubuntu:18.04
+
+ENV DEBIAN_FRONTEND noninteractive
+ENV NODE_VERSION "v12.13.0"
+
+# Lots of packages. Some dependencies and stuff for GUI.
+RUN apt-get -qq -y update && \
+ apt-get -qq -y install build-essential git curl libusb-1.0 libavutil-dev libxss1 \
+ libsecret-1-dev libudev-dev libgtk-3-0 libcanberra-gtk3-module packagekit-gtk3-module \
+ chromium-browser
+
+RUN useradd -s /bin/bash node && mkdir -p /home/node/.config \
+ && chown -R node:node /home/node
+
+# Yarn
+RUN curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | apt-key add -
+
+RUN echo "deb https://dl.yarnpkg.com/debian/ stable main" | tee /etc/apt/sources.list.d/yarn.list
+RUN apt-get -qq -y update && apt-get -qq -y install yarn
+
+# Node
+RUN curl -O https://nodejs.org/download/release/$NODE_VERSION/node-$NODE_VERSION-linux-x64.tar.gz \
+ && tar -xzf node-$NODE_VERSION-linux-x64.tar.gz -C /usr/local/bin
+
+ENV PATH=/usr/local/bin/node-$NODE_VERSION-linux-x64/bin:${PATH}
+
+RUN chown -R node:$(id -gn node) /home/node/.config
+
+WORKDIR /home/node
+
+RUN mkdir uploader
+
+ENV NODE_ENV "development"
+
+WORKDIR /home/node/uploader/
+
+COPY entrypoint.sh entrypoint.sh
+
+USER node
+
+ENTRYPOINT ["/bin/bash", "entrypoint.sh"]
diff --git a/app/actions/async.js b/app/actions/async.js
index 9eb7e60c45..7ec2dba91e 100644
--- a/app/actions/async.js
+++ b/app/actions/async.js
@@ -17,11 +17,12 @@
import async from 'async';
import { push } from 'connected-react-router';
-import { ipcRenderer } from 'electron';
import _ from 'lodash';
-import os from 'os';
-import { checkCacheValid } from 'redux-cache';
import semver from 'semver';
+import { get, set, del } from 'idb-keyval';
+
+import { checkCacheValid } from 'redux-cache';
+import { ipcRenderer } from '../utils/ipc';
import * as actionSources from '../constants/actionSources';
import * as actionTypes from '../constants/actionTypes';
@@ -36,13 +37,14 @@ import personUtils from '../../lib/core/personUtils';
import { clinicUIDetails } from '../../lib/core/clinicUtils';
import * as sync from './sync';
import * as actionUtils from './utils';
+import env from '../utils/env';
let services = { api };
let versionInfo = {};
let hostMap = {
- 'darwin': 'mac',
- 'win32' : 'win',
- 'linux': 'linux',
+ 'macOS': 'mac',
+ 'Windows' : 'win',
+ 'Linux': 'linux',
};
const isBrowser = typeof window !== 'undefined';
@@ -95,13 +97,16 @@ export function doAppInit(opts, servicesToInit) {
const { api, device, log } = services;
dispatch(sync.initializeAppRequest());
- dispatch(sync.hideUnavailableDevices(opts.os || hostMap[os.platform()]));
+ log('Platform detected:', navigator.userAgentData.platform);
+ dispatch(sync.hideUnavailableDevices(opts.os || hostMap[navigator.userAgentData.platform]));
log('Getting OS details.');
await actionUtils.initOSDetails();
ipcRenderer.on('bluetooth-pairing-request', async (event, details) => {
- const displayBluetoothModal = actionUtils.makeDisplayBluetoothModal(dispatch);
+ const displayBluetoothModal = actionUtils.makeDisplayBluetoothModal(
+ dispatch
+ );
displayBluetoothModal((response) => {
ipcRenderer.send('bluetooth-pairing-response', response);
}, details);
@@ -440,6 +445,7 @@ export function doUpload(deviceKey, opts, utc) {
return async (dispatch, getState) => {
const { devices, uploadTargetUser, working } = getState();
+ const { log } = services;
const targetDevice = _.get(devices, deviceKey);
const driverId = _.get(targetDevice, 'source.driverId');
@@ -454,11 +460,27 @@ export function doUpload(deviceKey, opts, utc) {
}));
try {
- ipcRenderer.send('setSerialPortFilter', filters);
- opts.port = await navigator.serial.requestPort({ filters: filters });
+ const existingPermissions = await navigator.serial.getPorts();
+
+ for (let i = 0; i < existingPermissions.length; i++) {
+ const { usbProductId, usbVendorId } = existingPermissions[i].getInfo();
+
+ for (let j = 0; j < driverManifest.usb.length; j++) {
+ if (driverManifest.usb[j].vendorId === usbVendorId
+ && driverManifest.usb[j].productId === usbProductId) {
+ log('Device has already been granted permission');
+ opts.port = existingPermissions[i];
+ }
+ }
+ }
+
+ if (opts.port == null) {
+ ipcRenderer.send('setSerialPortFilter', filters);
+ opts.port = await navigator.serial.requestPort({ filters: filters });
+ }
} catch (err) {
// not returning error, as we'll attempt user-space driver instead
- console.log('Error:', err);
+ log('Error:', err);
}
}
@@ -477,7 +499,7 @@ export function doUpload(deviceKey, opts, utc) {
for (let j = 0; j < driverManifest.usb.length; j++) {
if (driverManifest.usb[j].vendorId === existingPermissions[i].vendorId
&& driverManifest.usb[j].productId === existingPermissions[i].productId) {
- console.log('Device has already been granted permission');
+ log('Device has already been granted permission');
opts.hidDevice = existingPermissions[i];
}
}
@@ -498,7 +520,7 @@ export function doUpload(deviceKey, opts, utc) {
const os = actionUtils.getOSDetails();
const version = versionInfo.semver;
- console.log('Error:', err);
+ log('Error:', err);
let hidErr = new Error(ErrorMessages.E_HID_CONNECTION);
@@ -535,7 +557,7 @@ export function doUpload(deviceKey, opts, utc) {
// we need to to scan for Bluetooth devices before the version check,
// otherwise it doesn't count as a response to a user request anymore
dispatch(sync.uploadRequest(uploadTargetUser, devices[deviceKey], utc));
- console.log('Scanning..');
+ log('Scanning..');
try {
await opts.ble.scan();
} catch (err) {
@@ -545,7 +567,7 @@ export function doUpload(deviceKey, opts, utc) {
const clinic = _.get(clinics, selectedClinicId, {});
const os = actionUtils.getOSDetails();
const version = versionInfo.semver;
- console.log('Error:', err);
+ log('Error:', err);
let btErr = new Error(ErrorMessages.E_BLUETOOTH_OFF);
let errProps = {
@@ -570,7 +592,7 @@ export function doUpload(deviceKey, opts, utc) {
}
return dispatch(sync.uploadFailure(btErr, errProps, devices[deviceKey]));
}
- console.log('Done.');
+ log('Done.');
}
dispatch(sync.versionCheckRequest());
@@ -615,19 +637,101 @@ export function doUpload(deviceKey, opts, utc) {
}
export function readFile(userId, deviceKey, file, extension) {
- return (dispatch, getState) => {
+ const { log } = services;
+
+ return async (dispatch, getState) => {
if (!file) {
- return;
+ const getFile = async () => {
+ dispatch(sync.choosingFile(userId, deviceKey));
+ const regex = new RegExp('.+\.ibf', 'g');
+
+ for await (const entry of dirHandle.values()) {
+ log(entry);
+ // On Eros PDM there should only be one .ibf file
+ if (regex.test(entry.name)) {
+ file = {
+ handle: await entry.getFile(),
+ name: entry.name,
+ };
+ }
+ }
+ };
+
+ let dirHandle = await get('directory');
+ const version = versionInfo.semver;
+
+ if (dirHandle) {
+ log(`Retrieved directory handle "${dirHandle.name}" from indexedDB.`);
+ if ((await dirHandle.queryPermission()) === 'granted') {
+ log('Permission already granted.');
+ try {
+ await getFile();
+ } catch (error) {
+ log('Device not ready yet or not plugged in.', error);
+ let err = new Error(ErrorMessages.E_NOT_YET_READY);
+ let errProps = {
+ code: 'E_NOT_YET_READY',
+ version: version,
+ };
+ return dispatch(sync.readFileAborted(err, errProps));
+ }
+ } else {
+ log('Requesting permission..');
+ if ((await dirHandle.requestPermission()) === 'granted') {
+ try {
+ await getFile();
+ } catch (err) {
+ // device mounted on a different drive number/letter, so we'll have to
+ // show directory picker again
+ log(err.name, err.message);
+ try {
+ dirHandle = await window.showDirectoryPicker();
+ await set('directory', dirHandle);
+ await getFile();
+ } catch (error) {
+ let err = new Error(`${ErrorMessages.E_READ_FILE}: ${error.message}`);
+ let errProps = {
+ code: 'E_READ_FILE',
+ version: version
+ };
+ return dispatch(sync.readFileAborted(err, errProps));
+ }
+ }
+ } else {
+ let err = new Error(ErrorMessages.E_READ_FILE);
+ let errProps = {
+ code: 'E_READ_FILE',
+ version: version
+ };
+ return dispatch(sync.readFileAborted(err, errProps));
+ }
+ }
+ } else {
+ try {
+ dirHandle = await window.showDirectoryPicker();
+ await set('directory', dirHandle);
+ await getFile();
+ } catch (error) {
+ let err = new Error(`${ErrorMessages.E_READ_FILE}: ${error.message}`);
+ let errProps = {
+ code: 'E_READ_FILE',
+ version: version
+ };
+ return dispatch(sync.readFileAborted(err, errProps));
+ }
+ }
}
- dispatch(sync.choosingFile(userId, deviceKey));
+
const version = versionInfo.semver;
- if (file.name.slice(-extension.length) !== extension) {
+ if (!file || file.name.slice(-extension.length) !== extension) {
let err = new Error(ErrorMessages.E_FILE_EXT + extension);
let errProps = {
code: 'E_FILE_EXT',
version: version
};
+ log('Wrong directory selected');
+ del('directory');
return dispatch(sync.readFileAborted(err, errProps));
}
else {
@@ -636,7 +740,7 @@ export function readFile(userId, deviceKey, file, extension) {
dispatch(sync.readFileRequest(userId, deviceKey, file.name));
};
- reader.onerror = () => {
+ const onError = () => {
let err = new Error(ErrorMessages.E_READ_FILE + file.name);
let errProps = {
code: 'E_READ_FILE',
@@ -645,14 +749,39 @@ export function readFile(userId, deviceKey, file, extension) {
return dispatch(sync.readFileFailure(err, errProps));
};
- reader.onloadend = ((theFile) => {
- return (e) => {
- dispatch(sync.readFileSuccess(userId, deviceKey, e.srcElement.result));
- dispatch(doUpload(deviceKey));
+ if (file.handle) {
+ // we're using File System Access API
+ dispatch(sync.readFileRequest(userId, deviceKey, file.name));
+ try {
+ const filedata = await file.handle.arrayBuffer();
+
+ dispatch(sync.readFileSuccess(userId, deviceKey, filedata));
+ const opts = {
+ filename : file.name,
+ filedata : filedata,
+ };
+ return dispatch(doUpload(deviceKey, opts));
+ } catch (err) {
+ log('Error', err);
+ return onError();
+ }
+ } else {
+ let reader = new FileReader();
+ reader.onloadstart = () => {
+ dispatch(sync.readFileRequest(userId, deviceKey, file.name));
};
- })(file);
- reader.readAsArrayBuffer(file);
+ reader.onerror = onError;
+
+ reader.onloadend = ((theFile) => {
+ return (e) => {
+ dispatch(sync.readFileSuccess(userId, deviceKey, e.srcElement.result));
+ dispatch(doUpload(deviceKey));
+ };
+ })(file);
+
+ reader.readAsArrayBuffer(file);
+ }
}
};
}
@@ -662,6 +791,9 @@ export function doVersionCheck() {
dispatch(sync.versionCheckRequest());
const { api } = services;
const version = versionInfo.semver;
+ if(env.browser){
+ return dispatch(sync.versionCheckSuccess());
+ }
api.upload.getVersions((err, versions) => {
if (err) {
return dispatch(sync.versionCheckFailure(err));
@@ -1128,10 +1260,18 @@ export function clickAddNewUser(){
export function setPage(page, actionSource = actionSources[actionTypes.SET_PAGE], metric) {
return (dispatch, getState) => {
- if(pagesMap[page]){
+ if (pagesMap[page]) {
+ const pageProps = { pathname: pagesMap[page] };
+
const meta = { source: actionSource };
_.assign(meta, metric);
- dispatch(push({pathname: pagesMap[page], state: { meta }}));
+ pageProps.state = { meta };
+
+ const { hash } = window.location;
+ if (hash) {
+ pageProps.hash = hash;
+ }
+ dispatch(push(pageProps));
}
};
}
diff --git a/app/actions/sync.js b/app/actions/sync.js
index ed3d7a67aa..e3865e96d0 100644
--- a/app/actions/sync.js
+++ b/app/actions/sync.js
@@ -759,56 +759,6 @@ export function quitAndInstall() {
};
}
-/*
- * relating to driver updates
- */
-
-export function checkingForDriverUpdate() {
- return {
- type: ActionTypes.CHECKING_FOR_DRIVER_UPDATE,
- meta: { source: actionSources[ActionTypes.CHECKING_FOR_DRIVER_UPDATE] }
- };
-}
-
-export function driverUpdateAvailable(current, available) {
- return {
- type: ActionTypes.DRIVER_UPDATE_AVAILABLE,
- payload: { current, available },
- meta: { source: actionSources[ActionTypes.DRIVER_UPDATE_AVAILABLE] }
- };
-}
-
-export function driverUpdateNotAvailable() {
- return {
- type: ActionTypes.DRIVER_UPDATE_NOT_AVAILABLE,
- meta: { source: actionSources[ActionTypes.DRIVER_UPDATE_NOT_AVAILABLE] }
- };
-}
-
-export function dismissDriverUpdateAvailable() {
- return {
- type: ActionTypes.DISMISS_DRIVER_UPDATE_AVAILABLE,
- meta: { source: actionSources[ActionTypes.DISMISS_DRIVER_UPDATE_AVAILABLE] }
- };
-}
-
-export function driverInstall() {
- return {
- type: ActionTypes.DRIVER_INSTALL,
- meta: {
- source: actionSources[ActionTypes.DRIVER_INSTALL]
- }
- };
-}
-
-export function driverUpdateShellOpts(opts) {
- return {
- type: ActionTypes.DRIVER_INSTALL_SHELL_OPTS,
- payload: { opts },
- meta: {source: actionSources[ActionTypes.DRIVER_INSTALL_SHELL_OPTS] }
- };
-}
-
export function deviceTimeIncorrect(callback, cfg, times) {
return {
type: ActionTypes.DEVICE_TIME_INCORRECT,
diff --git a/app/auth.js b/app/auth.js
new file mode 100644
index 0000000000..0eaf5b25f2
--- /dev/null
+++ b/app/auth.js
@@ -0,0 +1,217 @@
+import { UserManager } from 'oidc-client-ts';
+import Keycloak from 'keycloak-js/dist/keycloak.mjs';
+import React, { useState, useMemo, useEffect, useCallback } from 'react';
+import { AuthProvider } from 'react-oidc-context';
+import { useSelector, useStore } from 'react-redux';
+import _ from 'lodash';
+import * as ActionTypes from './constants/actionTypes';
+import { sync, async } from './actions';
+import api from '../lib/core/api';
+import env from './utils/env';
+import { ipcRenderer } from './utils/ipc';
+
+/**
+ * @type {Keycloak}
+ */
+export let keycloak = null;
+
+/**
+ * @type {UserManager}
+ */
+let userManager;
+
+export const oidcMiddleware = api => storeAPI => next => action => {
+ switch (action.type) {
+ case ActionTypes.KEYCLOAK_READY: {
+ const blipUrl = storeAPI.getState()?.blipUrls?.blipUrl;
+ if (blipUrl) {
+ const blipHref = new URL(blipUrl).href;
+ const registrationUrl = keycloak.createRegisterUrl({
+ redirectUri: blipHref,
+ });
+ ipcRenderer.send('keycloakRegistrationUrl', registrationUrl);
+ storeAPI.dispatch(sync.setKeycloakRegistrationUrl(registrationUrl));
+ }
+ break;
+ }
+ case ActionTypes.SET_BLIP_URL: {
+ const blipUrl = action?.payload?.url;
+ const initialized = storeAPI.getState()?.keycloakConfig?.initialized;
+ if (blipUrl && initialized && keycloak) {
+ const blipHref = new URL(blipUrl).href;
+ const registrationUrl = keycloak.createRegisterUrl({
+ redirectUri: blipHref,
+ });
+ ipcRenderer.send('keycloakRegistrationUrl', registrationUrl);
+ storeAPI.dispatch(sync.setKeycloakRegistrationUrl(registrationUrl));
+ }
+ break;
+ }
+ case ActionTypes.LOGOUT_REQUEST: {
+ userManager?.removeUser();
+ break;
+ }
+ case ActionTypes.LOGOUT_SUCCESS:
+ case ActionTypes.LOGOUT_FAILURE: {
+ if (!env.electron) {
+ userManager?.signoutSilent();
+ }
+ break;
+ }
+ default: {
+ if (
+ action?.error?.status === 401 ||
+ action?.error?.originalError?.status === 401 ||
+ action?.error?.status === 403 ||
+ action?.error?.originalError?.status === 403 ||
+ action?.payload?.status === 401 ||
+ action?.payload?.originalError?.status === 401 ||
+ action?.payload?.status === 403 ||
+ action?.payload?.originalError?.status === 403
+ ) {
+ // on any action with a 401 or 403, we try to refresh the oidc token to verify
+ // if the user is still logged in
+
+ userManager.signinSilent().then(user => {
+ if (!user) {
+ storeAPI.dispatch(sync.keycloakAuthRefreshError('onAuthRefreshError', null));
+ storeAPI.dispatch(async.doLoggedOut());
+ }
+ }).catch(err => {
+ // if the silent signin errors, we consider the user logged out
+ storeAPI.dispatch(sync.keycloakAuthRefreshError('onAuthRefreshError', err));
+ storeAPI.dispatch(async.doLoggedOut());
+ });
+ }
+ break;
+ }
+ }
+ return next(action);
+};
+
+let refreshCount = 0;
+
+export const OidcWrapper = props => {
+ const [wrapperUserManager, setUserManager] = useState(null);
+ const blipUrl = useSelector(state => state.blipUrls.blipUrl);
+ const blipRedirect = useMemo(() => {
+ if (!blipUrl) return null;
+ const url = new URL(`${blipUrl}upload-redirect`);
+ return url.href;
+ }, [blipUrl]);
+ const keycloakConfig = useSelector(state => state.keycloakConfig);
+ const { url, realm } = keycloakConfig;
+ const authority = useMemo(
+ () =>
+ keycloakConfig?.url && keycloakConfig?.realm ? `${keycloakConfig?.url}/realms/${keycloakConfig?.realm}` : null,
+ [keycloakConfig?.url, keycloakConfig?.realm],
+ );
+ const [, updateState] = useState();
+ const forceUpdate = useCallback(() => updateState({}), []);
+ const store = useStore();
+ const isOauthRedirectRoute = /^(\/upload-redirect)/.test(window?.location?.pathname);
+
+ useEffect(() => {
+ if (!authority || !blipRedirect) return;
+
+ userManager = new UserManager({
+ authority: authority,
+ client_id: 'tidepool-uploader-sso',
+ redirect_uri: blipRedirect,
+ response_mode: 'fragment',
+ monitorSession: !env.electron,
+ });
+
+ const loggedOut = () => {
+ store.dispatch(async.doLoggedOut());
+ };
+
+ const loggedIn = (user) => {
+ store.dispatch(sync.keycloakAuthSuccess('onAuthSuccess', null));
+ api.user.saveSession(user.profile.sub, user.access_token, {
+ noRefresh: true,
+ });
+ if (!store.getState().loggedInUser) {
+ store.dispatch(async.doLogin());
+ }
+ };
+
+ userManager.events.addUserSignedIn(() => {
+ userManager.getUser().then(loggedIn);
+ });
+
+ userManager.events.addUserLoaded(loggedIn);
+
+ userManager.events.addAccessTokenExpired(() => {
+ store.dispatch(sync.keycloakTokenExpired('onTokenExpired', null));
+ loggedOut();
+ });
+
+ userManager.events.addSilentRenewError(() => {
+ store.dispatch(sync.keycloakAuthRefreshError('onAuthRefreshError', null));
+ loggedOut();
+ });
+
+ userManager.events.addUserUnloaded(() => {
+ store.dispatch(sync.keycloakAuthLogout('onAuthLogout', null));
+ });
+
+ const keycloakInitOptions = {
+ checkLoginIframe: false,
+ enableLogging: process.env.NODE_ENV === 'development',
+ redirectUri: blipRedirect,
+ };
+
+ keycloak = new Keycloak({
+ url: url,
+ realm: realm,
+ clientId: 'tidepool-uploader-sso',
+ });
+
+ keycloak.init(keycloakInitOptions).then(() => {
+ const logoutUrl = keycloak.createLogoutUrl({
+ redirectUri: 'tidepooluploader://localhost/keycloak-redirect',
+ });
+ store.dispatch(sync.keycloakReady('onReady', null, logoutUrl));
+ });
+
+ setUserManager(userManager);
+ refreshCount++;
+
+ }, [authority, blipRedirect, store, url, realm]);
+
+ // watch for hash changes and re-instantiate the authClient and force a re-render of the provider
+ // incrementing externally defined `key` forces unmount/remount as provider doesn't expect to
+ // have the authClient refreshed and only sets up refresh timeout on mount
+ if(env.electron){
+ const onHashChange = useCallback(async () => {
+ if(!await userManager.getUser()){
+ refreshCount++;
+ forceUpdate();
+ }
+ }, [forceUpdate]);
+
+ useEffect(() => {
+ window.addEventListener('hashchange', onHashChange, false);
+ return () => {
+ window.removeEventListener('hashchange', onHashChange, false);
+ };
+ }, [onHashChange]);
+ }
+
+ if (authority && blipRedirect && wrapperUserManager && !isOauthRedirectRoute) {
+ return (
+
{updateText.NEEDS_UPDATED}
{updateText.IMPROVEMENTS}
- {i18n.t('Follow')} {i18n.t('these instructions')} {i18n.t('to do so.')}} + {i18n.t('Follow')} {i18n.t('these instructions')} {i18n.t('to do so.')}