Skip to content
This repository has been archived by the owner on Aug 6, 2024. It is now read-only.

Commit

Permalink
Merge pull request #116 from hifis-net/115-update_v1.18.0
Browse files Browse the repository at this point in the history
Update v1.18.0
  • Loading branch information
cmeessen authored Mar 6, 2023
2 parents 7a43cb6 + d940c10 commit 062c147
Show file tree
Hide file tree
Showing 78 changed files with 1,932 additions and 723 deletions.
91 changes: 91 additions & 0 deletions .github/workflows/e2e_tests_chrome.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,91 @@
# SPDX-FileCopyrightText: 2022 - 2023 Dusan Mijatovic (dv4all)
# SPDX-FileCopyrightText: 2022 - 2023 dv4all
# SPDX-FileCopyrightText: 2022 Dusan Mijatovic (dv4all) (dv4all)
# SPDX-FileCopyrightText: 2022 Ewan Cahen (Netherlands eScience Center) <e.cahen@esciencecenter.nl>
# SPDX-FileCopyrightText: 2022 Netherlands eScience Center
# SPDX-FileCopyrightText: 2023 Christian Meeßen (GFZ) <christian.meessen@gfz-potsdam.de>
# SPDX-FileCopyrightText: 2023 Helmholtz Centre Potsdam - GFZ German Research Centre for Geosciences
#
# SPDX-License-Identifier: Apache-2.0
# SPDX-License-Identifier: EUPL-1.2

name: e2e tests chrome
on:
workflow_dispatch:
# Disabled automatic tests for HIFIS fork, because the e2e tests sometimes fail randomly
# push:
# branches:
# - main
# paths:
# - "authentication/**"
# - "backend-postgrest/**"
# - "database/**"
# - "e2e/**"
# - "frontend/**"
# - "nginx/**"
# pull_request:
# paths:
# - "authentication/**"
# - "backend-postgrest/**"
# - "database/**"
# - "e2e/**"
# - "frontend/**"
# - "nginx/**"

jobs:
ubuntu-v22:
timeout-minutes: 30
runs-on: ubuntu-22.04
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3
with:
node-version: 18.5
cache: 'npm'
cache-dependency-path: e2e/package-lock.json
- name: get playwright version
id: playwright-version
working-directory: e2e
run: echo "PLAYWRIGHT_VERSION=$(node -e "console.log(require('./package-lock.json').dependencies['@playwright/test'].version)")" >> $GITHUB_ENV
- name: cache playwright binaries
uses: actions/cache@v3
id: playwright-cache
with:
path: |
~/.cache/ms-playwright
key: ${{ runner.os }}-playwright-${{ env.PLAYWRIGHT_VERSION }}
- name: install dependencies
working-directory: e2e
run: npm ci
- name: install browsers
working-directory: e2e
run: npx playwright install chromium chrome firefox --with-deps
if: steps.playwright-cache.outputs.cache-hit != 'true'
- name: build rsd
working-directory: .
run: |
cp e2e/.env.e2e .env
docker-compose build --parallel database backend auth frontend nginx
- name: start rsd
working-directory: .
run: |
docker-compose up --detach --scale scrapers=0
sleep 5
- name: run e2e tests in chrome
working-directory: e2e
run: npm run e2e:chrome:action
- uses: actions/upload-artifact@v3
if: always()
with:
name: playwright-report
path: e2e/playwright-report/
retention-days: 30
- uses: actions/upload-artifact@v3
if: failure()
with:
name: browser state and .env file
path: |
e2e/state/
.env
retention-days: 30

69 changes: 69 additions & 0 deletions .github/workflows/e2e_tests_firefox.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
# SPDX-FileCopyrightText: 2022 - 2023 Dusan Mijatovic (dv4all)
# SPDX-FileCopyrightText: 2022 - 2023 dv4all
# SPDX-FileCopyrightText: 2022 Dusan Mijatovic (dv4all) (dv4all)
# SPDX-FileCopyrightText: 2022 Ewan Cahen (Netherlands eScience Center) <e.cahen@esciencecenter.nl>
# SPDX-FileCopyrightText: 2022 Netherlands eScience Center
#
# SPDX-License-Identifier: Apache-2.0

name: e2e tests firefox
on:
workflow_dispatch:

jobs:
ubuntu-v22:
timeout-minutes: 30
runs-on: ubuntu-22.04
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3
with:
node-version: 18.5
cache: 'npm'
cache-dependency-path: e2e/package-lock.json
- name: get playwright version
id: playwright-version
working-directory: e2e
run: echo "PLAYWRIGHT_VERSION=$(node -e "console.log(require('./package-lock.json').dependencies['@playwright/test'].version)")" >> $GITHUB_ENV
- name: cache playwright binaries
uses: actions/cache@v3
id: playwright-cache
with:
path: |
~/.cache/ms-playwright
key: ${{ runner.os }}-playwright-${{ env.PLAYWRIGHT_VERSION }}
- name: install dependencies
working-directory: e2e
run: npm ci
- name: install browsers
working-directory: e2e
run: npx playwright install chromium chrome firefox --with-deps
if: steps.playwright-cache.outputs.cache-hit != 'true'
- name: build rsd
working-directory: .
run: |
cp e2e/.env.e2e .env
docker-compose build --parallel database backend auth frontend nginx
- name: start rsd
working-directory: .
run: |
docker-compose up --detach --scale scrapers=0
sleep 5
- name: run e2e tests in firefox
working-directory: e2e
run: npm run e2e:ff:action
- uses: actions/upload-artifact@v3
if: always()
with:
name: playwright-report
path: e2e/playwright-report/
retention-days: 30
- uses: actions/upload-artifact@v3
if: failure()
with:
name: browser state and .env file
path: |
e2e/state/
.env
retention-days: 30

5 changes: 3 additions & 2 deletions .github/workflows/e2e_tests_ubuntu.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,9 @@
# SPDX-License-Identifier: Apache-2.0
# SPDX-License-Identifier: EUPL-1.2

name: e2e tests ubuntu
name: e2e tests firefox
on:
# only manual run
workflow_dispatch:

jobs:
Expand Down Expand Up @@ -51,7 +52,7 @@ jobs:
- name: start rsd
working-directory: .
run: |
docker-compose up --detach database backend auth frontend nginx
docker-compose up --detach --scale scrapers=0
sleep 5
- name: run e2e tests
working-directory: e2e
Expand Down
6 changes: 3 additions & 3 deletions CITATION.cff
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ references:
- name: "Netherlands eScience Center"
title: "Research Software Directory (as a service)"
doi: 10.5281/zenodo.6379973
version: v1.17.1
version: v1.18.0
repository-code: https://github.com/research-software-directory/RSD-as-a-service
keywords:
- Research Software
Expand All @@ -62,5 +62,5 @@ keywords:
license:
- EUPL-1.2
- Apache-2.0
version: hifis-1.6.0
date-released: '2023-02-27'
version: hifis-1.7.0
date-released: '2023-03-06'
14 changes: 7 additions & 7 deletions Makefile
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
# SPDX-FileCopyrightText: 2022 - 2023 Christian Meeßen (GFZ) <christian.meessen@gfz-potsdam.de>
# SPDX-FileCopyrightText: 2022 - 2023 Dusan Mijatovic (dv4all)
# SPDX-FileCopyrightText: 2022 - 2023 Helmholtz Centre Potsdam - GFZ German Research Centre for Geosciences
# SPDX-FileCopyrightText: 2022 - 2023 dv4all
# SPDX-FileCopyrightText: 2022 Christian Meeßen (GFZ) <christian.meessen@gfz-potsdam.de>
# SPDX-FileCopyrightText: 2022 Ewan Cahen (Netherlands eScience Center) <e.cahen@esciencecenter.nl>
# SPDX-FileCopyrightText: 2022 Helmholtz Centre Potsdam - GFZ German Research Centre for Geosciences
# SPDX-FileCopyrightText: 2022 Jesús García Gonzalez (Netherlands eScience Center) <j.g.gonzalez@esciencecenter.nl>
# SPDX-FileCopyrightText: 2022 Netherlands eScience Center
#
Expand All @@ -28,14 +28,12 @@ export DGID

# Main commands
# ----------------------------------------------------------------
start:
docker-compose down --volumes #cleanup phase
start: clean
docker-compose build # build all services
docker-compose up --scale data-generation=1 --scale scrapers=0 -d
# open http://localhost to see the application running

install:
docker-compose down --volumes #cleanup phase
install: clean
docker-compose build database backend auth scrapers nginx # exclude frontend and wait for the build to finish
docker-compose up --scale scrapers=0 -d
cd frontend && yarn install -d
Expand All @@ -45,6 +43,8 @@ install:
docker-compose up --scale data-generation=1 -d
# All dependencies are installed. The data migration is runing in the background. You can now run `make dev' to start the application

clean:
docker-compose down --volumes


dev:
Expand Down Expand Up @@ -84,7 +84,7 @@ dev-frontend: frontend/.env.local
e2e-tests:
docker-compose down --volumes
docker-compose build --parallel database backend auth frontend nginx
docker-compose up --detach database backend auth frontend nginx
docker-compose up --detach --scale scrapers=0
sleep 10
docker-compose --file e2e/docker-compose.yml build
docker-compose --file e2e/docker-compose.yml up
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
// SPDX-FileCopyrightText: 2022 - 2023 Helmholtz Centre Potsdam - GFZ German Research Centre for Geosciences
// SPDX-FileCopyrightText: 2022 Ewan Cahen (Netherlands eScience Center) <e.cahen@esciencecenter.nl>
// SPDX-FileCopyrightText: 2022 Helmholtz Centre Potsdam - GFZ German Research Centre for Geosciences
// SPDX-FileCopyrightText: 2022 Matthias Rüster (GFZ) <matthias.ruester@gfz-potsdam.de>
// SPDX-FileCopyrightText: 2022 Netherlands eScience Center
// SPDX-FileCopyrightText: 2023 Christian Meeßen (GFZ) <christian.meessen@gfz-potsdam.de>
//
// SPDX-License-Identifier: Apache-2.0

Expand Down Expand Up @@ -34,31 +35,42 @@
import java.net.http.HttpRequest;
import java.net.http.HttpResponse;
import java.net.http.HttpResponse.BodyHandlers;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Objects;
import java.util.Set;

public class HelmholtzAaiLogin implements Login {

private final String code;
private final String redirectUrl;
static final String DEFAULT_ORGANISATION = "Helmholtz";

// See https://hifis.net/doc/helmholtz-aai/list-of-vos/#vos-representing-helmholtz-centres
static private final Collection<String> knownHgfOrganisations = Set.<String>of(
"AWI", "CISPA", "DESY", "DKFZ", "DLR", "DZNE", "FZJ", "GEOMAR", "GFZ", "GSI", "hereon", "HMGU", "HZB", "KIT", "MDC", "UFZ"
);

public HelmholtzAaiLogin(String code, String redirectUrl) {
this.code = Objects.requireNonNull(code);
this.redirectUrl = Objects.requireNonNull(redirectUrl);
}

static String getOrganisationFromEntitlements(
JSONArray entitlements,
boolean allowExternal
) {
static String getOrganisationFromEntitlements(JSONArray entitlements) {
if (entitlements == null || entitlements.isEmpty()) {
return allowExternal ? DEFAULT_ORGANISATION : null;
return null;
}

String organisation = DEFAULT_ORGANISATION;
String returnOrganisation;
ArrayList<String> organisationsDelivered = new ArrayList<String>();
boolean helmholtzmemberFound = false;

// Collect all organisations delivered, because the home organisation
// must not be the first one in the list. This assumes that a person
// is only member of one organisation
String organisation;
for (Object element : entitlements.toArray()) {
organisation = null;
String ent = element.toString();

// we expect this for logins from Helmholtz centres
Expand All @@ -72,6 +84,7 @@ static String getOrganisationFromEntitlements(
if (ent.matches("urn:geant:helmholtz\\.de:group:.*")) {
String withoutHash = ent;

// remove everything behind the hash
if (ent.contains("#")) {
String[] splitHash = ent.split("#");

Expand All @@ -93,17 +106,27 @@ static String getOrganisationFromEntitlements(

// get organisation from last element
organisation = splitGroup[splitGroup.length - 1];
organisationsDelivered.add(organisation);
}
}

if (!helmholtzmemberFound && !allowExternal) {
// deny login
if (!helmholtzmemberFound) {
return null;
}

// Detect whether one of the delivered organisations is in the list of known HGF centres
organisationsDelivered.retainAll(knownHgfOrganisations);
if (organisationsDelivered.size() == 0) {
// No known HGF organisation could be found
returnOrganisation = DEFAULT_ORGANISATION;
} else {
// Always return the first element in the list, even if there were multiple centres found
returnOrganisation = organisationsDelivered.get(0);
};

// else: we either return the found the Helmholtz centre name
// or the default organisation
return organisation;
return returnOrganisation;
}

@Override
Expand Down Expand Up @@ -173,15 +196,12 @@ public OpenIdInfo openidInfo() {
}

JSONArray entitlements = (JSONArray) userInfo.getClaim("eduperson_entitlement");
String organisation = getOrganisationFromEntitlements(
entitlements,
Config.helmholtzAaiAllowExternalUsers()
);
String organisation = getOrganisationFromEntitlements(entitlements);

if (organisation == null) {
// login denied by missing entitlements
// or external providers are not allowed
throw new RsdAuthenticationException("You are not allowed to login");
if (organisation == null && !Config.helmholtzAaiAllowExternalUsers()) {
// Login denied because no HGF organisation could be found in eduperson_entitlements
// and because social IdPs are not allowed
throw new RsdAuthenticationException("You are not allowed to login.");
}

return new OpenIdInfo(
Expand Down
Loading

0 comments on commit 062c147

Please sign in to comment.