Skip to content

Commit

Permalink
Merge branch 'develop' into feature/modeling/inline-ai-feedback
Browse files Browse the repository at this point in the history
  • Loading branch information
LeonWehrhahn authored Nov 29, 2024
2 parents 2fb61bd + b365494 commit 78e1a4a
Show file tree
Hide file tree
Showing 178 changed files with 5,487 additions and 2,557 deletions.
8 changes: 4 additions & 4 deletions .github/PULL_REQUEST_TEMPLATE.md
Original file line number Diff line number Diff line change
Expand Up @@ -111,10 +111,10 @@ Prerequisites:
- [ ] Test 2

### Test Coverage
<!-- Please add the test coverages for all changed files here. You can see this when executing the tests locally (see build.gradle and package.json) or when looking into the corresponding Bamboo build plan. -->
<!-- The line coverage must be above 90% for changes files and you must use extensive and useful assertions for server tests and expect statements for client tests. -->
<!-- Note: Use the table below and confirm in the last column that you have implemented extensive assertions for server tests and expect statements for client tests. -->
<!-- You can use `supporting_script/generate_code_cov_table/generate_code_cov_table.py` to automatically generate one from the corresponding Bamboo build plan artefacts. -->
<!-- Please add the test coverages for all changed files modified in this PR here. You can use `supporting_script/generate_code_cov_table/generate_code_cov_table.py` to automatically generate the coverage table from the corresponding artefacts of your branch (follow the ReadMe for setup details). -->
<!-- Alternatively you can execute the tests locally (see build.gradle and package.json) or look into the corresponding artefacts. -->
<!-- The line coverage must be above 90% for changes files, and you must use extensive and useful assertions for server tests and expect statements for client tests. -->
<!-- Note: Confirm in the last column that you have implemented extensive assertions for server tests and expect statements for client tests. -->
<!-- Remove rows with only trivial changes from the table. -->
<!--
| Class/File | Line Coverage | Confirmation (assert/expect) |
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -193,7 +193,7 @@ Refer to [Using JHipster in production](http://www.jhipster.tech/production) for
The following command can automate the deployment to a server. The example shows the deployment to the main Artemis test server (which runs a virtual machine):

```shell
./artemis-server-cli deploy username@artemistest.ase.in.tum.de -w build/libs/Artemis-7.7.3.war
./artemis-server-cli deploy username@artemistest.ase.in.tum.de -w build/libs/Artemis-7.7.4.war
```

## Architecture
Expand Down
2 changes: 1 addition & 1 deletion build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ plugins {
}

group = "de.tum.cit.aet.artemis"
version = "7.7.3"
version = "7.7.4"
description = "Interactive Learning with Individual Feedback"

java {
Expand Down
39 changes: 2 additions & 37 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 1 addition & 2 deletions package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "artemis",
"version": "7.7.3",
"version": "7.7.4",
"description": "Interactive Learning with Individual Feedback",
"private": true,
"license": "MIT",
Expand Down Expand Up @@ -69,7 +69,6 @@
"papaparse": "5.4.1",
"pdf-lib": "1.17.1",
"pdfjs-dist": "4.8.69",
"posthog-js": "1.187.2",
"rxjs": "7.8.1",
"simple-statistics": "7.8.7",
"smoothscroll-polyfill": "0.4.4",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,13 @@
import java.io.Serial;
import java.io.Serializable;

import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonInclude;

// NOTE: this data structure is used in shared code between core and build agent nodes. Changing it requires that the shared data structures in Hazelcast (or potentially Redis)
// in the future are migrated or cleared. Changes should be communicated in release notes as potentially breaking changes.
@JsonIgnoreProperties(ignoreUnknown = true)
@JsonInclude(JsonInclude.Include.NON_EMPTY)
public record BuildAgentDTO(String name, String memberAddress, String displayName) implements Serializable {

@Serial
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,4 +48,9 @@ public BuildJobQueueItem(BuildJobQueueItem queueItem, ResultDTO submissionResult
this(queueItem.id(), queueItem.name(), queueItem.buildAgent(), queueItem.participationId(), queueItem.courseId(), queueItem.exerciseId(), queueItem.retryCount(),
queueItem.priority(), queueItem.status(), queueItem.repositoryInfo(), queueItem.jobTimingInfo(), queueItem.buildConfig(), submissionResult);
}

public BuildJobQueueItem(BuildJobQueueItem queueItem, BuildAgentDTO buildAgent, int newRetryCount) {
this(queueItem.id(), queueItem.name(), buildAgent, queueItem.participationId(), queueItem.courseId(), queueItem.exerciseId(), newRetryCount, queueItem.priority(), null,
queueItem.repositoryInfo(), new JobTimingInfo(queueItem.jobTimingInfo.submissionDate(), ZonedDateTime.now(), null), queueItem.buildConfig(), null);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -221,10 +221,10 @@ public void stopUnresponsiveContainer(String containerId) {
future.get(10, TimeUnit.SECONDS); // Wait for the stop command to complete with a timeout
}
catch (NotFoundException | NotModifiedException e) {
log.debug("Container with id {} is already stopped: {}", containerId, e.getMessage());
log.warn("Container with id {} is already stopped.", containerId, e);
}
catch (Exception e) {
log.warn("Failed to stop container with id {}. Attempting to kill container: {}", containerId, e.getMessage());
log.error("Failed to stop container with id {}. Attempting to kill container.", containerId, e);

// Attempt to kill the container if stop fails
try {
Expand All @@ -236,7 +236,7 @@ public void stopUnresponsiveContainer(String containerId) {
killFuture.get(5, TimeUnit.SECONDS); // Wait for the kill command to complete with a timeout
}
catch (Exception killException) {
log.warn("Failed to kill container with id {}: {}", containerId, killException.getMessage());
log.error("Failed to kill container with id {}.", containerId, killException);
}
}
finally {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -378,18 +378,18 @@ private BuildResult parseTestResults(TarArchiveInputStream testResultsTarInputSt
}

// Read the contents of the tar entry as a string.
String xmlString = readTarEntryContent(testResultsTarInputStream);
String fileString = readTarEntryContent(testResultsTarInputStream);
// Get the file name of the tar entry.
String fileName = getFileName(tarEntry);

try {
// Check if the file is a static code analysis report file
if (StaticCodeAnalysisTool.getToolByFilePattern(fileName).isPresent()) {
processStaticCodeAnalysisReportFile(fileName, xmlString, staticCodeAnalysisReports, buildJobId);
processStaticCodeAnalysisReportFile(fileName, fileString, staticCodeAnalysisReports, buildJobId);
}
else {
// ugly workaround because in swift result files \n\t breaks the parsing
var testResultFileString = xmlString.replace("\n\t", "");
var testResultFileString = fileString.replace("\n\t", "");
if (!testResultFileString.isBlank()) {
processTestResultFile(testResultFileString, failedTests, successfulTests);
}
Expand Down Expand Up @@ -418,7 +418,7 @@ private boolean isValidTestResultFile(TarArchiveEntry tarArchiveEntry) {
String result = (lastIndexOfSlash != -1 && lastIndexOfSlash + 1 < name.length()) ? name.substring(lastIndexOfSlash + 1) : name;

// Java test result files are named "TEST-*.xml", Python test result files are named "*results.xml".
return !tarArchiveEntry.isDirectory() && result.endsWith(".xml") && !result.equals("pom.xml");
return !tarArchiveEntry.isDirectory() && (result.endsWith(".xml") && !result.equals("pom.xml") || result.endsWith(".sarif"));
}

/**
Expand All @@ -444,12 +444,12 @@ private String getFileName(TarArchiveEntry tarEntry) {
* Processes a static code analysis report file and adds the report to the corresponding list.
*
* @param fileName the file name of the static code analysis report file
* @param xmlString the content of the static code analysis report file
* @param reportContent the content of the static code analysis report file
* @param staticCodeAnalysisReports the list of static code analysis reports
*/
private void processStaticCodeAnalysisReportFile(String fileName, String xmlString, List<StaticCodeAnalysisReportDTO> staticCodeAnalysisReports, String buildJobId) {
private void processStaticCodeAnalysisReportFile(String fileName, String reportContent, List<StaticCodeAnalysisReportDTO> staticCodeAnalysisReports, String buildJobId) {
try {
staticCodeAnalysisReports.add(ReportParser.getReport(xmlString, fileName));
staticCodeAnalysisReports.add(ReportParser.getReport(reportContent, fileName));
}
catch (UnsupportedToolException e) {
String msg = "Failed to parse static code analysis report for " + fileName;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -266,16 +266,26 @@ private void checkAvailabilityAndProcessNextBuild() {
processBuild(buildJob);
}
catch (RejectedExecutionException e) {
log.error("Couldn't add build job to threadpool: {}\n Concurrent Build Jobs Count: {} Active tasks in pool: {}, Concurrent Build Jobs Size: {}", buildJob,
// TODO: we should log this centrally and not on the local node
log.error("Couldn't add build job to thread pool: {}\n Concurrent Build Jobs Count: {} Active tasks in pool: {}, Concurrent Build Jobs Size: {}", buildJob,
localProcessingJobs.get(), localCIBuildExecutorService.getActiveCount(), localCIBuildExecutorService.getMaximumPoolSize(), e);

// Add the build job back to the queue
if (buildJob != null) {
processingJobs.remove(buildJob.id());

buildJob = new BuildJobQueueItem(buildJob, new BuildAgentDTO("", "", ""));
log.info("Adding build job back to the queue: {}", buildJob);
queue.add(buildJob);
// At most try out the build job 5 times when they get rejected
if (buildJob.retryCount() >= 5) {
// TODO: we should log this centrally and not on the local node
log.error("Build job was rejected 5 times. Not adding build job back to the queue: {}", buildJob);
}
else {
// NOTE: we increase the retry count here, because the build job was not processed successfully
// TODO: we should try to run this job on a different build agent to avoid getting the same error again
buildJob = new BuildJobQueueItem(buildJob, new BuildAgentDTO("", "", ""), buildJob.retryCount() + 1);
log.info("Adding build job {} back to the queue with retry count {}", buildJob, buildJob.retryCount());
queue.add(buildJob);
}
localProcessingJobs.decrementAndGet();
}

Expand Down Expand Up @@ -551,7 +561,8 @@ private void resumeBuildAgent() {
private boolean nodeIsAvailable() {
log.debug("Currently processing jobs on this node: {}, active threads in Pool: {}, maximum pool size of thread executor : {}", localProcessingJobs.get(),
localCIBuildExecutorService.getActiveCount(), localCIBuildExecutorService.getMaximumPoolSize());
return localProcessingJobs.get() < localCIBuildExecutorService.getMaximumPoolSize();
return localProcessingJobs.get() < localCIBuildExecutorService.getMaximumPoolSize()
&& localCIBuildExecutorService.getActiveCount() < localCIBuildExecutorService.getMaximumPoolSize() && localCIBuildExecutorService.getQueue().isEmpty();
}

public class QueuedBuildJobItemListener implements ItemListener<BuildJobQueueItem> {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -290,21 +290,21 @@ private void registerLocalCIMetrics() {
}

private static int extractRunningBuilds(Optional<SharedQueueManagementService> sharedQueueManagementService) {
return sharedQueueManagementService.map(queueManagementService -> queueManagementService.getBuildAgentInformation().stream()
.map(buildAgentInformation -> buildAgentInformation.runningBuildJobs().size()).reduce(0, Integer::sum)).orElse(0);
return sharedQueueManagementService.map(SharedQueueManagementService::getProcessingJobsSize).orElse(0);
}

private static int extractQueuedBuilds(Optional<SharedQueueManagementService> sharedQueueManagementService) {
return sharedQueueManagementService.map(queueManagementService -> queueManagementService.getQueuedJobs().size()).orElse(0);
return sharedQueueManagementService.map(SharedQueueManagementService::getQueuedJobsSize).orElse(0);
}

private static int extractBuildAgents(Optional<SharedQueueManagementService> sharedQueueManagementService) {
return sharedQueueManagementService.map(queueManagementService -> queueManagementService.getBuildAgentInformation().size()).orElse(0);
return sharedQueueManagementService.map(SharedQueueManagementService::getBuildAgentInformationSize).orElse(0);
}

private static int extractMaxConcurrentBuilds(Optional<SharedQueueManagementService> sharedQueueManagementService) {
return sharedQueueManagementService.map(queueManagementService -> queueManagementService.getBuildAgentInformation().stream()
.map(BuildAgentInformation::maxNumberOfConcurrentBuildJobs).reduce(0, Integer::sum)).orElse(0);
.filter(agent -> agent.status() != BuildAgentInformation.BuildAgentStatus.PAUSED).map(BuildAgentInformation::maxNumberOfConcurrentBuildJobs)
.reduce(0, Integer::sum)).orElse(0);
}

private void registerWebsocketMetrics() {
Expand Down
27 changes: 0 additions & 27 deletions src/main/java/de/tum/cit/aet/artemis/core/domain/User.java
Original file line number Diff line number Diff line change
Expand Up @@ -156,23 +156,6 @@ public class User extends AbstractAuditingEntity implements Participant {
@Column(name = "vcs_access_token_expiry_date")
private ZonedDateTime vcsAccessTokenExpiryDate = null;

/**
* The actual full public ssh key of a user used to authenticate git clone and git push operations if available
*/
@Nullable
@JsonIgnore
@Column(name = "ssh_public_key")
private final String sshPublicKey = null;

/**
* A hash of the public ssh key for fast comparison in the database (with an index)
*/
@Nullable
@Size(max = 100)
@JsonIgnore
@Column(name = "ssh_public_key_hash")
private final String sshPublicKeyHash = null;

@ElementCollection(fetch = FetchType.LAZY)
@CollectionTable(name = "user_groups", joinColumns = @JoinColumn(name = "user_id"))
@Column(name = "user_groups")
Expand Down Expand Up @@ -560,14 +543,4 @@ public void hasAcceptedIrisElseThrow() {
throw new AccessForbiddenException("The user has not accepted the Iris privacy policy yet.");
}
}

@Nullable
public String getSshPublicKey() {
return sshPublicKey;
}

@Nullable
public @Size(max = 100) String getSshPublicKeyHash() {
return sshPublicKeyHash;
}
}
20 changes: 0 additions & 20 deletions src/main/java/de/tum/cit/aet/artemis/core/dto/UserDTO.java
Original file line number Diff line number Diff line change
Expand Up @@ -76,10 +76,6 @@ public class UserDTO extends AuditingEntityDTO {

private ZonedDateTime vcsAccessTokenExpiryDate;

private String sshPublicKey;

private String sshKeyHash;

private ZonedDateTime irisAccepted;

public UserDTO() {
Expand Down Expand Up @@ -262,14 +258,6 @@ public ZonedDateTime getVcsAccessTokenExpiryDate() {
return vcsAccessTokenExpiryDate;
}

public String getSshPublicKey() {
return sshPublicKey;
}

public void setSshPublicKey(String sshPublicKey) {
this.sshPublicKey = sshPublicKey;
}

@Override
public String toString() {
return "UserDTO{" + "login='" + login + '\'' + ", firstName='" + firstName + '\'' + ", lastName='" + lastName + '\'' + ", email='" + email + '\'' + ", imageUrl='"
Expand All @@ -293,12 +281,4 @@ public ZonedDateTime getIrisAccepted() {
public void setIrisAccepted(ZonedDateTime irisAccepted) {
this.irisAccepted = irisAccepted;
}

public String getSshKeyHash() {
return sshKeyHash;
}

public void setSshKeyHash(String sshKeyHash) {
this.sshKeyHash = sshKeyHash;
}
}
Loading

0 comments on commit 78e1a4a

Please sign in to comment.