Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add code formatter and linter #140

Merged
merged 4 commits into from
Jun 14, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
The table of contents is too big for display.
Diff view
Diff view
  •  
  •  
  •  
6 changes: 6 additions & 0 deletions .github/workflows/cd.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,12 @@ on:
workflow_dispatch:

jobs:
run_spotless_check:
name: Check code is formatted with Spotless
uses: ./.github/workflows/spotless.yml

run_java_tests:
needs: run_spotless_check
name: Run Java tests
uses: ./.github/workflows/java-tests.yml

Expand Down Expand Up @@ -64,6 +69,7 @@ jobs:

run_e2e_tests:
needs:
- run_spotless_check
- run_java_tests
- push_to_registry
name: Run E2E tests
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/generate-jooq.yml
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ jobs:
distribution: temurin

- name: Cache Maven packages
uses: actions/cache@v3
uses: actions/cache@v4
with:
path: ~/.m2
key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }}
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/java-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ jobs:
distribution: temurin

- name: Cache Maven packages
uses: actions/cache@v3
uses: actions/cache@v4
with:
path: ~/.m2
key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }}
Expand Down
31 changes: 31 additions & 0 deletions .github/workflows/spotless.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
name: 'Spotless'
on:
# this workflow is only called by others, won't be executed on itself
workflow_call:

jobs:
spotless:
name: Check code is formatted with Spotless
runs-on: ubuntu-22.04

steps:
- name: Checkout code
uses: actions/checkout@v4

- name: Set up JDK 17
uses: actions/setup-java@v4
with:
java-version: "17"
java-package: jdk
architecture: x64
distribution: temurin

- name: Cache Maven packages
uses: actions/cache@v4
with:
path: ~/.m2
key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }}
restore-keys: ${{ runner.os }}-m2

- name: Run Spotless Check
run: mvn spotless:check
54 changes: 53 additions & 1 deletion pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@
<maven-properties.plugin.version>1.2.1</maven-properties.plugin.version>
<maven-enforcer.plugin.version>3.5.0</maven-enforcer.plugin.version>
<maven-build-helper.plugin.version>3.6.0</maven-build-helper.plugin.version>
<maven-spotless.plugin.version>2.43.0</maven-spotless.plugin.version>

<!-- Packaging -->
<build.profile.id>dev</build.profile.id>
Expand Down Expand Up @@ -666,7 +667,58 @@
<skipTests>${skip.integration.tests}</skipTests>
</configuration>
</plugin>

<plugin>
<groupId>com.diffplug.spotless</groupId>
<artifactId>spotless-maven-plugin</artifactId>
<version>${maven-spotless.plugin.version}</version>
<configuration>
<java>
<!-- No need to specify files, inferred automatically -->

<!-- Cleanthat enforces certain kind of coding styles -->
<!-- The codebase mostly adheres to the listed ruleset. -->
<!-- But a couple of offending mutators have been disabled. -->
<cleanthat>
<sourceJdk>${java.version}</sourceJdk>
<mutators>
<!-- See the commit message for full list of mutators enabled by these presets. -->
<mutator>SafeAndConsensual</mutator>
<mutator>SafeButNotConsensual</mutator>
<mutator>SafeButControversial</mutator>
<mutator>Guava</mutator>
<mutator>SpotBugs</mutator>

<!-- <mutator>Stream</mutator> -->
<!-- Is broken, it has a copy-paste bug and is equal to Guava rule set. -->
<!-- It should include rules: -->
<mutator>ForEachIfBreakToStreamFindFirst</mutator>
<mutator>ForEachIfBreakElseToStreamTakeWhile</mutator>
<mutator>StreamFlatMapStreamToFlatMap</mutator>
<mutator>StreamForEachNestingForLoopToFlatMap</mutator>
<mutator>StreamMapIdentity</mutator>
<mutator>StreamWrappedMethodRefToMap</mutator>
<mutator>StreamWrappedVariableToMap</mutator>
</mutators>
<excludedMutators>
<!-- Allow ternary clauses: return condition ? valA : valB -->
<excludedMutator>AvoidInlineConditionals</excludedMutator>

<!-- Allow writing 123456789 instead of requiring 123_456_789 -->
<excludedMutator>UseUnderscoresInNumericLiterals</excludedMutator>
</excludedMutators>
</cleanthat>

<!-- Format the code according to Palantir code style. -->
<!-- Needs to be applied after <cleanthat>, as it can generate alternative code. -->
<palantirJavaFormat>
<version>2.47.0</version>
<style>PALANTIR</style>
<formatJavadoc>true</formatJavadoc>
</palantirJavaFormat>
</java>
</configuration>
</plugin>
</plugins>
</build>

</project>
3 changes: 1 addition & 2 deletions src/main/java/fi/hsl/jore/importer/ImporterApplication.java
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,10 @@
import org.springframework.boot.autoconfigure.r2dbc.R2dbcAutoConfiguration;

// Disable JDBC datasource autoconfiguration temporarily
@SpringBootApplication(exclude = { DataSourceAutoConfiguration.class, R2dbcAutoConfiguration.class })
@SpringBootApplication(exclude = {DataSourceAutoConfiguration.class, R2dbcAutoConfiguration.class})
public class ImporterApplication {

public static void main(final String[] args) {
SpringApplication.run(ImporterApplication.class, args);
}

}
39 changes: 19 additions & 20 deletions src/main/java/fi/hsl/jore/importer/config/DatasourceConfig.java
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,8 @@
import fi.hsl.jore.importer.config.properties.TestImporterDataSourceProperties;
import fi.hsl.jore.importer.config.properties.TestJore4DataSourceProperties;
import fi.hsl.jore.importer.config.properties.TestSourceDataSourceProperties;
import jakarta.annotation.Resource;
import javax.sql.DataSource;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
Expand All @@ -18,19 +20,11 @@
import org.springframework.jdbc.datasource.DataSourceTransactionManager;
import org.springframework.transaction.PlatformTransactionManager;

import javax.sql.DataSource;
import jakarta.annotation.Resource;


@Configuration
public class DatasourceConfig {
@Configuration
@StandardDatabase
@Import({
SourceDataSourceProperties.class,
ImporterDataSourceProperties.class,
Jore4DataSourceProperties.class
})
@Import({SourceDataSourceProperties.class, ImporterDataSourceProperties.class, Jore4DataSourceProperties.class})
public static class StandardDatabaseConfiguration {
@Resource
private SourceDataSourceProperties sourceDataSourceProperties;
Expand Down Expand Up @@ -63,10 +57,10 @@ public DataSourceConfigDto jore4DataSourceConfig() {
@Configuration
@TestDatabase
@Import({
TestSourceDataSourceProperties.class,
TestImporterDataSourceProperties.class,
TestJore4DataSourceProperties.class
})
TestSourceDataSourceProperties.class,
TestImporterDataSourceProperties.class,
TestJore4DataSourceProperties.class
})
public static class TestDatabaseConfiguration {
@Resource
private TestSourceDataSourceProperties testSourceDataSourceProperties;
Expand Down Expand Up @@ -99,40 +93,45 @@ public DataSourceConfigDto jore4DataSourceConfig() {
// The sourceDataSource is a plain Hikari connection pool
@Bean(destroyMethod = "close")
@Qualifier("sourceDataSource")
public HikariDataSource sourceDataSource(@Qualifier("sourceDataSourceConfig") final DataSourceConfigDto dataSourceConfigDto) {
public HikariDataSource sourceDataSource(
@Qualifier("sourceDataSourceConfig") final DataSourceConfigDto dataSourceConfigDto) {
return new HikariDataSource(dataSourceConfigDto.buildHikariConfig());
}

@Bean(destroyMethod = "close")
@Primary
@Qualifier("importerDataSource")
public HikariDataSource importerDataSource(@Qualifier("importerDataSourceConfig") final DataSourceConfigDto dataSourceConfigDto) {
public HikariDataSource importerDataSource(
@Qualifier("importerDataSourceConfig") final DataSourceConfigDto dataSourceConfigDto) {
return new HikariDataSource(dataSourceConfigDto.buildHikariConfig());
}

@Bean(destroyMethod = "close")
@Qualifier("jore4DataSource")
public HikariDataSource jore4DataSource(@Qualifier("jore4DataSourceConfig") final DataSourceConfigDto dataSourceConfigDto) {
public HikariDataSource jore4DataSource(
@Qualifier("jore4DataSourceConfig") final DataSourceConfigDto dataSourceConfigDto) {
return new HikariDataSource(dataSourceConfigDto.buildHikariConfig());
}


@Bean
@Qualifier("sourceTransactionManager")
public PlatformTransactionManager sourceTransactionManager(@Qualifier("sourceDataSource") DataSource sourceDataSource) {
public PlatformTransactionManager sourceTransactionManager(
@Qualifier("sourceDataSource") DataSource sourceDataSource) {
return new DataSourceTransactionManager(sourceDataSource);
}

@Bean
@Primary
@Qualifier("importerTransactionManager")
public PlatformTransactionManager importerTransactionManager(@Qualifier("importerDataSource") DataSource importerDataSource) {
public PlatformTransactionManager importerTransactionManager(
@Qualifier("importerDataSource") DataSource importerDataSource) {
return new DataSourceTransactionManager(importerDataSource);
}

@Bean
@Qualifier("jore4TransactionManager")
public PlatformTransactionManager jore4TransactionManager(@Qualifier("jore4DataSource") DataSource jore4DataSource) {
public PlatformTransactionManager jore4TransactionManager(
@Qualifier("jore4DataSource") DataSource jore4DataSource) {
return new DataSourceTransactionManager(jore4DataSource);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -4,17 +4,16 @@
import fi.hsl.jore.importer.config.profile.TestDatabase;
import fi.hsl.jore.importer.feature.digiroad.service.CsvDigiroadStopService;
import fi.hsl.jore.importer.feature.digiroad.service.DigiroadStopService;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URISyntaxException;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.env.Environment;
import org.springframework.core.io.ClassPathResource;
import org.springframework.core.io.Resource;
import org.springframework.core.io.UrlResource;

import java.net.MalformedURLException;
import java.net.URI;
import java.net.URISyntaxException;

@Configuration
public class DigiroadServiceConfig {

Expand All @@ -25,8 +24,10 @@ public class DigiroadServiceConfig {
public static class StandardDatabaseConfiguration {

@Bean
public DigiroadStopService digiroadStopService(final Environment environment) throws URISyntaxException, MalformedURLException {
final Resource csvResource = new UrlResource(new URI(environment.getRequiredProperty("digiroad.stop.csv.file.url")));
public DigiroadStopService digiroadStopService(final Environment environment)
throws URISyntaxException, MalformedURLException {
final Resource csvResource =
new UrlResource(new URI(environment.getRequiredProperty("digiroad.stop.csv.file.url")));
return new CsvDigiroadStopService(csvResource);
}
}
Expand All @@ -37,7 +38,8 @@ public static class TestDatabaseConfiguration {

@Bean
public DigiroadStopService digiroadStopService() throws Exception {
final CsvDigiroadStopService service = new CsvDigiroadStopService(DigiroadServiceConfig.CSV_STOP_CLASSPATH_RESOURCE);
final CsvDigiroadStopService service =
new CsvDigiroadStopService(DigiroadServiceConfig.CSV_STOP_CLASSPATH_RESOURCE);
service.readStopsFromCsvFile();
return service;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,10 @@ public RestTemplate restTemplate() {
}

@Bean
public IMapMatchingService mapMatchingService(@Value("#{environment['map.matching.api.baseUrl']}") final String mapMatchingApiUrl,
final ObjectMapper objectMapper,
final RestTemplate restTemplate) {
public IMapMatchingService mapMatchingService(
@Value("#{environment['map.matching.api.baseUrl']}") final String mapMatchingApiUrl,
final ObjectMapper objectMapper,
final RestTemplate restTemplate) {
return new MapMatchingService(mapMatchingApiUrl, objectMapper, restTemplate);
}
}
Expand All @@ -41,4 +42,3 @@ public IMapMatchingService mapMatchingService() {
}
}
}

12 changes: 6 additions & 6 deletions src/main/java/fi/hsl/jore/importer/config/jobs/BatchConfig.java
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
package fi.hsl.jore.importer.config.jobs;

import javax.sql.DataSource;
import org.springframework.batch.core.configuration.support.DefaultBatchConfiguration;
import org.springframework.beans.factory.support.BeanDefinitionRegistryPostProcessor;
import org.springframework.boot.autoconfigure.batch.BatchTransactionManager;
Expand All @@ -9,23 +10,22 @@
import org.springframework.jdbc.datasource.DataSourceTransactionManager;
import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder;
import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseType;
import javax.sql.DataSource;
import org.springframework.transaction.PlatformTransactionManager;

@Configuration
@ComponentScan(basePackages = "fi.hsl.jore.importer.feature")
public class BatchConfig extends DefaultBatchConfiguration {
public class BatchConfig extends DefaultBatchConfiguration {
private DataSource batchDataSource;
private DataSourceTransactionManager batchTransactionManager;

@Override
protected DataSource getDataSource() {
if (batchDataSource == null) {
batchDataSource = new EmbeddedDatabaseBuilder()
.setType(EmbeddedDatabaseType.HSQL)
.addScript("/org/springframework/batch/core/schema-hsqldb.sql")
.generateUniqueName(true)
.build();
.setType(EmbeddedDatabaseType.HSQL)
.addScript("/org/springframework/batch/core/schema-hsqldb.sql")
.generateUniqueName(true)
.build();
}

return this.batchDataSource;
Expand Down
Loading
Loading