diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 06596c51..10f0a1bd 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -24,14 +24,31 @@ jobs: steps: - uses: actions/checkout@v4 - name: scalafmt test - run: ./sbt scalafmtCheckAll + run: ./sbt "+ scalafmtCheckAll" test_sbt_plugin: name: plugin test - runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + include: + - os: ubuntu-latest + distribution: zulu + java: 8 + - os: ubuntu-latest + distribution: temurin + java: 11 + - os: ubuntu-latest + distribution: temurin + java: 17 + - os: ubuntu-latest + distribution: temurin + java: 21 + runs-on: ${{ matrix.os }} steps: - uses: actions/checkout@v4 - - uses: olafurpg/setup-scala@v14 + - uses: actions/setup-java@v4 with: - java-version: adopt@1.11 + distribution: "${{ matrix.distribution }}" + java-version: "${{ matrix.java }}" - name: scripted tests - run: ./sbt scripted \ No newline at end of file + run: ./sbt "+ test" "+ scripted" diff --git a/build.sbt b/build.sbt index 9d2690e1..54fce8f7 100644 --- a/build.sbt +++ b/build.sbt @@ -3,6 +3,21 @@ Global / onChangedBuildSource := ReloadOnSourceChanges enablePlugins(SbtTwirl) enablePlugins(SbtPlugin) +val SCALA_3 = "3.3.4" +val SCALA_2 = "2.12.20" +crossScalaVersions := List(SCALA_3, SCALA_2) + +ThisBuild / scalaVersion := SCALA_3 + +pluginCrossBuild / sbtVersion := { + scalaBinaryVersion.value match { + case "2.12" => + (pluginCrossBuild / sbtVersion).value + case _ => + "2.0.0-M2" + } +} + organization := "org.xerial.sbt" organizationName := "Xerial project" name := "sbt-pack" @@ -16,10 +31,8 @@ pomIncludeRepository := { _ => false } -ThisBuild / scalaVersion := "2.12.20" - parallelExecution := true -crossPaths := false +crossPaths := true scalacOptions ++= Seq("-encoding", "UTF-8", "-deprecation", "-unchecked") scriptedBufferLog := false diff --git a/sbt b/sbt index 8e8d34b1..7554340a 100755 --- a/sbt +++ b/sbt @@ -34,11 +34,11 @@ set -o pipefail -declare -r sbt_release_version="1.7.3" -declare -r sbt_unreleased_version="1.7.3" +declare -r sbt_release_version="1.10.2" +declare -r sbt_unreleased_version="1.10.2" -declare -r latest_213="2.13.10" -declare -r latest_212="2.12.17" +declare -r latest_213="2.13.15" +declare -r latest_212="2.12.20" declare -r latest_211="2.11.12" declare -r latest_210="2.10.7" declare -r latest_29="2.9.3" diff --git a/src/main/scala-2/xerial/sbt/pack/PluginCompat.scala b/src/main/scala-2/xerial/sbt/pack/PluginCompat.scala new file mode 100644 index 00000000..2f541585 --- /dev/null +++ b/src/main/scala-2/xerial/sbt/pack/PluginCompat.scala @@ -0,0 +1,21 @@ +package xerial.sbt.pack + +import java.nio.file.{Path => NioPath} +import sbt.* +import xsbti.FileConverter + +private[pack] object PluginCompat { + type FileRef = java.io.File + type Out = java.io.File + + implicit def toFile(a: FileRef): Out = a + + def toNioPath(a: Attributed[File])(implicit conv: FileConverter): NioPath = + a.data.toPath() + def toFile(a: Attributed[File])(implicit conv: FileConverter): File = + a.data + def toNioPaths(cp: Seq[Attributed[File]])(implicit conv: FileConverter): Vector[NioPath] = + cp.map(_.data.toPath()).toVector + def toFiles(cp: Seq[Attributed[File]])(implicit conv: FileConverter): Vector[File] = + cp.map(_.data).toVector +} diff --git a/src/main/scala-3/xerial/sbt/pack/PluginCompat.scala b/src/main/scala-3/xerial/sbt/pack/PluginCompat.scala new file mode 100644 index 00000000..d2235f9c --- /dev/null +++ b/src/main/scala-3/xerial/sbt/pack/PluginCompat.scala @@ -0,0 +1,25 @@ +package xerial.sbt.pack + +import java.nio.file.Path as NioPath +import sbt.* +import sbt.internal.inc.PlainVirtualFileConverter +import xsbti.{FileConverter, HashedVirtualFileRef, VirtualFile} + +// See https://www.eed3si9n.com/sbt-assembly-2.3.0 +private[pack] object PluginCompat: + type FileRef = HashedVirtualFileRef + type Out = VirtualFile + + given conv: FileConverter = PlainVirtualFileConverter.converter + + implicit def toFile(a: HashedVirtualFileRef): File = conv.toPath(a).toFile + implicit def toFileRef(a: File): FileRef = conv.toVirtualFile(a.toPath) + + def toNioPath(a: Attributed[HashedVirtualFileRef])(using conv: FileConverter): NioPath = + conv.toPath(a.data) + inline def toFile(a: Attributed[HashedVirtualFileRef])(using conv: FileConverter): File = + toNioPath(a).toFile() + def toNioPaths(cp: Seq[Attributed[HashedVirtualFileRef]])(using conv: FileConverter): Vector[NioPath] = + cp.map(toNioPath).toVector + inline def toFiles(cp: Seq[Attributed[HashedVirtualFileRef]])(using conv: FileConverter): Vector[File] = + toNioPaths(cp).map(_.toFile()) diff --git a/src/main/scala/xerial/sbt/pack/PackArchive.scala b/src/main/scala/xerial/sbt/pack/PackArchive.scala index 943d0ca4..d23a06c2 100644 --- a/src/main/scala/xerial/sbt/pack/PackArchive.scala +++ b/src/main/scala/xerial/sbt/pack/PackArchive.scala @@ -10,6 +10,8 @@ import org.apache.commons.compress.compressors.xz.XZCompressorOutputStream import org.apache.commons.io.IOUtils import sbt.Keys.* import sbt.* +import PluginCompat.* +import PluginCompat.toFile trait PackArchive { @@ -32,7 +34,7 @@ trait PackArchive { val excludeFiles = packArchiveExcludes.value.toSet def addFilesToArchive(dir: File): Unit = Option(dir.listFiles) - .getOrElse(Array.empty) + .getOrElse(Array.empty[File]) .filterNot(f => excludeFiles.contains(rpath(distDir, f))) .foreach { file => aos.putArchiveEntry(createEntry(file, archiveBaseDir ++ rpath(distDir, file, "/"), binDir)) @@ -115,16 +117,16 @@ trait PackArchive { ) def publishPackArchiveTgz: SettingsDefinition = - addArtifact(Def.setting(packArchiveTgzArtifact.value), Runtime / packArchiveTgz) + addArtifact(Def.setting(packArchiveTgzArtifact.value), packArchiveTgz) def publishPackArchiveTbz: SettingsDefinition = - addArtifact(Def.setting(packArchiveTbzArtifact.value), Runtime / packArchiveTbz) + addArtifact(Def.setting(packArchiveTbzArtifact.value), packArchiveTbz) def publishPackArchiveTxz: SettingsDefinition = - addArtifact(Def.setting(packArchiveTxzArtifact.value), Runtime / packArchiveTxz) + addArtifact(Def.setting(packArchiveTxzArtifact.value), packArchiveTxz) def publishPackArchiveZip: SettingsDefinition = - addArtifact(Def.setting(packArchiveZipArtifact.value), Runtime / packArchiveZip) + addArtifact(Def.setting(packArchiveZipArtifact.value), packArchiveZip) def publishPackArchives: SettingsDefinition = publishPackArchiveTgz ++ publishPackArchiveZip diff --git a/src/main/scala/xerial/sbt/pack/PackPlugin.scala b/src/main/scala/xerial/sbt/pack/PackPlugin.scala index 629cb29c..fdc19bbd 100755 --- a/src/main/scala/xerial/sbt/pack/PackPlugin.scala +++ b/src/main/scala/xerial/sbt/pack/PackPlugin.scala @@ -10,15 +10,16 @@ package xerial.sbt.pack import java.io.{BufferedWriter, FileWriter} import java.nio.file.Files import java.time.format.{DateTimeFormatterBuilder, SignStyle} -import java.time.temporal.ChronoField._ +import java.time.temporal.ChronoField.* import java.time.{Instant, ZoneId, ZonedDateTime} import java.util.{Date, Locale} - -import sbt.Keys._ -import sbt._ +import sbt.Keys.* +import sbt.{io, *} import scala.util.Try import scala.util.matching.Regex +import PluginCompat.* +import PluginCompat.toFile /** Plugin for packaging projects * @@ -26,7 +27,6 @@ import scala.util.matching.Regex * Taro L. Saito */ object PackPlugin extends AutoPlugin with PackArchive { - override def trigger = noTrigger case class ModuleEntry( @@ -66,7 +66,7 @@ object PackPlugin extends AutoPlugin with PackArchive { val packExcludeJars = settingKey[Seq[String]]("specify jar file name patterns to exclude when packaging") val packExcludeArtifactTypes = settingKey[Seq[String]]("specify artifact types (e.g. javadoc) to exclude when packaging") - val packLibJars = taskKey[Seq[(File, ProjectRef)]]("pack-lib-jars") + val packLibJars = taskKey[Seq[(FileRef, ProjectRef)]]("pack-lib-jars") val packGenerateWindowsBatFile = settingKey[Boolean]("Generate BAT file launch scripts for Windows") val packGenerateMakefile = settingKey[Boolean]("Generate Makefile") @@ -104,10 +104,10 @@ object PackPlugin extends AutoPlugin with PackArchive { val packArchiveTbzArtifact = settingKey[Artifact]("tar.bz2 archive artifact") val packArchiveTxzArtifact = settingKey[Artifact]("tar.xz archive artifact") val packArchiveZipArtifact = settingKey[Artifact]("zip archive artifact") - val packArchiveTgz = taskKey[File]("create a tar.gz archive of the distributable package") - val packArchiveTbz = taskKey[File]("create a tar.bz2 archive of the distributable package") - val packArchiveTxz = taskKey[File]("create a tar.xz archive of the distributable package") - val packArchiveZip = taskKey[File]("create a zip archive of the distributable package") + val packArchiveTgz = taskKey[FileRef]("create a tar.gz archive of the distributable package") + val packArchiveTbz = taskKey[FileRef]("create a tar.bz2 archive of the distributable package") + val packArchiveTxz = taskKey[FileRef]("create a tar.xz archive of the distributable package") + val packArchiveZip = taskKey[FileRef]("create a zip archive of the distributable package") val packArchive = taskKey[Seq[File]]("create a tar.gz and a zip archive of the distributable package") val packEnvVars = taskKey[Map[String, Map[String, String]]]("environment variables") } @@ -144,23 +144,31 @@ object PackPlugin extends AutoPlugin with PackArchive { packGenerateMakefile := true, packMainDiscovered := Def.taskDyn { val mainClasses = - getFromSelectedProjects(thisProjectRef.value, Compile / discoveredMainClasses, state.value, packExclude.value) + getFromSelectedProjects( + thisProjectRef.value, + Runtime, + discoveredMainClasses, + state.value, + packExclude.value + ) Def.task { - mainClasses.value.flatMap(_._1.map(mainClass => hyphenize(mainClass.split('.').last) -> mainClass).toMap).toMap + mainClasses.value + .flatMap(_._1.map(mainClass => hyphenize(mainClass.split('.').last) -> mainClass).toMap).toMap } }.value, packAllUnmanagedJars := Def.taskDyn { val allUnmanagedJars = - getFromSelectedProjects(thisProjectRef.value, Runtime / unmanagedJars, state.value, packExclude.value) + getFromSelectedProjects(thisProjectRef.value, Runtime, unmanagedJars, state.value, packExclude.value) Def.task { allUnmanagedJars.value } }.value, Def.derive( packLibJars := Def.taskDyn { - def libJarsFromConfiguration(c: Configuration): Seq[Task[Seq[(File, ProjectRef)]]] = + def libJarsFromConfiguration(c: Configuration): Seq[Task[Seq[(FileRef, ProjectRef)]]] = Seq( - getFromSelectedProjects( + getFromSelectedProjects[FileRef]( thisProjectRef.value, - c / packageBin, + c, + packageBin, state.value, packExcludeLibJars.value ) @@ -192,7 +200,7 @@ object PackPlugin extends AutoPlugin with PackArchive { c <- update.value.filter(df).configurations m <- c.modules if !m.evicted (artifact, file) <- m.artifacts - if !packExcludeArtifactTypes.value.contains(artifact.`type`) && !isExcludeJar(file.name) + if !packExcludeArtifactTypes.value.contains(artifact.`type`) && !isExcludeJar(file.getName()) } yield { val mid = m.module ModuleEntry( @@ -234,23 +242,23 @@ object PackPlugin extends AutoPlugin with PackArchive { val log = streams.value.log val distinctDpJars = packModuleEntries.value.map(_.file) - val unmanaged = packAllUnmanagedJars.value.flatMap { _._1 }.map { _.data } + val unmanaged = packAllUnmanagedJars.value.flatMap(_._1).map(x => toFile(x.data)) val copyDepTargetDir = packCopyDependenciesTarget.value val useSymlink = packCopyDependenciesUseSymbolicLinks.value copyDepTargetDir.mkdirs() - IO.delete((copyDepTargetDir * "*.jar").get) - (distinctDpJars ++ unmanaged) foreach { d ⇒ - log debug s"Copying ${d.getName}" - val dest = copyDepTargetDir / d.getName + IO.delete((copyDepTargetDir * "*.jar").get()) + (distinctDpJars ++ unmanaged).foreach { d => + log debug s"Copying ${d.getName()}" + val dest = copyDepTargetDir / d.getName() if (useSymlink) { - Files.createSymbolicLink(dest.toPath, d.toPath) + Files.createSymbolicLink(dest.toPath, d.toPath()) } else { IO.copyFile(d, dest) } } val libs = packLibJars.value.map(_._1) - libs.foreach(l ⇒ IO.copyFile(l, copyDepTargetDir / l.getName)) + libs.foreach(l => IO.copyFile(l, copyDepTargetDir / l.getName())) log info s"Copied ${distinctDpJars.size + libs.size} jars to ${copyDepTargetDir}" } @@ -272,10 +280,10 @@ object PackPlugin extends AutoPlugin with PackArchive { // Copy project jars out.log.info(logPrefix + "Copying libraries to " + rpath(base, libDir)) - val libs: Seq[File] = packLibJars.value.map(_._1) - out.log.info(logPrefix + "project jars:\n" + libs.map(path => rpath(base, path)).mkString("\n")) + val libs: Seq[FileRef] = packLibJars.value.map(_._1) + out.log.info(logPrefix + "project jars:\n" + libs.map(path => rpath(base, new io.RichFile(path))).mkString("\n")) val projectJars = libs.map(l => { - val dest = libDir / l.getName + val dest = libDir / l.getName() IO.copyFile(l, dest) dest }) @@ -297,13 +305,13 @@ object PackPlugin extends AutoPlugin with PackArchive { out.log.info(logPrefix + "Copying unmanaged dependencies:") val unmanagedDepsJars = for ((m, projectRef) <- packAllUnmanagedJars.value; um <- m; f = um.data) yield { out.log.info(f.getPath) - val dest = libDir / f.getName - IO.copyFile(f, dest, true) + val dest = libDir / f.getName() + sbt.IO.copyFile(f, dest, true) dest } // Copy explicitly added dependencies - val mapped: Seq[(File, String)] = mappings.value + val mapped: Seq[(FileRef, String)] = mappings.value out.log.info(logPrefix + "Copying explicit dependencies:") val explicitDepsJars = for ((file, path) <- mapped) yield { out.log.info(file.getPath) @@ -331,7 +339,7 @@ object PackPlugin extends AutoPlugin with PackArchive { out.log.info(logPrefix + "Create a bin folder: " + rpath(base, binDir)) binDir.mkdirs() - def write(path: String, content: String) { + def write(path: String, content: String): Unit = { val p = distDir / path out.log.info(logPrefix + "Generating %s".format(rpath(base, p))) IO.write(p, content) @@ -368,10 +376,10 @@ object PackPlugin extends AutoPlugin with PackArchive { def extraClasspath(sep: String): String = packExtraClasspath.value.get(name).map(_.mkString("", sep, sep)).getOrElse("") def expandedClasspath(sep: String): String = { - val projJars = libs.map(l => "${PROG_HOME}/lib/" + l.getName) + val projJars = libs.map(l => "${PROG_HOME}/lib/" + l.getName()) val depJars = distinctDpJars.map(m => "${PROG_HOME}/lib/" + resolveJarName(m, jarNameConvention)) val unmanagedJars = for ((m, projectRef) <- packAllUnmanagedJars.value; um <- m; f = um.data) yield { - "${PROG_HOME}/lib/" + f.getName + "${PROG_HOME}/lib/" + f.getName() } (projJars ++ depJars ++ unmanagedJars).mkString("", sep, sep) } @@ -485,13 +493,15 @@ object PackPlugin extends AutoPlugin with PackArchive { private def getFromAllProjects[T]( contextProject: ProjectRef, + config: Configuration, targetTask: TaskKey[T], state: State ): Task[Seq[(T, ProjectRef)]] = - getFromSelectedProjects(contextProject, targetTask, state, Seq.empty) + getFromSelectedProjects(contextProject, config, targetTask, state, Seq.empty) private def getFromSelectedProjects[T]( contextProject: ProjectRef, + config: Configuration, targetTask: TaskKey[T], state: State, exclude: Seq[String] @@ -513,7 +523,7 @@ object PackPlugin extends AutoPlugin with PackArchive { (currentProject +: (children flatMap transitiveDependencies)) filterNot (isExcluded) } val projects: Seq[ProjectRef] = transitiveDependencies(contextProject).distinct - projects.map(p => (Def.task { ((p / targetTask).value, p) }) evaluate structure.data).join + projects.map(p => (Def.task { ((p / config / targetTask).value, p) }) evaluate structure.data).join } private val humanReadableTimestampFormatter = new DateTimeFormatterBuilder()