diff --git a/build.sbt b/build.sbt index 75ed3f7bc8..f2a86ca307 100755 --- a/build.sbt +++ b/build.sbt @@ -76,7 +76,6 @@ lazy val akkaTestKit = "com.typesafe.akka" %% "akka lazy val akkaTestKitTyped = "com.typesafe.akka" %% "akka-actor-testkit-typed" % akkaVersion lazy val alpakkaFile = "com.lightbend.akka" %% "akka-stream-alpakka-file" % alpakkaVersion lazy val alpakkaSse = "com.lightbend.akka" %% "akka-stream-alpakka-sse" % alpakkaVersion -lazy val alpakkaS3 = "com.lightbend.akka" %% "akka-stream-alpakka-s3" % alpakkaVersion lazy val apacheCompress = "org.apache.commons" % "commons-compress" % apacheCompressVersion lazy val apacheIO = "commons-io" % "commons-io" % apacheIOVersion lazy val awsSdk = "software.amazon.awssdk" % "s3" % awsSdkVersion @@ -576,12 +575,6 @@ lazy val storagePlugin = project name := "delta-storage-plugin", moduleName := "delta-storage-plugin", libraryDependencies ++= Seq( - alpakkaS3 excludeAll ( - ExclusionRule(organization = "com.typesafe.akka", name = "akka-stream_2.13"), - ExclusionRule(organization = "com.typesafe.akka", name = "akka-http_2.13"), - ExclusionRule(organization = "com.typesafe.akka", name = "akka-http-xml_2.13"), - ExclusionRule(organization = "org.slf4j", name = "slf4j-api") - ), kamonAkkaHttp % Provided, akkaSlf4j % Test, akkaTestKitTyped % Test, diff --git a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/model/StorageValue.scala b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/model/StorageValue.scala index e33007da4e..71b7b98dcd 100644 --- a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/model/StorageValue.scala +++ b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/model/StorageValue.scala @@ -1,10 +1,7 @@ package ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model import akka.http.scaladsl.model.Uri -import akka.stream.alpakka.s3 -import akka.stream.alpakka.s3.{ApiVersion, MemoryBufferType} import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.model.Digest -import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.StoragesConfig.StorageTypeConfig import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.JsonLdContext.keywords import ch.epfl.bluebrain.nexus.delta.sdk.implicits._ import ch.epfl.bluebrain.nexus.delta.sdk.permissions.model.Permission @@ -13,9 +10,7 @@ import io.circe.generic.extras.Configuration import io.circe.generic.extras.semiauto.{deriveConfiguredCodec, deriveConfiguredEncoder} import io.circe.syntax._ import io.circe.{Codec, Decoder, Encoder} -import software.amazon.awssdk.auth.credentials.{AnonymousCredentialsProvider, AwsBasicCredentials, StaticCredentialsProvider} import software.amazon.awssdk.regions.Region -import software.amazon.awssdk.regions.providers.AwsRegionProvider import java.io.File import java.nio.file.Path @@ -146,38 +141,6 @@ object StorageValue { override val tpe: StorageType = StorageType.S3Storage override val capacity: Option[Long] = None - - def address(bucket: String): Uri = - endpoint match { - case Some(host) if host.scheme.trim.isEmpty => Uri(s"https://$bucket.$host") - case Some(e) => e.withHost(s"$bucket.${e.authority.host}") - case None => region.fold(s"https://$bucket.s3.amazonaws.com")(r => s"https://$bucket.s3.$r.amazonaws.com") - } - - /** - * @return - * these settings converted to an instance of [[akka.stream.alpakka.s3.S3Settings]] - */ - def alpakkaSettings(config: StorageTypeConfig): s3.S3Settings = { - - val keys = for { - cfg <- config.amazon - } yield cfg.defaultAccessKey.value -> cfg.defaultSecretKey.value - - val credsProvider = keys match { - case Some((accessKey, secretKey)) => - StaticCredentialsProvider.create(AwsBasicCredentials.create(accessKey, secretKey)) - case _ => - StaticCredentialsProvider.create(AnonymousCredentialsProvider.create().resolveCredentials()) - } - - val regionProvider: AwsRegionProvider = new AwsRegionProvider { - val getRegion: Region = region.getOrElse(Region.US_EAST_1) - } - - s3.S3Settings(MemoryBufferType, credsProvider, regionProvider, ApiVersion.ListBucketVersion2) - .withEndpointUrl(address(bucket).toString()) - } } object S3StorageValue { diff --git a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/s3/MinioSpec.scala b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/s3/MinioSpec.scala deleted file mode 100644 index 963e7df1c5..0000000000 --- a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/s3/MinioSpec.scala +++ /dev/null @@ -1,50 +0,0 @@ -//package ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.s3 -// -//import akka.actor.ActorSystem -//import akka.stream.alpakka.s3.scaladsl.S3 -//import akka.stream.alpakka.s3.{BucketAccess, S3Attributes} -//import cats.effect.IO -//import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.StoragesConfig.StorageTypeConfig -//import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.StorageValue.S3StorageValue -//import ch.epfl.bluebrain.nexus.testkit.minio.MinioDocker -//import org.scalatest.{Suite, Suites} -// -//import java.net.URLDecoder -//import java.nio.charset.StandardCharsets.UTF_8 -// -//class MinioSpec extends Suites with MinioDocker { -// override val nestedSuites: IndexedSeq[Suite] = Vector( -// new S3StorageSaveAndFetchFileSpec(this) -// ) -//} -// -//object MinioSpec { -// def createBucket( -// value: S3StorageValue -// )(implicit config: StorageTypeConfig, system: ActorSystem): IO[Unit] = { -// implicit val attributes = S3Attributes.settings(value.alpakkaSettings(config)) -// -// IO.fromFuture(IO.delay(S3.checkIfBucketExists(value.bucket))).flatMap { -// case BucketAccess.NotExists => IO.delay(S3.makeBucket(value.bucket)).void -// case _ => IO.unit -// } -// } -// -// def deleteBucket( -// value: S3StorageValue -// )(implicit config: StorageTypeConfig, system: ActorSystem): IO[Unit] = { -// implicit val attributes = S3Attributes.settings(value.alpakkaSettings(config)) -// -// IO.fromFuture( -// IO.delay( -// S3.listBucket(value.bucket, None) -// .withAttributes(attributes) -// .flatMapConcat { content => -// S3.deleteObject(value.bucket, URLDecoder.decode(content.getKey, UTF_8.toString)) -// .withAttributes(attributes) -// } -// .run() -// ) -// ) >> IO.fromFuture(IO.delay(S3.deleteBucket(value.bucket))).void -// } -//}