diff --git a/project/Versions.scala b/project/Versions.scala index 1dbd3295d..88cbf1a4a 100644 --- a/project/Versions.scala +++ b/project/Versions.scala @@ -41,5 +41,5 @@ object Versions { val sbtLucuma = "0.12.4" val scalaCollectionContrib = "0.4.0" val scalaJsDom = "2.8.0" - val scalaJsReact = "3.0.0-beta6" + val scalaJsReact = "3.0.0-beta7" } diff --git a/workers/src/main/scala/japgolly/webapputil/binary/BinaryData.scala b/workers/src/main/scala/japgolly/webapputil/binary/BinaryData.scala new file mode 100644 index 000000000..357e09abd --- /dev/null +++ b/workers/src/main/scala/japgolly/webapputil/binary/BinaryData.scala @@ -0,0 +1,242 @@ +// Copyright (c) 2016-2023 Association of Universities for Research in Astronomy, Inc. (AURA) +// For license information see LICENSE or https://opensource.org/licenses/BSD-3-Clause + +package japgolly.webapputil.binary + +import japgolly.webapputil.general.ErrorMsg +import java.io.OutputStream +import java.lang.{StringBuilder => JStringBuilder} +import java.nio.ByteBuffer +import java.nio.charset.StandardCharsets +import java.util.{Arrays, Base64} +import scala.collection.immutable.ArraySeq +import cats.Eq + +object BinaryData extends BinaryData_PlatformSpecific_Object { + + implicit def univEq: Eq[BinaryData] = + Eq.fromUniversalEquals + + final val DefaultByteLimitInDesc = 50 + + def empty: BinaryData = + unsafeFromArray(new Array(0)) + + def byte(b: Byte): BinaryData = { + val a = new Array[Byte](1) + a(0) = b + unsafeFromArray(a) + } + + def fromArray(a: Array[Byte]): BinaryData = { + val a2 = Arrays.copyOf(a, a.length) + unsafeFromArray(a2) + } + + def fromArraySeq(a: ArraySeq[Byte]): BinaryData = + unsafeFromArray(a.unsafeArray.asInstanceOf[Array[Byte]]) + + def fromBase64(base64: String): Either[ErrorMsg, BinaryData] = + try + Right(fromBase64OrThrow(base64)) + catch { + case e: IllegalArgumentException => + Left(ErrorMsg("Invalid base64 data: " + e.getMessage)) + } + + def fromBase64OrThrow(base64: String): BinaryData = + unsafeFromArray(Base64.getDecoder.decode(base64)) + + def fromByteBuffer(bb: ByteBuffer): BinaryData = + if (bb.hasArray) { + val offset = bb.arrayOffset() + val a = Arrays.copyOfRange(bb.array(), offset, offset + bb.limit()) + unsafeFromArray(a) + } else { + val a = new Array[Byte](bb.remaining) + bb.get(a) + unsafeFromArray(a) + } + + def fromHex(hex: String): BinaryData = { + assert((hex.length & 1) == 0, "Hex strings must have an even length.") + var i = hex.length >> 1 + val bytes = new Array[Byte](i) + while (i > 0) { + i -= 1 + val si = i << 1 + val byteStr = hex.substring(si, si + 2) + val byte = java.lang.Integer.parseUnsignedInt(byteStr, 16).byteValue() + bytes(i) = byte + } + unsafeFromArray(bytes) + } + + /** + * unsafe because the array could be modified later and affect the underlying array we use here + */ + def unsafeFromArray(a: Array[Byte]): BinaryData = + new BinaryData(a, 0, a.length) + + /** + * unsafe because the ByteBuffer could be modified later and affect the underlying array we use + * here + */ + def unsafeFromByteBuffer(bb: ByteBuffer): BinaryData = + if (bb.hasArray) + new BinaryData(bb.array(), bb.arrayOffset(), bb.limit()) + else + fromByteBuffer(bb) + + def fromStringAsUtf8(str: String): BinaryData = + unsafeFromArray(str.getBytes(StandardCharsets.UTF_8)) +} + +/** Immutable blob of binary data. */ +final class BinaryData( + private[BinaryData] val bytes: Array[Byte], + private[BinaryData] val offset: Int, + val length: Int +) extends BinaryData_PlatformSpecific_Instance { + + private val lastIndExcl = offset + length + + // Note: It's acceptable to have excess bytes beyond the declared length + assert(lastIndExcl <= bytes.length, + s"offset($offset) + length ($length) exceeds number of bytes (${bytes.length})" + ) + + override def toString = s"BinaryData(${describe()})" + + override def hashCode = + // Should use Arrays.hashCode() but have to copy to use provided length instead of array.length + offset * -947 + length + + override def equals(o: Any): Boolean = + o match { + case b: BinaryData => + @inline def sameRef = this eq b + @inline def sameLen = length == b.length + @inline def sameBin = + (0 until length).forall(i => bytes(offset + i) == b.bytes(b.offset + i)) + sameRef || (sameLen && sameBin) + case _ => + false + } + + @inline def isEmpty: Boolean = + length == 0 + + @inline def nonEmpty: Boolean = + length != 0 + + def duplicate: BinaryData = + BinaryData.unsafeFromArray(toNewArray) + + def describe(byteLimit: Int = BinaryData.DefaultByteLimitInDesc, sep: String = ",") = { + val byteDesc = describeBytes(byteLimit, sep) + val len = "%,d".format(length) + s"$len bytes: $byteDesc" + } + + def describeBytes(limit: Int = BinaryData.DefaultByteLimitInDesc, sep: String = ",") = { + var i = bytes.iterator.drop(offset).map(b => "%02X".format(b & 0xff)) + if (length > limit) + i = i.take(limit) ++ Iterator.single("…") + else + i = i.take(length) + i.mkString(sep) + } + + def writeTo(os: OutputStream): Unit = + os.write(bytes, offset, length) + + // Note: the below must remain a `def` because ByteBuffers themselves have mutable state + /** unsafe in that the underlying bytes could be modified via access to unsafeArray */ + def unsafeByteBuffer: ByteBuffer = + if (offset > 0) + ByteBuffer.wrap(bytes, 0, lastIndExcl).position(offset).slice() + else + ByteBuffer.wrap(bytes, 0, length) + + def toNewByteBuffer: ByteBuffer = + ByteBuffer.wrap(toNewArray, 0, length) + + def toNewArray: Array[Byte] = + Arrays.copyOfRange(bytes, offset, lastIndExcl) + + /** unsafe in that you might get back the underlying array which is mutable */ + lazy val unsafeArray: Array[Byte] = + if (offset == 0 && length == bytes.length) + bytes + else + toNewArray + + def binaryLikeString: String = { + val chars = new Array[Char](length) + var j = length + while (j > 0) { + j -= 1 + val b = bytes(offset + j) + val i = b.toInt & 0xff + chars.update(j, i.toChar) + } + String.valueOf(chars) + } + + def hex: String = + bytes.iterator + .slice(offset, lastIndExcl) + .map(b => "%02X".format(b & 0xff)) + .mkString + + def ++(that: BinaryData): BinaryData = + if (this.isEmpty) + that + else if (that.isEmpty) + this + else { + val a = new Array[Byte](length + that.length) + Array.copy(this.bytes, this.offset, a, 0, this.length) + Array.copy(that.bytes, that.offset, a, this.length, that.length) + BinaryData.unsafeFromArray(a) + } + + def drop(n: Int): BinaryData = { + val m = n.min(length) + new BinaryData(bytes, offset + m, length - m) + } + + def take(n: Int): BinaryData = { + val m = n.min(length) + new BinaryData(bytes, offset, m) + } + + def dropRight(n: Int): BinaryData = { + val m = n.min(length) + take(length - m) + } + + def takeRight(n: Int): BinaryData = { + val m = n.min(length) + drop(length - m) + } + + def toBase64: String = + Base64.getEncoder.encodeToString(unsafeArray) + + def appendBase64(sb: JStringBuilder): Unit = { + val b64 = Base64.getEncoder.encode(unsafeArray) + var i = 0 + while (i < b64.length) { + sb.append(b64(i).toChar) + i += 1 + } + } + + @inline def appendBase64(sb: StringBuilder): Unit = + appendBase64(sb.underlying) + + def toStringAsUtf8: String = + new String(unsafeArray, StandardCharsets.UTF_8) +} diff --git a/workers/src/main/scala/japgolly/webapputil/binary/BinaryData_PlatformSpecific.scala b/workers/src/main/scala/japgolly/webapputil/binary/BinaryData_PlatformSpecific.scala new file mode 100644 index 000000000..21792b7f5 --- /dev/null +++ b/workers/src/main/scala/japgolly/webapputil/binary/BinaryData_PlatformSpecific.scala @@ -0,0 +1,57 @@ +// Copyright (c) 2016-2023 Association of Universities for Research in Astronomy, Inc. (AURA) +// For license information see LICENSE or https://opensource.org/licenses/BSD-3-Clause + +package japgolly.webapputil.binary + +// ********** +// * * +// * JS * +// * * +// ********** + +import org.scalajs.dom.Blob +import scala.scalajs.js +import scala.scalajs.js.JSConverters._ +import scala.scalajs.js.typedarray.{ArrayBuffer, Uint8Array} + +trait BinaryData_PlatformSpecific_Object { self: BinaryData.type => + + def fromArrayBuffer(ab: ArrayBuffer): BinaryData = + BinaryData.fromByteBuffer(BinaryJs.arrayBufferToByteBuffer(ab)) + + def fromUint8Array(a: Uint8Array): BinaryData = + fromArrayBuffer(BinaryJs.uint8ArrayToArrayBuffer(a)) + + def unsafeFromArrayBuffer(ab: ArrayBuffer): BinaryData = + BinaryData.unsafeFromByteBuffer(BinaryJs.arrayBufferToByteBuffer(ab)) + + def unsafeFromUint8Array(a: Uint8Array): BinaryData = + unsafeFromArrayBuffer(BinaryJs.uint8ArrayToArrayBuffer(a)) +} + +trait BinaryData_PlatformSpecific_Instance { self: BinaryData => + + def toArrayBuffer: ArrayBuffer = + BinaryJs.byteBufferToArrayBuffer(self.unsafeByteBuffer) + + def toUint8Array: Uint8Array = + new Uint8Array(toArrayBuffer) + + def toBlob: Blob = + BinaryJs.byteBufferToBlob(self.unsafeByteBuffer) + + def toNewJsArray: js.Array[Byte] = + self.toNewArray.toJSArray + + def unsafeArrayBuffer: js.typedarray.ArrayBufferView = + BinaryJs.byteBufferToInt8Array(self.unsafeByteBuffer) + + def unsafeUint8Array: Uint8Array = + new Uint8Array(toArrayBuffer) + + def unsafeBlob: Blob = + BinaryJs.byteBufferToBlob(self.unsafeByteBuffer) + + def unsafeJsArray: js.Array[Byte] = + self.unsafeArray.toJSArray +} diff --git a/workers/src/main/scala/japgolly/webapputil/binary/BinaryFormat.scala b/workers/src/main/scala/japgolly/webapputil/binary/BinaryFormat.scala new file mode 100644 index 000000000..709572c5e --- /dev/null +++ b/workers/src/main/scala/japgolly/webapputil/binary/BinaryFormat.scala @@ -0,0 +1,46 @@ +// Copyright (c) 2016-2023 Association of Universities for Research in Astronomy, Inc. (AURA) +// For license information see LICENSE or https://opensource.org/licenses/BSD-3-Clause + +package japgolly.webapputil.binary + +import japgolly.scalajs.react.AsyncCallback + +/** A means of converting instances of type `A` to a binary format and back. */ +final class BinaryFormat[A]( + val encode: A => AsyncCallback[BinaryData], + val decode: BinaryData => AsyncCallback[A] +) { + + def xmap[B](onDecode: A => B)(onEncode: B => A): BinaryFormat[B] = + // Delegating because decoding can fail and must be wrapped to be pure + xmapAsync(a => AsyncCallback.delay(onDecode(a)))(b => AsyncCallback.delay(onEncode(b))) + + def xmapAsync[B](onDecode: A => AsyncCallback[B])( + onEncode: B => AsyncCallback[A] + ): BinaryFormat[B] = + BinaryFormat.async(decode(_).flatMap(onDecode))(onEncode(_).flatMap(encode)) + + type ThisIsBinary = BinaryFormat[A] =:= BinaryFormat[BinaryData] + + // def encrypt(e: Encryption)(implicit ev: ThisIsBinary): BinaryFormat[BinaryData] = + // ev(this).xmapAsync(e.decrypt)(e.encrypt) + + // def compress(c: Compression)(implicit ev: ThisIsBinary): BinaryFormat[BinaryData] = + // ev(this).xmap(c.decompressOrThrow)(c.compress) +} + +object BinaryFormat { + + val id: BinaryFormat[BinaryData] = { + val f: BinaryData => AsyncCallback[BinaryData] = AsyncCallback.pure + async(f)(f) + } + + def apply[A](decode: BinaryData => A)(encode: A => BinaryData): BinaryFormat[A] = + async(b => AsyncCallback.delay(decode(b)))(a => AsyncCallback.delay(encode(a))) + + def async[A](decode: BinaryData => AsyncCallback[A])( + encode: A => AsyncCallback[BinaryData] + ): BinaryFormat[A] = + new BinaryFormat(encode, decode) +} diff --git a/workers/src/main/scala/japgolly/webapputil/binary/BinaryJs.scala b/workers/src/main/scala/japgolly/webapputil/binary/BinaryJs.scala new file mode 100644 index 000000000..592e5148d --- /dev/null +++ b/workers/src/main/scala/japgolly/webapputil/binary/BinaryJs.scala @@ -0,0 +1,80 @@ +// Copyright (c) 2016-2023 Association of Universities for Research in Astronomy, Inc. (AURA) +// For license information see LICENSE or https://opensource.org/licenses/BSD-3-Clause + +package japgolly.webapputil.binary + +import java.nio.ByteBuffer +import org.scalajs.dom.{Blob, FileReader, window} +import scala.scalajs.js +import scala.scalajs.js.JSConverters._ +import scala.scalajs.js.typedarray.TypedArrayBufferOps._ +import scala.scalajs.js.typedarray._ + +object BinaryJs extends BinaryJs + +trait BinaryJs { + + final def arrayBufferToBlob(a: ArrayBuffer): Blob = + new Blob(js.Array(a)) + + @inline final def arrayBufferToByteBuffer(a: ArrayBuffer): ByteBuffer = + TypedArrayBuffer.wrap(a) + + final def base64ToByteBuffer(base64: String): ByteBuffer = { + val binstr = window.atob(base64) + val buf = new Int8Array(binstr.length) + var i = 0 + binstr.foreach { ch => + buf(i) = ch.toByte + i += 1 + } + TypedArrayBuffer.wrap(buf) + } + + final def blobToArrayBuffer(blob: Blob): ArrayBuffer = { + var arrayBuffer: ArrayBuffer = null + val fileReader = new FileReader() + fileReader.onload = e => arrayBuffer = e.target.asInstanceOf[js.Dynamic].result.asInstanceOf[ArrayBuffer] + fileReader.readAsArrayBuffer(blob) + assert(arrayBuffer != null) + arrayBuffer + } + + final def byteBufferToArrayBuffer(bb: ByteBuffer): ArrayBuffer = + int8ArrayToArrayBuffer(byteBufferToInt8Array(bb)) + + final def byteBufferToBlob(bb: ByteBuffer): Blob = + arrayBufferToBlob(byteBufferToArrayBuffer(bb)) + + final def byteBufferToInt8Array(bb: ByteBuffer): Int8Array = { + val limit = bb.limit() + if (bb.hasTypedArray()) + bb.typedArray() + else if (bb.hasArray) { + var array = bb.array() + val offset = bb.arrayOffset() + if (limit != array.length) + array = array.slice(offset, offset + limit) + new Int8Array(array.toJSArray) + } else { + val array = BinaryData.unsafeFromByteBuffer(bb).unsafeJsArray + new Int8Array(array) + } + } + + final def int8ArrayToArrayBuffer(v: Int8Array): ArrayBuffer = + arrayBufferViewToArrayBuffer(v) + + final def uint8ArrayToArrayBuffer(v: Uint8Array): ArrayBuffer = + arrayBufferViewToArrayBuffer(v) + + final private def arrayBufferViewToArrayBuffer(v: ArrayBufferView): ArrayBuffer = { + val off = v.byteOffset + val len = v.byteLength + if (len == v.buffer.byteLength) + v.buffer + else + v.buffer.slice(off, off + len) + } + +} diff --git a/workers/src/main/scala/japgolly/webapputil/boopickle/BinaryFormatExt.scala b/workers/src/main/scala/japgolly/webapputil/boopickle/BinaryFormatExt.scala new file mode 100644 index 000000000..21a510f7b --- /dev/null +++ b/workers/src/main/scala/japgolly/webapputil/boopickle/BinaryFormatExt.scala @@ -0,0 +1,96 @@ +// Copyright (c) 2016-2023 Association of Universities for Research in Astronomy, Inc. (AURA) +// For license information see LICENSE or https://opensource.org/licenses/BSD-3-Clause + +package japgolly.webapputil.boopickle + +import boopickle.{PickleImpl, Pickler, UnpickleImpl} +import japgolly.scalajs.react.callback.AsyncCallback +import japgolly.webapputil.binary._ +import java.nio.ByteBuffer +import scala.scalajs.js + +object BinaryFormatExt { + + trait Implicits { + + @inline final implicit def BinaryFormatBoopickleExt[A]( + self: BinaryFormat[A] + ): Implicits.BinaryFormatBoopickleExt[A] = + new Implicits.BinaryFormatBoopickleExt[A](self) + + @inline final implicit def BinaryFormatBoopickleStaticExt( + self: BinaryFormat.type + ): Implicits.BinaryFormatBoopickleStaticExt = + new Implicits.BinaryFormatBoopickleStaticExt(self) + } + + object Implicits extends Implicits { + + final class BinaryFormatBoopickleExt[A](private val self: BinaryFormat[A]) extends AnyVal { + type ThisIsBinary = BinaryFormat[A] =:= BinaryFormat[BinaryData] + + def pickle[B](implicit pickler: SafePickler[B], ev: ThisIsBinary): BinaryFormat[B] = + ev(self).xmap(pickler.decodeOrThrow)(pickler.encode) + + def pickleBasic[B](implicit pickler: Pickler[B], ev: ThisIsBinary): BinaryFormat[B] = { + val unpickle = UnpickleImpl[B] + ev(self) + .xmap[ByteBuffer](_.unsafeByteBuffer)(BinaryData.unsafeFromByteBuffer) + .xmap(unpickle.fromBytes(_))(PickleImpl.intoBytes(_)) + } + } + + final class BinaryFormatBoopickleStaticExt(private val self: BinaryFormat.type) extends AnyVal { + // @inline def pickleCompressEncrypt[A](c: Compression, e: Encryption)(implicit pickler: SafePickler[A]): BinaryFormat[A] = + // BinaryFormatExt.pickleCompressEncrypt(c, e) + + @inline def versioned[A]( + oldest: BinaryFormat[A], + toLatest: BinaryFormat[A]* + ): BinaryFormat[A] = + BinaryFormatExt.versioned(oldest, toLatest*) + } + } + + // =================================================================================================================== + + def versioned[A](oldest: BinaryFormat[A], toLatest: BinaryFormat[A]*): BinaryFormat[A] = { + val layers = oldest +: toLatest.toArray + val decoders = layers + val decoderIndices = decoders.indices + val latestVer = decoders.length - 1 + val latestVerHeader = BinaryData.byte(latestVer.toByte) + val encoder = layers.last + + def encode(a: A): AsyncCallback[BinaryData] = + encoder.encode(a).map(latestVerHeader ++ _) + + def decode(bin: BinaryData): AsyncCallback[A] = + AsyncCallback.suspend { + + if (bin.isEmpty) + throw js.JavaScriptException("No data") + + val ver = bin.unsafeArray(0).toInt + + if (decoderIndices.contains(ver)) { + val binBody = bin.drop(1) + decoders(ver).decode(binBody) + } else if (ver < 0) + throw js.JavaScriptException("Bad data") + else + SafePicklerUtil.unsupportedVer(ver, latestVer) + } + + BinaryFormat.async(decode)(encode) + } + + // def pickleCompressEncrypt[A](c: Compression, e: Encryption)(implicit + // pickler: SafePickler[A] + // ): BinaryFormat[A] = + // BinaryFormat.id + // .encrypt(e) // 3. Encryption is the very last step + // .compress(c) // 2. Compress the binary *before* encrypting + // .pickle[A] // 1. Generate binary first + +} diff --git a/workers/src/main/scala/japgolly/webapputil/boopickle/PicklerUtil.scala b/workers/src/main/scala/japgolly/webapputil/boopickle/PicklerUtil.scala new file mode 100644 index 000000000..182411707 --- /dev/null +++ b/workers/src/main/scala/japgolly/webapputil/boopickle/PicklerUtil.scala @@ -0,0 +1,305 @@ +// Copyright (c) 2016-2023 Association of Universities for Research in Astronomy, Inc. (AURA) +// For license information see LICENSE or https://opensource.org/licenses/BSD-3-Clause + +package japgolly.webapputil.boopickle + +import boopickle.Decoder +import boopickle.DefaultBasic._ +import cats.data.Ior +// import japgolly.microlibs.nonempty._ +// import japgolly.microlibs.recursion._ +// import japgolly.microlibs.stdlib_ext.StdlibExt._ +// import japgolly.microlibs.utils.SafeBool +// import japgolly.univeq.UnivEq +import japgolly.webapputil.binary.BinaryData +import japgolly.webapputil.general.ErrorMsg +import java.nio.ByteBuffer +import java.time.Instant +import scala.collection.immutable.ArraySeq +import scala.reflect.ClassTag +import cats.Eq +import cats.data.NonEmptyVector +import cats.data.NonEmptySet +import scala.collection.immutable.SortedSet + +object PicklerUtil { + + // =================================================================================================================== + // Extension classes + + object Implicits { + + @inline implicit def boopickleUtilAnyRefPicklerExt[A <: AnyRef]( + a: Pickler[A] + ): AnyRefPicklerExt[A] = + new AnyRefPicklerExt[A](a) + + @inline implicit def boopickleUtilDecoderExt(a: Decoder): DecoderExt = + new DecoderExt(a) + } + + implicit final class AnyRefPicklerExt[A <: AnyRef](private val p: Pickler[A]) extends AnyVal { + + def reuseByEq(implicit ev: Eq[A]) = + new PickleWithReuse[A](p, true) + + def reuseByRef = + new PickleWithReuse[A](p, false) + + def narrow[B <: A: ClassTag]: Pickler[B] = + p.xmap[B] { + case b: B => b + case a => throw new IllegalArgumentException("Illegal supertype: " + a) + }(b => b) + } + + final class PickleWithReuse[A <: AnyRef](p: Pickler[A], byUnivEq: Boolean) extends Pickler[A] { + private val getP: (PickleState, A) => Option[Int] = + if (byUnivEq) _ immutableRefFor _ else _ identityRefFor _ + private val getU: (UnpickleState, Int) => A = + if (byUnivEq) _.immutableFor[A](_) else _.identityFor[A](_) + private val setP: (PickleState, A) => Unit = + if (byUnivEq) _ addImmutableRef _ else _ addIdentityRef _ + private val setU: (UnpickleState, A) => Unit = + if (byUnivEq) _ addImmutableRef _ else _ addIdentityRef _ + + override def pickle(value: A)(implicit state: PickleState): Unit = { + val ref = getP(state, value) + if (ref.isDefined) + state.enc.writeInt(-ref.get) + () + else { + state.enc.writeInt(0) + p.pickle(value) + setP(state, value) + } + } + override def unpickle(implicit state: UnpickleState): A = + state.dec.readIntCode match { + case Right(i) => + if (i == 0) { + val value = p.unpickle + setU(state, value) + value + } else + getU(state, -i) + case Left(_) => + throw new IllegalArgumentException("Unknown coding") + } + } + + implicit final class DecoderExt(private val self: Decoder) extends AnyVal { + def buf: ByteBuffer = + self match { + case a: boopickle.DecoderSpeed => a.buf + case a: boopickle.DecoderSize => a.buf + } + + def peek[A](f: Decoder => A): A = { + val b = buf + val p = b.position() + try f(self) + finally + b.position(p) + () + } + } + + // =================================================================================================================== + // Polymorphic definitions + // (non-implicit, "pickle" prefix) + + def pickleArraySeq[A](implicit pa: Pickler[A], ct: ClassTag[A]): Pickler[ArraySeq[A]] = + // Can't use boopickle.BasicPicklers.ArrayPickler here because internally, it uses writeRawInt to write length, + // where as IterablePickler uses writeInt. We need to be compatible because we're switching out a Vector for an + // ArraySeq in some impls without affecting the codec. + boopickle.BasicPicklers.IterablePickler[A, ArraySeq] + + def pickleEither[L: Pickler, R: Pickler]: Pickler[Either[L, R]] = + new Pickler[Either[L, R]] { + private final val KeyR = 0 + private final val KeyL = 1 + override def pickle(a: Either[L, R])(implicit state: PickleState): Unit = + a match { + case Right(r) => state.enc.writeByte(KeyR); state.pickle(r); () + case Left(l) => state.enc.writeByte(KeyL); state.pickle(l); () + } + override def unpickle(implicit state: UnpickleState): Either[L, R] = + state.dec.readByte match { + case KeyR => Right(state.unpickle[R]) + case KeyL => Left(state.unpickle[L]) + } + } + + def pickleEnum[V: Eq](nev: NonEmptyVector[V], firstValue: Int = 0): Pickler[V] = + new Pickler[V] { + private val fromInt = nev.toVector + private val toInt = fromInt.iterator.zipWithIndex.toMap + assert(toInt.size == nev.length, s"Duplicates found in $nev") + override def pickle(v: V)(implicit state: PickleState): Unit = { + val i = toInt(v) + firstValue + state.enc.writeInt(i) + () + } + override def unpickle(implicit state: UnpickleState): V = + state.dec.readIntCode match { + case Right(i) => fromInt(i - firstValue) + case Left(_) => throw new IllegalArgumentException("Invalid coding") + } + } + + // def pickleFix[F[_]: Functor](implicit p: Pickler[F[Unit]]): Pickler[Fix[F]] = + // new Pickler[Fix[F]] { + // override def pickle(f: Fix[F])(implicit state: PickleState): Unit = { + + // // val fUnit = Functor[F].void(f.unfix) + // // p.pickle(fUnit) + // // Functor[F].map(f.unfix)(pickle) + + // // Compared to ↑, this ↓ is generally on-par for small trees, and around 30% faster for larger, deeper trees + + // val fields = new collection.mutable.ArrayBuffer[Fix[F]](32) + // val fUnit = Functor[F].map(f.unfix) { a => + // fields += a + // () + // } + // p.pickle(fUnit) + // fields.foreach(pickle) + + // () + // } + + // override def unpickle(implicit state: UnpickleState) = { + // val fUnit = p.unpickle + // Fix(Functor[F].map(fUnit)(_ => unpickle)) + // } + // } + + def pickleIor[A: Pickler, B: Pickler]: Pickler[A Ior B] = + new Pickler[A Ior B] { + import Ior._ + private final val KeyLeft = 0 + private final val KeyRight = 1 + private final val KeyBoth = 2 + override def pickle(i: A Ior B)(implicit state: PickleState): Unit = + i match { + case Left(a) => state.enc.writeByte(KeyLeft); state.pickle(a); () + case Right(b) => state.enc.writeByte(KeyRight); state.pickle(b); () + case Both(a, b) => state.enc.writeByte(KeyBoth); state.pickle(a); state.pickle(b); () + } + override def unpickle(implicit state: UnpickleState): A Ior B = + state.dec.readByte match { + case KeyLeft => Left(state.unpickle[A]) + case KeyRight => Right(state.unpickle[B]) + case KeyBoth => + val a = state.unpickle[A] + val b = state.unpickle[B] + Both(a, b) + } + } + + def pickleLazily[A](f: => Pickler[A]): Pickler[A] = { + lazy val p = f + new Pickler[A] { + override def pickle(a: A)(implicit state: PickleState): Unit = p.pickle(a) + override def unpickle(implicit state: UnpickleState): A = p.unpickle + } + } + + def pickleMap[K: Pickler, V: Pickler]: Pickler[Map[K, V]] = + mapPickler[K, V, Map] + + // def pickleNEA[A](implicit p: Pickler[ArraySeq[A]]): Pickler[NonEmptyArraySeq[A]] = + // pickleNonEmpty(_.whole) + + def pickleNES[A: Eq](implicit p: Pickler[SortedSet[A]]): Pickler[NonEmptySet[A]] = + p.xmap(NonEmptySet.fromSetUnsafe)(_.toSortedSet) + + def pickleNEV[A](implicit p: Pickler[Vector[A]]): Pickler[NonEmptyVector[A]] = + p.xmap(NonEmptyVector.fromVectorUnsafe)(_.toVector) + + // def pickleNonEmpty[N, E]( + // f: N => E + // )(implicit p: Pickler[E], proof: NonEmpty.Proof[E, N]): Pickler[N] = + // p.xmap(NonEmpty require_! _)(f) + + // def pickleNonEmptyMono[A](implicit + // p: Pickler[A], + // proof: NonEmpty.ProofMono[A] + // ): Pickler[NonEmpty[A]] = + // pickleNonEmpty(_.value) + + private object _pickleNothing extends Pickler[AnyRef] { + override def pickle(obj: AnyRef)(implicit state: PickleState): Unit = () + override def unpickle(implicit state: UnpickleState): Nothing = throw new RuntimeException( + "This case is illegal." + ) + } + + def pickleNothing[A <: AnyRef]: Pickler[A] = + _pickleNothing.asInstanceOf[Pickler[A]] + + // def pickleSafeBoolValues[B <: SafeBool[B], A: Pickler]: Pickler[SafeBool.Values[B, A]] = + // transformPickler[SafeBool.Values[B, A], (A, A)](x => SafeBool.Values(pos = x._1, neg = x._2))( + // x => (x.pos, x.neg) + // ) + + // =================================================================================================================== + // Concrete picklers for base data type + // (implicit lazy vals, "pickler" prefix) + + implicit lazy val picklerBinaryData: Pickler[BinaryData] = + transformPickler(BinaryData.unsafeFromArray)(_.unsafeArray) + + def picklerBinaryDataFixedLength(len: Int): Pickler[BinaryData] = + new Pickler[BinaryData] { + + override def pickle(bin: BinaryData)(implicit state: PickleState): Unit = { + assert(bin.length == len) + val enc = state.enc + val bytes = bin.unsafeArray + var i = 0 + while (i < len) { + enc.writeByte(bytes(i)) + i += 1 + } + } + + override def unpickle(implicit state: UnpickleState): BinaryData = { + val dec = state.dec + val bytes = new Array[Byte](len) + var i = 0 + while (i < len) { + bytes(i) = dec.readByte + i += 1 + } + BinaryData.unsafeFromArray(bytes) + } + } + + implicit lazy val picklerErrorMsg: Pickler[ErrorMsg] = + transformPickler(ErrorMsg.apply)(_.value) + + implicit lazy val picklerErrorMsgOrUnit: Pickler[Either[ErrorMsg, Unit]] = + pickleEither + + implicit lazy val picklerInstant: Pickler[Instant] = + new Pickler[Instant] { + // EpochSecond is stored as a packed long (typically 5 bytes instead of 8 raw) + // Nano is stored as a raw int (4 bytes instead of typically 5 packed, P(27%) 4 packed) + override def pickle(i: Instant)(implicit state: PickleState): Unit = { + state.enc.writeLong(i.getEpochSecond) + state.enc.writeRawInt(i.getNano) + () + } + override def unpickle(implicit state: UnpickleState): Instant = { + val epochSecond = state.dec.readLong + val nano = state.dec.readRawInt + Instant.ofEpochSecond(epochSecond, nano) + } + } + + implicit lazy val picklerNonEmptyVectorInt: Pickler[NonEmptyVector[Int]] = + pickleNEV + +} diff --git a/workers/src/main/scala/japgolly/webapputil/boopickle/SafePickler.scala b/workers/src/main/scala/japgolly/webapputil/boopickle/SafePickler.scala new file mode 100644 index 000000000..b541391da --- /dev/null +++ b/workers/src/main/scala/japgolly/webapputil/boopickle/SafePickler.scala @@ -0,0 +1,234 @@ +// Copyright (c) 2016-2023 Association of Universities for Research in Astronomy, Inc. (AURA) +// For license information see LICENSE or https://opensource.org/licenses/BSD-3-Clause + +package japgolly.webapputil.boopickle + +import cats.syntax.eq.* +import boopickle.{PickleState, Pickler, UnpickleState} +import japgolly.webapputil.binary.BinaryData +import japgolly.webapputil.general.Version +import japgolly.webapputil.general.Version.ordering.mkOrderingOps +import scala.annotation.elidable +import scala.util.control.NonFatal + +/** + * Binary codec (pickler). Differs from out-of-the-box [[Pickler]] in the following ways: + * + * - decoding is pure: an error value is returned on failure + * - supports magic numbers at header and footer as a partial message integrity check + * - supports protocol versioning and evolution + */ +final case class SafePickler[A]( + header: Option[MagicNumber], + footer: Option[MagicNumber], + version: Version, + body: Pickler[A] +) { + + type Data = A + + import boopickle.{PickleImpl, UnpickleImpl} + import SafePickler._ + + // def i1 = "0x%08X".format(new util.Random().nextInt); def i2 = s"$i1, $i1"; def i4 = s"$i2 $i2" + def withMagicNumbers(header: Int, footer: Int): SafePickler[A] = + copy(Some(MagicNumber(header)), Some(MagicNumber(footer))) + + def withMagicNumberFooter(footer: Int): SafePickler[A] = + copy(footer = Some(MagicNumber(footer))) + + def map[B](f: Pickler[A] => Pickler[B]): SafePickler[B] = + copy(body = f(body)) + + private val picklerHeader = header.map(pickleMagicNumber(version, _)) + private val picklerFooter = footer.map(pickleMagicNumber(version, _)) + private val picklerVersion = pickleVersion(version) + + private val picklerCombined: Pickler[A] = + new Pickler[A] { + + override def pickle(a: A)(implicit state: PickleState): Unit = { + picklerHeader.foreach(_.pickle(())) + picklerVersion.pickle(version) + body.pickle(a) + picklerFooter.foreach(_.pickle(())) + } + + override def unpickle(implicit state: UnpickleState): A = { + picklerHeader.foreach(_.unpickle) + val v = picklerVersion.unpickle + if (v.major =!= version.major) + throw DecodingFailure.UnsupportedMajorVer(localVer = version, actual = v) + try { + val a = body.unpickle + picklerFooter.foreach(_.unpickle) + a + } catch { + case e: Throwable => throw new VerAndErr(v, e) + } + } + } + + def encode(a: A): BinaryData = { + val bb = PickleImpl.intoBytes(a)(implicitly, picklerCombined) + BinaryData.unsafeFromByteBuffer(bb) + } + + private def wrapRead(unpickle: => A): SafePickler.Result[A] = + try + Right(unpickle) + catch { + case e: EmbeddedFailure => Left(e.failure) + case e: VerAndErr => DecodingFailure.fromException(version, e.err, Some(e.ver)) + case e: Throwable => DecodingFailure.fromException(version, e, None) + } + + def decode(bin: BinaryData): SafePickler.Result[A] = + wrapRead(UnpickleImpl(picklerCombined).fromBytes(bin.unsafeByteBuffer)) + + def decodeOrThrow(bin: BinaryData): A = + decode(bin).fold(throw _, identity) + + val decodeBytes: Array[Byte] => SafePickler.Result[A] = + bytes => decode(BinaryData.unsafeFromArray(bytes)) + + def embeddedWrite(a: A)(implicit state: PickleState): Unit = + picklerCombined.pickle(a) + + def embeddedRead(implicit state: UnpickleState): A = + wrapRead(picklerCombined.unpickle) match { + case Right(a) => a + case Left(e) => throw new EmbeddedFailure(e) + } +} + +object SafePickler { + + type Result[+A] = Either[DecodingFailure, A] + + def success[A](a: A): Result[A] = + Right(a) + + sealed trait DecodingFailure extends RuntimeException { + val localVer: Version + val upstreamVer: Option[Version] + + def isLocalKnownToBeOutOfDate: Boolean = + upstreamVer.exists(localVer < _) + + def isUpstreamKnownToBeOutOfDate: Boolean = + upstreamVer.exists(_ < localVer) + } + + object DecodingFailure { + + final case class UnsupportedMajorVer(localVer: Version, actual: Version) + extends RuntimeException + with DecodingFailure { + override val upstreamVer = Some(actual) + } + + final case class MagicNumberMismatch( + localVer: Version, + actual: MagicNumber, + expected: MagicNumber, + upstreamVer: Option[Version] + ) extends RuntimeException + with DecodingFailure + + final case class InvalidVersion(localVer: Version, major: Int, minor: Int) + extends RuntimeException + with DecodingFailure { + override val upstreamVer = None + } + + final case class ExceptionOccurred( + localVer: Version, + exception: Throwable, + upstreamVer: Option[Version] + ) extends RuntimeException + with DecodingFailure { + + @elidable(elidable.ASSERTION) + private def devOnly(): Unit = + exception.printStackTrace(System.err) + + devOnly() + } + + def fromException( + localVer: Version, + err: Throwable, + upstreamVer: Option[Version] + ): Result[Nothing] = + err match { + case MagicNumberMismatch(_, a, b, None) => + Left(MagicNumberMismatch(localVer, a, b, upstreamVer)) + case e: DecodingFailure => Left(e) + case e: EmbeddedFailure => Left(e.failure) + case e: StackOverflowError => + Left(DecodingFailure.ExceptionOccurred(localVer, e, upstreamVer)) + case NonFatal(e) => Left(DecodingFailure.ExceptionOccurred(localVer, e, upstreamVer)) + case e => throw e + } + } + + private[SafePickler] final class VerAndErr(val ver: Version, val err: Throwable) + extends RuntimeException + + private[SafePickler] final class EmbeddedFailure(val failure: DecodingFailure) + extends RuntimeException + + private[SafePickler] def pickleVersion(localVer: Version): Pickler[Version] = + new Pickler[Version] { + override def pickle(a: Version)(implicit state: PickleState): Unit = { + state.enc.writeInt(a.major.value) + state.enc.writeInt(a.minor.value) + () + } + + override def unpickle(implicit state: UnpickleState): Version = { + val major = state.dec.readInt + val minor = state.dec.readInt + val minOk = major >= 1 && minor >= 0 + val maxOk = major <= 4 && minor <= 100 + if (minOk && maxOk) + Version.fromInts(major, minor) + else + throw DecodingFailure.InvalidVersion(localVer, major, minor) + } + } + + private[SafePickler] def pickleMagicNumber(localVer: Version, real: MagicNumber): Pickler[Unit] = + new Pickler[Unit] { + override def pickle(a: Unit)(implicit state: PickleState): Unit = + state.enc.writeRawInt(real.value) + () + + override def unpickle(implicit state: UnpickleState): Unit = { + val found = state.dec.readRawInt + if (found != real.value) + throw DecodingFailure.MagicNumberMismatch(localVer = localVer, + actual = MagicNumber(found), + expected = real, + upstreamVer = None + ) + } + } + + object ConstructionHelperImplicits { + implicit class SafePickler_PicklerExt[A](private val self: Pickler[A]) extends AnyVal { + @inline def asVersion(major: Int, minor: Int): SafePickler[A] = asVersion( + Version.fromInts(major, minor) + ) + def asVersion(v: Version): SafePickler[A] = SafePickler(None, None, v, self) + def asV1(minorVer: Int): SafePickler[A] = asVersion(Version.v1(minorVer)) + def asV2(minorVer: Int): SafePickler[A] = asVersion(Version.v2(minorVer)) + } + } +} + +final case class MagicNumber(value: Int) { + override def toString = s"MagicNumber(0x$hex)" + def hex = "%08X".format(value) +} diff --git a/workers/src/main/scala/japgolly/webapputil/boopickle/SafePicklerUtil.scala b/workers/src/main/scala/japgolly/webapputil/boopickle/SafePicklerUtil.scala new file mode 100644 index 000000000..976893213 --- /dev/null +++ b/workers/src/main/scala/japgolly/webapputil/boopickle/SafePicklerUtil.scala @@ -0,0 +1,64 @@ +// Copyright (c) 2016-2023 Association of Universities for Research in Astronomy, Inc. (AURA) +// For license information see LICENSE or https://opensource.org/licenses/BSD-3-Clause + +package japgolly.webapputil.boopickle + +import boopickle.{PickleState, UnpickleState} +import japgolly.webapputil.general.Version + +object SafePicklerUtil { + import PicklerUtil._ + + final case class UnsupportedVersionException(found: Version, maxSupported: Version) + extends RuntimeException( + s"${found.verStr} not supported. ${maxSupported.verStr} is the max supported." + ) + + case object CorruptData extends RuntimeException("Corrupt data.") + + /** + * Used to add a codec version to a binary protocol whilst retaining backwards-compatibility with + * the unversioned case. + */ + final val VersionHeader = -99988999 + + def writeVersion(ver: Int)(implicit state: PickleState): Unit = { + assert(ver > 0) // v1.0 is the default and doesn't need a version header + state.enc.writeInt(VersionHeader) + state.enc.writeInt(ver) + () + } + + def unsupportedVer(ver: Int, maxSupportedVer: Int): Nothing = + throw UnsupportedVersionException(found = Version.v1(ver), + maxSupported = Version.v1(maxSupportedVer) + ) + + def readByVersion[A]( + maxSupportedVer: Int + )(f: PartialFunction[Int, A])(implicit state: UnpickleState): A = { + assert(maxSupportedVer > 0) + + def unsupportedVer(ver: Int): Nothing = + SafePicklerUtil.unsupportedVer(ver, maxSupportedVer) + + def readVer(ver: Int): A = + f.applyOrElse[Int, A](ver, unsupportedVer) + + state.dec.peek(_.readInt) match { + case VersionHeader => + state.dec.readInt + val ver = state.dec.readInt + if (ver <= 0) + throw CorruptData + if ( + ver > maxSupportedVer + ) // preempt using the partial function in case maxSupportedVer is incorrect + unsupportedVer(ver) + readVer(ver) + case _ => + readVer(0) + } + } + +} diff --git a/workers/src/main/scala/japgolly/webapputil/general/ErrorMsg.scala b/workers/src/main/scala/japgolly/webapputil/general/ErrorMsg.scala new file mode 100644 index 000000000..4deb63308 --- /dev/null +++ b/workers/src/main/scala/japgolly/webapputil/general/ErrorMsg.scala @@ -0,0 +1,56 @@ +// Copyright (c) 2016-2023 Association of Universities for Research in Astronomy, Inc. (AURA) +// For license information see LICENSE or https://opensource.org/licenses/BSD-3-Clause + +package japgolly.webapputil.general + +import cats.Eq + +final case class ErrorMsg(value: String) { + + // Keep this as a val so that the stack trace points to where the error was created, as opposed to thrown. + val exception: ErrorMsg.Exception = + ErrorMsg.Exception(this) + + def throwException(): Nothing = + throw exception + + def modMsg(f: String => String): ErrorMsg = { + val e = ErrorMsg(f(value)) + e.exception.setStackTrace(exception.getStackTrace) + e + } + + def withPrefix(s: String): ErrorMsg = + modMsg(s + _) +} + +object ErrorMsg { + + implicit def univEq: Eq[ErrorMsg] = + Eq.by(_.value) + + def fromThrowable(t: Throwable): ErrorMsg = + apply(Option(t.getMessage).getOrElse(t.toString).trim) + + def errorOccurred(t: Throwable): ErrorMsg = + Option(t.getMessage).map(_.trim).filter(_.nonEmpty) match { + case Some(m) => ErrorMsg("Error occurred: " + m) + case None => ErrorMsg("Error occurred.") + } + + object ClientSide { + def errorContactingServer = ErrorMsg("Error contacting server. Please try again.") + def failedToParseResponse = ErrorMsg("Failed to understand the response from the server.") + def noCompatibleServer = ErrorMsg( + "Failed to find a compatible server. Please try again, or try reloading the page." + ) + def serverCallTimeout = ErrorMsg( + "Server didn't respond. Please check your internet connectivity." + ) + def serverProtocolIsNewer = ErrorMsg( + "Our servers have been upgraded to a newer version. Please reload this page and try again." + ) + } + + final case class Exception(msg: ErrorMsg) extends RuntimeException(msg.value) +} diff --git a/workers/src/main/scala/japgolly/webapputil/general/Version.scala b/workers/src/main/scala/japgolly/webapputil/general/Version.scala new file mode 100644 index 000000000..93c1591ed --- /dev/null +++ b/workers/src/main/scala/japgolly/webapputil/general/Version.scala @@ -0,0 +1,59 @@ +// Copyright (c) 2016-2023 Association of Universities for Research in Astronomy, Inc. (AURA) +// For license information see LICENSE or https://opensource.org/licenses/BSD-3-Clause + +package japgolly.webapputil.general + +import cats.Eq +import cats.syntax.option.* +import scala.collection.mutable + +final case class Version(major: Version.Major, minor: Version.Minor) { + override def toString = verStr + def verNum = s"${major.value}.${minor.value}" + def verStr = "v" + verNum +} + +object Version { + + def fromInts(major: Int, minor: Int): Version = + Version(Major(major), Minor(minor)) + + final case class Major(value: Int) { + assert(value >= 1) + } + + final case class Minor(value: Int) { + assert(value >= 0) + } + + implicit val univEqMajor: Eq[Major] = Eq.by(_.value) + implicit val univEqMinor: Eq[Minor] = Eq.by(_.value) + implicit val univEq: Eq[Version] = Eq.by(v => (v.major, v.minor)) + + implicit val ordering: Ordering[Version] = + new Ordering[Version] { + override def compare(x: Version, y: Version): Int = { + val i = x.major.value - y.major.value + if (i != 0) + i + else + x.minor.value - y.minor.value + } + } + + private val memoV1: mutable.Map[Int, Version] = mutable.Map.empty + def v1(minorVer: Int): Version = + memoV1 + .updateWith(minorVer): + case None => Version.fromInts(1, minorVer).some + case other => other + .get + + private val memoV2: mutable.Map[Int, Version] = mutable.Map.empty + def v2(minorVer: Int): Version = + memoV2 + .updateWith(minorVer): + case None => Version.fromInts(2, minorVer).some + case other => other + .get +} diff --git a/workers/src/main/scala/japgolly/webapputil/indexeddb/IndexedDb.scala b/workers/src/main/scala/japgolly/webapputil/indexeddb/IndexedDb.scala index 5f59af66b..e54e4f1ce 100644 --- a/workers/src/main/scala/japgolly/webapputil/indexeddb/IndexedDb.scala +++ b/workers/src/main/scala/japgolly/webapputil/indexeddb/IndexedDb.scala @@ -6,7 +6,6 @@ package japgolly.webapputil.indexeddb import cats.kernel.Eq import japgolly.scalajs.react._ import japgolly.scalajs.react.util.Util.{identity => identityFn} -import japgolly.univeq.UnivEqCats._ import japgolly.webapputil.indexeddb.TxnMode._ import org.scalajs.dom._ import scala.annotation.elidable @@ -32,7 +31,7 @@ final class IndexedDb(raw: IDBFactory) { // r.onblocked = callbacks.blocked.toJsFn1 r.onupgradeneeded = e => { - val db = new DatabaseInVersionChange(r.result) + val db = new DatabaseInVersionChange(r.result) val args = versionChange(db, e) callbacks.upgradeNeeded(args).runNow() } @@ -43,7 +42,7 @@ final class IndexedDb(raw: IDBFactory) { asyncRequest(create()) { r => val rawDb = r.result - rawDb.onversionchange = e => { + rawDb.onversionchange = e => try { val args = versionChange(new DatabaseInVersionChange(rawDb), e) callbacks.versionChange(args).runNow() @@ -57,11 +56,8 @@ final class IndexedDb(raw: IDBFactory) { // https://developer.mozilla.org/en-US/docs/Web/API/IDBDatabase/onclose callbacks.closed.runNow() } - } - rawDb.onclose = _ => { - callbacks.closed.runNow() - } + rawDb.onclose = _ => callbacks.closed.runNow() new Database(rawDb, onClose = callbacks.closed) } @@ -79,9 +75,9 @@ object IndexedDb { new IndexedDb(raw) def global(): Option[IndexedDb] = - try { + try window.indexedDB.toOption.map(apply) - } catch { + catch { case _: Throwable => None } @@ -94,21 +90,25 @@ object IndexedDb { type OpenResult = OpenCallbacks => AsyncCallback[Database] - /** Callbacks to install when opening a DB. + /** + * Callbacks to install when opening a DB. * * Note 1: On `versionChange`, the DB connection will be closed automatically. * - * Note 2: There's no `blocked` handler because we currently don't allow blocking. To quote the idb spec: - * if "there are open connections that don’t close in response to a versionchange event, the request will be - * blocked until all they close". + * Note 2: There's no `blocked` handler because we currently don't allow blocking. To quote the + * idb spec: if "there are open connections that don’t close in response to a versionchange event, + * the request will be blocked until all they close". */ - final case class OpenCallbacks(upgradeNeeded: VersionChange => Callback, - versionChange: VersionChange => Callback = _ => Callback.empty, - closed : Callback = Callback.empty) + final case class OpenCallbacks( + upgradeNeeded: VersionChange => Callback, + versionChange: VersionChange => Callback = _ => Callback.empty, + closed: Callback = Callback.empty + ) - final case class Error(event: ErrorEvent) extends RuntimeException( - event.asInstanceOf[js.Dynamic].message.asInstanceOf[js.UndefOr[String]].getOrElse(null) - ) { + final case class Error(event: ErrorEvent) + extends RuntimeException( + event.asInstanceOf[js.Dynamic].message.asInstanceOf[js.UndefOr[String]].getOrElse(null) + ) { // Note: allowing .message to be undefined is presumably only required due to use of fake-indexeddb in tests val msg: String = @@ -118,17 +118,21 @@ object IndexedDb { override def toString = s"IndexedDb.Error($msg)" - def isStoredDatabaseHigherThanRequested: Boolean = { + def isStoredDatabaseHigherThanRequested: Boolean = // Chrome: The requested version (1) is less than the existing version (2). // Firefox: The operation failed because the stored database is a higher version than the version requested. msg.contains("version") && (msg.contains("higher") || msg.contains("less than")) - } } - final case class VersionChange(db: DatabaseInVersionChange, oldVersion: Int, newVersion: Option[Int]) { + final case class VersionChange( + db: DatabaseInVersionChange, + oldVersion: Int, + newVersion: Option[Int] + ) { def createObjectStore[K, V](defn: ObjectStoreDef[K, V], createdInDbVer: Int): Callback = Callback.when(oldVersion < createdInDbVer && newVersion.exists(_ >= createdInDbVer))( - db.createObjectStore(defn)) + db.createObjectStore(defn) + ) } final class DatabaseInVersionChange(raw: IDBDatabase) { @@ -153,7 +157,7 @@ object IndexedDb { actuallyClose >> onClose } - def compareAndSet(stores: ObjectStoreDef[_, _]*): CasDsl1 = + def compareAndSet(stores: ObjectStoreDef[?, ?]*): CasDsl1 = new CasDsl1(this, stores) def transactionRO: RunTxnDsl1[RO] = @@ -173,11 +177,15 @@ object IndexedDb { transactionRW(store)(_.objectStore(store).flatMap(_.add(key, value))) /** Note: insert only */ - def addWhenDefined[K, V](store: ObjectStoreDef.Async[K, V])(key: K, value: Option[V]): AsyncCallback[Unit] = + def addWhenDefined[K, V]( + store: ObjectStoreDef.Async[K, V] + )(key: K, value: Option[V]): AsyncCallback[Unit] = AsyncCallback.traverseOption_(value)(add(store)(key, _)) /** Note: insert only */ - def addWhenDefined[K, V](store: ObjectStoreDef.Sync[K, V])(key: K, value: Option[V]): AsyncCallback[Unit] = + def addWhenDefined[K, V]( + store: ObjectStoreDef.Sync[K, V] + )(key: K, value: Option[V]): AsyncCallback[Unit] = AsyncCallback.traverseOption_(value)(add(store)(key, _)) def clear[K, V](store: ObjectStoreDef[K, V]): AsyncCallback[Unit] = @@ -210,25 +218,33 @@ object IndexedDb { transactionRW(store)(_.objectStore(store).flatMap(_.put(key, value))) /** aka upsert or delete */ - def putOrDelete[K, V](store: ObjectStoreDef.Async[K, V])(key: K, value: Option[V]): AsyncCallback[Unit] = + def putOrDelete[K, V]( + store: ObjectStoreDef.Async[K, V] + )(key: K, value: Option[V]): AsyncCallback[Unit] = value match { case Some(v) => put(store)(key, v) case None => delete(store)(key) } /** aka upsert or delete */ - def putOrDelete[K, V](store: ObjectStoreDef.Sync[K, V])(key: K, value: Option[V]): AsyncCallback[Unit] = + def putOrDelete[K, V]( + store: ObjectStoreDef.Sync[K, V] + )(key: K, value: Option[V]): AsyncCallback[Unit] = value match { case Some(v) => put(store)(key, v) case None => delete(store)(key) } /** aka upsert */ - def putWhenDefined[K, V](store: ObjectStoreDef.Sync[K, V])(key: K, value: Option[V]): AsyncCallback[Unit] = + def putWhenDefined[K, V]( + store: ObjectStoreDef.Sync[K, V] + )(key: K, value: Option[V]): AsyncCallback[Unit] = AsyncCallback.traverseOption_(value)(put(store)(key, _)) /** aka upsert */ - def putWhenDefined[K, V](store: ObjectStoreDef.Async[K, V])(key: K, value: Option[V]): AsyncCallback[Unit] = + def putWhenDefined[K, V]( + store: ObjectStoreDef.Async[K, V] + )(key: K, value: Option[V]): AsyncCallback[Unit] = AsyncCallback.traverseOption_(value)(put(store)(key, _)) } // class Database @@ -268,7 +284,7 @@ object IndexedDb { case Some(v1) => val v2 = f(v1) put(key, v2).map(_ => Some(v2)) - case None => + case None => TxnDslRW.none } @@ -302,25 +318,31 @@ object IndexedDb { final class AtomicAsyncDsl[K, V](db: Database, store: ObjectStoreDef.Async[K, V]) { - /** Performs an async modification on a store value. - * - * This only modifies an existing value. Use [[modifyAsyncOption()]] to upsert and/or delete values. - * - * This uses [[compareAndSet()]] for atomicity and thread-safety. - * - * @return If the value exists, this returns the previous and updated values - */ + /** + * Performs an async modification on a store value. + * + * This only modifies an existing value. Use [[modifyAsyncOption()]] to upsert and/or delete + * values. + * + * This uses [[compareAndSet()]] for atomicity and thread-safety. + * + * @return + * If the value exists, this returns the previous and updated values + */ def modify(key: K)(f: V => V): AsyncCallback[Option[(V, V)]] = modifyAsync(key)(v => AsyncCallback.pure(f(v))) - /** Performs an async modification on a store value. - * - * This only modifies an existing value. Use [[modifyAsyncOption()]] to upsert and/or delete values. - * - * This uses [[compareAndSet()]] for atomicity and thread-safety. - * - * @return If the value exists, this returns the previous and updated values - */ + /** + * Performs an async modification on a store value. + * + * This only modifies an existing value. Use [[modifyAsyncOption()]] to upsert and/or delete + * values. + * + * This uses [[compareAndSet()]] for atomicity and thread-safety. + * + * @return + * If the value exists, this returns the previous and updated values + */ def modifyAsync(key: K)(f: V => AsyncCallback[V]): AsyncCallback[Option[(V, V)]] = db .compareAndSet(store) @@ -328,35 +350,50 @@ object IndexedDb { .mapAsync(AsyncCallback.traverseOption(_)(v1 => f(v1).map((v1, _)))) .putResultWhenDefinedBy(store)(key, _.map(_._2)) - /** Performs an async modification on an optional store value. - * - * This uses [[compareAndSet()]] for atomicity and thread-safety. - * - * @return The previous and updated values - */ + /** + * Performs an async modification on an optional store value. + * + * This uses [[compareAndSet()]] for atomicity and thread-safety. + * + * @return + * The previous and updated values + */ def modifyOption(key: K)(f: Option[V] => Option[V]): AsyncCallback[(Option[V], Option[V])] = modifyOptionAsync(key)(v => AsyncCallback.pure(f(v))) - /** Performs an async modification on an optional store value. - * - * This uses [[compareAndSet()]] for atomicity and thread-safety. - * - * @return The previous and updated values - */ - def modifyOptionAsync(key: K)(f: Option[V] => AsyncCallback[Option[V]]): AsyncCallback[(Option[V], Option[V])] = + /** + * Performs an async modification on an optional store value. + * + * This uses [[compareAndSet()]] for atomicity and thread-safety. + * + * @return + * The previous and updated values + */ + def modifyOptionAsync( + key: K + )(f: Option[V] => AsyncCallback[Option[V]]): AsyncCallback[(Option[V], Option[V])] = db .compareAndSet(store) .getValueAsync(store)(key) - .mapAsync { o1 => f(o1).map((o1, _)) } + .mapAsync(o1 => f(o1).map((o1, _))) .putOrDeleteResultBy(store)(key, _._2) } - final class RunTxnDsl1[M <: TxnMode] private[IndexedDb](raw: IDBDatabase, txnDsl: TxnDsl[M], mode: IDBTransactionMode) { - def apply(stores: ObjectStoreDef[_, _]*): RunTxnDsl2[M] = + final class RunTxnDsl1[M <: TxnMode] private[IndexedDb] ( + raw: IDBDatabase, + txnDsl: TxnDsl[M], + mode: IDBTransactionMode + ) { + def apply(stores: ObjectStoreDef[?, ?]*): RunTxnDsl2[M] = new RunTxnDsl2(raw, txnDsl, mode, mkStoreArray(stores)) } - final class RunTxnDsl2[M <: TxnMode] private[IndexedDb](raw: IDBDatabase, txnDsl: TxnDsl[M], mode: IDBTransactionMode, stores: js.Array[String]) { + final class RunTxnDsl2[M <: TxnMode] private[IndexedDb] ( + raw: IDBDatabase, + txnDsl: TxnDsl[M], + mode: IDBTransactionMode, + stores: js.Array[String] + ) { def apply[A](f: TxnDsl[M] => Txn[M, A]): AsyncCallback[A] = { val x = CallbackTo.pure(f(txnDsl)) @@ -368,9 +405,7 @@ object IndexedDb { @inline def startRawTxn(complete: Try[Unit] => Callback) = { val txn = raw.transaction(stores, mode) - txn.onerror = event => { - complete(Failure(Error(event))).runNow() - } + txn.onerror = event => complete(Failure(Error(event))).runNow() txn.oncomplete = complete(success_).toJsFn1 @@ -383,9 +418,9 @@ object IndexedDb { (awaitTxnCompletion, complete) <- AsyncCallback.promise[Unit].asAsyncCallback result <- AsyncCallback.suspend { - val txn = startRawTxn(complete) - interpretTxn(txn, dsl) - } + val txn = startRawTxn(complete) + interpretTxn(txn, dsl) + } _ <- awaitTxnCompletion @@ -401,7 +436,7 @@ object IndexedDb { dsl(txnDsl).flatMap(txnA => apply(_ => txnA)) } - final class CasDsl1(db: Database, stores: Seq[ObjectStoreDef[_, _]]) { + final class CasDsl1(db: Database, stores: Seq[ObjectStoreDef[?, ?]]) { def get[A](f: TxnDsl[RO] => Txn[RO, A])(implicit e: Eq[A]) = getAndCompareBy(f)(e.eqv) @@ -413,22 +448,32 @@ object IndexedDb { new CasDsl2[A](db, stores, f(TxnDslRO), eql) /** Note: CAS comparison is on the raw IDB value, i.e. the result prior to async decoding */ - def getValueAsync[K, V](store: ObjectStoreDef.Async[K, V])(key: K): CasDsl3[Option[store.Value], Option[V]] = + def getValueAsync[K, V]( + store: ObjectStoreDef.Async[K, V] + )(key: K): CasDsl3[Option[store.Value], Option[V]] = get(_.objectStore(store).flatMap(_.get(key))) .mapAsync(AsyncCallback.traverseOption(_)(_.decode)) /** Note: CAS comparison is on `Option[V]`, i.e. the decoded result */ - def getValueSync[K, V](store: ObjectStoreDef.Sync[K, V])(key: K)(implicit e: Eq[Option[V]]): CasDsl2[Option[V]] = + def getValueSync[K, V](store: ObjectStoreDef.Sync[K, V])(key: K)(implicit + e: Eq[Option[V]] + ): CasDsl2[Option[V]] = get(_.objectStore(store).flatMap(_.get(key))) - def getAllKeys[K, V](store: ObjectStoreDef[K, V])(implicit e: Eq[Vector[K]]): CasDsl2[Vector[K]] = + def getAllKeys[K, V](store: ObjectStoreDef[K, V])(implicit + e: Eq[Vector[K]] + ): CasDsl2[Vector[K]] = get(_.objectStore(store.sync).flatMap(_.getAllKeys)) - def getAllValuesAsync[K, V](store: ObjectStoreDef.Async[K, V]): CasDsl3[Vector[store.Value], Vector[V]] = + def getAllValuesAsync[K, V]( + store: ObjectStoreDef.Async[K, V] + ): CasDsl3[Vector[store.Value], Vector[V]] = get(_.objectStore(store).flatMap(_.getAllValues)) .mapAsync(AsyncCallback.traverse(_)(_.decode)) - def getAllValuesSync[K, V](store: ObjectStoreDef.Sync[K, V])(implicit e: Eq[Vector[V]]): CasDsl2[Vector[V]] = + def getAllValuesSync[K, V](store: ObjectStoreDef.Sync[K, V])(implicit + e: Eq[Vector[V]] + ): CasDsl2[Vector[V]] = get(_.objectStore(store.sync).flatMap(_.getAllValues)) } @@ -442,7 +487,12 @@ object IndexedDb { mapAsync(f(_).asAsyncCallback) } - final class CasDsl2[A](db: Database, stores: Seq[ObjectStoreDef[_, _]], get: Txn[RO, A], eql: (A, A) => Boolean) extends CasDsl23[A, A] { + final class CasDsl2[A]( + db: Database, + stores: Seq[ObjectStoreDef[?, ?]], + get: Txn[RO, A], + eql: (A, A) => Boolean + ) extends CasDsl23[A, A] { private def next = mapAsync(AsyncCallback.pure) @@ -450,17 +500,23 @@ object IndexedDb { new CasDsl3[A, C](db, stores, get, eql, f) def set[C](set: TxnDsl[RW] => A => Txn[RW, C]): AsyncCallback[C] = - next.set { dsl => (a, _) => set(dsl)(a) } + next.set(dsl => (a, _) => set(dsl)(a)) } - final class CasDsl3[A, B](db: Database, stores: Seq[ObjectStoreDef[_, _]], get: Txn[RO, A], eql: (A, A) => Boolean, prep: A => AsyncCallback[B]) extends CasDsl23[A, B] { + final class CasDsl3[A, B]( + db: Database, + stores: Seq[ObjectStoreDef[?, ?]], + get: Txn[RO, A], + eql: (A, A) => Boolean, + prep: A => AsyncCallback[B] + ) extends CasDsl23[A, B] { override def mapAsync[C](f: B => AsyncCallback[C]) = new CasDsl3[A, C](db, stores, get, eql, prep(_).flatMap(f)) def set[C](set: TxnDsl[RW] => (A, B) => Txn[RW, C]): AsyncCallback[C] = { - val txnRO = db.transactionRO(stores: _*) - val txnRW = db.transactionRW(stores: _*) + val txnRO = db.transactionRO(stores*) + val txnRW = db.transactionRW(stores*) def loopTxn(a: A, b: B): AsyncCallback[Either[A, C]] = txnRW { dsl => @@ -501,53 +557,67 @@ object IndexedDb { /** Note: insert only */ def addResultBy[K, V](store: ObjectStoreDef.Async[K, V])(key: K, f: B => V): AsyncCallback[B] = - mapAsync { b => store.encode(f(b)).map((b, _)) } - .addResultBy(store.sync)(key, _._2) - .map(_._1) + mapAsync(b => store.encode(f(b)).map((b, _))) + .addResultBy(store.sync)(key, _._2) + .map(_._1) /** Note: insert only */ def addResultBy[K, V](store: ObjectStoreDef.Sync[K, V])(key: K, f: B => V): AsyncCallback[B] = - set(dsl => (_, b) => - for { - s <- dsl.objectStore(store) - _ <- s.add(key, f(b)) - } yield b + set(dsl => + (_, b) => + for { + s <- dsl.objectStore(store) + _ <- s.add(key, f(b)) + } yield b ) /** Note: insert only */ - @inline def addResultWhenDefined[K, V](store: ObjectStoreDef.Async[K, V])(key: K)(implicit ev: B => Option[V]): AsyncCallback[B] = + @inline def addResultWhenDefined[K, V](store: ObjectStoreDef.Async[K, V])(key: K)(implicit + ev: B => Option[V] + ): AsyncCallback[B] = addResultWhenDefinedBy(store)(key, ev) /** Note: insert only */ - @inline def addResultWhenDefined[K, V](store: ObjectStoreDef.Sync[K, V])(key: K)(implicit ev: B => Option[V]): AsyncCallback[B] = + @inline def addResultWhenDefined[K, V](store: ObjectStoreDef.Sync[K, V])(key: K)(implicit + ev: B => Option[V] + ): AsyncCallback[B] = addResultWhenDefinedBy(store)(key, ev) /** Note: insert only */ - def addResultWhenDefinedBy[K, V](store: ObjectStoreDef.Async[K, V])(key: K, f: B => Option[V]): AsyncCallback[B] = - mapAsync { b => AsyncCallback.traverseOption(f(b))(store.encode(_)).map((b, _)) } - .addResultWhenDefinedBy(store.sync)(key, _._2) - .map(_._1) + def addResultWhenDefinedBy[K, V]( + store: ObjectStoreDef.Async[K, V] + )(key: K, f: B => Option[V]): AsyncCallback[B] = + mapAsync(b => AsyncCallback.traverseOption(f(b))(store.encode(_)).map((b, _))) + .addResultWhenDefinedBy(store.sync)(key, _._2) + .map(_._1) /** Note: insert only */ - def addResultWhenDefinedBy[K, V](store: ObjectStoreDef.Sync[K, V])(key: K, f: B => Option[V]): AsyncCallback[B] = - set(dsl => (_, b) => - f(b) match { - case Some(v) => - for { - s <- dsl.objectStore(store) - _ <- s.add(key, v) - } yield b - case None => - dsl.pure(b) - } + def addResultWhenDefinedBy[K, V]( + store: ObjectStoreDef.Sync[K, V] + )(key: K, f: B => Option[V]): AsyncCallback[B] = + set(dsl => + (_, b) => + f(b) match { + case Some(v) => + for { + s <- dsl.objectStore(store) + _ <- s.add(key, v) + } yield b + case None => + dsl.pure(b) + } ) /** Note: insert only */ - def addWhenDefined[K, V](store: ObjectStoreDef.Async[K, V])(key: K, value: Option[V]): AsyncCallback[Unit] = + def addWhenDefined[K, V]( + store: ObjectStoreDef.Async[K, V] + )(key: K, value: Option[V]): AsyncCallback[Unit] = value.fold(AsyncCallback.unit)(add(store)(key, _)) /** Note: insert only */ - def addWhenDefined[K, V](store: ObjectStoreDef.Sync[K, V])(key: K, value: Option[V]): AsyncCallback[Unit] = + def addWhenDefined[K, V]( + store: ObjectStoreDef.Sync[K, V] + )(key: K, value: Option[V]): AsyncCallback[Unit] = value.fold(AsyncCallback.unit)(add(store)(key, _)) def clear[K, V](store: ObjectStoreDef[K, V]): AsyncCallback[Unit] = @@ -565,40 +635,53 @@ object IndexedDb { setConst(_.objectStore(store).flatMap(_.put(key, value))) /** aka upsert or delete */ - def putOrDelete[K, V](store: ObjectStoreDef.Async[K, V])(key: K, value: Option[V]): AsyncCallback[Unit] = + def putOrDelete[K, V]( + store: ObjectStoreDef.Async[K, V] + )(key: K, value: Option[V]): AsyncCallback[Unit] = value match { case Some(v) => put(store)(key, v) case None => delete(store)(key) } /** aka upsert or delete */ - def putOrDelete[K, V](store: ObjectStoreDef.Sync[K, V])(key: K, value: Option[V]): AsyncCallback[Unit] = + def putOrDelete[K, V]( + store: ObjectStoreDef.Sync[K, V] + )(key: K, value: Option[V]): AsyncCallback[Unit] = value match { case Some(v) => put(store)(key, v) case None => delete(store)(key) } /** Note: upsert */ - @inline def putOrDeleteResult[K, V](store: ObjectStoreDef.Async[K, V])(key: K)(implicit ev: B => Option[V]): AsyncCallback[B] = + @inline def putOrDeleteResult[K, V](store: ObjectStoreDef.Async[K, V])(key: K)(implicit + ev: B => Option[V] + ): AsyncCallback[B] = putOrDeleteResultBy(store)(key, ev) /** Note: upsert */ - @inline def putOrDeleteResult[K, V](store: ObjectStoreDef.Sync[K, V])(key: K)(implicit ev: B => Option[V]): AsyncCallback[B] = + @inline def putOrDeleteResult[K, V](store: ObjectStoreDef.Sync[K, V])(key: K)(implicit + ev: B => Option[V] + ): AsyncCallback[B] = putOrDeleteResultBy(store)(key, ev) /** Note: upsert */ - def putOrDeleteResultBy[K, V](store: ObjectStoreDef.Async[K, V])(key: K, f: B => Option[V]): AsyncCallback[B] = - mapAsync { b => AsyncCallback.traverseOption(f(b))(store.encode(_)).map((b, _)) } - .putOrDeleteResultBy(store.sync)(key, _._2) - .map(_._1) + def putOrDeleteResultBy[K, V]( + store: ObjectStoreDef.Async[K, V] + )(key: K, f: B => Option[V]): AsyncCallback[B] = + mapAsync(b => AsyncCallback.traverseOption(f(b))(store.encode(_)).map((b, _))) + .putOrDeleteResultBy(store.sync)(key, _._2) + .map(_._1) /** Note: upsert */ - def putOrDeleteResultBy[K, V](store: ObjectStoreDef.Sync[K, V])(key: K, f: B => Option[V]): AsyncCallback[B] = - set(dsl => (_, b) => - for { - s <- dsl.objectStore(store) - _ <- s.putOrDelete(key, f(b)) - } yield b + def putOrDeleteResultBy[K, V]( + store: ObjectStoreDef.Sync[K, V] + )(key: K, f: B => Option[V]): AsyncCallback[B] = + set(dsl => + (_, b) => + for { + s <- dsl.objectStore(store) + _ <- s.putOrDelete(key, f(b)) + } yield b ) /** Note: upsert */ @@ -617,52 +700,66 @@ object IndexedDb { enc <- store.encode(v) } yield (b, enc) } - .putResultBy(store.sync)(key, _._2) - .map(_._1) + .putResultBy(store.sync)(key, _._2) + .map(_._1) /** Note: upsert */ def putResultBy[K, V](store: ObjectStoreDef.Sync[K, V])(key: K, f: B => V): AsyncCallback[B] = - set(dsl => (_, b) => - for { - s <- dsl.objectStore(store) - _ <- s.put(key, f(b)) - } yield b + set(dsl => + (_, b) => + for { + s <- dsl.objectStore(store) + _ <- s.put(key, f(b)) + } yield b ) /** Note: upsert */ - @inline def putResultWhenDefined[K, V](store: ObjectStoreDef.Async[K, V])(key: K)(implicit ev: B => Option[V]): AsyncCallback[B] = + @inline def putResultWhenDefined[K, V](store: ObjectStoreDef.Async[K, V])(key: K)(implicit + ev: B => Option[V] + ): AsyncCallback[B] = putResultWhenDefinedBy(store)(key, ev) /** Note: upsert */ - @inline def putResultWhenDefined[K, V](store: ObjectStoreDef.Sync[K, V])(key: K)(implicit ev: B => Option[V]): AsyncCallback[B] = + @inline def putResultWhenDefined[K, V](store: ObjectStoreDef.Sync[K, V])(key: K)(implicit + ev: B => Option[V] + ): AsyncCallback[B] = putResultWhenDefinedBy(store)(key, ev) /** Note: upsert */ - def putResultWhenDefinedBy[K, V](store: ObjectStoreDef.Async[K, V])(key: K, f: B => Option[V]): AsyncCallback[B] = - mapAsync { b => AsyncCallback.traverseOption(f(b))(store.encode(_)).map((b, _)) } - .putResultWhenDefinedBy(store.sync)(key, _._2) - .map(_._1) + def putResultWhenDefinedBy[K, V]( + store: ObjectStoreDef.Async[K, V] + )(key: K, f: B => Option[V]): AsyncCallback[B] = + mapAsync(b => AsyncCallback.traverseOption(f(b))(store.encode(_)).map((b, _))) + .putResultWhenDefinedBy(store.sync)(key, _._2) + .map(_._1) /** Note: upsert */ - def putResultWhenDefinedBy[K, V](store: ObjectStoreDef.Sync[K, V])(key: K, f: B => Option[V]): AsyncCallback[B] = - set(dsl => (_, b) => - f(b) match { - case Some(v) => - for { - s <- dsl.objectStore(store) - _ <- s.put(key, v) - } yield b - case None => - dsl.pure(b) - } + def putResultWhenDefinedBy[K, V]( + store: ObjectStoreDef.Sync[K, V] + )(key: K, f: B => Option[V]): AsyncCallback[B] = + set(dsl => + (_, b) => + f(b) match { + case Some(v) => + for { + s <- dsl.objectStore(store) + _ <- s.put(key, v) + } yield b + case None => + dsl.pure(b) + } ) /** Note: upsert */ - def putWhenDefined[K, V](store: ObjectStoreDef.Async[K, V])(key: K, value: Option[V]): AsyncCallback[Unit] = + def putWhenDefined[K, V]( + store: ObjectStoreDef.Async[K, V] + )(key: K, value: Option[V]): AsyncCallback[Unit] = value.fold(AsyncCallback.unit)(put(store)(key, _)) /** Note: upsert */ - def putWhenDefined[K, V](store: ObjectStoreDef.Sync[K, V])(key: K, value: Option[V]): AsyncCallback[Unit] = + def putWhenDefined[K, V]( + store: ObjectStoreDef.Sync[K, V] + )(key: K, value: Option[V]): AsyncCallback[Unit] = value.fold(AsyncCallback.unit)(put(store)(key, _)) def setConst[C](set: TxnDsl[RW] => Txn[RW, C]): AsyncCallback[C] = @@ -674,20 +771,16 @@ object IndexedDb { val success_ = Success(()) - def asyncRequest_[R <: IDBRequest[Any, _]](act: => R): AsyncCallback[Unit] = + def asyncRequest_[R <: IDBRequest[Any, ?]](act: => R): AsyncCallback[Unit] = asyncRequest(act)(_ => ()) - def asyncRequest[R <: IDBRequest[Any, _], A](act: => R)(onSuccess: R => A): AsyncCallback[A] = + def asyncRequest[R <: IDBRequest[Any, ?], A](act: => R)(onSuccess: R => A): AsyncCallback[A] = AsyncCallback.promise[A].asAsyncCallback.flatMap { case (promise, complete) => val raw = act - raw.onerror = event => { - complete(Failure(Error(event))).runNow() - } + raw.onerror = event => complete(Failure(Error(event))).runNow() - raw.onsuccess = _ => { - complete(Try(onSuccess(raw))).runNow() - } + raw.onsuccess = _ => complete(Try(onSuccess(raw))).runNow() promise } @@ -695,7 +788,7 @@ object IndexedDb { def versionChange(db: DatabaseInVersionChange, e: IDBVersionChangeEvent): VersionChange = VersionChange(db, e.oldVersion.toInt, e.newVersionOption.map(_.toInt)) - def mkStoreArray(stores: Seq[ObjectStoreDef[_, _]]): js.Array[String] = { + def mkStoreArray(stores: Seq[ObjectStoreDef[?, ?]]): js.Array[String] = { val a = new js.Array[String] stores.foreach(s => a.push(s.name)) a @@ -707,10 +800,10 @@ object IndexedDb { val stores = js.Dynamic.literal().asInstanceOf[js.Dictionary[IDBObjectStore]] - def getStore(s: ObjectStore[_, _]) = + def getStore(s: ObjectStore[?, ?]) = AsyncCallback.delay(stores.get(s.defn.name).get) - def interpret[B](step: TxnStep[TxnMode, B]): AsyncCallback[B] = { + def interpret[B](step: TxnStep[TxnMode, B]): AsyncCallback[B] = step match { case FlatMap(fa, f) => @@ -764,7 +857,9 @@ object IndexedDb { val rawKeys = req.result Vector.tabulate(rawKeys.length) { i => val rawKey = rawKeys(i) - keyCodec.decode(IndexedDbKey.fromJs(rawKey)).runNow() // safe in asyncRequest onSuccess + keyCodec + .decode(IndexedDbKey.fromJs(rawKey)) + .runNow() // safe in asyncRequest onSuccess } } } @@ -790,7 +885,6 @@ object IndexedDb { AsyncCallback.tailrec(z)(a => interpret(f(a))) } - } interpret(dsl.step) } diff --git a/workers/src/main/scala/japgolly/webapputil/indexeddb/IndexedDbKey.scala b/workers/src/main/scala/japgolly/webapputil/indexeddb/IndexedDbKey.scala index 63959b4aa..7a38d1bc5 100644 --- a/workers/src/main/scala/japgolly/webapputil/indexeddb/IndexedDbKey.scala +++ b/workers/src/main/scala/japgolly/webapputil/indexeddb/IndexedDbKey.scala @@ -4,9 +4,8 @@ package japgolly.webapputil.indexeddb import org.scalajs.dom.IDBKey -import scala.scalajs.js.| -final class IndexedDbKey private(val asJs: IDBKey) extends AnyVal { +final class IndexedDbKey private (val asJs: IDBKey) extends AnyVal { @inline def value = asJs.asInstanceOf[IndexedDbKey.Typed] } @@ -22,4 +21,4 @@ object IndexedDbKey { def fromJs(k: IDBKey): IndexedDbKey = new IndexedDbKey(k) -} \ No newline at end of file +} diff --git a/workers/src/main/scala/japgolly/webapputil/indexeddb/ObjectStoreDef.scala b/workers/src/main/scala/japgolly/webapputil/indexeddb/ObjectStoreDef.scala index 61976e66e..bd2dc7eaf 100644 --- a/workers/src/main/scala/japgolly/webapputil/indexeddb/ObjectStoreDef.scala +++ b/workers/src/main/scala/japgolly/webapputil/indexeddb/ObjectStoreDef.scala @@ -4,8 +4,8 @@ package japgolly.webapputil.indexeddb import japgolly.scalajs.react.{AsyncCallback, CallbackTo} -import japgolly.univeq.UnivEq import org.scalajs.dom.IDBValue +import cats.Eq sealed trait ObjectStoreDef[K, V] { val name: String @@ -13,14 +13,13 @@ sealed trait ObjectStoreDef[K, V] { final type Key = K - def sync: ObjectStoreDef.Sync[K, _] + def sync: ObjectStoreDef.Sync[K, ?] } object ObjectStoreDef { - final case class Sync[K, V](name : String, - keyCodec : KeyCodec[K], - valueCodec: ValueCodec[V]) extends ObjectStoreDef[K, V] { + final case class Sync[K, V](name: String, keyCodec: KeyCodec[K], valueCodec: ValueCodec[V]) + extends ObjectStoreDef[K, V] { type Value = V @@ -30,12 +29,11 @@ object ObjectStoreDef { // =================================================================================================================== - final case class Async[K, V](name : String, - keyCodec : KeyCodec[K], - valueCodec: ValueCodec.Async[V]) extends ObjectStoreDef[K, V] { self => + final case class Async[K, V](name: String, keyCodec: KeyCodec[K], valueCodec: ValueCodec.Async[V]) + extends ObjectStoreDef[K, V] { self => type Value = Async.Value { - type KeyType = K + type KeyType = K type ValueType = V val store: self.type } @@ -45,16 +43,16 @@ object ObjectStoreDef { def value(v: IDBValue): Value = new Async.Value { - override type KeyType = K + override type KeyType = K override type ValueType = V override val store: self.type = self - override val value = v + override val value = v } override val sync: Sync[K, Value] = { val syncValueCodec = ValueCodec[Value]( encode = v => CallbackTo.pure(v.value), - decode = v => CallbackTo.pure(value(v)), + decode = v => CallbackTo.pure(value(v)) ) Sync(name, keyCodec, syncValueCodec) } @@ -82,7 +80,7 @@ object ObjectStoreDef { } object Value { - implicit def univEq[V <: Value]: UnivEq[V] = UnivEq.force + implicit def univEq[V <: Value]: Eq[V] = Eq.fromUniversalEquals } } diff --git a/workers/src/main/scala/japgolly/webapputil/indexeddb/Txn.scala b/workers/src/main/scala/japgolly/webapputil/indexeddb/Txn.scala new file mode 100644 index 000000000..c93421e66 --- /dev/null +++ b/workers/src/main/scala/japgolly/webapputil/indexeddb/Txn.scala @@ -0,0 +1,89 @@ +// Copyright (c) 2016-2023 Association of Universities for Research in Astronomy, Inc. (AURA) +// For license information see LICENSE or https://opensource.org/licenses/BSD-3-Clause + +package japgolly.webapputil.indexeddb + +import cats.Monad +import japgolly.webapputil.indexeddb.TxnMode._ + +/** + * Embedded language for safely working with(in) an IndexedDB transaction. + * + * This is necessary because whilst all the transaction methods are async, any other type of + * asynchronicity is not supported and will result in IndexedDB automatically committing and closing + * the transaction, in which case, further interaction with the transaction will result in a runtime + * error. + * + * Therefore, returning [[AsyncCallback]] from within transactions is dangerous because it allows + * composition of both kinds of asynchronicity. To avoid this, we use this embedded language and + * don't publicly expose its interpretation/translation to [[AsyncCallback]]. From the call-site's + * point of view, a `Txn[A]` is completely opaque. + * + * This also has a nice side-effect of ensuring that transaction completion is always awaited + * because we do it in the transaction functions right after interpretation. Otherwise, the + * call-sites would always need to remember to do it if live transaction access were exposed. + * + * @tparam A + * The return type. + */ +final case class Txn[+M <: TxnMode, +A](step: TxnStep[M, A]) { self => + import TxnStep._ + + def map[B](f: A => B): Txn[M, B] = + Txn(Map(step, f)) + + def void: Txn[M, Unit] = + map(_ => ()) +} + +object Txn { + + @inline implicit final class InvariantOps[M <: TxnMode, A](private val self: Txn[M, A]) + extends AnyVal { + import TxnStep._ + + def flatMap[N <: TxnMode, B]( + f: A => Txn[N, B] + )(implicit m: TxnMode.Merge[M, N]): Txn[m.Mode, B] = { + val step = FlatMap[m.Mode, A, B](m.substM(self.step), a => m.substN(f(a).step)) + Txn(step) + } + + @inline def unless(cond: Boolean)(implicit + ev: TxnStep[RO, Option[Nothing]] => Txn[M, Option[Nothing]] + ): Txn[M, Option[A]] = + when(!cond) + + @inline def unless_(cond: Boolean)(implicit + ev: TxnStep[RO, Unit] => Txn[M, Unit] + ): Txn[M, Unit] = + when_(!cond) + + def when(cond: Boolean)(implicit + ev: TxnStep[RO, Option[Nothing]] => Txn[M, Option[Nothing]] + ): Txn[M, Option[A]] = + if (cond) self.map(Some(_)) else ev(none) + + def when_(cond: Boolean)(implicit ev: TxnStep[RO, Unit] => Txn[M, Unit]): Txn[M, Unit] = + if (cond) self.void else ev(unit) + + def >>[N <: TxnMode, B](f: Txn[N, B])(implicit m: TxnMode.Merge[M, N]): Txn[m.Mode, B] = { + val next = m.substN(f.step) + val step = FlatMap[m.Mode, A, B](m.substM(self.step), _ => next) + Txn(step) + } + } + + type CatsInstance[M <: TxnMode] = Monad[Txn[M, *]] + + def catsInstance[M <: TxnMode](dsl: TxnDsl[M]): CatsInstance[M] = + new CatsInstance[M] { + override def pure[A](a: A) = dsl.pure(a) + override def map[A, B](fa: Txn[M, A])(f: A => B) = fa.map(f) + override def flatMap[A, B](fa: Txn[M, A])(f: A => Txn[M, B]) = fa.flatMap(f) + override def tailRecM[A, B](a: A)(f: A => Txn[M, Either[A, B]]) = dsl.tailRec(a)(f) + } + + implicit def catsInstanceRO: CatsInstance[RO] = catsInstance(TxnDslRO) + implicit def catsInstanceRW: CatsInstance[RW] = catsInstance(TxnDslRW) +} diff --git a/workers/src/main/scala/japgolly/webapputil/indexeddb/TxnDsl.scala b/workers/src/main/scala/japgolly/webapputil/indexeddb/TxnDsl.scala new file mode 100644 index 000000000..2f3cd66d0 --- /dev/null +++ b/workers/src/main/scala/japgolly/webapputil/indexeddb/TxnDsl.scala @@ -0,0 +1,132 @@ +// Copyright (c) 2016-2023 Association of Universities for Research in Astronomy, Inc. (AURA) +// For license information see LICENSE or https://opensource.org/licenses/BSD-3-Clause + +package japgolly.webapputil.indexeddb + +import cats.Traverse +import japgolly.scalajs.react._ +import japgolly.scalajs.react.util.Util.{identity => identityFn} +import japgolly.webapputil.indexeddb.IndexedDb.ObjectStore +import japgolly.webapputil.indexeddb.TxnMode._ +import scala.collection.BuildFrom + +sealed abstract class TxnDsl[M <: TxnMode] { + + implicit def catsInstance: Txn.CatsInstance[M] + + protected implicit def autoWrapStepRO[B](step: TxnStep[RO, B]): Txn[M, B] + + private implicit def autoWrapStepM[B](step: TxnStep[M, B]): Txn[M, B] = + Txn(step) + + // Sync only. Async not allowed by IndexedDB. + final def eval[A](c: CallbackTo[A]): Txn[M, A] = + TxnStep.Eval(c) + + final def pure[A](a: A): Txn[M, A] = + TxnStep.pure(a) + + @inline final def delay[A](a: => A): Txn[M, A] = + eval(CallbackTo(a)) + + final def unit: Txn[M, Unit] = + TxnStep.unit + + @inline final def none: Txn[M, Option[Nothing]] = + pure(None) + + final def suspend[A](a: => Txn[M, A]): Txn[M, A] = + TxnStep.Suspend(CallbackTo(a.step)) + + final def tailRec[A, B](a: A)(f: A => Txn[M, Either[A, B]]): Txn[M, B] = + TxnStep.TailRec(a, f.andThen(_.step)) + + final def objectStore[K, V](s: ObjectStoreDef.Sync[K, V]): Txn[M, ObjectStore[K, V]] = + TxnStep.GetStore(s) + + @inline final def objectStore[K, V](s: ObjectStoreDef.Async[K, V]): Txn[M, ObjectStore[K, s.Value]] = + objectStore(s.sync) + + @inline final def sequence[G[_], A](txns: G[Txn[M, A]])(implicit G: Traverse[G]): Txn[M, G[A]] = + traverse(txns)(identityFn) + + @inline final def sequenceIterable[F[x] <: Iterable[x], A](txns: => F[Txn[M, A]])(implicit cbf: BuildFrom[F[Txn[M, A]], A, F[A]]): Txn[M, F[A]] = + traverseIterable(txns)(identityFn) + + @inline final def sequenceIterable_(txns: => Iterable[Txn[M, Any]]): Txn[M, Unit] = + traverseIterable_(txns)(identityFn) + + @inline final def sequenceOption[A](o: => Option[Txn[M, A]]): Txn[M, Option[A]] = + traverseOption(o)(identityFn) + + @inline final def sequenceOption_(o: Option[Txn[M, Any]]): Txn[M, Unit] = + traverseOption_(o)(identityFn) + + final def traverse[G[_], A, B](ga: G[A])(f: A => Txn[M, B])(implicit G: Traverse[G]): Txn[M, G[B]] = + G.traverse(ga)(f.andThen(_.step)) + + final def traverseIterable[F[x] <: Iterable[x], A, B](fa: => F[A])(f: A => Txn[M, B])(implicit cbf: BuildFrom[F[A], B, F[B]]): Txn[M, F[B]] = + suspend { + val as = fa + val b = cbf.newBuilder(as) + if (as.isEmpty) + pure(b.result()) + else + as.iterator.map(f(_).map(b += _)).reduce(_ >> _) >> delay(b.result()) + } + + final def traverseIterable_[A](fa: => Iterable[A])(f: A => Txn[M, Any]): Txn[M, Unit] = + suspend { + val as = fa + val it = as.iterator + if (it.isEmpty) + unit + else { + val first = f(it.next()) + it.foldLeft(first)(_ >> f(_)).void + } + } + + final def traverseOption[A, B](o: => Option[A])(f: A => Txn[M, B]): Txn[M, Option[B]] = + suspend { + o match { + case Some(a) => f(a).map(Some(_)) + case None => none + } + } + + final def traverseOption_[A, B](o: => Option[A])(f: A => Txn[M, B]): Txn[M, Unit] = + suspend { + o match { + case Some(a) => f(a).void + case None => unit + } + } + + @inline final def unless[A](cond: Boolean)(txn: => Txn[M, A]): Txn[M, Option[A]] = + when(!cond)(txn) + + @inline final def unless_(cond: Boolean)(txn: => Txn[M, Any]): Txn[M, Unit] = + when_(!cond)(txn) + + final def when[A](cond: Boolean)(txn: => Txn[M, A]): Txn[M, Option[A]] = + if (cond) txn.map(Some(_)) else none + + final def when_(cond: Boolean)(txn: => Txn[M, Any]): Txn[M, Unit] = + if (cond) txn.void else unit +} + +// ===================================================================================================================== + +object TxnDsl { + + object RO extends TxnDsl[RO] { + override implicit def catsInstance: Txn.CatsInstance[RO] = Txn.catsInstance(this) + override protected implicit def autoWrapStepRO[B](s: TxnStep[RO, B]): Txn[RO, B] = Txn(s) + } + + object RW extends TxnDsl[RW] { + override implicit def catsInstance: Txn.CatsInstance[RW] = Txn.catsInstance(this) + override protected implicit def autoWrapStepRO[B](s: TxnStep[RO, B]): Txn[RO, B] = Txn(s) + } +} diff --git a/workers/src/main/scala/japgolly/webapputil/indexeddb/TxnMode.scala b/workers/src/main/scala/japgolly/webapputil/indexeddb/TxnMode.scala new file mode 100644 index 000000000..198d0c644 --- /dev/null +++ b/workers/src/main/scala/japgolly/webapputil/indexeddb/TxnMode.scala @@ -0,0 +1,45 @@ +// Copyright (c) 2016-2023 Association of Universities for Research in Astronomy, Inc. (AURA) +// For license information see LICENSE or https://opensource.org/licenses/BSD-3-Clause + +package japgolly.webapputil.indexeddb + +sealed trait TxnMode + +object TxnMode { + sealed trait RW extends TxnMode + sealed trait RO extends RW + + // =================================================================================================================== + + trait Merge[M <: TxnMode, N <: TxnMode] { + type Mode <: TxnMode + + def substM[F[+_ <: TxnMode, _], A](f: F[M, A]): F[Mode, A] + def substN[F[+_ <: TxnMode, _], A](f: F[N, A]): F[Mode, A] + } + + object Merge { + type To[M <: TxnMode, N <: TxnMode, R <: TxnMode] = Merge[M, N] { type Mode = R } + + implicit def eql[M <: TxnMode]: To[M, M, M] = + new Merge[M, M] { + override type Mode = M + override def substM[F[+_ <: TxnMode, _], A](f: F[M, A]) = f + override def substN[F[+_ <: TxnMode, _], A](f: F[M, A]) = f + } + + implicit def rorw: To[RO, RW, RW] = + new Merge[RO, RW] { + override type Mode = RW + override def substM[F[+_ <: TxnMode, _], A](f: F[RO, A]) = f + override def substN[F[+_ <: TxnMode, _], A](f: F[RW, A]) = f + } + + implicit def rwro: To[RW, RO, RW] = + new Merge[RW, RO] { + override type Mode = RW + override def substM[F[+_ <: TxnMode, _], A](f: F[RW, A]) = f + override def substN[F[+_ <: TxnMode, _], A](f: F[RO, A]) = f + } + } +} diff --git a/workers/src/main/scala/japgolly/webapputil/indexeddb/TxnStep.scala b/workers/src/main/scala/japgolly/webapputil/indexeddb/TxnStep.scala new file mode 100644 index 000000000..84a799e96 --- /dev/null +++ b/workers/src/main/scala/japgolly/webapputil/indexeddb/TxnStep.scala @@ -0,0 +1,66 @@ +// Copyright (c) 2016-2023 Association of Universities for Research in Astronomy, Inc. (AURA) +// For license information see LICENSE or https://opensource.org/licenses/BSD-3-Clause + +package japgolly.webapputil.indexeddb + +import japgolly.scalajs.react._ +import japgolly.webapputil.indexeddb.IndexedDb.ObjectStore +import org.scalajs.dom._ + +/** + * Embedded language for safely working with(in) an IndexedDB transaction. + * + * This is necessary because whilst all the transaction methods are async, any other type of + * asynchronicity is not supported and will result in IndexedDB automatically committing and closing + * the transaction, in which case, further interaction with the transaction will result in a runtime + * error. + * + * Therefore, returning [[AsyncCallback]] from within transactions is dangerous because it allows + * composition of both kinds of asynchronicity. To avoid this, we use this embedded language and + * don't publicly expose its interpretation/translation to [[AsyncCallback]]. From the call-site's + * point of view, a `Txn[A]` is completely opaque. + * + * This also has a nice side-effect of ensuring that transaction completion is always awaited + * because we do it in the transaction functions right after interpretation. Otherwise, the + * call-sites would always need to remember to do it if live transaction access were exposed. + * + * @tparam A + * The return type. + */ +sealed trait TxnStep[+M <: TxnMode, +A] + +object TxnStep { + import TxnMode._ + + final case class FlatMap[M <: TxnMode, A, B](from: TxnStep[M, A], f: A => TxnStep[M, B]) + extends TxnStep[M, B] + final case class Map[M <: TxnMode, A, B](from: TxnStep[M, A], f: A => B) extends TxnStep[M, B] + final case class Suspend[M <: TxnMode, A](body: CallbackTo[TxnStep[M, A]]) extends TxnStep[M, A] + final case class TailRec[M <: TxnMode, A, B](a: A, f: A => TxnStep[M, Either[A, B]]) + extends TxnStep[M, B] + + final case class Eval[A](body: CallbackTo[A]) extends TxnStep[RO, A] + final case class GetStore[K, V](defn: ObjectStoreDef.Sync[K, V]) + extends TxnStep[RO, ObjectStore[K, V]] + final case class StoreGet[K, V](store: ObjectStore[K, V], key: IndexedDbKey) + extends TxnStep[RO, Option[V]] + final case class StoreGetAllKeys[K, V](store: ObjectStore[K, V]) extends TxnStep[RO, Vector[K]] + final case class StoreGetAllVals[K, V](store: ObjectStore[K, V]) extends TxnStep[RO, Vector[V]] + + final case class StoreAdd(store: ObjectStore[?, ?], key: IndexedDbKey, value: IDBValue) + extends TxnStep[RW, Unit] + final case class StoreClear(store: ObjectStore[?, ?]) extends TxnStep[RW, Unit] + final case class StoreDelete[K, V](store: ObjectStore[K, V], key: IndexedDbKey) + extends TxnStep[RW, Unit] + final case class StorePut(store: ObjectStore[?, ?], key: IndexedDbKey, value: IDBValue) + extends TxnStep[RW, Unit] + + val none: TxnStep[RO, Option[Nothing]] = + pure(None) + + def pure[A](a: A): TxnStep[RO, A] = + Eval(CallbackTo.pure(a)) + + val unit: TxnStep[RO, Unit] = + Eval(Callback.empty) +} diff --git a/workers/src/main/scala/japgolly/webapputil/indexeddb/ValueCodec.scala b/workers/src/main/scala/japgolly/webapputil/indexeddb/ValueCodec.scala index 4d68c8e67..182401cad 100644 --- a/workers/src/main/scala/japgolly/webapputil/indexeddb/ValueCodec.scala +++ b/workers/src/main/scala/japgolly/webapputil/indexeddb/ValueCodec.scala @@ -11,29 +11,27 @@ import scala.reflect.ClassTag import scala.scalajs.js import scala.scalajs.js.typedarray.ArrayBuffer -final case class ValueCodec[A](encode: A => CallbackTo[IDBValue], - decode: IDBValue => CallbackTo[A]) { +final case class ValueCodec[A]( + encode: A => CallbackTo[IDBValue], + decode: IDBValue => CallbackTo[A] +) { def xmap[B](onDecode: A => B)(onEncode: B => A): ValueCodec[B] = // Delegating because decoding can fail and must be wrapped to be pure - xmapSync( - a => CallbackTo(onDecode(a)))( - b => CallbackTo(onEncode(b))) + xmapSync(a => CallbackTo(onDecode(a)))(b => CallbackTo(onEncode(b))) def xmapSync[B](onDecode: A => CallbackTo[B])(onEncode: B => CallbackTo[A]): ValueCodec[B] = - ValueCodec[B]( - encode = onEncode(_).flatMap(encode), - decode = decode(_).flatMap(onDecode)) + ValueCodec[B](encode = onEncode(_).flatMap(encode), decode = decode(_).flatMap(onDecode)) def async: ValueCodec.Async[A] = - ValueCodec.Async( - encode = encode.andThen(_.asAsyncCallback), - decode = decode.andThen(_.asAsyncCallback)) + ValueCodec.Async(encode = encode.andThen(_.asAsyncCallback), + decode = decode.andThen(_.asAsyncCallback) + ) type ThisIsBinary = ValueCodec[A] =:= ValueCodec[BinaryData] - def compress(c: Compression)(implicit ev: ThisIsBinary): ValueCodec[BinaryData] = - ev(this).xmap(c.decompressOrThrow)(c.compress) + // def compress(c: Compression)(implicit ev: ThisIsBinary): ValueCodec[BinaryData] = + // ev(this).xmap(c.decompressOrThrow)(c.compress) } object ValueCodec { @@ -59,12 +57,14 @@ object ValueCodec { def primative[A: ClassTag](name: String): ValueCodec[A] = apply( encode = a => CallbackTo.pure(a), - decode = d => CallbackTo( - (d: Any) match { - case a: A => a - case x => throw new js.JavaScriptException(s"Invalid IDB value found. $name expected, got: $x") - } - ) + decode = d => + CallbackTo( + (d: Any) match { + case a: A => a + case x => + throw new js.JavaScriptException(s"Invalid IDB value found. $name expected, got: $x") + } + ) ) lazy val string: ValueCodec[String] = @@ -75,19 +75,17 @@ object ValueCodec { // =================================================================================================================== - final case class Async[A](encode: A => AsyncCallback[IDBValue], - decode: IDBValue => AsyncCallback[A]) { + final case class Async[A]( + encode: A => AsyncCallback[IDBValue], + decode: IDBValue => AsyncCallback[A] + ) { def xmap[B](onDecode: A => B)(onEncode: B => A): Async[B] = // Delegating because decoding can fail and must be wrapped to be pure - xmapAsync( - a => AsyncCallback.delay(onDecode(a)))( - b => AsyncCallback.delay(onEncode(b))) + xmapAsync(a => AsyncCallback.delay(onDecode(a)))(b => AsyncCallback.delay(onEncode(b))) def xmapAsync[B](onDecode: A => AsyncCallback[B])(onEncode: B => AsyncCallback[A]): Async[B] = - Async[B]( - encode = onEncode(_).flatMap(encode), - decode = decode(_).flatMap(onDecode)) + Async[B](encode = onEncode(_).flatMap(encode), decode = decode(_).flatMap(onDecode)) type ThisIsBinary = Async[A] =:= Async[BinaryData] diff --git a/workers/src/main/scala/japgolly/webapputil/indexeddb/package.scala b/workers/src/main/scala/japgolly/webapputil/indexeddb/package.scala new file mode 100644 index 000000000..fc0385087 --- /dev/null +++ b/workers/src/main/scala/japgolly/webapputil/indexeddb/package.scala @@ -0,0 +1,14 @@ +// Copyright (c) 2016-2023 Association of Universities for Research in Astronomy, Inc. (AURA) +// For license information see LICENSE or https://opensource.org/licenses/BSD-3-Clause + +package japgolly.webapputil + +package object indexeddb { + + type TxnDslRO = TxnDsl.RO.type + type TxnDslRW = TxnDsl.RW.type + + @inline def TxnDslRO: TxnDslRO = TxnDsl.RO + @inline def TxnDslRW: TxnDslRW = TxnDsl.RW + +} diff --git a/workers/src/main/scala/workers/CacheIDBStores.scala b/workers/src/main/scala/workers/CacheIDBStores.scala index 1560cc292..e1a264e28 100644 --- a/workers/src/main/scala/workers/CacheIDBStores.scala +++ b/workers/src/main/scala/workers/CacheIDBStores.scala @@ -12,6 +12,7 @@ import japgolly.webapputil.boopickle.* import japgolly.webapputil.indexeddb.* import japgolly.webapputil.indexeddb.IndexedDb.DatabaseName import lucuma.ags.GuideStarCandidate +import japgolly.webapputil.boopickle.BinaryFormatExt.Implicits.* import scala.annotation.nowarn import scala.scalajs.js