diff --git a/build.sbt b/build.sbt index 046e986ca..dede32ede 100644 --- a/build.sbt +++ b/build.sbt @@ -88,8 +88,7 @@ lazy val workers = project .settings( libraryDependencies ++= LucumaCatalog.value ++ Http4sDom.value ++ - Log4Cats.value ++ - ScalaWebAppUtil.value, + Log4Cats.value, Test / scalaJSLinkerConfig ~= { import org.scalajs.linker.interface.OutputPatterns _.withOutputPatterns(OutputPatterns.fromJSFile("%s.mjs")) diff --git a/project/Dependencies.scala b/project/Dependencies.scala index de855fb14..aaf2d71d0 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -335,10 +335,4 @@ object Dependencies { )(scalaJsReact) ) - val ScalaWebAppUtil = Def.setting( - deps( - "com.github.japgolly.webapp-util" %%% "core", - "com.github.japgolly.webapp-util" %%% "core-boopickle" - )(webAppUtil) - ) } diff --git a/project/Versions.scala b/project/Versions.scala index 537b4d45f..88cbf1a4a 100644 --- a/project/Versions.scala +++ b/project/Versions.scala @@ -42,5 +42,4 @@ object Versions { val scalaCollectionContrib = "0.4.0" val scalaJsDom = "2.8.0" val scalaJsReact = "3.0.0-beta7" - val webAppUtil = "2.0.0-RC12" } diff --git a/workers/src/main/scala/japgolly/webapputil/binary/BinaryData.scala b/workers/src/main/scala/japgolly/webapputil/binary/BinaryData.scala new file mode 100644 index 000000000..c43871182 --- /dev/null +++ b/workers/src/main/scala/japgolly/webapputil/binary/BinaryData.scala @@ -0,0 +1,244 @@ +// Copyright (c) 2016-2023 Association of Universities for Research in Astronomy, Inc. (AURA) +// For license information see LICENSE or https://opensource.org/licenses/BSD-3-Clause + +package japgolly.webapputil.binary + +import cats.Eq +import japgolly.webapputil.general.ErrorMsg + +import java.io.OutputStream +import java.lang.StringBuilder as JStringBuilder +import java.nio.ByteBuffer +import java.nio.charset.StandardCharsets +import java.util.Arrays +import java.util.Base64 +import scala.collection.immutable.ArraySeq + +object BinaryData extends BinaryData_PlatformSpecific_Object { + + implicit def univEq: Eq[BinaryData] = + Eq.fromUniversalEquals + + final val DefaultByteLimitInDesc = 50 + + def empty: BinaryData = + unsafeFromArray(new Array(0)) + + def byte(b: Byte): BinaryData = { + val a = new Array[Byte](1) + a(0) = b + unsafeFromArray(a) + } + + def fromArray(a: Array[Byte]): BinaryData = { + val a2 = Arrays.copyOf(a, a.length) + unsafeFromArray(a2) + } + + def fromArraySeq(a: ArraySeq[Byte]): BinaryData = + unsafeFromArray(a.unsafeArray.asInstanceOf[Array[Byte]]) + + def fromBase64(base64: String): Either[ErrorMsg, BinaryData] = + try + Right(fromBase64OrThrow(base64)) + catch { + case e: IllegalArgumentException => + Left(ErrorMsg("Invalid base64 data: " + e.getMessage)) + } + + def fromBase64OrThrow(base64: String): BinaryData = + unsafeFromArray(Base64.getDecoder.decode(base64)) + + def fromByteBuffer(bb: ByteBuffer): BinaryData = + if (bb.hasArray) { + val offset = bb.arrayOffset() + val a = Arrays.copyOfRange(bb.array(), offset, offset + bb.limit()) + unsafeFromArray(a) + } else { + val a = new Array[Byte](bb.remaining) + bb.get(a) + unsafeFromArray(a) + } + + def fromHex(hex: String): BinaryData = { + assert((hex.length & 1) == 0, "Hex strings must have an even length.") + var i = hex.length >> 1 + val bytes = new Array[Byte](i) + while (i > 0) { + i -= 1 + val si = i << 1 + val byteStr = hex.substring(si, si + 2) + val byte = java.lang.Integer.parseUnsignedInt(byteStr, 16).byteValue() + bytes(i) = byte + } + unsafeFromArray(bytes) + } + + /** + * unsafe because the array could be modified later and affect the underlying array we use here + */ + def unsafeFromArray(a: Array[Byte]): BinaryData = + new BinaryData(a, 0, a.length) + + /** + * unsafe because the ByteBuffer could be modified later and affect the underlying array we use + * here + */ + def unsafeFromByteBuffer(bb: ByteBuffer): BinaryData = + if (bb.hasArray) + new BinaryData(bb.array(), bb.arrayOffset(), bb.limit()) + else + fromByteBuffer(bb) + + def fromStringAsUtf8(str: String): BinaryData = + unsafeFromArray(str.getBytes(StandardCharsets.UTF_8)) +} + +/** Immutable blob of binary data. */ +final class BinaryData( + private[BinaryData] val bytes: Array[Byte], + private[BinaryData] val offset: Int, + val length: Int +) extends BinaryData_PlatformSpecific_Instance { + + private val lastIndExcl = offset + length + + // Note: It's acceptable to have excess bytes beyond the declared length + assert(lastIndExcl <= bytes.length, + s"offset($offset) + length ($length) exceeds number of bytes (${bytes.length})" + ) + + override def toString = s"BinaryData(${describe()})" + + override def hashCode = + // Should use Arrays.hashCode() but have to copy to use provided length instead of array.length + offset * -947 + length + + override def equals(o: Any): Boolean = + o match { + case b: BinaryData => + @inline def sameRef = this eq b + @inline def sameLen = length == b.length + @inline def sameBin = + (0 until length).forall(i => bytes(offset + i) == b.bytes(b.offset + i)) + sameRef || (sameLen && sameBin) + case _ => + false + } + + @inline def isEmpty: Boolean = + length == 0 + + @inline def nonEmpty: Boolean = + length != 0 + + def duplicate: BinaryData = + BinaryData.unsafeFromArray(toNewArray) + + def describe(byteLimit: Int = BinaryData.DefaultByteLimitInDesc, sep: String = ",") = { + val byteDesc = describeBytes(byteLimit, sep) + val len = "%,d".format(length) + s"$len bytes: $byteDesc" + } + + def describeBytes(limit: Int = BinaryData.DefaultByteLimitInDesc, sep: String = ",") = { + var i = bytes.iterator.drop(offset).map(b => "%02X".format(b & 0xff)) + if (length > limit) + i = i.take(limit) ++ Iterator.single("…") + else + i = i.take(length) + i.mkString(sep) + } + + def writeTo(os: OutputStream): Unit = + os.write(bytes, offset, length) + + // Note: the below must remain a `def` because ByteBuffers themselves have mutable state + /** unsafe in that the underlying bytes could be modified via access to unsafeArray */ + def unsafeByteBuffer: ByteBuffer = + if (offset > 0) + ByteBuffer.wrap(bytes, 0, lastIndExcl).position(offset).slice() + else + ByteBuffer.wrap(bytes, 0, length) + + def toNewByteBuffer: ByteBuffer = + ByteBuffer.wrap(toNewArray, 0, length) + + def toNewArray: Array[Byte] = + Arrays.copyOfRange(bytes, offset, lastIndExcl) + + /** unsafe in that you might get back the underlying array which is mutable */ + lazy val unsafeArray: Array[Byte] = + if (offset == 0 && length == bytes.length) + bytes + else + toNewArray + + def binaryLikeString: String = { + val chars = new Array[Char](length) + var j = length + while (j > 0) { + j -= 1 + val b = bytes(offset + j) + val i = b.toInt & 0xff + chars.update(j, i.toChar) + } + String.valueOf(chars) + } + + def hex: String = + bytes.iterator + .slice(offset, lastIndExcl) + .map(b => "%02X".format(b & 0xff)) + .mkString + + def ++(that: BinaryData): BinaryData = + if (this.isEmpty) + that + else if (that.isEmpty) + this + else { + val a = new Array[Byte](length + that.length) + Array.copy(this.bytes, this.offset, a, 0, this.length) + Array.copy(that.bytes, that.offset, a, this.length, that.length) + BinaryData.unsafeFromArray(a) + } + + def drop(n: Int): BinaryData = { + val m = n.min(length) + new BinaryData(bytes, offset + m, length - m) + } + + def take(n: Int): BinaryData = { + val m = n.min(length) + new BinaryData(bytes, offset, m) + } + + def dropRight(n: Int): BinaryData = { + val m = n.min(length) + take(length - m) + } + + def takeRight(n: Int): BinaryData = { + val m = n.min(length) + drop(length - m) + } + + def toBase64: String = + Base64.getEncoder.encodeToString(unsafeArray) + + def appendBase64(sb: JStringBuilder): Unit = { + val b64 = Base64.getEncoder.encode(unsafeArray) + var i = 0 + while (i < b64.length) { + sb.append(b64(i).toChar) + i += 1 + } + } + + @inline def appendBase64(sb: StringBuilder): Unit = + appendBase64(sb.underlying) + + def toStringAsUtf8: String = + new String(unsafeArray, StandardCharsets.UTF_8) +} diff --git a/workers/src/main/scala/japgolly/webapputil/binary/BinaryData_PlatformSpecific.scala b/workers/src/main/scala/japgolly/webapputil/binary/BinaryData_PlatformSpecific.scala new file mode 100644 index 000000000..f606a99e1 --- /dev/null +++ b/workers/src/main/scala/japgolly/webapputil/binary/BinaryData_PlatformSpecific.scala @@ -0,0 +1,59 @@ +// Copyright (c) 2016-2023 Association of Universities for Research in Astronomy, Inc. (AURA) +// For license information see LICENSE or https://opensource.org/licenses/BSD-3-Clause + +package japgolly.webapputil.binary + +// ********** +// * * +// * JS * +// * * +// ********** + +import org.scalajs.dom.Blob + +import scala.scalajs.js +import scala.scalajs.js.JSConverters.* +import scala.scalajs.js.typedarray.ArrayBuffer +import scala.scalajs.js.typedarray.Uint8Array + +trait BinaryData_PlatformSpecific_Object { self: BinaryData.type => + + def fromArrayBuffer(ab: ArrayBuffer): BinaryData = + BinaryData.fromByteBuffer(BinaryJs.arrayBufferToByteBuffer(ab)) + + def fromUint8Array(a: Uint8Array): BinaryData = + fromArrayBuffer(BinaryJs.uint8ArrayToArrayBuffer(a)) + + def unsafeFromArrayBuffer(ab: ArrayBuffer): BinaryData = + BinaryData.unsafeFromByteBuffer(BinaryJs.arrayBufferToByteBuffer(ab)) + + def unsafeFromUint8Array(a: Uint8Array): BinaryData = + unsafeFromArrayBuffer(BinaryJs.uint8ArrayToArrayBuffer(a)) +} + +trait BinaryData_PlatformSpecific_Instance { self: BinaryData => + + def toArrayBuffer: ArrayBuffer = + BinaryJs.byteBufferToArrayBuffer(self.unsafeByteBuffer) + + def toUint8Array: Uint8Array = + new Uint8Array(toArrayBuffer) + + def toBlob: Blob = + BinaryJs.byteBufferToBlob(self.unsafeByteBuffer) + + def toNewJsArray: js.Array[Byte] = + self.toNewArray.toJSArray + + def unsafeArrayBuffer: js.typedarray.ArrayBufferView = + BinaryJs.byteBufferToInt8Array(self.unsafeByteBuffer) + + def unsafeUint8Array: Uint8Array = + new Uint8Array(toArrayBuffer) + + def unsafeBlob: Blob = + BinaryJs.byteBufferToBlob(self.unsafeByteBuffer) + + def unsafeJsArray: js.Array[Byte] = + self.unsafeArray.toJSArray +} diff --git a/workers/src/main/scala/japgolly/webapputil/binary/BinaryFormat.scala b/workers/src/main/scala/japgolly/webapputil/binary/BinaryFormat.scala new file mode 100644 index 000000000..709572c5e --- /dev/null +++ b/workers/src/main/scala/japgolly/webapputil/binary/BinaryFormat.scala @@ -0,0 +1,46 @@ +// Copyright (c) 2016-2023 Association of Universities for Research in Astronomy, Inc. (AURA) +// For license information see LICENSE or https://opensource.org/licenses/BSD-3-Clause + +package japgolly.webapputil.binary + +import japgolly.scalajs.react.AsyncCallback + +/** A means of converting instances of type `A` to a binary format and back. */ +final class BinaryFormat[A]( + val encode: A => AsyncCallback[BinaryData], + val decode: BinaryData => AsyncCallback[A] +) { + + def xmap[B](onDecode: A => B)(onEncode: B => A): BinaryFormat[B] = + // Delegating because decoding can fail and must be wrapped to be pure + xmapAsync(a => AsyncCallback.delay(onDecode(a)))(b => AsyncCallback.delay(onEncode(b))) + + def xmapAsync[B](onDecode: A => AsyncCallback[B])( + onEncode: B => AsyncCallback[A] + ): BinaryFormat[B] = + BinaryFormat.async(decode(_).flatMap(onDecode))(onEncode(_).flatMap(encode)) + + type ThisIsBinary = BinaryFormat[A] =:= BinaryFormat[BinaryData] + + // def encrypt(e: Encryption)(implicit ev: ThisIsBinary): BinaryFormat[BinaryData] = + // ev(this).xmapAsync(e.decrypt)(e.encrypt) + + // def compress(c: Compression)(implicit ev: ThisIsBinary): BinaryFormat[BinaryData] = + // ev(this).xmap(c.decompressOrThrow)(c.compress) +} + +object BinaryFormat { + + val id: BinaryFormat[BinaryData] = { + val f: BinaryData => AsyncCallback[BinaryData] = AsyncCallback.pure + async(f)(f) + } + + def apply[A](decode: BinaryData => A)(encode: A => BinaryData): BinaryFormat[A] = + async(b => AsyncCallback.delay(decode(b)))(a => AsyncCallback.delay(encode(a))) + + def async[A](decode: BinaryData => AsyncCallback[A])( + encode: A => AsyncCallback[BinaryData] + ): BinaryFormat[A] = + new BinaryFormat(encode, decode) +} diff --git a/workers/src/main/scala/japgolly/webapputil/binary/BinaryJs.scala b/workers/src/main/scala/japgolly/webapputil/binary/BinaryJs.scala new file mode 100644 index 000000000..ec7a7b062 --- /dev/null +++ b/workers/src/main/scala/japgolly/webapputil/binary/BinaryJs.scala @@ -0,0 +1,84 @@ +// Copyright (c) 2016-2023 Association of Universities for Research in Astronomy, Inc. (AURA) +// For license information see LICENSE or https://opensource.org/licenses/BSD-3-Clause + +package japgolly.webapputil.binary + +import org.scalajs.dom.Blob +import org.scalajs.dom.FileReader +import org.scalajs.dom.window + +import java.nio.ByteBuffer +import scala.scalajs.js +import scala.scalajs.js.JSConverters.* +import scala.scalajs.js.typedarray.* +import scala.scalajs.js.typedarray.TypedArrayBufferOps.* + +object BinaryJs extends BinaryJs + +trait BinaryJs { + + final def arrayBufferToBlob(a: ArrayBuffer): Blob = + new Blob(js.Array(a)) + + @inline final def arrayBufferToByteBuffer(a: ArrayBuffer): ByteBuffer = + TypedArrayBuffer.wrap(a) + + final def base64ToByteBuffer(base64: String): ByteBuffer = { + val binstr = window.atob(base64) + val buf = new Int8Array(binstr.length) + var i = 0 + binstr.foreach { ch => + buf(i) = ch.toByte + i += 1 + } + TypedArrayBuffer.wrap(buf) + } + + final def blobToArrayBuffer(blob: Blob): ArrayBuffer = { + var arrayBuffer: ArrayBuffer = null + val fileReader = new FileReader() + fileReader.onload = e => + arrayBuffer = e.target.asInstanceOf[js.Dynamic].result.asInstanceOf[ArrayBuffer] + fileReader.readAsArrayBuffer(blob) + assert(arrayBuffer != null) + arrayBuffer + } + + final def byteBufferToArrayBuffer(bb: ByteBuffer): ArrayBuffer = + int8ArrayToArrayBuffer(byteBufferToInt8Array(bb)) + + final def byteBufferToBlob(bb: ByteBuffer): Blob = + arrayBufferToBlob(byteBufferToArrayBuffer(bb)) + + final def byteBufferToInt8Array(bb: ByteBuffer): Int8Array = { + val limit = bb.limit() + if (bb.hasTypedArray()) + bb.typedArray() + else if (bb.hasArray) { + var array = bb.array() + val offset = bb.arrayOffset() + if (limit != array.length) + array = array.slice(offset, offset + limit) + new Int8Array(array.toJSArray) + } else { + val array = BinaryData.unsafeFromByteBuffer(bb).unsafeJsArray + new Int8Array(array) + } + } + + final def int8ArrayToArrayBuffer(v: Int8Array): ArrayBuffer = + arrayBufferViewToArrayBuffer(v) + + final def uint8ArrayToArrayBuffer(v: Uint8Array): ArrayBuffer = + arrayBufferViewToArrayBuffer(v) + + final private def arrayBufferViewToArrayBuffer(v: ArrayBufferView): ArrayBuffer = { + val off = v.byteOffset + val len = v.byteLength + if (len == v.buffer.byteLength) + v.buffer + else + v.buffer.slice(off, off + len) + } + +} diff --git a/workers/src/main/scala/japgolly/webapputil/boopickle/BinaryFormatExt.scala b/workers/src/main/scala/japgolly/webapputil/boopickle/BinaryFormatExt.scala new file mode 100644 index 000000000..5b0cd95b3 --- /dev/null +++ b/workers/src/main/scala/japgolly/webapputil/boopickle/BinaryFormatExt.scala @@ -0,0 +1,99 @@ +// Copyright (c) 2016-2023 Association of Universities for Research in Astronomy, Inc. (AURA) +// For license information see LICENSE or https://opensource.org/licenses/BSD-3-Clause + +package japgolly.webapputil.boopickle + +import boopickle.PickleImpl +import boopickle.Pickler +import boopickle.UnpickleImpl +import japgolly.scalajs.react.callback.AsyncCallback +import japgolly.webapputil.binary.* + +import java.nio.ByteBuffer +import scala.scalajs.js + +object BinaryFormatExt { + + trait Implicits { + + @inline final implicit def BinaryFormatBoopickleExt[A]( + self: BinaryFormat[A] + ): Implicits.BinaryFormatBoopickleExt[A] = + new Implicits.BinaryFormatBoopickleExt[A](self) + + @inline final implicit def BinaryFormatBoopickleStaticExt( + self: BinaryFormat.type + ): Implicits.BinaryFormatBoopickleStaticExt = + new Implicits.BinaryFormatBoopickleStaticExt(self) + } + + object Implicits extends Implicits { + + final class BinaryFormatBoopickleExt[A](private val self: BinaryFormat[A]) extends AnyVal { + type ThisIsBinary = BinaryFormat[A] =:= BinaryFormat[BinaryData] + + def pickle[B](implicit pickler: SafePickler[B], ev: ThisIsBinary): BinaryFormat[B] = + ev(self).xmap(pickler.decodeOrThrow)(pickler.encode) + + def pickleBasic[B](implicit pickler: Pickler[B], ev: ThisIsBinary): BinaryFormat[B] = { + val unpickle = UnpickleImpl[B] + ev(self) + .xmap[ByteBuffer](_.unsafeByteBuffer)(BinaryData.unsafeFromByteBuffer) + .xmap(unpickle.fromBytes(_))(PickleImpl.intoBytes(_)) + } + } + + final class BinaryFormatBoopickleStaticExt(private val self: BinaryFormat.type) extends AnyVal { + // @inline def pickleCompressEncrypt[A](c: Compression, e: Encryption)(implicit pickler: SafePickler[A]): BinaryFormat[A] = + // BinaryFormatExt.pickleCompressEncrypt(c, e) + + @inline def versioned[A]( + oldest: BinaryFormat[A], + toLatest: BinaryFormat[A]* + ): BinaryFormat[A] = + BinaryFormatExt.versioned(oldest, toLatest*) + } + } + + // =================================================================================================================== + + def versioned[A](oldest: BinaryFormat[A], toLatest: BinaryFormat[A]*): BinaryFormat[A] = { + val layers = oldest +: toLatest.toArray + val decoders = layers + val decoderIndices = decoders.indices + val latestVer = decoders.length - 1 + val latestVerHeader = BinaryData.byte(latestVer.toByte) + val encoder = layers.last + + def encode(a: A): AsyncCallback[BinaryData] = + encoder.encode(a).map(latestVerHeader ++ _) + + def decode(bin: BinaryData): AsyncCallback[A] = + AsyncCallback.suspend { + + if (bin.isEmpty) + throw js.JavaScriptException("No data") + + val ver = bin.unsafeArray(0).toInt + + if (decoderIndices.contains(ver)) { + val binBody = bin.drop(1) + decoders(ver).decode(binBody) + } else if (ver < 0) + throw js.JavaScriptException("Bad data") + else + SafePicklerUtil.unsupportedVer(ver, latestVer) + } + + BinaryFormat.async(decode)(encode) + } + + // def pickleCompressEncrypt[A](c: Compression, e: Encryption)(implicit + // pickler: SafePickler[A] + // ): BinaryFormat[A] = + // BinaryFormat.id + // .encrypt(e) // 3. Encryption is the very last step + // .compress(c) // 2. Compress the binary *before* encrypting + // .pickle[A] // 1. Generate binary first + +} diff --git a/workers/src/main/scala/japgolly/webapputil/boopickle/PicklerUtil.scala b/workers/src/main/scala/japgolly/webapputil/boopickle/PicklerUtil.scala new file mode 100644 index 000000000..af9296d4d --- /dev/null +++ b/workers/src/main/scala/japgolly/webapputil/boopickle/PicklerUtil.scala @@ -0,0 +1,301 @@ +// Copyright (c) 2016-2023 Association of Universities for Research in Astronomy, Inc. (AURA) +// For license information see LICENSE or https://opensource.org/licenses/BSD-3-Clause + +package japgolly.webapputil.boopickle + +import boopickle.Decoder +import boopickle.DefaultBasic.* +import cats.Eq +import cats.data.Ior +import cats.data.NonEmptySet +import cats.data.NonEmptyVector +import japgolly.webapputil.binary.BinaryData +import japgolly.webapputil.general.ErrorMsg + +import java.nio.ByteBuffer +import java.time.Instant +import scala.collection.immutable.ArraySeq +import scala.collection.immutable.SortedSet +import scala.reflect.ClassTag + +object PicklerUtil { + + // =================================================================================================================== + // Extension classes + + object Implicits { + + @inline implicit def boopickleUtilAnyRefPicklerExt[A <: AnyRef]( + a: Pickler[A] + ): AnyRefPicklerExt[A] = + new AnyRefPicklerExt[A](a) + + @inline implicit def boopickleUtilDecoderExt(a: Decoder): DecoderExt = + new DecoderExt(a) + } + + implicit final class AnyRefPicklerExt[A <: AnyRef](private val p: Pickler[A]) extends AnyVal { + + def reuseByEq(implicit ev: Eq[A]) = + new PickleWithReuse[A](p, true) + + def reuseByRef = + new PickleWithReuse[A](p, false) + + def narrow[B <: A: ClassTag]: Pickler[B] = + p.xmap[B] { + case b: B => b + case a => throw new IllegalArgumentException("Illegal supertype: " + a) + }(b => b) + } + + final class PickleWithReuse[A <: AnyRef](p: Pickler[A], byUnivEq: Boolean) extends Pickler[A] { + private val getP: (PickleState, A) => Option[Int] = + if (byUnivEq) _ immutableRefFor _ else _ identityRefFor _ + private val getU: (UnpickleState, Int) => A = + if (byUnivEq) _.immutableFor[A](_) else _.identityFor[A](_) + private val setP: (PickleState, A) => Unit = + if (byUnivEq) _ addImmutableRef _ else _ addIdentityRef _ + private val setU: (UnpickleState, A) => Unit = + if (byUnivEq) _ addImmutableRef _ else _ addIdentityRef _ + + override def pickle(value: A)(implicit state: PickleState): Unit = { + val ref = getP(state, value) + if (ref.isDefined) + state.enc.writeInt(-ref.get) + () + else { + state.enc.writeInt(0) + p.pickle(value) + setP(state, value) + } + } + override def unpickle(implicit state: UnpickleState): A = + state.dec.readIntCode match { + case Right(i) => + if (i == 0) { + val value = p.unpickle + setU(state, value) + value + } else + getU(state, -i) + case Left(_) => + throw new IllegalArgumentException("Unknown coding") + } + } + + implicit final class DecoderExt(private val self: Decoder) extends AnyVal { + def buf: ByteBuffer = + self match { + case a: boopickle.DecoderSpeed => a.buf + case a: boopickle.DecoderSize => a.buf + } + + def peek[A](f: Decoder => A): A = { + val b = buf + val p = b.position() + try f(self) + finally + b.position(p) + () + } + } + + // =================================================================================================================== + // Polymorphic definitions + // (non-implicit, "pickle" prefix) + + def pickleArraySeq[A](implicit pa: Pickler[A], ct: ClassTag[A]): Pickler[ArraySeq[A]] = + // Can't use boopickle.BasicPicklers.ArrayPickler here because internally, it uses writeRawInt to write length, + // where as IterablePickler uses writeInt. We need to be compatible because we're switching out a Vector for an + // ArraySeq in some impls without affecting the codec. + boopickle.BasicPicklers.IterablePickler[A, ArraySeq] + + def pickleEither[L: Pickler, R: Pickler]: Pickler[Either[L, R]] = + new Pickler[Either[L, R]] { + private final val KeyR = 0 + private final val KeyL = 1 + override def pickle(a: Either[L, R])(implicit state: PickleState): Unit = + a match { + case Right(r) => state.enc.writeByte(KeyR); state.pickle(r); () + case Left(l) => state.enc.writeByte(KeyL); state.pickle(l); () + } + override def unpickle(implicit state: UnpickleState): Either[L, R] = + state.dec.readByte match { + case KeyR => Right(state.unpickle[R]) + case KeyL => Left(state.unpickle[L]) + } + } + + def pickleEnum[V: Eq](nev: NonEmptyVector[V], firstValue: Int = 0): Pickler[V] = + new Pickler[V] { + private val fromInt = nev.toVector + private val toInt = fromInt.iterator.zipWithIndex.toMap + assert(toInt.size == nev.length, s"Duplicates found in $nev") + override def pickle(v: V)(implicit state: PickleState): Unit = { + val i = toInt(v) + firstValue + state.enc.writeInt(i) + () + } + override def unpickle(implicit state: UnpickleState): V = + state.dec.readIntCode match { + case Right(i) => fromInt(i - firstValue) + case Left(_) => throw new IllegalArgumentException("Invalid coding") + } + } + + // def pickleFix[F[_]: Functor](implicit p: Pickler[F[Unit]]): Pickler[Fix[F]] = + // new Pickler[Fix[F]] { + // override def pickle(f: Fix[F])(implicit state: PickleState): Unit = { + + // // val fUnit = Functor[F].void(f.unfix) + // // p.pickle(fUnit) + // // Functor[F].map(f.unfix)(pickle) + + // // Compared to ↑, this ↓ is generally on-par for small trees, and around 30% faster for larger, deeper trees + + // val fields = new collection.mutable.ArrayBuffer[Fix[F]](32) + // val fUnit = Functor[F].map(f.unfix) { a => + // fields += a + // () + // } + // p.pickle(fUnit) + // fields.foreach(pickle) + + // () + // } + + // override def unpickle(implicit state: UnpickleState) = { + // val fUnit = p.unpickle + // Fix(Functor[F].map(fUnit)(_ => unpickle)) + // } + // } + + def pickleIor[A: Pickler, B: Pickler]: Pickler[A Ior B] = + new Pickler[A Ior B] { + import Ior._ + private final val KeyLeft = 0 + private final val KeyRight = 1 + private final val KeyBoth = 2 + override def pickle(i: A Ior B)(implicit state: PickleState): Unit = + i match { + case Left(a) => state.enc.writeByte(KeyLeft); state.pickle(a); () + case Right(b) => state.enc.writeByte(KeyRight); state.pickle(b); () + case Both(a, b) => state.enc.writeByte(KeyBoth); state.pickle(a); state.pickle(b); () + } + override def unpickle(implicit state: UnpickleState): A Ior B = + state.dec.readByte match { + case KeyLeft => Left(state.unpickle[A]) + case KeyRight => Right(state.unpickle[B]) + case KeyBoth => + val a = state.unpickle[A] + val b = state.unpickle[B] + Both(a, b) + } + } + + def pickleLazily[A](f: => Pickler[A]): Pickler[A] = { + lazy val p = f + new Pickler[A] { + override def pickle(a: A)(implicit state: PickleState): Unit = p.pickle(a) + override def unpickle(implicit state: UnpickleState): A = p.unpickle + } + } + + def pickleMap[K: Pickler, V: Pickler]: Pickler[Map[K, V]] = + mapPickler[K, V, Map] + + // def pickleNEA[A](implicit p: Pickler[ArraySeq[A]]): Pickler[NonEmptyArraySeq[A]] = + // pickleNonEmpty(_.whole) + + def pickleNES[A: Eq](implicit p: Pickler[SortedSet[A]]): Pickler[NonEmptySet[A]] = + p.xmap(NonEmptySet.fromSetUnsafe)(_.toSortedSet) + + def pickleNEV[A](implicit p: Pickler[Vector[A]]): Pickler[NonEmptyVector[A]] = + p.xmap(NonEmptyVector.fromVectorUnsafe)(_.toVector) + + // def pickleNonEmpty[N, E]( + // f: N => E + // )(implicit p: Pickler[E], proof: NonEmpty.Proof[E, N]): Pickler[N] = + // p.xmap(NonEmpty require_! _)(f) + + // def pickleNonEmptyMono[A](implicit + // p: Pickler[A], + // proof: NonEmpty.ProofMono[A] + // ): Pickler[NonEmpty[A]] = + // pickleNonEmpty(_.value) + + private object _pickleNothing extends Pickler[AnyRef] { + override def pickle(obj: AnyRef)(implicit state: PickleState): Unit = () + override def unpickle(implicit state: UnpickleState): Nothing = throw new RuntimeException( + "This case is illegal." + ) + } + + def pickleNothing[A <: AnyRef]: Pickler[A] = + _pickleNothing.asInstanceOf[Pickler[A]] + + // def pickleSafeBoolValues[B <: SafeBool[B], A: Pickler]: Pickler[SafeBool.Values[B, A]] = + // transformPickler[SafeBool.Values[B, A], (A, A)](x => SafeBool.Values(pos = x._1, neg = x._2))( + // x => (x.pos, x.neg) + // ) + + // =================================================================================================================== + // Concrete picklers for base data type + // (implicit lazy vals, "pickler" prefix) + + implicit lazy val picklerBinaryData: Pickler[BinaryData] = + transformPickler(BinaryData.unsafeFromArray)(_.unsafeArray) + + def picklerBinaryDataFixedLength(len: Int): Pickler[BinaryData] = + new Pickler[BinaryData] { + + override def pickle(bin: BinaryData)(implicit state: PickleState): Unit = { + assert(bin.length == len) + val enc = state.enc + val bytes = bin.unsafeArray + var i = 0 + while (i < len) { + enc.writeByte(bytes(i)) + i += 1 + } + } + + override def unpickle(implicit state: UnpickleState): BinaryData = { + val dec = state.dec + val bytes = new Array[Byte](len) + var i = 0 + while (i < len) { + bytes(i) = dec.readByte + i += 1 + } + BinaryData.unsafeFromArray(bytes) + } + } + + implicit lazy val picklerErrorMsg: Pickler[ErrorMsg] = + transformPickler(ErrorMsg.apply)(_.value) + + implicit lazy val picklerErrorMsgOrUnit: Pickler[Either[ErrorMsg, Unit]] = + pickleEither + + implicit lazy val picklerInstant: Pickler[Instant] = + new Pickler[Instant] { + // EpochSecond is stored as a packed long (typically 5 bytes instead of 8 raw) + // Nano is stored as a raw int (4 bytes instead of typically 5 packed, P(27%) 4 packed) + override def pickle(i: Instant)(implicit state: PickleState): Unit = { + state.enc.writeLong(i.getEpochSecond) + state.enc.writeRawInt(i.getNano) + () + } + override def unpickle(implicit state: UnpickleState): Instant = { + val epochSecond = state.dec.readLong + val nano = state.dec.readRawInt + Instant.ofEpochSecond(epochSecond, nano) + } + } + + implicit lazy val picklerNonEmptyVectorInt: Pickler[NonEmptyVector[Int]] = + pickleNEV + +} diff --git a/workers/src/main/scala/japgolly/webapputil/boopickle/SafePickler.scala b/workers/src/main/scala/japgolly/webapputil/boopickle/SafePickler.scala new file mode 100644 index 000000000..c9ee7e4a3 --- /dev/null +++ b/workers/src/main/scala/japgolly/webapputil/boopickle/SafePickler.scala @@ -0,0 +1,237 @@ +// Copyright (c) 2016-2023 Association of Universities for Research in Astronomy, Inc. (AURA) +// For license information see LICENSE or https://opensource.org/licenses/BSD-3-Clause + +package japgolly.webapputil.boopickle + +import boopickle.PickleState +import boopickle.Pickler +import boopickle.UnpickleState +import cats.syntax.eq.* +import japgolly.webapputil.binary.BinaryData +import japgolly.webapputil.general.Version +import japgolly.webapputil.general.Version.ordering.mkOrderingOps + +import scala.annotation.elidable +import scala.util.control.NonFatal + +/** + * Binary codec (pickler). Differs from out-of-the-box [[Pickler]] in the following ways: + * + * - decoding is pure: an error value is returned on failure + * - supports magic numbers at header and footer as a partial message integrity check + * - supports protocol versioning and evolution + */ +final case class SafePickler[A]( + header: Option[MagicNumber], + footer: Option[MagicNumber], + version: Version, + body: Pickler[A] +) { + + type Data = A + + import boopickle.{PickleImpl, UnpickleImpl} + import SafePickler._ + + // def i1 = "0x%08X".format(new util.Random().nextInt); def i2 = s"$i1, $i1"; def i4 = s"$i2 $i2" + def withMagicNumbers(header: Int, footer: Int): SafePickler[A] = + copy(Some(MagicNumber(header)), Some(MagicNumber(footer))) + + def withMagicNumberFooter(footer: Int): SafePickler[A] = + copy(footer = Some(MagicNumber(footer))) + + def map[B](f: Pickler[A] => Pickler[B]): SafePickler[B] = + copy(body = f(body)) + + private val picklerHeader = header.map(pickleMagicNumber(version, _)) + private val picklerFooter = footer.map(pickleMagicNumber(version, _)) + private val picklerVersion = pickleVersion(version) + + private val picklerCombined: Pickler[A] = + new Pickler[A] { + + override def pickle(a: A)(implicit state: PickleState): Unit = { + picklerHeader.foreach(_.pickle(())) + picklerVersion.pickle(version) + body.pickle(a) + picklerFooter.foreach(_.pickle(())) + } + + override def unpickle(implicit state: UnpickleState): A = { + picklerHeader.foreach(_.unpickle) + val v = picklerVersion.unpickle + if (v.major =!= version.major) + throw DecodingFailure.UnsupportedMajorVer(localVer = version, actual = v) + try { + val a = body.unpickle + picklerFooter.foreach(_.unpickle) + a + } catch { + case e: Throwable => throw new VerAndErr(v, e) + } + } + } + + def encode(a: A): BinaryData = { + val bb = PickleImpl.intoBytes(a)(implicitly, picklerCombined) + BinaryData.unsafeFromByteBuffer(bb) + } + + private def wrapRead(unpickle: => A): SafePickler.Result[A] = + try + Right(unpickle) + catch { + case e: EmbeddedFailure => Left(e.failure) + case e: VerAndErr => DecodingFailure.fromException(version, e.err, Some(e.ver)) + case e: Throwable => DecodingFailure.fromException(version, e, None) + } + + def decode(bin: BinaryData): SafePickler.Result[A] = + wrapRead(UnpickleImpl(picklerCombined).fromBytes(bin.unsafeByteBuffer)) + + def decodeOrThrow(bin: BinaryData): A = + decode(bin).fold(throw _, identity) + + val decodeBytes: Array[Byte] => SafePickler.Result[A] = + bytes => decode(BinaryData.unsafeFromArray(bytes)) + + def embeddedWrite(a: A)(implicit state: PickleState): Unit = + picklerCombined.pickle(a) + + def embeddedRead(implicit state: UnpickleState): A = + wrapRead(picklerCombined.unpickle) match { + case Right(a) => a + case Left(e) => throw new EmbeddedFailure(e) + } +} + +object SafePickler { + + type Result[+A] = Either[DecodingFailure, A] + + def success[A](a: A): Result[A] = + Right(a) + + sealed trait DecodingFailure extends RuntimeException { + val localVer: Version + val upstreamVer: Option[Version] + + def isLocalKnownToBeOutOfDate: Boolean = + upstreamVer.exists(localVer < _) + + def isUpstreamKnownToBeOutOfDate: Boolean = + upstreamVer.exists(_ < localVer) + } + + object DecodingFailure { + + final case class UnsupportedMajorVer(localVer: Version, actual: Version) + extends RuntimeException + with DecodingFailure { + override val upstreamVer = Some(actual) + } + + final case class MagicNumberMismatch( + localVer: Version, + actual: MagicNumber, + expected: MagicNumber, + upstreamVer: Option[Version] + ) extends RuntimeException + with DecodingFailure + + final case class InvalidVersion(localVer: Version, major: Int, minor: Int) + extends RuntimeException + with DecodingFailure { + override val upstreamVer = None + } + + final case class ExceptionOccurred( + localVer: Version, + exception: Throwable, + upstreamVer: Option[Version] + ) extends RuntimeException + with DecodingFailure { + + @elidable(elidable.ASSERTION) + private def devOnly(): Unit = + exception.printStackTrace(System.err) + + devOnly() + } + + def fromException( + localVer: Version, + err: Throwable, + upstreamVer: Option[Version] + ): Result[Nothing] = + err match { + case MagicNumberMismatch(_, a, b, None) => + Left(MagicNumberMismatch(localVer, a, b, upstreamVer)) + case e: DecodingFailure => Left(e) + case e: EmbeddedFailure => Left(e.failure) + case e: StackOverflowError => + Left(DecodingFailure.ExceptionOccurred(localVer, e, upstreamVer)) + case NonFatal(e) => Left(DecodingFailure.ExceptionOccurred(localVer, e, upstreamVer)) + case e => throw e + } + } + + private[SafePickler] final class VerAndErr(val ver: Version, val err: Throwable) + extends RuntimeException + + private[SafePickler] final class EmbeddedFailure(val failure: DecodingFailure) + extends RuntimeException + + private[SafePickler] def pickleVersion(localVer: Version): Pickler[Version] = + new Pickler[Version] { + override def pickle(a: Version)(implicit state: PickleState): Unit = { + state.enc.writeInt(a.major.value) + state.enc.writeInt(a.minor.value) + () + } + + override def unpickle(implicit state: UnpickleState): Version = { + val major = state.dec.readInt + val minor = state.dec.readInt + val minOk = major >= 1 && minor >= 0 + val maxOk = major <= 4 && minor <= 100 + if (minOk && maxOk) + Version.fromInts(major, minor) + else + throw DecodingFailure.InvalidVersion(localVer, major, minor) + } + } + + private[SafePickler] def pickleMagicNumber(localVer: Version, real: MagicNumber): Pickler[Unit] = + new Pickler[Unit] { + override def pickle(a: Unit)(implicit state: PickleState): Unit = + state.enc.writeRawInt(real.value) + () + + override def unpickle(implicit state: UnpickleState): Unit = { + val found = state.dec.readRawInt + if (found != real.value) + throw DecodingFailure.MagicNumberMismatch(localVer = localVer, + actual = MagicNumber(found), + expected = real, + upstreamVer = None + ) + } + } + + object ConstructionHelperImplicits { + implicit class SafePickler_PicklerExt[A](private val self: Pickler[A]) extends AnyVal { + @inline def asVersion(major: Int, minor: Int): SafePickler[A] = asVersion( + Version.fromInts(major, minor) + ) + def asVersion(v: Version): SafePickler[A] = SafePickler(None, None, v, self) + def asV1(minorVer: Int): SafePickler[A] = asVersion(Version.v1(minorVer)) + def asV2(minorVer: Int): SafePickler[A] = asVersion(Version.v2(minorVer)) + } + } +} + +final case class MagicNumber(value: Int) { + override def toString = s"MagicNumber(0x$hex)" + def hex = "%08X".format(value) +} diff --git a/workers/src/main/scala/japgolly/webapputil/boopickle/SafePicklerUtil.scala b/workers/src/main/scala/japgolly/webapputil/boopickle/SafePicklerUtil.scala new file mode 100644 index 000000000..01050e220 --- /dev/null +++ b/workers/src/main/scala/japgolly/webapputil/boopickle/SafePicklerUtil.scala @@ -0,0 +1,65 @@ +// Copyright (c) 2016-2023 Association of Universities for Research in Astronomy, Inc. (AURA) +// For license information see LICENSE or https://opensource.org/licenses/BSD-3-Clause + +package japgolly.webapputil.boopickle + +import boopickle.PickleState +import boopickle.UnpickleState +import japgolly.webapputil.general.Version + +object SafePicklerUtil { + import PicklerUtil._ + + final case class UnsupportedVersionException(found: Version, maxSupported: Version) + extends RuntimeException( + s"${found.verStr} not supported. ${maxSupported.verStr} is the max supported." + ) + + case object CorruptData extends RuntimeException("Corrupt data.") + + /** + * Used to add a codec version to a binary protocol whilst retaining backwards-compatibility with + * the unversioned case. + */ + final val VersionHeader = -99988999 + + def writeVersion(ver: Int)(implicit state: PickleState): Unit = { + assert(ver > 0) // v1.0 is the default and doesn't need a version header + state.enc.writeInt(VersionHeader) + state.enc.writeInt(ver) + () + } + + def unsupportedVer(ver: Int, maxSupportedVer: Int): Nothing = + throw UnsupportedVersionException(found = Version.v1(ver), + maxSupported = Version.v1(maxSupportedVer) + ) + + def readByVersion[A]( + maxSupportedVer: Int + )(f: PartialFunction[Int, A])(implicit state: UnpickleState): A = { + assert(maxSupportedVer > 0) + + def unsupportedVer(ver: Int): Nothing = + SafePicklerUtil.unsupportedVer(ver, maxSupportedVer) + + def readVer(ver: Int): A = + f.applyOrElse[Int, A](ver, unsupportedVer) + + state.dec.peek(_.readInt) match { + case VersionHeader => + state.dec.readInt + val ver = state.dec.readInt + if (ver <= 0) + throw CorruptData + if ( + ver > maxSupportedVer + ) // preempt using the partial function in case maxSupportedVer is incorrect + unsupportedVer(ver) + readVer(ver) + case _ => + readVer(0) + } + } + +} diff --git a/workers/src/main/scala/japgolly/webapputil/general/ErrorMsg.scala b/workers/src/main/scala/japgolly/webapputil/general/ErrorMsg.scala new file mode 100644 index 000000000..4deb63308 --- /dev/null +++ b/workers/src/main/scala/japgolly/webapputil/general/ErrorMsg.scala @@ -0,0 +1,56 @@ +// Copyright (c) 2016-2023 Association of Universities for Research in Astronomy, Inc. (AURA) +// For license information see LICENSE or https://opensource.org/licenses/BSD-3-Clause + +package japgolly.webapputil.general + +import cats.Eq + +final case class ErrorMsg(value: String) { + + // Keep this as a val so that the stack trace points to where the error was created, as opposed to thrown. + val exception: ErrorMsg.Exception = + ErrorMsg.Exception(this) + + def throwException(): Nothing = + throw exception + + def modMsg(f: String => String): ErrorMsg = { + val e = ErrorMsg(f(value)) + e.exception.setStackTrace(exception.getStackTrace) + e + } + + def withPrefix(s: String): ErrorMsg = + modMsg(s + _) +} + +object ErrorMsg { + + implicit def univEq: Eq[ErrorMsg] = + Eq.by(_.value) + + def fromThrowable(t: Throwable): ErrorMsg = + apply(Option(t.getMessage).getOrElse(t.toString).trim) + + def errorOccurred(t: Throwable): ErrorMsg = + Option(t.getMessage).map(_.trim).filter(_.nonEmpty) match { + case Some(m) => ErrorMsg("Error occurred: " + m) + case None => ErrorMsg("Error occurred.") + } + + object ClientSide { + def errorContactingServer = ErrorMsg("Error contacting server. Please try again.") + def failedToParseResponse = ErrorMsg("Failed to understand the response from the server.") + def noCompatibleServer = ErrorMsg( + "Failed to find a compatible server. Please try again, or try reloading the page." + ) + def serverCallTimeout = ErrorMsg( + "Server didn't respond. Please check your internet connectivity." + ) + def serverProtocolIsNewer = ErrorMsg( + "Our servers have been upgraded to a newer version. Please reload this page and try again." + ) + } + + final case class Exception(msg: ErrorMsg) extends RuntimeException(msg.value) +} diff --git a/workers/src/main/scala/japgolly/webapputil/general/Version.scala b/workers/src/main/scala/japgolly/webapputil/general/Version.scala new file mode 100644 index 000000000..fda99d1cd --- /dev/null +++ b/workers/src/main/scala/japgolly/webapputil/general/Version.scala @@ -0,0 +1,60 @@ +// Copyright (c) 2016-2023 Association of Universities for Research in Astronomy, Inc. (AURA) +// For license information see LICENSE or https://opensource.org/licenses/BSD-3-Clause + +package japgolly.webapputil.general + +import cats.Eq +import cats.syntax.option.* + +import scala.collection.mutable + +final case class Version(major: Version.Major, minor: Version.Minor) { + override def toString = verStr + def verNum = s"${major.value}.${minor.value}" + def verStr = "v" + verNum +} + +object Version { + + def fromInts(major: Int, minor: Int): Version = + Version(Major(major), Minor(minor)) + + final case class Major(value: Int) { + assert(value >= 1) + } + + final case class Minor(value: Int) { + assert(value >= 0) + } + + implicit val univEqMajor: Eq[Major] = Eq.by(_.value) + implicit val univEqMinor: Eq[Minor] = Eq.by(_.value) + implicit val univEq: Eq[Version] = Eq.by(v => (v.major, v.minor)) + + implicit val ordering: Ordering[Version] = + new Ordering[Version] { + override def compare(x: Version, y: Version): Int = { + val i = x.major.value - y.major.value + if (i != 0) + i + else + x.minor.value - y.minor.value + } + } + + private val memoV1: mutable.Map[Int, Version] = mutable.Map.empty + def v1(minorVer: Int): Version = + memoV1 + .updateWith(minorVer): + case None => Version.fromInts(1, minorVer).some + case other => other + .get + + private val memoV2: mutable.Map[Int, Version] = mutable.Map.empty + def v2(minorVer: Int): Version = + memoV2 + .updateWith(minorVer): + case None => Version.fromInts(2, minorVer).some + case other => other + .get +} diff --git a/workers/src/main/scala/japgolly/webapputil/indexeddb/IndexedDb.scala b/workers/src/main/scala/japgolly/webapputil/indexeddb/IndexedDb.scala new file mode 100644 index 000000000..4178cc550 --- /dev/null +++ b/workers/src/main/scala/japgolly/webapputil/indexeddb/IndexedDb.scala @@ -0,0 +1,896 @@ +// Copyright (c) 2016-2023 Association of Universities for Research in Astronomy, Inc. (AURA) +// For license information see LICENSE or https://opensource.org/licenses/BSD-3-Clause + +package japgolly.webapputil.indexeddb + +import cats.kernel.Eq +import japgolly.scalajs.react.* +import japgolly.scalajs.react.util.Util.identity as identityFn +import japgolly.webapputil.indexeddb.TxnMode.* +import org.scalajs.dom.* + +import scala.annotation.elidable +import scala.scalajs.js +import scala.util.Failure +import scala.util.Success +import scala.util.Try + +final class IndexedDb(raw: IDBFactory) { + import IndexedDb._ + import IndexedDb.Internals._ + + def open(name: DatabaseName): OpenResult = + _open(raw.open(name.value)) + + def open(name: DatabaseName, version: Int): OpenResult = + _open(raw.open(name.value, version)) + + private def _open(rawOpen: => IDBOpenDBRequest[IDBDatabase]): OpenResult = + callbacks => { + + def create(): IDBOpenDBRequest[IDBDatabase] = { + val r = rawOpen + + // r.onblocked = callbacks.blocked.toJsFn1 + + r.onupgradeneeded = e => { + val db = new DatabaseInVersionChange(r.result) + val args = versionChange(db, e) + callbacks.upgradeNeeded(args).runNow() + } + + r + } + + asyncRequest(create()) { r => + val rawDb = r.result + + rawDb.onversionchange = e => + try { + val args = versionChange(new DatabaseInVersionChange(rawDb), e) + callbacks.versionChange(args).runNow() + } finally { + // We close the DB at the end of this event on matter what so that other connections don't block and we + // don't have to handle onblocked events. + rawDb.close() + + // The onclose event handler is only fired "when the database is unexpectedly closed". + // Therefore we call it here explicitly. + // https://developer.mozilla.org/en-US/docs/Web/API/IDBDatabase/onclose + callbacks.closed.runNow() + } + + rawDb.onclose = _ => callbacks.closed.runNow() + + new Database(rawDb, onClose = callbacks.closed) + } + } + + def deleteDatabase(name: DatabaseName): AsyncCallback[Unit] = + asyncRequest_(raw.deleteDatabase(name.value)) +} + +// ===================================================================================================================== + +object IndexedDb { + + def apply(raw: IDBFactory): IndexedDb = + new IndexedDb(raw) + + def global(): Option[IndexedDb] = + try + window.indexedDB.toOption.map(apply) + catch { + case _: Throwable => None + } + + // =================================================================================================================== + // Main types and classes + + import Internals._ + + final case class DatabaseName(value: String) + + type OpenResult = OpenCallbacks => AsyncCallback[Database] + + /** + * Callbacks to install when opening a DB. + * + * Note 1: On `versionChange`, the DB connection will be closed automatically. + * + * Note 2: There's no `blocked` handler because we currently don't allow blocking. To quote the + * idb spec: if "there are open connections that don’t close in response to a versionchange event, + * the request will be blocked until all they close". + */ + final case class OpenCallbacks( + upgradeNeeded: VersionChange => Callback, + versionChange: VersionChange => Callback = _ => Callback.empty, + closed: Callback = Callback.empty + ) + + final case class Error(event: ErrorEvent) + extends RuntimeException( + event.asInstanceOf[js.Dynamic].message.asInstanceOf[js.UndefOr[String]].getOrElse(null) + ) { + + // Note: allowing .message to be undefined is presumably only required due to use of fake-indexeddb in tests + val msg: String = + event.asInstanceOf[js.Dynamic].message.asInstanceOf[js.UndefOr[String]].getOrElse("") + + @elidable(elidable.FINEST) + override def toString = + s"IndexedDb.Error($msg)" + + def isStoredDatabaseHigherThanRequested: Boolean = + // Chrome: The requested version (1) is less than the existing version (2). + // Firefox: The operation failed because the stored database is a higher version than the version requested. + msg.contains("version") && (msg.contains("higher") || msg.contains("less than")) + } + + final case class VersionChange( + db: DatabaseInVersionChange, + oldVersion: Int, + newVersion: Option[Int] + ) { + def createObjectStore[K, V](defn: ObjectStoreDef[K, V], createdInDbVer: Int): Callback = + Callback.when(oldVersion < createdInDbVer && newVersion.exists(_ >= createdInDbVer))( + db.createObjectStore(defn) + ) + } + + final class DatabaseInVersionChange(raw: IDBDatabase) { + def createObjectStore[K, V](defn: ObjectStoreDef[K, V]): Callback = + Callback { + raw.createObjectStore(defn.name) + } + } + + final class Database(raw: IDBDatabase, onClose: Callback) { + + def atomic[K, V](store: ObjectStoreDef.Async[K, V]): AtomicAsyncDsl[K, V] = + new AtomicAsyncDsl(this, store) + + def close: Callback = { + val actuallyClose = + Callback(raw.close()).attempt + + // The onclose event handler is only fired "when the database is unexpectedly closed". + // Therefore we call it here explicitly. + // https://developer.mozilla.org/en-US/docs/Web/API/IDBDatabase/onclose + actuallyClose >> onClose + } + + def compareAndSet(stores: ObjectStoreDef[?, ?]*): CasDsl1 = + new CasDsl1(this, stores) + + def transactionRO: RunTxnDsl1[RO] = + new RunTxnDsl1(raw, TxnDslRO, IDBTransactionMode.readonly) + + def transactionRW: RunTxnDsl1[RW] = + new RunTxnDsl1(raw, TxnDslRW, IDBTransactionMode.readwrite) + + // Convenience methods + + /** Note: insert only */ + def add[K, V](store: ObjectStoreDef.Async[K, V])(key: K, value: V): AsyncCallback[Unit] = + store.encode(value).flatMap(add(store.sync)(key, _)) + + /** Note: insert only */ + def add[K, V](store: ObjectStoreDef.Sync[K, V])(key: K, value: V): AsyncCallback[Unit] = + transactionRW(store)(_.objectStore(store).flatMap(_.add(key, value))) + + /** Note: insert only */ + def addWhenDefined[K, V]( + store: ObjectStoreDef.Async[K, V] + )(key: K, value: Option[V]): AsyncCallback[Unit] = + AsyncCallback.traverseOption_(value)(add(store)(key, _)) + + /** Note: insert only */ + def addWhenDefined[K, V]( + store: ObjectStoreDef.Sync[K, V] + )(key: K, value: Option[V]): AsyncCallback[Unit] = + AsyncCallback.traverseOption_(value)(add(store)(key, _)) + + def clear[K, V](store: ObjectStoreDef[K, V]): AsyncCallback[Unit] = + transactionRW(store)(_.objectStore(store.sync).flatMap(_.clear)) + + def delete[K, V](store: ObjectStoreDef[K, V])(key: K): AsyncCallback[Unit] = + transactionRW(store)(_.objectStore(store.sync).flatMap(_.delete(key))) + + def get[K, V](store: ObjectStoreDef.Async[K, V])(key: K): AsyncCallback[Option[V]] = + get(store.sync)(key).flatMap(AsyncCallback.traverseOption(_)(_.decode)) + + def get[K, V](store: ObjectStoreDef.Sync[K, V])(key: K): AsyncCallback[Option[V]] = + transactionRO(store)(_.objectStore(store).flatMap(_.get(key))) + + def getAllKeys[K, V](store: ObjectStoreDef[K, V]): AsyncCallback[Vector[K]] = + transactionRO(store)(_.objectStore(store.sync).flatMap(_.getAllKeys)) + + def getAllValues[K, V](store: ObjectStoreDef.Async[K, V]): AsyncCallback[Vector[V]] = + getAllValues(store.sync).flatMap(AsyncCallback.traverse(_)(_.decode)) + + def getAllValues[K, V](store: ObjectStoreDef.Sync[K, V]): AsyncCallback[Vector[V]] = + transactionRO(store)(_.objectStore(store).flatMap(_.getAllValues)) + + /** aka upsert */ + def put[K, V](store: ObjectStoreDef.Async[K, V])(key: K, value: V): AsyncCallback[Unit] = + store.encode(value).flatMap(put(store.sync)(key, _)) + + /** aka upsert */ + def put[K, V](store: ObjectStoreDef.Sync[K, V])(key: K, value: V): AsyncCallback[Unit] = + transactionRW(store)(_.objectStore(store).flatMap(_.put(key, value))) + + /** aka upsert or delete */ + def putOrDelete[K, V]( + store: ObjectStoreDef.Async[K, V] + )(key: K, value: Option[V]): AsyncCallback[Unit] = + value match { + case Some(v) => put(store)(key, v) + case None => delete(store)(key) + } + + /** aka upsert or delete */ + def putOrDelete[K, V]( + store: ObjectStoreDef.Sync[K, V] + )(key: K, value: Option[V]): AsyncCallback[Unit] = + value match { + case Some(v) => put(store)(key, v) + case None => delete(store)(key) + } + + /** aka upsert */ + def putWhenDefined[K, V]( + store: ObjectStoreDef.Sync[K, V] + )(key: K, value: Option[V]): AsyncCallback[Unit] = + AsyncCallback.traverseOption_(value)(put(store)(key, _)) + + /** aka upsert */ + def putWhenDefined[K, V]( + store: ObjectStoreDef.Async[K, V] + )(key: K, value: Option[V]): AsyncCallback[Unit] = + AsyncCallback.traverseOption_(value)(put(store)(key, _)) + } // class Database + + final class ObjectStore[K, V](val defn: ObjectStoreDef.Sync[K, V]) { + import defn.{keyCodec, valueCodec} + + private implicit def autoWrap[M <: TxnMode, A](s: TxnStep[M, A]): Txn[M, A] = + Txn(s) + + /** Note: insert only */ + def add(key: K, value: V): Txn[RW, Unit] = { + val k = keyCodec.encode(key) + TxnDslRW.eval(valueCodec.encode(value)).flatMap(TxnStep.StoreAdd(this, k, _)) + } + + /** Note: insert only */ + def addWhenDefined(key: K, value: Option[V]): Txn[RW, Unit] = + value.fold[Txn[RW, Unit]](TxnStep.unit)(add(key, _)) + + def clear: Txn[RW, Unit] = + TxnStep.StoreClear(this) + + def delete(key: K): Txn[RW, Unit] = + TxnStep.StoreDelete(this, keyCodec.encode(key)) + + def get(key: K): Txn[RO, Option[V]] = + TxnStep.StoreGet(this, keyCodec.encode(key)) + + def getAllKeys: Txn[RO, Vector[K]] = + TxnStep.StoreGetAllKeys(this) + + def getAllValues: Txn[RO, Vector[V]] = + TxnStep.StoreGetAllVals(this) + + def modify(key: K)(f: V => V): Txn[RW, Option[V]] = + get(key).flatMap { + case Some(v1) => + val v2 = f(v1) + put(key, v2).map(_ => Some(v2)) + case None => + TxnDslRW.none + } + + def modifyOption(key: K)(f: Option[V] => Option[V]): Txn[RW, Option[V]] = + for { + o1 <- get(key) + o2 = f(o1) + _ <- putOrDelete(key, o2) + } yield o2 + + /** aka upsert */ + def put(key: K, value: V): Txn[RW, Unit] = { + val k = keyCodec.encode(key) + TxnDslRW.eval(valueCodec.encode(value)).flatMap(TxnStep.StorePut(this, k, _)) + } + + /** aka upsert or delete */ + def putOrDelete(key: K, value: Option[V]): Txn[RW, Unit] = + value match { + case Some(v) => put(key, v) + case None => delete(key) + } + + /** aka upsert */ + def putWhenDefined(key: K, value: Option[V]): Txn[RW, Unit] = + value.fold[Txn[RW, Unit]](TxnStep.unit)(put(key, _)) + } + + // =================================================================================================================== + // DSLs + + final class AtomicAsyncDsl[K, V](db: Database, store: ObjectStoreDef.Async[K, V]) { + + /** + * Performs an async modification on a store value. + * + * This only modifies an existing value. Use [[modifyAsyncOption()]] to upsert and/or delete + * values. + * + * This uses [[compareAndSet()]] for atomicity and thread-safety. + * + * @return + * If the value exists, this returns the previous and updated values + */ + def modify(key: K)(f: V => V): AsyncCallback[Option[(V, V)]] = + modifyAsync(key)(v => AsyncCallback.pure(f(v))) + + /** + * Performs an async modification on a store value. + * + * This only modifies an existing value. Use [[modifyAsyncOption()]] to upsert and/or delete + * values. + * + * This uses [[compareAndSet()]] for atomicity and thread-safety. + * + * @return + * If the value exists, this returns the previous and updated values + */ + def modifyAsync(key: K)(f: V => AsyncCallback[V]): AsyncCallback[Option[(V, V)]] = + db + .compareAndSet(store) + .getValueAsync(store)(key) + .mapAsync(AsyncCallback.traverseOption(_)(v1 => f(v1).map((v1, _)))) + .putResultWhenDefinedBy(store)(key, _.map(_._2)) + + /** + * Performs an async modification on an optional store value. + * + * This uses [[compareAndSet()]] for atomicity and thread-safety. + * + * @return + * The previous and updated values + */ + def modifyOption(key: K)(f: Option[V] => Option[V]): AsyncCallback[(Option[V], Option[V])] = + modifyOptionAsync(key)(v => AsyncCallback.pure(f(v))) + + /** + * Performs an async modification on an optional store value. + * + * This uses [[compareAndSet()]] for atomicity and thread-safety. + * + * @return + * The previous and updated values + */ + def modifyOptionAsync( + key: K + )(f: Option[V] => AsyncCallback[Option[V]]): AsyncCallback[(Option[V], Option[V])] = + db + .compareAndSet(store) + .getValueAsync(store)(key) + .mapAsync(o1 => f(o1).map((o1, _))) + .putOrDeleteResultBy(store)(key, _._2) + } + + final class RunTxnDsl1[M <: TxnMode] private[IndexedDb] ( + raw: IDBDatabase, + txnDsl: TxnDsl[M], + mode: IDBTransactionMode + ) { + def apply(stores: ObjectStoreDef[?, ?]*): RunTxnDsl2[M] = + new RunTxnDsl2(raw, txnDsl, mode, mkStoreArray(stores)) + } + + final class RunTxnDsl2[M <: TxnMode] private[IndexedDb] ( + raw: IDBDatabase, + txnDsl: TxnDsl[M], + mode: IDBTransactionMode, + stores: js.Array[String] + ) { + + def apply[A](f: TxnDsl[M] => Txn[M, A]): AsyncCallback[A] = { + val x = CallbackTo.pure(f(txnDsl)) + sync(_ => x) + } + + def sync[A](dslCB: TxnDsl[M] => CallbackTo[Txn[M, A]]): AsyncCallback[A] = { + + @inline def startRawTxn(complete: Try[Unit] => Callback) = { + val txn = raw.transaction(stores, mode) + + txn.onerror = event => complete(Failure(Error(event))).runNow() + + txn.oncomplete = complete(success_).toJsFn1 + + txn + } + + for { + dsl <- dslCB(txnDsl).asAsyncCallback + + (awaitTxnCompletion, complete) <- AsyncCallback.promise[Unit].asAsyncCallback + + result <- AsyncCallback.suspend { + val txn = startRawTxn(complete) + interpretTxn(txn, dsl) + } + + _ <- awaitTxnCompletion + + } yield result + } + + def async[A](dsl: TxnDsl[M] => AsyncCallback[Txn[M, A]]): AsyncCallback[A] = + // Note: This is safer than it looks. + // 1) This is `Dsl => AsyncCallback[Txn[A]]` + // and not `Dsl => Txn[AsyncCallback[A]]` + // 2) Everything within Txn is still synchronous and lawful + // 3) Only one transaction is created (i.e. only one call to `apply`) + dsl(txnDsl).flatMap(txnA => apply(_ => txnA)) + } + + final class CasDsl1(db: Database, stores: Seq[ObjectStoreDef[?, ?]]) { + + def get[A](f: TxnDsl[RO] => Txn[RO, A])(implicit e: Eq[A]) = + getAndCompareBy(f)(e.eqv) + + def getAndCompareBy_==[A](f: TxnDsl[RO] => Txn[RO, A]) = + getAndCompareBy(f)(_ == _) + + def getAndCompareBy[A](f: TxnDsl[RO] => Txn[RO, A])(eql: (A, A) => Boolean) = + new CasDsl2[A](db, stores, f(TxnDslRO), eql) + + /** Note: CAS comparison is on the raw IDB value, i.e. the result prior to async decoding */ + def getValueAsync[K, V]( + store: ObjectStoreDef.Async[K, V] + )(key: K): CasDsl3[Option[store.Value], Option[V]] = + get(_.objectStore(store).flatMap(_.get(key))) + .mapAsync(AsyncCallback.traverseOption(_)(_.decode)) + + /** Note: CAS comparison is on `Option[V]`, i.e. the decoded result */ + def getValueSync[K, V](store: ObjectStoreDef.Sync[K, V])(key: K)(implicit + e: Eq[Option[V]] + ): CasDsl2[Option[V]] = + get(_.objectStore(store).flatMap(_.get(key))) + + def getAllKeys[K, V](store: ObjectStoreDef[K, V])(implicit + e: Eq[Vector[K]] + ): CasDsl2[Vector[K]] = + get(_.objectStore(store.sync).flatMap(_.getAllKeys)) + + def getAllValuesAsync[K, V]( + store: ObjectStoreDef.Async[K, V] + ): CasDsl3[Vector[store.Value], Vector[V]] = + get(_.objectStore(store).flatMap(_.getAllValues)) + .mapAsync(AsyncCallback.traverse(_)(_.decode)) + + def getAllValuesSync[K, V](store: ObjectStoreDef.Sync[K, V])(implicit + e: Eq[Vector[V]] + ): CasDsl2[Vector[V]] = + get(_.objectStore(store.sync).flatMap(_.getAllValues)) + } + + sealed trait CasDsl23[A, B] { + def mapAsync[C](f: B => AsyncCallback[C]): CasDsl3[A, C] + + final def map[C](f: B => C) = + mapAsync(b => AsyncCallback.pure(f(b))) + + final def mapSync[C](f: B => CallbackTo[C]) = + mapAsync(f(_).asAsyncCallback) + } + + final class CasDsl2[A]( + db: Database, + stores: Seq[ObjectStoreDef[?, ?]], + get: Txn[RO, A], + eql: (A, A) => Boolean + ) extends CasDsl23[A, A] { + + private def next = mapAsync(AsyncCallback.pure) + + override def mapAsync[C](f: A => AsyncCallback[C]) = + new CasDsl3[A, C](db, stores, get, eql, f) + + def set[C](set: TxnDsl[RW] => A => Txn[RW, C]): AsyncCallback[C] = + next.set(dsl => (a, _) => set(dsl)(a)) + } + + final class CasDsl3[A, B]( + db: Database, + stores: Seq[ObjectStoreDef[?, ?]], + get: Txn[RO, A], + eql: (A, A) => Boolean, + prep: A => AsyncCallback[B] + ) extends CasDsl23[A, B] { + + override def mapAsync[C](f: B => AsyncCallback[C]) = + new CasDsl3[A, C](db, stores, get, eql, prep(_).flatMap(f)) + + def set[C](set: TxnDsl[RW] => (A, B) => Txn[RW, C]): AsyncCallback[C] = { + val txnRO = db.transactionRO(stores*) + val txnRW = db.transactionRW(stores*) + + def loopTxn(a: A, b: B): AsyncCallback[Either[A, C]] = + txnRW { dsl => + (get: Txn[RW, A]).flatMap { a2 => + if (eql(a, a2)) + set(dsl)(a, b).map(Right(_)) + else + dsl.pure(Left(a2)) + } + } + + def loop(a: A): AsyncCallback[Either[A, C]] = + for { + b <- prep(a) + e <- loopTxn(a, b) + } yield e + + txnRO(_ => get).flatMap(AsyncCallback.tailrec(_)(loop)) + } + + // Convenience methods + + /** Note: insert only */ + def add[K, V](store: ObjectStoreDef.Async[K, V])(key: K, value: V): AsyncCallback[Unit] = + store.encode(value).flatMap(add(store.sync)(key, _)) + + /** Note: insert only */ + def add[K, V](store: ObjectStoreDef.Sync[K, V])(key: K, value: V): AsyncCallback[Unit] = + setConst(_.objectStore(store).flatMap(_.add(key, value))) + + /** Note: insert only */ + def addResult[K](store: ObjectStoreDef.Async[K, B])(key: K): AsyncCallback[B] = + addResultBy(store)(key, identityFn) + + /** Note: insert only */ + def addResult[K](store: ObjectStoreDef.Sync[K, B])(key: K): AsyncCallback[B] = + addResultBy(store)(key, identityFn) + + /** Note: insert only */ + def addResultBy[K, V](store: ObjectStoreDef.Async[K, V])(key: K, f: B => V): AsyncCallback[B] = + mapAsync(b => store.encode(f(b)).map((b, _))) + .addResultBy(store.sync)(key, _._2) + .map(_._1) + + /** Note: insert only */ + def addResultBy[K, V](store: ObjectStoreDef.Sync[K, V])(key: K, f: B => V): AsyncCallback[B] = + set(dsl => + (_, b) => + for { + s <- dsl.objectStore(store) + _ <- s.add(key, f(b)) + } yield b + ) + + /** Note: insert only */ + @inline def addResultWhenDefined[K, V](store: ObjectStoreDef.Async[K, V])(key: K)(implicit + ev: B => Option[V] + ): AsyncCallback[B] = + addResultWhenDefinedBy(store)(key, ev) + + /** Note: insert only */ + @inline def addResultWhenDefined[K, V](store: ObjectStoreDef.Sync[K, V])(key: K)(implicit + ev: B => Option[V] + ): AsyncCallback[B] = + addResultWhenDefinedBy(store)(key, ev) + + /** Note: insert only */ + def addResultWhenDefinedBy[K, V]( + store: ObjectStoreDef.Async[K, V] + )(key: K, f: B => Option[V]): AsyncCallback[B] = + mapAsync(b => AsyncCallback.traverseOption(f(b))(store.encode(_)).map((b, _))) + .addResultWhenDefinedBy(store.sync)(key, _._2) + .map(_._1) + + /** Note: insert only */ + def addResultWhenDefinedBy[K, V]( + store: ObjectStoreDef.Sync[K, V] + )(key: K, f: B => Option[V]): AsyncCallback[B] = + set(dsl => + (_, b) => + f(b) match { + case Some(v) => + for { + s <- dsl.objectStore(store) + _ <- s.add(key, v) + } yield b + case None => + dsl.pure(b) + } + ) + + /** Note: insert only */ + def addWhenDefined[K, V]( + store: ObjectStoreDef.Async[K, V] + )(key: K, value: Option[V]): AsyncCallback[Unit] = + value.fold(AsyncCallback.unit)(add(store)(key, _)) + + /** Note: insert only */ + def addWhenDefined[K, V]( + store: ObjectStoreDef.Sync[K, V] + )(key: K, value: Option[V]): AsyncCallback[Unit] = + value.fold(AsyncCallback.unit)(add(store)(key, _)) + + def clear[K, V](store: ObjectStoreDef[K, V]): AsyncCallback[Unit] = + setConst(_.objectStore(store.sync).flatMap(_.clear)) + + def delete[K, V](store: ObjectStoreDef[K, V])(key: K): AsyncCallback[Unit] = + setConst(_.objectStore(store.sync).flatMap(_.delete(key))) + + /** aka upsert */ + def put[K, V](store: ObjectStoreDef.Async[K, V])(key: K, value: V): AsyncCallback[Unit] = + store.encode(value).flatMap(put(store.sync)(key, _)) + + /** aka upsert */ + def put[K, V](store: ObjectStoreDef.Sync[K, V])(key: K, value: V): AsyncCallback[Unit] = + setConst(_.objectStore(store).flatMap(_.put(key, value))) + + /** aka upsert or delete */ + def putOrDelete[K, V]( + store: ObjectStoreDef.Async[K, V] + )(key: K, value: Option[V]): AsyncCallback[Unit] = + value match { + case Some(v) => put(store)(key, v) + case None => delete(store)(key) + } + + /** aka upsert or delete */ + def putOrDelete[K, V]( + store: ObjectStoreDef.Sync[K, V] + )(key: K, value: Option[V]): AsyncCallback[Unit] = + value match { + case Some(v) => put(store)(key, v) + case None => delete(store)(key) + } + + /** Note: upsert */ + @inline def putOrDeleteResult[K, V](store: ObjectStoreDef.Async[K, V])(key: K)(implicit + ev: B => Option[V] + ): AsyncCallback[B] = + putOrDeleteResultBy(store)(key, ev) + + /** Note: upsert */ + @inline def putOrDeleteResult[K, V](store: ObjectStoreDef.Sync[K, V])(key: K)(implicit + ev: B => Option[V] + ): AsyncCallback[B] = + putOrDeleteResultBy(store)(key, ev) + + /** Note: upsert */ + def putOrDeleteResultBy[K, V]( + store: ObjectStoreDef.Async[K, V] + )(key: K, f: B => Option[V]): AsyncCallback[B] = + mapAsync(b => AsyncCallback.traverseOption(f(b))(store.encode(_)).map((b, _))) + .putOrDeleteResultBy(store.sync)(key, _._2) + .map(_._1) + + /** Note: upsert */ + def putOrDeleteResultBy[K, V]( + store: ObjectStoreDef.Sync[K, V] + )(key: K, f: B => Option[V]): AsyncCallback[B] = + set(dsl => + (_, b) => + for { + s <- dsl.objectStore(store) + _ <- s.putOrDelete(key, f(b)) + } yield b + ) + + /** Note: upsert */ + def putResult[K](store: ObjectStoreDef.Async[K, B])(key: K): AsyncCallback[B] = + putResultBy(store)(key, identityFn) + + /** Note: upsert */ + def putResult[K](store: ObjectStoreDef.Sync[K, B])(key: K): AsyncCallback[B] = + putResultBy(store)(key, identityFn) + + /** Note: upsert */ + def putResultBy[K, V](store: ObjectStoreDef.Async[K, V])(key: K, f: B => V): AsyncCallback[B] = + mapAsync { b => + val v = f(b) + for { + enc <- store.encode(v) + } yield (b, enc) + } + .putResultBy(store.sync)(key, _._2) + .map(_._1) + + /** Note: upsert */ + def putResultBy[K, V](store: ObjectStoreDef.Sync[K, V])(key: K, f: B => V): AsyncCallback[B] = + set(dsl => + (_, b) => + for { + s <- dsl.objectStore(store) + _ <- s.put(key, f(b)) + } yield b + ) + + /** Note: upsert */ + @inline def putResultWhenDefined[K, V](store: ObjectStoreDef.Async[K, V])(key: K)(implicit + ev: B => Option[V] + ): AsyncCallback[B] = + putResultWhenDefinedBy(store)(key, ev) + + /** Note: upsert */ + @inline def putResultWhenDefined[K, V](store: ObjectStoreDef.Sync[K, V])(key: K)(implicit + ev: B => Option[V] + ): AsyncCallback[B] = + putResultWhenDefinedBy(store)(key, ev) + + /** Note: upsert */ + def putResultWhenDefinedBy[K, V]( + store: ObjectStoreDef.Async[K, V] + )(key: K, f: B => Option[V]): AsyncCallback[B] = + mapAsync(b => AsyncCallback.traverseOption(f(b))(store.encode(_)).map((b, _))) + .putResultWhenDefinedBy(store.sync)(key, _._2) + .map(_._1) + + /** Note: upsert */ + def putResultWhenDefinedBy[K, V]( + store: ObjectStoreDef.Sync[K, V] + )(key: K, f: B => Option[V]): AsyncCallback[B] = + set(dsl => + (_, b) => + f(b) match { + case Some(v) => + for { + s <- dsl.objectStore(store) + _ <- s.put(key, v) + } yield b + case None => + dsl.pure(b) + } + ) + + /** Note: upsert */ + def putWhenDefined[K, V]( + store: ObjectStoreDef.Async[K, V] + )(key: K, value: Option[V]): AsyncCallback[Unit] = + value.fold(AsyncCallback.unit)(put(store)(key, _)) + + /** Note: upsert */ + def putWhenDefined[K, V]( + store: ObjectStoreDef.Sync[K, V] + )(key: K, value: Option[V]): AsyncCallback[Unit] = + value.fold(AsyncCallback.unit)(put(store)(key, _)) + + def setConst[C](set: TxnDsl[RW] => Txn[RW, C]): AsyncCallback[C] = + this.set(dsl => (_, _) => set(dsl)) + } + + // =================================================================================================================== + private object Internals { + + val success_ = Success(()) + + def asyncRequest_[R <: IDBRequest[Any, ?]](act: => R): AsyncCallback[Unit] = + asyncRequest(act)(_ => ()) + + def asyncRequest[R <: IDBRequest[Any, ?], A](act: => R)(onSuccess: R => A): AsyncCallback[A] = + AsyncCallback.promise[A].asAsyncCallback.flatMap { case (promise, complete) => + val raw = act + + raw.onerror = event => complete(Failure(Error(event))).runNow() + + raw.onsuccess = _ => complete(Try(onSuccess(raw))).runNow() + + promise + } + + def versionChange(db: DatabaseInVersionChange, e: IDBVersionChangeEvent): VersionChange = + VersionChange(db, e.oldVersion.toInt, e.newVersionOption.map(_.toInt)) + + def mkStoreArray(stores: Seq[ObjectStoreDef[?, ?]]): js.Array[String] = { + val a = new js.Array[String] + stores.foreach(s => a.push(s.name)) + a + } + + def interpretTxn[M <: TxnMode, A](txn: IDBTransaction, dsl: Txn[M, A]): AsyncCallback[A] = + AsyncCallback.suspend { + import TxnStep._ + + val stores = js.Dynamic.literal().asInstanceOf[js.Dictionary[IDBObjectStore]] + + def getStore(s: ObjectStore[?, ?]) = + AsyncCallback.delay(stores.get(s.defn.name).get) + + def interpret[B](step: TxnStep[TxnMode, B]): AsyncCallback[B] = + step match { + + case FlatMap(fa, f) => + interpret(fa).flatMap(a => interpret(f(a))) + + case StoreGet(s, k) => + getStore(s).flatMap { store => + asyncRequest(store.get(k.asJs))(_.result).flatMapSync { result => + if (js.isUndefined(result)) + CallbackTo.pure(None) + else + s.defn.valueCodec.decode(result).map(Some(_)) + } + } + + case Eval(c) => + c.asAsyncCallback + + case Suspend(s) => + s.asAsyncCallback.flatMap(interpret(_)) + + case GetStore(sd) => + AsyncCallback.delay { + val s = txn.objectStore(sd.name) + stores.put(sd.name, s) + new ObjectStore(sd) + } + + case StoreAdd(s, k, v) => + getStore(s).flatMap { store => + asyncRequest_(store.add(v, k.asJs)) + } + + case StorePut(s, k, v) => + getStore(s).flatMap { store => + asyncRequest_(store.put(v, k.asJs)) + } + + case Map(fa, f) => + interpret(fa).map(f) + + case StoreDelete(s, k) => + getStore(s).flatMap { store => + asyncRequest_(store.delete(k.asJs)) + } + + case StoreGetAllKeys(s) => + import s.defn.keyCodec + getStore(s).flatMap { store => + asyncRequest(store.getAllKeys()) { req => + val rawKeys = req.result + Vector.tabulate(rawKeys.length) { i => + val rawKey = rawKeys(i) + keyCodec + .decode(IndexedDbKey.fromJs(rawKey)) + .runNow() // safe in asyncRequest onSuccess + } + } + } + + case StoreGetAllVals(s) => + import s.defn.valueCodec + getStore(s).flatMap { store => + asyncRequest(store.getAll()) { req => + val rawVals = req.result + Vector.tabulate(rawVals.length) { i => + val rawVal = rawVals(i) + valueCodec.decode(rawVal).runNow() // safe in asyncRequest onSuccess + } + } + } + + case StoreClear(s) => + getStore(s).flatMap { store => + asyncRequest_(store.clear()) + } + + case TailRec(z, f) => + AsyncCallback.tailrec(z)(a => interpret(f(a))) + + } + + interpret(dsl.step) + } + + } // object Internals +} diff --git a/workers/src/main/scala/japgolly/webapputil/indexeddb/IndexedDbKey.scala b/workers/src/main/scala/japgolly/webapputil/indexeddb/IndexedDbKey.scala new file mode 100644 index 000000000..7a38d1bc5 --- /dev/null +++ b/workers/src/main/scala/japgolly/webapputil/indexeddb/IndexedDbKey.scala @@ -0,0 +1,24 @@ +// Copyright (c) 2016-2023 Association of Universities for Research in Astronomy, Inc. (AURA) +// For license information see LICENSE or https://opensource.org/licenses/BSD-3-Clause + +package japgolly.webapputil.indexeddb + +import org.scalajs.dom.IDBKey + +final class IndexedDbKey private (val asJs: IDBKey) extends AnyVal { + @inline def value = asJs.asInstanceOf[IndexedDbKey.Typed] +} + +object IndexedDbKey { + + // https://w3c.github.io/IndexedDB/#key-construct + // A key has an associated type which is one of: number, date, string, binary, or array. + + type Typed = String | Double + + @inline def apply(t: Typed): IndexedDbKey = + fromJs(t) + + def fromJs(k: IDBKey): IndexedDbKey = + new IndexedDbKey(k) +} diff --git a/workers/src/main/scala/japgolly/webapputil/indexeddb/KeyCodec.scala b/workers/src/main/scala/japgolly/webapputil/indexeddb/KeyCodec.scala new file mode 100644 index 000000000..eedfeea8f --- /dev/null +++ b/workers/src/main/scala/japgolly/webapputil/indexeddb/KeyCodec.scala @@ -0,0 +1,54 @@ +// Copyright (c) 2016-2023 Association of Universities for Research in Astronomy, Inc. (AURA) +// For license information see LICENSE or https://opensource.org/licenses/BSD-3-Clause + +package japgolly.webapputil.indexeddb + +import japgolly.scalajs.react.CallbackTo + +import java.util.UUID +import scala.reflect.ClassTag +import scala.scalajs.js + +final case class KeyCodec[A](encode: A => IndexedDbKey, decode: IndexedDbKey => CallbackTo[A]) { + + def xmap[B](onDecode: A => B)(onEncode: B => A): KeyCodec[B] = + // Delegating because decoding can fail and must be wrapped to be pure + xmapSync(a => CallbackTo(onDecode(a)))(onEncode) + + def xmapSync[B](onDecode: A => CallbackTo[B])(onEncode: B => A): KeyCodec[B] = + KeyCodec[B](encode = encode.compose(onEncode), decode = decode(_).flatMap(onDecode)) +} + +object KeyCodec { + + lazy val double: KeyCodec[Double] = + primative("Double") + + lazy val int: KeyCodec[Int] = + primative("Int") + + lazy val long: KeyCodec[Long] = + string.xmap(_.toLong)(_.toString) + + def primative[A]( + name: String + )(implicit ev: A => IndexedDbKey.Typed, ct: ClassTag[A]): KeyCodec[A] = + apply[A]( + a => IndexedDbKey(ev(a)), + k => + CallbackTo( + (k.value: Any) match { + case a: A => a + case x => + throw new js.JavaScriptException(s"Invalid IDB key found. $name expected, got: $x") + } + ) + ) + + lazy val string: KeyCodec[String] = + primative("String") + + lazy val uuid: KeyCodec[UUID] = + string.xmap(UUID.fromString)(_.toString) + +} diff --git a/workers/src/main/scala/japgolly/webapputil/indexeddb/ObjectStoreDef.scala b/workers/src/main/scala/japgolly/webapputil/indexeddb/ObjectStoreDef.scala new file mode 100644 index 000000000..8eef15b93 --- /dev/null +++ b/workers/src/main/scala/japgolly/webapputil/indexeddb/ObjectStoreDef.scala @@ -0,0 +1,88 @@ +// Copyright (c) 2016-2023 Association of Universities for Research in Astronomy, Inc. (AURA) +// For license information see LICENSE or https://opensource.org/licenses/BSD-3-Clause + +package japgolly.webapputil.indexeddb + +import cats.Eq +import japgolly.scalajs.react.AsyncCallback +import japgolly.scalajs.react.CallbackTo +import org.scalajs.dom.IDBValue + +sealed trait ObjectStoreDef[K, V] { + val name: String + val keyCodec: KeyCodec[K] + + final type Key = K + + def sync: ObjectStoreDef.Sync[K, ?] +} + +object ObjectStoreDef { + + final case class Sync[K, V](name: String, keyCodec: KeyCodec[K], valueCodec: ValueCodec[V]) + extends ObjectStoreDef[K, V] { + + type Value = V + + override def sync: this.type = + this + } + + // =================================================================================================================== + + final case class Async[K, V](name: String, keyCodec: KeyCodec[K], valueCodec: ValueCodec.Async[V]) + extends ObjectStoreDef[K, V] { self => + + type Value = Async.Value { + type KeyType = K + type ValueType = V + val store: self.type + } + + def encode(v: V): AsyncCallback[Value] = + valueCodec.encode(v).map(value) + + def value(v: IDBValue): Value = + new Async.Value { + override type KeyType = K + override type ValueType = V + override val store: self.type = self + override val value = v + } + + override val sync: Sync[K, Value] = { + val syncValueCodec = ValueCodec[Value]( + encode = v => CallbackTo.pure(v.value), + decode = v => CallbackTo.pure(value(v)) + ) + Sync(name, keyCodec, syncValueCodec) + } + } + + object Async { + + sealed trait Value { + type KeyType + type ValueType + val store: Async[KeyType, ValueType] + val value: IDBValue + + final def decode: AsyncCallback[ValueType] = + store.valueCodec.decode(value) + + final override def hashCode = + store.name.hashCode * 29 + value.## + + final override def equals(o: Any) = + o match { + case x: Value => store.name == x.store.name && value == x.value + case _ => false + } + } + + object Value { + implicit def univEq[V <: Value]: Eq[V] = Eq.fromUniversalEquals + } + } + +} diff --git a/workers/src/main/scala/japgolly/webapputil/indexeddb/Txn.scala b/workers/src/main/scala/japgolly/webapputil/indexeddb/Txn.scala new file mode 100644 index 000000000..4202d6bbb --- /dev/null +++ b/workers/src/main/scala/japgolly/webapputil/indexeddb/Txn.scala @@ -0,0 +1,89 @@ +// Copyright (c) 2016-2023 Association of Universities for Research in Astronomy, Inc. (AURA) +// For license information see LICENSE or https://opensource.org/licenses/BSD-3-Clause + +package japgolly.webapputil.indexeddb + +import cats.Monad +import japgolly.webapputil.indexeddb.TxnMode.* + +/** + * Embedded language for safely working with(in) an IndexedDB transaction. + * + * This is necessary because whilst all the transaction methods are async, any other type of + * asynchronicity is not supported and will result in IndexedDB automatically committing and closing + * the transaction, in which case, further interaction with the transaction will result in a runtime + * error. + * + * Therefore, returning [[AsyncCallback]] from within transactions is dangerous because it allows + * composition of both kinds of asynchronicity. To avoid this, we use this embedded language and + * don't publicly expose its interpretation/translation to [[AsyncCallback]]. From the call-site's + * point of view, a `Txn[A]` is completely opaque. + * + * This also has a nice side-effect of ensuring that transaction completion is always awaited + * because we do it in the transaction functions right after interpretation. Otherwise, the + * call-sites would always need to remember to do it if live transaction access were exposed. + * + * @tparam A + * The return type. + */ +final case class Txn[+M <: TxnMode, +A](step: TxnStep[M, A]) { self => + import TxnStep._ + + def map[B](f: A => B): Txn[M, B] = + Txn(Map(step, f)) + + def void: Txn[M, Unit] = + map(_ => ()) +} + +object Txn { + + @inline implicit final class InvariantOps[M <: TxnMode, A](private val self: Txn[M, A]) + extends AnyVal { + import TxnStep._ + + def flatMap[N <: TxnMode, B]( + f: A => Txn[N, B] + )(implicit m: TxnMode.Merge[M, N]): Txn[m.Mode, B] = { + val step = FlatMap[m.Mode, A, B](m.substM(self.step), a => m.substN(f(a).step)) + Txn(step) + } + + @inline def unless(cond: Boolean)(implicit + ev: TxnStep[RO, Option[Nothing]] => Txn[M, Option[Nothing]] + ): Txn[M, Option[A]] = + when(!cond) + + @inline def unless_(cond: Boolean)(implicit + ev: TxnStep[RO, Unit] => Txn[M, Unit] + ): Txn[M, Unit] = + when_(!cond) + + def when(cond: Boolean)(implicit + ev: TxnStep[RO, Option[Nothing]] => Txn[M, Option[Nothing]] + ): Txn[M, Option[A]] = + if (cond) self.map(Some(_)) else ev(none) + + def when_(cond: Boolean)(implicit ev: TxnStep[RO, Unit] => Txn[M, Unit]): Txn[M, Unit] = + if (cond) self.void else ev(unit) + + def >>[N <: TxnMode, B](f: Txn[N, B])(implicit m: TxnMode.Merge[M, N]): Txn[m.Mode, B] = { + val next = m.substN(f.step) + val step = FlatMap[m.Mode, A, B](m.substM(self.step), _ => next) + Txn(step) + } + } + + type CatsInstance[M <: TxnMode] = Monad[Txn[M, *]] + + def catsInstance[M <: TxnMode](dsl: TxnDsl[M]): CatsInstance[M] = + new CatsInstance[M] { + override def pure[A](a: A) = dsl.pure(a) + override def map[A, B](fa: Txn[M, A])(f: A => B) = fa.map(f) + override def flatMap[A, B](fa: Txn[M, A])(f: A => Txn[M, B]) = fa.flatMap(f) + override def tailRecM[A, B](a: A)(f: A => Txn[M, Either[A, B]]) = dsl.tailRec(a)(f) + } + + implicit def catsInstanceRO: CatsInstance[RO] = catsInstance(TxnDslRO) + implicit def catsInstanceRW: CatsInstance[RW] = catsInstance(TxnDslRW) +} diff --git a/workers/src/main/scala/japgolly/webapputil/indexeddb/TxnDsl.scala b/workers/src/main/scala/japgolly/webapputil/indexeddb/TxnDsl.scala new file mode 100644 index 000000000..e733e091e --- /dev/null +++ b/workers/src/main/scala/japgolly/webapputil/indexeddb/TxnDsl.scala @@ -0,0 +1,141 @@ +// Copyright (c) 2016-2023 Association of Universities for Research in Astronomy, Inc. (AURA) +// For license information see LICENSE or https://opensource.org/licenses/BSD-3-Clause + +package japgolly.webapputil.indexeddb + +import cats.Traverse +import japgolly.scalajs.react.* +import japgolly.scalajs.react.util.Util.identity as identityFn +import japgolly.webapputil.indexeddb.IndexedDb.ObjectStore +import japgolly.webapputil.indexeddb.TxnMode.* + +import scala.collection.BuildFrom + +sealed abstract class TxnDsl[M <: TxnMode] { + + implicit def catsInstance: Txn.CatsInstance[M] + + protected implicit def autoWrapStepRO[B](step: TxnStep[RO, B]): Txn[M, B] + + private implicit def autoWrapStepM[B](step: TxnStep[M, B]): Txn[M, B] = + Txn(step) + + // Sync only. Async not allowed by IndexedDB. + final def eval[A](c: CallbackTo[A]): Txn[M, A] = + TxnStep.Eval(c) + + final def pure[A](a: A): Txn[M, A] = + TxnStep.pure(a) + + @inline final def delay[A](a: => A): Txn[M, A] = + eval(CallbackTo(a)) + + final def unit: Txn[M, Unit] = + TxnStep.unit + + @inline final def none: Txn[M, Option[Nothing]] = + pure(None) + + final def suspend[A](a: => Txn[M, A]): Txn[M, A] = + TxnStep.Suspend(CallbackTo(a.step)) + + final def tailRec[A, B](a: A)(f: A => Txn[M, Either[A, B]]): Txn[M, B] = + TxnStep.TailRec(a, f.andThen(_.step)) + + final def objectStore[K, V](s: ObjectStoreDef.Sync[K, V]): Txn[M, ObjectStore[K, V]] = + TxnStep.GetStore(s) + + @inline final def objectStore[K, V]( + s: ObjectStoreDef.Async[K, V] + ): Txn[M, ObjectStore[K, s.Value]] = + objectStore(s.sync) + + @inline final def sequence[G[_], A](txns: G[Txn[M, A]])(implicit G: Traverse[G]): Txn[M, G[A]] = + traverse(txns)(identityFn) + + @inline final def sequenceIterable[F[x] <: Iterable[x], A](txns: => F[Txn[M, A]])(implicit + cbf: BuildFrom[F[Txn[M, A]], A, F[A]] + ): Txn[M, F[A]] = + traverseIterable(txns)(identityFn) + + @inline final def sequenceIterable_(txns: => Iterable[Txn[M, Any]]): Txn[M, Unit] = + traverseIterable_(txns)(identityFn) + + @inline final def sequenceOption[A](o: => Option[Txn[M, A]]): Txn[M, Option[A]] = + traverseOption(o)(identityFn) + + @inline final def sequenceOption_(o: Option[Txn[M, Any]]): Txn[M, Unit] = + traverseOption_(o)(identityFn) + + final def traverse[G[_], A, B](ga: G[A])(f: A => Txn[M, B])(implicit + G: Traverse[G] + ): Txn[M, G[B]] = + G.traverse(ga)(f.andThen(_.step)) + + final def traverseIterable[F[x] <: Iterable[x], A, B]( + fa: => F[A] + )(f: A => Txn[M, B])(implicit cbf: BuildFrom[F[A], B, F[B]]): Txn[M, F[B]] = + suspend { + val as = fa + val b = cbf.newBuilder(as) + if (as.isEmpty) + pure(b.result()) + else + as.iterator.map(f(_).map(b += _)).reduce(_ >> _) >> delay(b.result()) + } + + final def traverseIterable_[A](fa: => Iterable[A])(f: A => Txn[M, Any]): Txn[M, Unit] = + suspend { + val as = fa + val it = as.iterator + if (it.isEmpty) + unit + else { + val first = f(it.next()) + it.foldLeft(first)(_ >> f(_)).void + } + } + + final def traverseOption[A, B](o: => Option[A])(f: A => Txn[M, B]): Txn[M, Option[B]] = + suspend { + o match { + case Some(a) => f(a).map(Some(_)) + case None => none + } + } + + final def traverseOption_[A, B](o: => Option[A])(f: A => Txn[M, B]): Txn[M, Unit] = + suspend { + o match { + case Some(a) => f(a).void + case None => unit + } + } + + @inline final def unless[A](cond: Boolean)(txn: => Txn[M, A]): Txn[M, Option[A]] = + when(!cond)(txn) + + @inline final def unless_(cond: Boolean)(txn: => Txn[M, Any]): Txn[M, Unit] = + when_(!cond)(txn) + + final def when[A](cond: Boolean)(txn: => Txn[M, A]): Txn[M, Option[A]] = + if (cond) txn.map(Some(_)) else none + + final def when_(cond: Boolean)(txn: => Txn[M, Any]): Txn[M, Unit] = + if (cond) txn.void else unit +} + +// ===================================================================================================================== + +object TxnDsl { + + object RO extends TxnDsl[RO] { + override implicit def catsInstance: Txn.CatsInstance[RO] = Txn.catsInstance(this) + override protected implicit def autoWrapStepRO[B](s: TxnStep[RO, B]): Txn[RO, B] = Txn(s) + } + + object RW extends TxnDsl[RW] { + override implicit def catsInstance: Txn.CatsInstance[RW] = Txn.catsInstance(this) + override protected implicit def autoWrapStepRO[B](s: TxnStep[RO, B]): Txn[RO, B] = Txn(s) + } +} diff --git a/workers/src/main/scala/japgolly/webapputil/indexeddb/TxnMode.scala b/workers/src/main/scala/japgolly/webapputil/indexeddb/TxnMode.scala new file mode 100644 index 000000000..198d0c644 --- /dev/null +++ b/workers/src/main/scala/japgolly/webapputil/indexeddb/TxnMode.scala @@ -0,0 +1,45 @@ +// Copyright (c) 2016-2023 Association of Universities for Research in Astronomy, Inc. (AURA) +// For license information see LICENSE or https://opensource.org/licenses/BSD-3-Clause + +package japgolly.webapputil.indexeddb + +sealed trait TxnMode + +object TxnMode { + sealed trait RW extends TxnMode + sealed trait RO extends RW + + // =================================================================================================================== + + trait Merge[M <: TxnMode, N <: TxnMode] { + type Mode <: TxnMode + + def substM[F[+_ <: TxnMode, _], A](f: F[M, A]): F[Mode, A] + def substN[F[+_ <: TxnMode, _], A](f: F[N, A]): F[Mode, A] + } + + object Merge { + type To[M <: TxnMode, N <: TxnMode, R <: TxnMode] = Merge[M, N] { type Mode = R } + + implicit def eql[M <: TxnMode]: To[M, M, M] = + new Merge[M, M] { + override type Mode = M + override def substM[F[+_ <: TxnMode, _], A](f: F[M, A]) = f + override def substN[F[+_ <: TxnMode, _], A](f: F[M, A]) = f + } + + implicit def rorw: To[RO, RW, RW] = + new Merge[RO, RW] { + override type Mode = RW + override def substM[F[+_ <: TxnMode, _], A](f: F[RO, A]) = f + override def substN[F[+_ <: TxnMode, _], A](f: F[RW, A]) = f + } + + implicit def rwro: To[RW, RO, RW] = + new Merge[RW, RO] { + override type Mode = RW + override def substM[F[+_ <: TxnMode, _], A](f: F[RW, A]) = f + override def substN[F[+_ <: TxnMode, _], A](f: F[RO, A]) = f + } + } +} diff --git a/workers/src/main/scala/japgolly/webapputil/indexeddb/TxnStep.scala b/workers/src/main/scala/japgolly/webapputil/indexeddb/TxnStep.scala new file mode 100644 index 000000000..d1dbb16ff --- /dev/null +++ b/workers/src/main/scala/japgolly/webapputil/indexeddb/TxnStep.scala @@ -0,0 +1,66 @@ +// Copyright (c) 2016-2023 Association of Universities for Research in Astronomy, Inc. (AURA) +// For license information see LICENSE or https://opensource.org/licenses/BSD-3-Clause + +package japgolly.webapputil.indexeddb + +import japgolly.scalajs.react.* +import japgolly.webapputil.indexeddb.IndexedDb.ObjectStore +import org.scalajs.dom.* + +/** + * Embedded language for safely working with(in) an IndexedDB transaction. + * + * This is necessary because whilst all the transaction methods are async, any other type of + * asynchronicity is not supported and will result in IndexedDB automatically committing and closing + * the transaction, in which case, further interaction with the transaction will result in a runtime + * error. + * + * Therefore, returning [[AsyncCallback]] from within transactions is dangerous because it allows + * composition of both kinds of asynchronicity. To avoid this, we use this embedded language and + * don't publicly expose its interpretation/translation to [[AsyncCallback]]. From the call-site's + * point of view, a `Txn[A]` is completely opaque. + * + * This also has a nice side-effect of ensuring that transaction completion is always awaited + * because we do it in the transaction functions right after interpretation. Otherwise, the + * call-sites would always need to remember to do it if live transaction access were exposed. + * + * @tparam A + * The return type. + */ +sealed trait TxnStep[+M <: TxnMode, +A] + +object TxnStep { + import TxnMode._ + + final case class FlatMap[M <: TxnMode, A, B](from: TxnStep[M, A], f: A => TxnStep[M, B]) + extends TxnStep[M, B] + final case class Map[M <: TxnMode, A, B](from: TxnStep[M, A], f: A => B) extends TxnStep[M, B] + final case class Suspend[M <: TxnMode, A](body: CallbackTo[TxnStep[M, A]]) extends TxnStep[M, A] + final case class TailRec[M <: TxnMode, A, B](a: A, f: A => TxnStep[M, Either[A, B]]) + extends TxnStep[M, B] + + final case class Eval[A](body: CallbackTo[A]) extends TxnStep[RO, A] + final case class GetStore[K, V](defn: ObjectStoreDef.Sync[K, V]) + extends TxnStep[RO, ObjectStore[K, V]] + final case class StoreGet[K, V](store: ObjectStore[K, V], key: IndexedDbKey) + extends TxnStep[RO, Option[V]] + final case class StoreGetAllKeys[K, V](store: ObjectStore[K, V]) extends TxnStep[RO, Vector[K]] + final case class StoreGetAllVals[K, V](store: ObjectStore[K, V]) extends TxnStep[RO, Vector[V]] + + final case class StoreAdd(store: ObjectStore[?, ?], key: IndexedDbKey, value: IDBValue) + extends TxnStep[RW, Unit] + final case class StoreClear(store: ObjectStore[?, ?]) extends TxnStep[RW, Unit] + final case class StoreDelete[K, V](store: ObjectStore[K, V], key: IndexedDbKey) + extends TxnStep[RW, Unit] + final case class StorePut(store: ObjectStore[?, ?], key: IndexedDbKey, value: IDBValue) + extends TxnStep[RW, Unit] + + val none: TxnStep[RO, Option[Nothing]] = + pure(None) + + def pure[A](a: A): TxnStep[RO, A] = + Eval(CallbackTo.pure(a)) + + val unit: TxnStep[RO, Unit] = + Eval(Callback.empty) +} diff --git a/workers/src/main/scala/japgolly/webapputil/indexeddb/ValueCodec.scala b/workers/src/main/scala/japgolly/webapputil/indexeddb/ValueCodec.scala new file mode 100644 index 000000000..cdd8dbf1e --- /dev/null +++ b/workers/src/main/scala/japgolly/webapputil/indexeddb/ValueCodec.scala @@ -0,0 +1,106 @@ +// Copyright (c) 2016-2023 Association of Universities for Research in Astronomy, Inc. (AURA) +// For license information see LICENSE or https://opensource.org/licenses/BSD-3-Clause + +package japgolly.webapputil.indexeddb + +import japgolly.scalajs.react.AsyncCallback +import japgolly.scalajs.react.CallbackTo +import japgolly.webapputil.binary.* +import org.scalajs.dom.IDBValue + +import java.util.UUID +import scala.reflect.ClassTag +import scala.scalajs.js +import scala.scalajs.js.typedarray.ArrayBuffer + +final case class ValueCodec[A]( + encode: A => CallbackTo[IDBValue], + decode: IDBValue => CallbackTo[A] +) { + + def xmap[B](onDecode: A => B)(onEncode: B => A): ValueCodec[B] = + // Delegating because decoding can fail and must be wrapped to be pure + xmapSync(a => CallbackTo(onDecode(a)))(b => CallbackTo(onEncode(b))) + + def xmapSync[B](onDecode: A => CallbackTo[B])(onEncode: B => CallbackTo[A]): ValueCodec[B] = + ValueCodec[B](encode = onEncode(_).flatMap(encode), decode = decode(_).flatMap(onDecode)) + + def async: ValueCodec.Async[A] = + ValueCodec.Async(encode = encode.andThen(_.asAsyncCallback), + decode = decode.andThen(_.asAsyncCallback) + ) + + type ThisIsBinary = ValueCodec[A] =:= ValueCodec[BinaryData] + + // def compress(c: Compression)(implicit ev: ThisIsBinary): ValueCodec[BinaryData] = + // ev(this).xmap(c.decompressOrThrow)(c.compress) +} + +object ValueCodec { + + lazy val binary: ValueCodec[BinaryData] = + apply( + encode = b => CallbackTo.pure(b.unsafeArrayBuffer), + decode = d => CallbackTo(BinaryData.unsafeFromArrayBuffer(d.asInstanceOf[ArrayBuffer])) + ) + + lazy val boolean: ValueCodec[Boolean] = + primative("Boolean") + + lazy val double: ValueCodec[Double] = + primative("Double") + + lazy val int: ValueCodec[Int] = + primative("Int") + + lazy val long: ValueCodec[Long] = + string.xmap(_.toLong)(_.toString) + + def primative[A: ClassTag](name: String): ValueCodec[A] = + apply( + encode = a => CallbackTo.pure(a), + decode = d => + CallbackTo( + (d: Any) match { + case a: A => a + case x => + throw new js.JavaScriptException(s"Invalid IDB value found. $name expected, got: $x") + } + ) + ) + + lazy val string: ValueCodec[String] = + primative("String") + + lazy val uuid: ValueCodec[UUID] = + string.xmap(UUID.fromString)(_.toString) + + // =================================================================================================================== + + final case class Async[A]( + encode: A => AsyncCallback[IDBValue], + decode: IDBValue => AsyncCallback[A] + ) { + + def xmap[B](onDecode: A => B)(onEncode: B => A): Async[B] = + // Delegating because decoding can fail and must be wrapped to be pure + xmapAsync(a => AsyncCallback.delay(onDecode(a)))(b => AsyncCallback.delay(onEncode(b))) + + def xmapAsync[B](onDecode: A => AsyncCallback[B])(onEncode: B => AsyncCallback[A]): Async[B] = + Async[B](encode = onEncode(_).flatMap(encode), decode = decode(_).flatMap(onDecode)) + + type ThisIsBinary = Async[A] =:= Async[BinaryData] + + def xmapBinaryFormat[B](fmt: BinaryFormat[B])(implicit ev: ThisIsBinary): Async[B] = + ev(this).xmapAsync(fmt.decode)(fmt.encode) + } + + object Async { + + lazy val binary: ValueCodec.Async[BinaryData] = + ValueCodec.binary.async + + def binary[A](fmt: BinaryFormat[A]): ValueCodec.Async[A] = + binary.xmapBinaryFormat(fmt) + } +} diff --git a/workers/src/main/scala/japgolly/webapputil/indexeddb/package.scala b/workers/src/main/scala/japgolly/webapputil/indexeddb/package.scala new file mode 100644 index 000000000..fc0385087 --- /dev/null +++ b/workers/src/main/scala/japgolly/webapputil/indexeddb/package.scala @@ -0,0 +1,14 @@ +// Copyright (c) 2016-2023 Association of Universities for Research in Astronomy, Inc. (AURA) +// For license information see LICENSE or https://opensource.org/licenses/BSD-3-Clause + +package japgolly.webapputil + +package object indexeddb { + + type TxnDslRO = TxnDsl.RO.type + type TxnDslRW = TxnDsl.RW.type + + @inline def TxnDslRO: TxnDslRO = TxnDsl.RO + @inline def TxnDslRW: TxnDslRW = TxnDsl.RW + +} diff --git a/workers/src/main/scala/workers/CacheIDBStores.scala b/workers/src/main/scala/workers/CacheIDBStores.scala index 1560cc292..b85ba5b4b 100644 --- a/workers/src/main/scala/workers/CacheIDBStores.scala +++ b/workers/src/main/scala/workers/CacheIDBStores.scala @@ -9,6 +9,7 @@ import explore.model.boopickle.CatalogPicklers.given import japgolly.scalajs.react.callback.* import japgolly.webapputil.binary.* import japgolly.webapputil.boopickle.* +import japgolly.webapputil.boopickle.BinaryFormatExt.Implicits.* import japgolly.webapputil.indexeddb.* import japgolly.webapputil.indexeddb.IndexedDb.DatabaseName import lucuma.ags.GuideStarCandidate