From cbe7d39061da9d72e59a34bb4fb5dc56d9a04e21 Mon Sep 17 00:00:00 2001 From: Orion Kindel Date: Tue, 14 May 2024 15:01:46 -0500 Subject: [PATCH] feat: working --- README.md | 80 +------------ package.json | 2 +- spago.lock | 99 ++++++++++----- spago.yaml | 24 ++-- src/Data.CBOR.Record.purs | 50 -------- src/Data.CBOR.purs | 192 +++++++++++++++++++++--------- src/Effect.CBOR.js | 7 ++ src/Effect.CBOR.purs | 21 ++++ src/Node.Stream.CBOR.Decode.js | 8 +- src/Node.Stream.CBOR.Decode.purs | 67 ++++------- src/Node.Stream.CBOR.Encode.js | 10 +- src/Node.Stream.CBOR.Encode.purs | 74 ++++-------- src/Node.Stream.CBOR.Options.js | 11 ++ src/Node.Stream.CBOR.Options.purs | 50 ++++++++ src/Node.Stream.CBOR.purs | 1 - src/Pipes.CBOR.purs | 114 ++++++------------ test/Test/Main.purs | 4 +- test/Test/Pipes.CSV.purs | 97 ++++++++------- 18 files changed, 444 insertions(+), 467 deletions(-) delete mode 100644 src/Data.CBOR.Record.purs create mode 100644 src/Effect.CBOR.js create mode 100644 src/Effect.CBOR.purs create mode 100644 src/Node.Stream.CBOR.Options.js create mode 100644 src/Node.Stream.CBOR.Options.purs delete mode 100644 src/Node.Stream.CBOR.purs diff --git a/README.md b/README.md index ee047d8..fa39d60 100644 --- a/README.md +++ b/README.md @@ -1,81 +1,9 @@ -# purescript-csv-stream +# purescript-cbor-stream -Type-safe bindings for the streaming API of `csv-parse` and `csv-stringify`. +Type-safe bindings for the streaming API of `cbor-x` ## Installing ```bash -spago install csv-stream -{bun|yarn|npm|pnpm} install csv-parse csv-stringify -``` - -## Examples -### Stream -```purescript -module Main where - -import Prelude - -import Effect (Effect) -import Effect.Class (liftEffect) -import Effect.Aff (launchAff_) -import Node.Stream (pipe) -import Node.Stream as Stream -import Node.Stream.CSV.Stringify as CSV.Stringify -import Node.Stream.CSV.Parse as CSV.Parse - -type MyCSVType1 = {a :: Int, b :: Int, bar :: String, baz :: Boolean} -type MyCSVType2 = {ab :: Int, bar :: String, baz :: Boolean} - -atob :: MyCSVType1 -> MyCSVType2 -atob {a, b, bar, baz} = {ab: a + b, bar, baz} - -myCSV :: String -myCSV = "a,b,bar,baz\n1,2,\"hello, world!\",true\n3,3,,f" - -main :: Effect Unit -main = launchAff_ do - parser <- liftEffect $ CSV.Parse.make {} - stringifier <- liftEffect $ CSV.Stringify.make {} - - input <- liftEffect $ Stream.readableFromString myCSV - liftEffect $ Stream.pipe input parser - - records <- CSV.Parse.readAll parser - liftEffect $ for_ records \r -> CSV.Stringify.write $ atob r - liftEffect $ Stream.end stringifier - - -- "ab,bar,baz\n3,\"hello, world!\",true\n6,,false" - csvString <- CSV.Stringify.readAll stringifier - pure unit -``` - -### Synchronous -```purescript -module Main where - -import Prelude - -import Effect (Effect) -import Effect.Class (liftEffect) -import Effect.Aff (launchAff_) -import Node.Stream (pipe) -import Node.Stream as Stream -import Node.Stream.CSV.Stringify as CSV.Stringify -import Node.Stream.CSV.Parse as CSV.Parse - -type MyCSVType1 = {a :: Int, b :: Int, bar :: String, baz :: Boolean} -type MyCSVType2 = {ab :: Int, bar :: String, baz :: Boolean} - -atob :: MyCSVType1 -> MyCSVType2 -atob {a, b, bar, baz} = {ab: a + b, bar, baz} - -myCSV :: String -myCSV = "a,b,bar,baz\n1,2,\"hello, world!\",true\n3,3,,f" - -main :: Effect Unit -main = launchAff_ do - records :: Array MyCSVType1 <- CSV.Parse.parse myCSV - -- "ab,bar,baz\n3,\"hello, world!\",true\n6,,false" - csvString <- CSV.Stringify.stringify (atob <$> records) - pure unit +spago install cbor-stream +{bun|yarn|npm|pnpm} install cbor-x ``` diff --git a/package.json b/package.json index 528e5fd..2faadc2 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "purescript-cbor-stream", - "version": "v2.0.1", + "version": "v1.4.0", "type": "module", "dependencies": { "cbor-x": "^1.5.9", diff --git a/spago.lock b/spago.lock index 7cf26ff..3517d58 100644 --- a/spago.lock +++ b/spago.lock @@ -1,38 +1,34 @@ workspace: packages: - csv-stream: + cbor-stream: path: ./ dependencies: - aff: ">=7.1.0 <8.0.0" - - arrays: ">=7.3.0 <8.0.0" + - arrays - bifunctors: ">=6.0.0 <7.0.0" - datetime: ">=6.1.0 <7.0.0" - effect: ">=4.0.0 <5.0.0" + - either - exceptions: ">=6.0.0 <7.0.0" - foldable-traversable: ">=6.0.0 <7.0.0" - foreign: ">=7.0.0 <8.0.0" - foreign-object: ">=4.1.0 <5.0.0" - - integers: ">=6.0.0 <7.0.0" - - lists: ">=7.0.0 <8.0.0" + - js-bigints + - js-date + - js-maps - maybe: ">=6.0.0 <7.0.0" - - newtype: ">=5.0.0 <6.0.0" - node-buffer: ">=9.0.0 <10.0.0" - node-event-emitter: ">=3.0.0 <4.0.0" - - node-stream-pipes: ">=1.3.0 <2.0.0" + - node-stream-pipes - node-streams: ">=9.0.0 <10.0.0" - nullable: ">=6.0.0 <7.0.0" - - numbers: ">=9.0.1 <10.0.0" - ordered-collections: ">=3.2.0 <4.0.0" - pipes: ">=8.0.0 <9.0.0" - - precise-datetime: ">=7.0.0 <8.0.0" - prelude: ">=6.0.1 <7.0.0" - record: ">=4.0.0 <5.0.0" - - record-extra: ">=5.0.1 <6.0.0" - - st: ">=6.2.0 <7.0.0" - - strings: ">=6.0.1 <7.0.0" + - simple-json - tailrec: ">=6.1.0 <7.0.0" - transformers: ">=6.0.0 <7.0.0" - - tuples: ">=7.0.0 <8.0.0" - typelevel-prelude: ">=7.0.0 <8.0.0" - unsafe-coerce: ">=6.0.0 <7.0.0" test_dependencies: @@ -40,6 +36,7 @@ workspace: - gen - node-fs - node-zlib + - precise-datetime - quickcheck - simple-json - spec @@ -76,7 +73,9 @@ workspace: - identity - integers - invariant + - js-bigints - js-date + - js-maps - lazy - lcg - lists @@ -103,10 +102,10 @@ workspace: - precise-datetime - prelude - profunctor + - profunctor-lenses - quickcheck - random - record - - record-extra - refs - safe-coerce - simple-json @@ -123,7 +122,10 @@ workspace: - unordered-collections - unsafe-coerce - variant - extra_packages: {} + extra_packages: + node-stream-pipes: + git: https://git.orionkindel.com/orion/purescript-node-stream-pipes + ref: 657af14 packages: aff: type: registry @@ -482,6 +484,14 @@ packages: dependencies: - control - prelude + js-bigints: + type: registry + version: 2.2.1 + integrity: sha256-hKWZo9NxtsAaHmNXr6B8GY4c0olQbYLXPVGWm4TF2Ss= + dependencies: + - integers + - maybe + - prelude js-date: type: registry version: 8.0.0 @@ -493,6 +503,24 @@ packages: - foreign - integers - now + js-maps: + type: registry + version: 0.1.2 + integrity: sha256-xQDZf88nQEiZNmkCVEi3YQGB19hu6Oju6laEi8Os/oM= + dependencies: + - arrays + - either + - foldable-traversable + - functions + - js-bigints + - maybe + - prelude + - profunctor-lenses + - st + - strings + - tuples + - unfoldable + - unsafe-coerce lazy: type: registry version: 6.0.0 @@ -606,9 +634,9 @@ packages: dependencies: - effect node-stream-pipes: - type: registry - version: 1.3.0 - integrity: sha256-5Jpf0BLn0ExQWYxbTTewai4M8quEmEVHxihc9CM1Juo= + type: git + url: https://git.orionkindel.com/orion/purescript-node-stream-pipes + rev: 657af14bb6f461e5def017d8d1ca6142b00891cb dependencies: - aff - arrays @@ -823,6 +851,31 @@ packages: - newtype - prelude - tuples + profunctor-lenses: + type: registry + version: 8.0.0 + integrity: sha256-K7f29rHRHgVSb2Y/PaSKtfYPriP6n87BJNO7EhsZHas= + dependencies: + - arrays + - bifunctors + - const + - control + - distributive + - either + - foldable-traversable + - foreign-object + - functors + - identity + - lists + - maybe + - newtype + - ordered-collections + - partial + - prelude + - profunctor + - record + - transformers + - tuples quickcheck: type: registry version: 8.0.1 @@ -870,18 +923,6 @@ packages: - functions - prelude - unsafe-coerce - record-extra: - type: registry - version: 5.0.1 - integrity: sha256-7vnREK2fpGJ7exswSeA9UpZFuU+UXRt3SA7AFUldT/Y= - dependencies: - - arrays - - functions - - lists - - prelude - - record - - tuples - - typelevel-prelude refs: type: registry version: 6.0.0 diff --git a/spago.yaml b/spago.yaml index 63dbafb..f7c705a 100644 --- a/spago.yaml +++ b/spago.yaml @@ -1,7 +1,7 @@ package: name: cbor-stream publish: - version: '2.0.1' + version: '1.4.0' license: 'GPL-3.0-or-later' location: githubOwner: 'cakekindel' @@ -10,36 +10,32 @@ package: strict: true pedanticPackages: true dependencies: - - node-stream-pipes: ">=1.3.0 <2.0.0" - aff: ">=7.1.0 <8.0.0" - arrays: ">=7.3.0 <8.0.0" - bifunctors: ">=6.0.0 <7.0.0" - datetime: ">=6.1.0 <7.0.0" - effect: ">=4.0.0 <5.0.0" + - either: ">=6.1.0 <7.0.0" - exceptions: ">=6.0.0 <7.0.0" - foldable-traversable: ">=6.0.0 <7.0.0" - foreign: ">=7.0.0 <8.0.0" - foreign-object: ">=4.1.0 <5.0.0" - - integers: ">=6.0.0 <7.0.0" - - lists: ">=7.0.0 <8.0.0" + - js-bigints: ">=2.2.1 <3.0.0" + - js-date: ">=8.0.0 <9.0.0" + - js-maps: ">=0.1.2 <0.2.0" - maybe: ">=6.0.0 <7.0.0" - - newtype: ">=5.0.0 <6.0.0" - node-buffer: ">=9.0.0 <10.0.0" - node-event-emitter: ">=3.0.0 <4.0.0" + - node-stream-pipes: "*" - node-streams: ">=9.0.0 <10.0.0" - nullable: ">=6.0.0 <7.0.0" - - numbers: ">=9.0.1 <10.0.0" - ordered-collections: ">=3.2.0 <4.0.0" - pipes: ">=8.0.0 <9.0.0" - - precise-datetime: ">=7.0.0 <8.0.0" - prelude: ">=6.0.1 <7.0.0" - record: ">=4.0.0 <5.0.0" - - record-extra: ">=5.0.1 <6.0.0" - - st: ">=6.2.0 <7.0.0" - - strings: ">=6.0.1 <7.0.0" + - simple-json: ">=9.0.0 <10.0.0" - tailrec: ">=6.1.0 <7.0.0" - transformers: ">=6.0.0 <7.0.0" - - tuples: ">=7.0.0 <8.0.0" - typelevel-prelude: ">=7.0.0 <8.0.0" - unsafe-coerce: ">=6.0.0 <7.0.0" test: @@ -49,8 +45,12 @@ package: - gen - node-fs - node-zlib + - precise-datetime - quickcheck - simple-json - spec workspace: - extraPackages: {} + extraPackages: + node-stream-pipes: + git: 'https://git.orionkindel.com/orion/purescript-node-stream-pipes' + ref: '657af14' diff --git a/src/Data.CBOR.Record.purs b/src/Data.CBOR.Record.purs deleted file mode 100644 index 8728d3d..0000000 --- a/src/Data.CBOR.Record.purs +++ /dev/null @@ -1,50 +0,0 @@ -module Data.CSV.Record where - -import Prelude - -import Control.Monad.Error.Class (liftMaybe) -import Control.Monad.Except (Except) -import Data.Array as Array -import Data.CSV (class ReadCSV, class WriteCSV, readCSV, writeCSV) -import Data.List.NonEmpty (NonEmptyList) -import Data.Map (Map) -import Data.Map as Map -import Data.Maybe (fromMaybe) -import Data.Symbol (class IsSymbol, reflectSymbol) -import Foreign (ForeignError(..)) -import Prim.Row (class Cons, class Lacks) -import Prim.RowList (class RowToList, Cons, Nil, RowList) -import Record as Record -import Type.Prelude (Proxy(..)) - -class WriteCSVRecord :: Row Type -> RowList Type -> Constraint -class RowToList r rl <= WriteCSVRecord r rl | rl -> r where - writeCSVRecord :: { | r } -> Array String - -instance (RowToList r (Cons k v tailrl), IsSymbol k, WriteCSV v, Lacks k tail, Cons k v tail r, WriteCSVRecord tail tailrl) => WriteCSVRecord r (Cons k v tailrl) where - writeCSVRecord r = - let - val = writeCSV $ Record.get (Proxy @k) r - tail = writeCSVRecord @tail @tailrl $ Record.delete (Proxy @k) r - in - [ val ] <> tail - -instance WriteCSVRecord () Nil where - writeCSVRecord _ = [] - -class ReadCSVRecord :: Row Type -> RowList Type -> Constraint -class RowToList r rl <= ReadCSVRecord r rl | rl -> r where - readCSVRecord :: Map String Int -> Array String -> Except (NonEmptyList ForeignError) { | r } - -instance (RowToList r (Cons k v tailrl), IsSymbol k, ReadCSV v, Lacks k tail, Cons k v tail r, ReadCSVRecord tail tailrl) => ReadCSVRecord r (Cons k v tailrl) where - readCSVRecord cols vals = do - let - k = reflectSymbol (Proxy @k) - pos <- liftMaybe (pure $ ForeignError $ "reached end of row looking for column " <> k) $ Map.lookup k cols - let valraw = fromMaybe "" $ Array.index vals pos - val <- readCSV @v valraw - tail <- readCSVRecord @tail @tailrl cols vals - pure $ Record.insert (Proxy @k) val tail - -instance ReadCSVRecord () Nil where - readCSVRecord _ _ = pure {} diff --git a/src/Data.CBOR.purs b/src/Data.CBOR.purs index 56eb679..ce4a7ac 100644 --- a/src/Data.CBOR.purs +++ b/src/Data.CBOR.purs @@ -1,74 +1,150 @@ -module Data.CSV where +module Data.CBOR where import Prelude -import Control.Monad.Error.Class (liftMaybe, throwError) -import Control.Monad.Except (Except) +import Control.Monad.Error.Class (liftMaybe) +import Control.Monad.Except (ExceptT(..), withExcept) +import Control.Monad.Except.Trans (runExceptT) +import Data.Array as Array import Data.DateTime (DateTime) -import Data.Int as Int -import Data.List.NonEmpty (NonEmptyList) -import Data.Maybe (Maybe(..), maybe) -import Data.Newtype (unwrap) -import Data.Number (fromString) as Number -import Data.Number.Format (toString) as Number -import Data.PreciseDateTime (fromDateTime, fromRFC3339String, toDateTimeLossy, toRFC3339String) -import Data.RFC3339String (RFC3339String(..)) -import Data.String as String -import Foreign (ForeignError(..)) +import Data.Either (Either(..)) +import Data.Foldable (class Foldable) +import Data.FoldableWithIndex (foldlWithIndex) +import Data.JSDate (JSDate) +import Data.JSDate as JSDate +import Data.Map (Map) +import Data.Symbol (class IsSymbol, reflectSymbol) +import Data.Traversable (traverse) +import Foreign (F, Foreign, ForeignError(..), readArray, unsafeReadTagged, unsafeToForeign) +import Foreign.Index (readProp) +import JS.BigInt (BigInt) +import JS.Map (Map) as JS +import JS.Map as JS.Map +import Prim.Row as Row +import Prim.RowList (class RowToList, Cons, Nil, RowList) +import Record (get) +import Record.Builder (Builder) +import Record.Builder as Builder +import Simple.JSON (class ReadForeign, class WriteForeign, readImpl, writeImpl) +import Type.Prelude (Proxy(..)) -class ReadCSV a where - readCSV :: String -> Except (NonEmptyList ForeignError) a +class ReadCBOR :: Type -> Constraint +class ReadCBOR a where + readCBOR :: Foreign -> F a -class WriteCSV a where - writeCSV :: a -> String +class WriteCBOR :: Type -> Constraint +class WriteCBOR a where + writeCBOR :: a -> Foreign -instance ReadCSV Int where - readCSV s = liftMaybe (pure $ ForeignError $ "invalid integer: " <> s) $ Int.fromString s +instance ReadCBOR Foreign where + readCBOR = pure +else instance (RowToList r rl, ReadCBORFields rl () r) => ReadCBOR (Record r) where + readCBOR o = do + flip Builder.build {} <$> getFields (Proxy @rl) o +else instance ReadCBOR BigInt where + readCBOR = unsafeReadTagged "BigInt" +else instance ReadCBOR JSDate where + readCBOR = unsafeReadTagged "Date" +else instance ReadCBOR DateTime where + readCBOR a = do + date :: JSDate <- readCBOR a + liftMaybe (pure $ ForeignError $ "Invalid DateTime: " <> show date) $ JSDate.toDateTime date +else instance ReadCBOR a => ReadCBOR (Array a) where + readCBOR a = do + raws :: Array Foreign <- readArray a + traverse readCBOR raws +else instance (ReadCBOR v) => ReadCBOR (JS.Map String v) where + readCBOR map = do + map' :: JS.Map String Foreign <- unsafeReadTagged "Map" map + foldlWithIndex (\k b v -> do + map'' <- b + v' <- readCBOR v + pure $ JS.Map.insert k v' map'' + ) (pure JS.Map.empty) map' +else instance (ReadForeign a) => ReadCBOR a where + readCBOR = readImpl -instance ReadCSV Number where - readCSV s = liftMaybe (pure $ ForeignError $ "invalid number: " <> s) $ Number.fromString s +instance WriteCBOR Foreign where + writeCBOR = identity +else instance (RowToList r rl, WriteCBORFields rl r () to) => WriteCBOR (Record r) where + writeCBOR rec = unsafeToForeign $ Builder.build (writeImplFields (Proxy @rl) rec) {} +else instance WriteCBOR BigInt where + writeCBOR = unsafeToForeign +else instance WriteCBOR JSDate where + writeCBOR = unsafeToForeign +else instance WriteCBOR DateTime where + writeCBOR = unsafeToForeign <<< JSDate.fromDateTime +else instance (WriteCBOR k, WriteCBOR v) => WriteCBOR (JS.Map k v) where + writeCBOR = unsafeToForeign +else instance (WriteCBOR a) => WriteCBOR (Array a) where + writeCBOR as = unsafeToForeign $ writeCBOR <$> as +else instance (Foldable f, WriteCBOR a) => WriteCBOR (f a) where + writeCBOR as = unsafeToForeign $ writeCBOR $ Array.fromFoldable as +else instance (JS.Map.EncodeKey k, WriteCBOR k, WriteCBOR v) => WriteCBOR (Map k v) where + writeCBOR map = writeCBOR $ foldlWithIndex (\k m v -> JS.Map.insert k v m) JS.Map.empty map +else instance (WriteForeign a) => WriteCBOR a where + writeCBOR = writeImpl -instance ReadCSV String where - readCSV = pure +applyEither :: forall e a b. Semigroup e => Either e (a -> b) -> Either e a -> Either e b +applyEither (Left e) (Right _) = Left e +applyEither (Left e1) (Left e2) = Left (e1 <> e2) +applyEither (Right _) (Left e) = Left e +applyEither (Right fun) (Right a) = Right (fun a) -instance ReadCSV DateTime where - readCSV s = map toDateTimeLossy $ liftMaybe (pure $ ForeignError $ "invalid ISO date string: " <> s) $ fromRFC3339String $ RFC3339String s +exceptTApply :: forall a b e m. Semigroup e => Applicative m => ExceptT e m (a -> b) -> ExceptT e m a -> ExceptT e m b +exceptTApply fun a = ExceptT $ applyEither + <$> runExceptT fun + <*> runExceptT a -instance ReadCSV Boolean where - readCSV s = - let - inner "t" = pure true - inner "true" = pure true - inner "yes" = pure true - inner "y" = pure true - inner "1" = pure true - inner "f" = pure false - inner "false" = pure false - inner "no" = pure false - inner "n" = pure false - inner "0" = pure false - inner _ = throwError $ pure $ ForeignError $ "invalid boolean value: " <> s - in - inner $ String.toLower s +class ReadCBORFields (xs :: RowList Type) (from :: Row Type) (to :: Row Type) + | xs -> from to where + getFields :: Proxy xs + -> Foreign + -> F (Builder (Record from) (Record to)) -instance ReadCSV a => ReadCSV (Maybe a) where - readCSV "" = pure Nothing - readCSV s = Just <$> readCSV s +instance readFieldsCons :: + ( IsSymbol name + , ReadCBOR ty + , ReadCBORFields tail from from' + , Row.Lacks name from' + , Row.Cons name ty from' to + ) => ReadCBORFields (Cons name ty tail) from to where + getFields _ obj = (compose <$> first) `exceptTApply` rest + where + first = do + value <- withExcept' (readCBOR =<< readProp name obj) + pure $ Builder.insert nameP value + rest = getFields tailP obj + nameP = Proxy :: Proxy name + tailP = Proxy :: Proxy tail + name = reflectSymbol nameP + withExcept' = withExcept <<< map $ ErrorAtProperty name -instance WriteCSV Int where - writeCSV = Int.toStringAs Int.decimal +instance readFieldsNil :: + ReadCBORFields Nil () () where + getFields _ _ = + pure identity -instance WriteCSV Number where - writeCSV = Number.toString +class WriteCBORFields (rl :: RowList Type) row (from :: Row Type) (to :: Row Type) + | rl -> row from to where + writeImplFields :: forall g. g rl -> Record row -> Builder (Record from) (Record to) -instance WriteCSV String where - writeCSV = identity +instance consWriteCBORFields :: + ( IsSymbol name + , WriteCBOR ty + , WriteCBORFields tail row from from' + , Row.Cons name ty whatever row + , Row.Lacks name from' + , Row.Cons name Foreign from' to + ) => WriteCBORFields (Cons name ty tail) row from to where + writeImplFields _ rec = result + where + namep = Proxy :: Proxy name + value = writeCBOR $ get namep rec + tailp = Proxy :: Proxy tail + rest = writeImplFields tailp rec + result = Builder.insert namep value <<< rest -instance WriteCSV DateTime where - writeCSV = unwrap <<< toRFC3339String <<< fromDateTime - -instance WriteCSV Boolean where - writeCSV = show - -instance WriteCSV a => WriteCSV (Maybe a) where - writeCSV = maybe "" writeCSV +instance nilWriteCBORFields :: + WriteCBORFields Nil row () () where + writeImplFields _ _ = identity diff --git a/src/Effect.CBOR.js b/src/Effect.CBOR.js new file mode 100644 index 0000000..cf999ac --- /dev/null +++ b/src/Effect.CBOR.js @@ -0,0 +1,7 @@ +import {decode, encode} from 'cbor-x' + +/** @type {(a: Buffer) => () => unknown} */ +export const decodeImpl = buf => () => decode(buf) + +/** @type {(a: unknown) => () => Buffer} */ +export const encodeImpl = buf => () => encode(buf) diff --git a/src/Effect.CBOR.purs b/src/Effect.CBOR.purs new file mode 100644 index 0000000..057f8c8 --- /dev/null +++ b/src/Effect.CBOR.purs @@ -0,0 +1,21 @@ +module Effect.CBOR where + +import Prelude + +import Control.Monad.Error.Class (liftEither) +import Control.Monad.Except (runExcept) +import Data.Bifunctor (lmap) +import Data.CBOR (class ReadCBOR, class WriteCBOR, readCBOR, writeCBOR) +import Effect (Effect) +import Effect.Exception (error) +import Foreign (Foreign) +import Node.Buffer (Buffer) + +foreign import decodeImpl :: Buffer -> Effect Foreign +foreign import encodeImpl :: Foreign -> Effect Buffer + +decode :: forall a. ReadCBOR a => Buffer -> Effect a +decode = (liftEither <<< lmap (error <<< show) <<< runExcept <<< readCBOR) <=< decodeImpl + +encode :: forall a. WriteCBOR a => a -> Effect Buffer +encode = encodeImpl <<< writeCBOR diff --git a/src/Node.Stream.CBOR.Decode.js b/src/Node.Stream.CBOR.Decode.js index a72708c..ba4d4a9 100644 --- a/src/Node.Stream.CBOR.Decode.js +++ b/src/Node.Stream.CBOR.Decode.js @@ -1,7 +1,7 @@ -import { Parser } from "csv-parse"; +import { DecoderStream } from "cbor-x"; -/** @type {(s: import('csv-parse').Options) => () => Parser} */ -export const makeImpl = (c) => () => new Parser(c); +/** @type {(s: import('cbor-x').Options) => () => DecoderStream} */ +export const makeImpl = (c) => () => new DecoderStream({useRecords: false, ...c}); -/** @type {(s: Parser) => () => Array | null} */ +/** @type {(s: DecoderStream) => () => unknown | null} */ export const readImpl = (p) => () => p.read(); diff --git a/src/Node.Stream.CBOR.Decode.purs b/src/Node.Stream.CBOR.Decode.purs index 943f225..fe74683 100644 --- a/src/Node.Stream.CBOR.Decode.purs +++ b/src/Node.Stream.CBOR.Decode.purs @@ -1,76 +1,51 @@ -module Node.Stream.CSV.Parse where +module Node.Stream.CBOR.Decode where import Prelude hiding (join) import Data.Nullable (Nullable) import Effect (Effect) import Effect.Uncurried (mkEffectFn1) -import Foreign (Foreign, unsafeToForeign) +import Foreign (Foreign) import Foreign.Object (Object) -import Foreign.Object (union) as Object import Node.Buffer (Buffer) import Node.EventEmitter (EventHandle(..)) import Node.EventEmitter.UtilTypes (EventHandle1) import Node.Stream (Read, Stream, Write) +import Node.Stream.CBOR.Options (F32, Options, prepareOptions) import Node.Stream.Object (Transform) as Object -import Prim.Row (class Union) +import Prim.Row (class Nub, class Union) import Unsafe.Coerce (unsafeCoerce) -data CSVRead +data CBORDecode --- | Stream transforming chunks of a CSV file --- | into parsed purescript objects. +-- | CBOR decoding transform stream -- | --- | The CSV contents may be piped into this stream --- | as Buffer or String chunks. -type CSVParser :: Row Type -> Type -type CSVParser r = Stream (read :: Read, write :: Write, csv :: CSVRead | r) +-- | Accepts unencoded `Buffer` chunks, and transforms them +-- | to JS values. +type CBORDecoder :: Row Type -> Type +type CBORDecoder r = Stream (read :: Read, write :: Write, cbor :: CBORDecode | r) --- | https://csv.js.org/parse/options/ -type Config r = - ( bom :: Boolean - , group_columns_by_name :: Boolean - , comment :: String - , comment_no_infix :: Boolean - , delimiter :: String - , encoding :: String - , escape :: String - , from :: Int - , from_line :: Int - , ignore_last_delimiters :: Boolean - , info :: Boolean - , max_record_size :: Int - , quote :: String - , raw :: Boolean - , record_delimiter :: String - , relax_column_count :: Boolean - , skip_empty_lines :: Boolean - , skip_records_with_empty_values :: Boolean - , skip_records_with_error :: Boolean - , to :: Int - , to_line :: Int - , trim :: Boolean - , ltrim :: Boolean - , rtrim :: Boolean - | r - ) +make + :: forall r missing extra minimal minimalExtra + . Union r missing (Options extra) + => Union r (useFloat32 :: F32) minimal + => Nub minimal (useFloat32 :: F32 | minimalExtra) + => { | r } + -> Effect (CBORDecoder ()) +make = makeImpl <<< prepareOptions @r @missing --- | Create a CSVParser -make :: forall @config @missing @extra. Union config missing (Config extra) => { | config } -> Effect (CSVParser ()) -make = makeImpl <<< unsafeToForeign <<< Object.union (recordToForeign { columns: false, cast: false, cast_date: false }) <<< recordToForeign - -toObjectStream :: CSVParser () -> Object.Transform Buffer (Array String) +toObjectStream :: forall r. CBORDecoder r -> Object.Transform Buffer Foreign toObjectStream = unsafeCoerce -- | `data` event. Emitted when a CSV record has been parsed. -dataH :: forall a. EventHandle1 (CSVParser a) (Array String) +dataH :: forall a. EventHandle1 (CBORDecoder a) Foreign dataH = EventHandle "data" mkEffectFn1 -- | FFI foreign import makeImpl :: forall r. Foreign -> Effect (Stream r) -- | FFI -foreign import readImpl :: forall r. Stream r -> Effect (Nullable (Array String)) +foreign import readImpl :: forall r. Stream r -> Effect (Nullable Foreign) -- | FFI recordToForeign :: forall r. Record r -> Object Foreign diff --git a/src/Node.Stream.CBOR.Encode.js b/src/Node.Stream.CBOR.Encode.js index 99e4ed7..003cfbd 100644 --- a/src/Node.Stream.CBOR.Encode.js +++ b/src/Node.Stream.CBOR.Encode.js @@ -1,7 +1,7 @@ -import { stringify } from "csv-stringify"; +import { EncoderStream } from "cbor-x"; -/** @type {(c: import('csv-stringify').Options) => () => import('csv-stringify').Stringifier} */ -export const makeImpl = (c) => () => stringify(c); +/** @type {(s: import('cbor-x').Options) => () => EncoderStream} */ +export const makeImpl = (c) => () => new EncoderStream({useRecords: false, ...c}); -/** @type {(s: import('csv-stringify').Stringifier) => (vals: Array) => () => void} */ -export const writeImpl = (s) => (vals) => () => s.write(vals); +/** @type {(s: EncoderStream) => (a: unknown) => () => void} */ +export const writeImpl = (s) => (a) => () => s.write(a); diff --git a/src/Node.Stream.CBOR.Encode.purs b/src/Node.Stream.CBOR.Encode.purs index 2a3a605..69c5217 100644 --- a/src/Node.Stream.CBOR.Encode.purs +++ b/src/Node.Stream.CBOR.Encode.purs @@ -1,49 +1,25 @@ -module Node.Stream.CSV.Stringify where +module Node.Stream.CBOR.Encode where import Prelude -import Data.CSV.Record (class WriteCSVRecord, writeCSVRecord) -import Data.String.Regex (Regex) +import Data.CBOR (class WriteCBOR, writeCBOR) import Effect (Effect) -import Foreign (Foreign, unsafeToForeign) +import Foreign (Foreign) import Foreign.Object (Object) -import Foreign.Object (union) as Object +import Node.Buffer (Buffer) import Node.Stream (Read, Stream, Write) +import Node.Stream.CBOR.Options (F32, Options, prepareOptions) import Node.Stream.Object (Transform) as Object -import Prim.Row (class Union) -import Prim.RowList (class RowToList) +import Prim.Row (class Nub, class Union) import Unsafe.Coerce (unsafeCoerce) -data CSVWrite +data CBOREncode --- | Stream transforming rows of stringified CSV values --- | to CSV-formatted rows. --- | --- | Write rows to the stream using `write`. --- | --- | Stringified rows are emitted on the `Readable` end as string --- | chunks, meaning it can be treated as a `Node.Stream.Readable` --- | that has had `setEncoding UTF8` invoked on it. -type CSVStringifier :: Row Type -> Type -type CSVStringifier r = Stream (read :: Read, write :: Write, csv :: CSVWrite | r) - --- | https://csv.js.org/stringify/options/ -type Config r = - ( bom :: Boolean - , delimiter :: String - , record_delimiter :: String - , escape :: String - , escape_formulas :: Boolean - , quote :: String - , quoted :: Boolean - , quoted_empty :: Boolean - , quoted_match :: Regex - , quoted_string :: Boolean - | r - ) +type CBOREncoder :: Row Type -> Type +type CBOREncoder r = Stream (read :: Read, write :: Write, csv :: CBOREncode | r) foreign import makeImpl :: forall r. Foreign -> Effect (Stream r) -foreign import writeImpl :: forall r. Stream r -> Array String -> Effect Unit +foreign import writeImpl :: forall r. Stream r -> Foreign -> Effect Unit recordToForeign :: forall r. Record r -> Object Foreign recordToForeign = unsafeCoerce @@ -53,31 +29,21 @@ recordToForeign = unsafeCoerce -- | -- | Requires an ordered array of column names. make - :: forall @config @missing @extra - . Union config missing (Config extra) - => Array String - -> { | config } - -> Effect (CSVStringifier ()) -make columns = - makeImpl - <<< unsafeToForeign - <<< Object.union (recordToForeign { columns, header: true }) - <<< recordToForeign + :: forall r missing extra minimal minimalExtra + . Union r missing (Options extra) + => Union r (useFloat32 :: F32) minimal + => Nub minimal (useFloat32 :: F32 | minimalExtra) + => { | r } + -> Effect (CBOREncoder ()) +make = makeImpl <<< prepareOptions @r @missing -- | Convert the raw stream to a typed ObjectStream -toObjectStream :: CSVStringifier () -> Object.Transform (Array String) String +toObjectStream :: CBOREncoder () -> Object.Transform Foreign Buffer toObjectStream = unsafeCoerce -- | Write a record to a CSVStringifier. -- | -- | The record will be emitted on the `Readable` end -- | of the stream as a string chunk. -write :: forall @r rl a. RowToList r rl => WriteCSVRecord r rl => CSVStringifier a -> { | r } -> Effect Unit -write s = writeImpl s <<< writeCSVRecord @r @rl - --- | Write a record to a CSVStringifier. --- | --- | The record will be emitted on the `Readable` end --- | of the stream as a string chunk. -writeRaw :: forall a. CSVStringifier a -> Array String -> Effect Unit -writeRaw = writeImpl +write :: forall a r. WriteCBOR a => CBOREncoder r -> a -> Effect Unit +write s a = writeImpl s $ writeCBOR a diff --git a/src/Node.Stream.CBOR.Options.js b/src/Node.Stream.CBOR.Options.js new file mode 100644 index 0000000..73aa0bf --- /dev/null +++ b/src/Node.Stream.CBOR.Options.js @@ -0,0 +1,11 @@ +import {FLOAT32_OPTIONS} from 'cbor-x' + +/** @type {(o: {round: (_a: F32) => boolean, fit: (_a: F32) => boolean, always: (_a: F32) => boolean}) => (f: F32) => FLOAT32_OPTIONS} */ +export const f32ToConst = ({round, fit, always}) => a => + round(a) + ? FLOAT32_OPTIONS.ALWAYS + : fit(a) + ? FLOAT32_OPTIONS.DECIMAL_FIT + : round(a) + ? FLOAT32_OPTIONS.DECIMAL_ROUND + : FLOAT32_OPTIONS.NEVER diff --git a/src/Node.Stream.CBOR.Options.purs b/src/Node.Stream.CBOR.Options.purs new file mode 100644 index 0000000..e37b4cb --- /dev/null +++ b/src/Node.Stream.CBOR.Options.purs @@ -0,0 +1,50 @@ +module Node.Stream.CBOR.Options where + +import Prelude + +import Foreign (Foreign, unsafeToForeign) +import Prim.Row (class Nub, class Union) +import Record (merge, modify) +import Type.Prelude (Proxy(..)) + +data F32 + = F32Always + | F32DecimalRound + | F32DecimalFit + | F32Never + +derive instance Eq F32 + +foreign import data CBORStruct :: Type +foreign import f32ToConst :: {always :: F32 -> Boolean, round :: F32 -> Boolean, fit :: F32 -> Boolean} -> F32 -> Foreign + +type Options r = + ( useRecords :: Boolean + , structures :: Array CBORStruct + , structuredClone :: Boolean + , mapsAsObject :: Boolean + , useFloat32 :: F32 + , alwaysUseFloat :: Boolean + , pack :: Boolean + , variableMapSize :: Boolean + , copyBuffers :: Boolean + , bundleStrings :: Boolean + , useTimestamp32 :: Boolean + , largeBigIntToFloat :: Boolean + , useTag259ForMaps :: Boolean + , tagUint8Array :: Boolean + , int64AsNumber :: Boolean + | r + ) + +prepareOptions + :: forall @r @missing extra minimal minimalExtra + . Union r missing (Options extra) + => Union r (useFloat32 :: F32) minimal + => Nub minimal (useFloat32 :: F32 | minimalExtra) + => { | r } + -> Foreign +prepareOptions a = + unsafeToForeign + $ modify (Proxy @"useFloat32") (f32ToConst {fit: eq F32DecimalFit, round: eq F32DecimalRound, always: eq F32Always}) + $ merge a {useFloat32: F32Never} diff --git a/src/Node.Stream.CBOR.purs b/src/Node.Stream.CBOR.purs deleted file mode 100644 index d47d642..0000000 --- a/src/Node.Stream.CBOR.purs +++ /dev/null @@ -1 +0,0 @@ -module Node.Stream.CSV where diff --git a/src/Pipes.CBOR.purs b/src/Pipes.CBOR.purs index 9ac747c..41a71ae 100644 --- a/src/Pipes.CBOR.purs +++ b/src/Pipes.CBOR.purs @@ -1,98 +1,52 @@ -module Pipes.CSV where +module Pipes.CBOR where import Prelude -import Control.Monad.Error.Class (liftEither) +import Control.Monad.Error.Class (class MonadThrow, liftEither) import Control.Monad.Except (runExcept) -import Control.Monad.Rec.Class (forever) -import Control.Monad.ST.Global as ST -import Control.Monad.ST.Ref as STRef -import Data.Array as Array +import Control.Monad.Rec.Class (class MonadRec, forever) import Data.Bifunctor (lmap) -import Data.CSV.Record (class ReadCSVRecord, class WriteCSVRecord, readCSVRecord, writeCSVRecord) -import Data.FunctorWithIndex (mapWithIndex) -import Data.Map as Map -import Data.Maybe (Maybe(..)) -import Data.Tuple.Nested ((/\)) -import Effect.Aff (Aff) +import Data.CBOR (class ReadCBOR, class WriteCBOR, readCBOR, writeCBOR) +import Data.Maybe (Maybe) +import Effect.Aff.Class (class MonadAff) import Effect.Class (liftEffect) -import Effect.Exception (error) +import Effect.Exception (Error, error) import Node.Buffer (Buffer) -import Node.Stream.CSV.Parse as CSV.Parse -import Node.Stream.CSV.Stringify as CSV.Stringify +import Node.Stream.CBOR.Decode as CBOR.Decode +import Node.Stream.CBOR.Encode as CBOR.Encode import Pipes (await, yield, (>->)) import Pipes.Core (Pipe) import Pipes.Node.Stream as Pipes.Stream -import Prim.RowList (class RowToList) -import Record.Extra (class Keys, keys) -import Type.Prelude (Proxy(..)) - --- | Transforms buffer chunks of a CSV file to parsed --- | records of `r`. --- | --- | ``` --- | -- == my-data.csv.gz == --- | -- id,foo,is_deleted --- | -- 1,hi,f --- | -- 2,bye,t --- | --- | rows --- | :: Array {id :: Int, foo :: String, is_deleted :: Boolean} --- | <- map Array.fromFoldable --- | $ Pipes.toListM --- | $ Pipes.Node.Stream.unEOS --- | $ Pipes.Node.FS.read "my-data.csv.gz" --- | >-> Pipes.Node.Zlib.gunzip --- | >-> Pipes.CSV.parse --- | rows `shouldEqual` [{id: 1, foo: "hi", is_deleted: false}, {id: 2, foo: "bye", is_deleted: true}] --- | ``` -parse - :: forall @r rl - . RowToList r rl - => ReadCSVRecord r rl - => Pipe (Maybe Buffer) (Maybe { | r }) Aff Unit -parse = do - raw <- liftEffect $ CSV.Parse.make {} - colsST <- liftEffect $ ST.toEffect $ STRef.new Nothing +-- | Transforms buffer chunks of a CBOR file to parsed values +-- | of type `a`. +decode + :: forall m @a + . MonadRec m + => MonadAff m + => MonadThrow Error m + => ReadCBOR a + => Pipe (Maybe Buffer) (Maybe a) m Unit +decode = do + raw <- liftEffect $ CBOR.Decode.make {} let - readCols = liftEffect $ ST.toEffect $ STRef.read colsST - putCols a = void $ liftEffect $ ST.toEffect $ STRef.write (Just a) colsST - - parse' a cols' = liftEither $ lmap (error <<< show) $ runExcept $ readCSVRecord @r @rl cols' a - firstRow a = putCols $ Map.fromFoldable $ mapWithIndex (flip (/\)) a - row a cols' = yield =<< parse' a cols' unmarshal = forever do r <- await - cols <- readCols - case cols of - Just cols' -> row r cols' - Nothing -> firstRow r - - parser = Pipes.Stream.fromTransform $ CSV.Parse.toObjectStream raw + yield =<< liftEither (lmap (error <<< show) $ runExcept $ readCBOR @a r) + parser = Pipes.Stream.fromTransform $ CBOR.Decode.toObjectStream raw parser >-> Pipes.Stream.inEOS unmarshal --- | Transforms buffer chunks of a CSV file to parsed --- | arrays of CSV values. -parseRaw :: Pipe (Maybe Buffer) (Maybe (Array String)) Aff Unit -parseRaw = do - s <- liftEffect $ CSV.Parse.toObjectStream <$> CSV.Parse.make {} - Pipes.Stream.fromTransform s - --- | Transforms CSV rows into stringified CSV records --- | using the given ordered array of column names. -stringifyRaw :: Array String -> Pipe (Maybe (Array String)) (Maybe String) Aff Unit -stringifyRaw columns = do - s <- liftEffect $ CSV.Stringify.toObjectStream <$> CSV.Stringify.make columns {} - Pipes.Stream.fromTransform s - --- | Transforms purescript records into stringified CSV records. --- | --- | Columns are inferred from the record's keys, ordered alphabetically. -stringify :: forall r rl. WriteCSVRecord r rl => RowToList r rl => Keys rl => Pipe (Maybe { | r }) (Maybe String) Aff Unit -stringify = do - raw <- liftEffect $ CSV.Stringify.make (Array.fromFoldable $ keys $ Proxy @r) {} +-- | Encode purescript values as CBOR buffers +encode + :: forall m a + . MonadAff m + => MonadThrow Error m + => MonadRec m + => WriteCBOR a + => Pipe (Maybe a) (Maybe Buffer) m Unit +encode = do + raw <- liftEffect $ CBOR.Encode.make {} let - printer = Pipes.Stream.fromTransform $ CSV.Stringify.toObjectStream raw - marshal = forever $ yield =<< (writeCSVRecord @r @rl <$> await) + printer = Pipes.Stream.fromTransform $ CBOR.Encode.toObjectStream raw + marshal = forever $ yield =<< (writeCBOR <$> await) Pipes.Stream.inEOS marshal >-> printer diff --git a/test/Test/Main.purs b/test/Test/Main.purs index 01eb821..c8cd6d4 100644 --- a/test/Test/Main.purs +++ b/test/Test/Main.purs @@ -5,10 +5,10 @@ import Prelude import Data.Maybe (Maybe(..)) import Effect (Effect) import Effect.Aff (launchAff_) -import Test.Pipes.CSV as Test.Pipes.CSV +import Test.Pipes.CBOR as Test.Pipes.CBOR import Test.Spec.Reporter (specReporter) import Test.Spec.Runner (defaultConfig, runSpec') main :: Effect Unit main = launchAff_ $ runSpec' (defaultConfig { failFast = true, timeout = Nothing }) [ specReporter ] do - Test.Pipes.CSV.spec + Test.Pipes.CBOR.spec diff --git a/test/Test/Pipes.CSV.purs b/test/Test/Pipes.CSV.purs index d17353a..add3685 100644 --- a/test/Test/Pipes.CSV.purs +++ b/test/Test/Pipes.CSV.purs @@ -1,88 +1,87 @@ -module Test.Pipes.CSV where +module Test.Pipes.CBOR where import Prelude +import Control.Monad.Cont (lift) import Control.Monad.Gen (chooseInt) -import Control.Monad.Rec.Class (Step(..), tailRecM) import Data.Array as Array import Data.DateTime (DateTime) -import Data.Foldable (fold, sum) +import Data.List ((:)) +import Data.List as List import Data.Maybe (Maybe(..), fromJust) import Data.Newtype (wrap) import Data.PreciseDateTime (fromRFC3339String, toDateTimeLossy) -import Data.String.CodePoints as String.CodePoints import Data.Tuple.Nested ((/\)) +import Effect (Effect) +import Effect.Aff (delay) +import Effect.CBOR as CBOR import Effect.Class (liftEffect) -import Effect.Console (log) +import Node.Buffer (Buffer) +import Node.Buffer as Buffer import Node.Encoding (Encoding(..)) import Partial.Unsafe (unsafePartial) import Pipes (yield, (>->)) -import Pipes.CSV as Pipes.CSV +import Pipes.CBOR as Pipes.CBOR import Pipes.Collect as Pipes.Collect -import Pipes.Construct as Pipes.Construct -import Pipes.Node.Buffer as Pipes.Buffer import Pipes.Node.Stream as Pipes.Stream -import Pipes.Prelude (chain, map, toListM) as Pipes -import Pipes.Util as Pipes.Util +import Pipes.Prelude (toListM) as Pipes import Test.QuickCheck.Gen (randomSample') import Test.Spec (Spec, before, describe, it) import Test.Spec.Assertions (shouldEqual) -csv :: String -csv = - """created,flag,foo,id -2020-01-01T00:00:00.0Z,true,a,1 -2024-02-02T08:00:00.0Z,false,apple,2 -1970-01-01T00:00:00.0Z,true,hello,3 -""" +cborHex :: String +cborHex = "82b90002646e616d656568656e72796174c1fb41d990ee6d671aa0b90002646e616d65656a756c696f6174c1fbc1d756dad0bbb646" + +cborBuf :: Effect Buffer +cborBuf = Buffer.fromString cborHex Hex + +exp :: Array {name :: String, t :: DateTime} +exp = + [{name: "henry", t: toDateTimeLossy $ unsafePartial fromJust $ fromRFC3339String $ wrap "2024-05-14T19:21:25.611Z"} + ,{name: "julio", t: toDateTimeLossy $ unsafePartial fromJust $ fromRFC3339String $ wrap "1920-05-14T20:21:17.067Z"} + ] + dt :: String -> DateTime dt = toDateTimeLossy <<< unsafePartial fromJust <<< fromRFC3339String <<< wrap spec :: Spec Unit spec = - describe "Pipes.CSV" do - it "stringify" do - let - objs = - [ { id: 1, foo: "a", flag: true, created: dt "2020-01-01T00:00:00Z" } - , { id: 2, foo: "apple", flag: false, created: dt "2024-02-02T08:00:00Z" } - , { id: 3, foo: "hello", flag: true, created: dt "1970-01-01T00:00:00Z" } - ] + describe "Pipes.CBOR" do + it "encode" do + bytes + <- Pipes.Collect.toBuffer + $ Pipes.Stream.withEOS (yield exp) + >-> Pipes.CBOR.encode + >-> Pipes.Stream.unEOS + act <- liftEffect $ CBOR.decode bytes + act `shouldEqual` exp - csv' <- map fold $ Pipes.Collect.toArray $ Pipes.Stream.withEOS (Pipes.Construct.eachArray objs) >-> Pipes.CSV.stringify >-> Pipes.Stream.unEOS - csv' `shouldEqual` csv describe "parse" do it "parses csv" do - rows <- map Array.fromFoldable - $ Pipes.toListM - $ Pipes.Stream.withEOS (yield csv) - >-> Pipes.Stream.inEOS (Pipes.Buffer.fromString UTF8) - >-> Pipes.CSV.parse - >-> Pipes.Stream.unEOS + buf <- liftEffect $ cborBuf + rows <- Pipes.toListM + $ Pipes.Stream.withEOS (yield buf *> lift (delay $ wrap 10.0)) + >-> Pipes.CBOR.decode - rows `shouldEqual` - [ { id: 1, foo: "a", flag: true, created: dt "2020-01-01T00:00:00Z" } - , { id: 2, foo: "apple", flag: false, created: dt "2024-02-02T08:00:00Z" } - , { id: 3, foo: "hello", flag: true, created: dt "1970-01-01T00:00:00Z" } - ] + rows `shouldEqual` ((Just exp) : Nothing : List.Nil) before (do nums <- liftEffect $ randomSample' 100000 (chooseInt 0 9) let - chars = [ "i","d","\n" ] <> join ((\n -> [show n, "\n"]) <$> nums) - bufs <- Pipes.Collect.toArray - $ Pipes.Stream.withEOS (Pipes.Construct.eachArray chars) - >-> Pipes.Util.chunked 1000 - >-> Pipes.Stream.inEOS (Pipes.map fold >-> Pipes.Buffer.fromString UTF8) - >-> Pipes.Stream.unEOS - pure $ nums /\ bufs + objs = (\n -> {id: n}) <$> nums + bytes <- + Pipes.Collect.toBuffer + $ Pipes.Stream.withEOS (yield objs) + >-> Pipes.CBOR.encode + >-> Pipes.Stream.unEOS + pure $ nums /\ bytes ) - $ it "parses large csv" \(nums /\ bufs) -> do + $ it "parses large csv" \(nums /\ bytes) -> do rows <- Pipes.Collect.toArray - $ Pipes.Stream.withEOS (Pipes.Construct.eachArray bufs) - >-> Pipes.CSV.parse @(id :: Int) + $ Pipes.Stream.withEOS (yield bytes) + >-> Pipes.CBOR.decode @(Array {id :: Int}) >-> Pipes.Stream.unEOS - rows `shouldEqual` ((\id -> { id }) <$> nums) + rows `shouldEqual` [(\id -> { id }) <$> nums]