feat: working

This commit is contained in:
orion 2024-05-14 15:01:46 -05:00
parent 55b372494f
commit cbe7d39061
Signed by: orion
GPG Key ID: 6D4165AE4C928719
18 changed files with 444 additions and 467 deletions

View File

@ -1,81 +1,9 @@
# purescript-csv-stream # purescript-cbor-stream
Type-safe bindings for the streaming API of `csv-parse` and `csv-stringify`. Type-safe bindings for the streaming API of `cbor-x`
## Installing ## Installing
```bash ```bash
spago install csv-stream spago install cbor-stream
{bun|yarn|npm|pnpm} install csv-parse csv-stringify {bun|yarn|npm|pnpm} install cbor-x
```
## Examples
### Stream
```purescript
module Main where
import Prelude
import Effect (Effect)
import Effect.Class (liftEffect)
import Effect.Aff (launchAff_)
import Node.Stream (pipe)
import Node.Stream as Stream
import Node.Stream.CSV.Stringify as CSV.Stringify
import Node.Stream.CSV.Parse as CSV.Parse
type MyCSVType1 = {a :: Int, b :: Int, bar :: String, baz :: Boolean}
type MyCSVType2 = {ab :: Int, bar :: String, baz :: Boolean}
atob :: MyCSVType1 -> MyCSVType2
atob {a, b, bar, baz} = {ab: a + b, bar, baz}
myCSV :: String
myCSV = "a,b,bar,baz\n1,2,\"hello, world!\",true\n3,3,,f"
main :: Effect Unit
main = launchAff_ do
parser <- liftEffect $ CSV.Parse.make {}
stringifier <- liftEffect $ CSV.Stringify.make {}
input <- liftEffect $ Stream.readableFromString myCSV
liftEffect $ Stream.pipe input parser
records <- CSV.Parse.readAll parser
liftEffect $ for_ records \r -> CSV.Stringify.write $ atob r
liftEffect $ Stream.end stringifier
-- "ab,bar,baz\n3,\"hello, world!\",true\n6,,false"
csvString <- CSV.Stringify.readAll stringifier
pure unit
```
### Synchronous
```purescript
module Main where
import Prelude
import Effect (Effect)
import Effect.Class (liftEffect)
import Effect.Aff (launchAff_)
import Node.Stream (pipe)
import Node.Stream as Stream
import Node.Stream.CSV.Stringify as CSV.Stringify
import Node.Stream.CSV.Parse as CSV.Parse
type MyCSVType1 = {a :: Int, b :: Int, bar :: String, baz :: Boolean}
type MyCSVType2 = {ab :: Int, bar :: String, baz :: Boolean}
atob :: MyCSVType1 -> MyCSVType2
atob {a, b, bar, baz} = {ab: a + b, bar, baz}
myCSV :: String
myCSV = "a,b,bar,baz\n1,2,\"hello, world!\",true\n3,3,,f"
main :: Effect Unit
main = launchAff_ do
records :: Array MyCSVType1 <- CSV.Parse.parse myCSV
-- "ab,bar,baz\n3,\"hello, world!\",true\n6,,false"
csvString <- CSV.Stringify.stringify (atob <$> records)
pure unit
``` ```

View File

@ -1,6 +1,6 @@
{ {
"name": "purescript-cbor-stream", "name": "purescript-cbor-stream",
"version": "v2.0.1", "version": "v1.4.0",
"type": "module", "type": "module",
"dependencies": { "dependencies": {
"cbor-x": "^1.5.9", "cbor-x": "^1.5.9",

View File

@ -1,38 +1,34 @@
workspace: workspace:
packages: packages:
csv-stream: cbor-stream:
path: ./ path: ./
dependencies: dependencies:
- aff: ">=7.1.0 <8.0.0" - aff: ">=7.1.0 <8.0.0"
- arrays: ">=7.3.0 <8.0.0" - arrays
- bifunctors: ">=6.0.0 <7.0.0" - bifunctors: ">=6.0.0 <7.0.0"
- datetime: ">=6.1.0 <7.0.0" - datetime: ">=6.1.0 <7.0.0"
- effect: ">=4.0.0 <5.0.0" - effect: ">=4.0.0 <5.0.0"
- either
- exceptions: ">=6.0.0 <7.0.0" - exceptions: ">=6.0.0 <7.0.0"
- foldable-traversable: ">=6.0.0 <7.0.0" - foldable-traversable: ">=6.0.0 <7.0.0"
- foreign: ">=7.0.0 <8.0.0" - foreign: ">=7.0.0 <8.0.0"
- foreign-object: ">=4.1.0 <5.0.0" - foreign-object: ">=4.1.0 <5.0.0"
- integers: ">=6.0.0 <7.0.0" - js-bigints
- lists: ">=7.0.0 <8.0.0" - js-date
- js-maps
- maybe: ">=6.0.0 <7.0.0" - maybe: ">=6.0.0 <7.0.0"
- newtype: ">=5.0.0 <6.0.0"
- node-buffer: ">=9.0.0 <10.0.0" - node-buffer: ">=9.0.0 <10.0.0"
- node-event-emitter: ">=3.0.0 <4.0.0" - node-event-emitter: ">=3.0.0 <4.0.0"
- node-stream-pipes: ">=1.3.0 <2.0.0" - node-stream-pipes
- node-streams: ">=9.0.0 <10.0.0" - node-streams: ">=9.0.0 <10.0.0"
- nullable: ">=6.0.0 <7.0.0" - nullable: ">=6.0.0 <7.0.0"
- numbers: ">=9.0.1 <10.0.0"
- ordered-collections: ">=3.2.0 <4.0.0" - ordered-collections: ">=3.2.0 <4.0.0"
- pipes: ">=8.0.0 <9.0.0" - pipes: ">=8.0.0 <9.0.0"
- precise-datetime: ">=7.0.0 <8.0.0"
- prelude: ">=6.0.1 <7.0.0" - prelude: ">=6.0.1 <7.0.0"
- record: ">=4.0.0 <5.0.0" - record: ">=4.0.0 <5.0.0"
- record-extra: ">=5.0.1 <6.0.0" - simple-json
- st: ">=6.2.0 <7.0.0"
- strings: ">=6.0.1 <7.0.0"
- tailrec: ">=6.1.0 <7.0.0" - tailrec: ">=6.1.0 <7.0.0"
- transformers: ">=6.0.0 <7.0.0" - transformers: ">=6.0.0 <7.0.0"
- tuples: ">=7.0.0 <8.0.0"
- typelevel-prelude: ">=7.0.0 <8.0.0" - typelevel-prelude: ">=7.0.0 <8.0.0"
- unsafe-coerce: ">=6.0.0 <7.0.0" - unsafe-coerce: ">=6.0.0 <7.0.0"
test_dependencies: test_dependencies:
@ -40,6 +36,7 @@ workspace:
- gen - gen
- node-fs - node-fs
- node-zlib - node-zlib
- precise-datetime
- quickcheck - quickcheck
- simple-json - simple-json
- spec - spec
@ -76,7 +73,9 @@ workspace:
- identity - identity
- integers - integers
- invariant - invariant
- js-bigints
- js-date - js-date
- js-maps
- lazy - lazy
- lcg - lcg
- lists - lists
@ -103,10 +102,10 @@ workspace:
- precise-datetime - precise-datetime
- prelude - prelude
- profunctor - profunctor
- profunctor-lenses
- quickcheck - quickcheck
- random - random
- record - record
- record-extra
- refs - refs
- safe-coerce - safe-coerce
- simple-json - simple-json
@ -123,7 +122,10 @@ workspace:
- unordered-collections - unordered-collections
- unsafe-coerce - unsafe-coerce
- variant - variant
extra_packages: {} extra_packages:
node-stream-pipes:
git: https://git.orionkindel.com/orion/purescript-node-stream-pipes
ref: 657af14
packages: packages:
aff: aff:
type: registry type: registry
@ -482,6 +484,14 @@ packages:
dependencies: dependencies:
- control - control
- prelude - prelude
js-bigints:
type: registry
version: 2.2.1
integrity: sha256-hKWZo9NxtsAaHmNXr6B8GY4c0olQbYLXPVGWm4TF2Ss=
dependencies:
- integers
- maybe
- prelude
js-date: js-date:
type: registry type: registry
version: 8.0.0 version: 8.0.0
@ -493,6 +503,24 @@ packages:
- foreign - foreign
- integers - integers
- now - now
js-maps:
type: registry
version: 0.1.2
integrity: sha256-xQDZf88nQEiZNmkCVEi3YQGB19hu6Oju6laEi8Os/oM=
dependencies:
- arrays
- either
- foldable-traversable
- functions
- js-bigints
- maybe
- prelude
- profunctor-lenses
- st
- strings
- tuples
- unfoldable
- unsafe-coerce
lazy: lazy:
type: registry type: registry
version: 6.0.0 version: 6.0.0
@ -606,9 +634,9 @@ packages:
dependencies: dependencies:
- effect - effect
node-stream-pipes: node-stream-pipes:
type: registry type: git
version: 1.3.0 url: https://git.orionkindel.com/orion/purescript-node-stream-pipes
integrity: sha256-5Jpf0BLn0ExQWYxbTTewai4M8quEmEVHxihc9CM1Juo= rev: 657af14bb6f461e5def017d8d1ca6142b00891cb
dependencies: dependencies:
- aff - aff
- arrays - arrays
@ -823,6 +851,31 @@ packages:
- newtype - newtype
- prelude - prelude
- tuples - tuples
profunctor-lenses:
type: registry
version: 8.0.0
integrity: sha256-K7f29rHRHgVSb2Y/PaSKtfYPriP6n87BJNO7EhsZHas=
dependencies:
- arrays
- bifunctors
- const
- control
- distributive
- either
- foldable-traversable
- foreign-object
- functors
- identity
- lists
- maybe
- newtype
- ordered-collections
- partial
- prelude
- profunctor
- record
- transformers
- tuples
quickcheck: quickcheck:
type: registry type: registry
version: 8.0.1 version: 8.0.1
@ -870,18 +923,6 @@ packages:
- functions - functions
- prelude - prelude
- unsafe-coerce - unsafe-coerce
record-extra:
type: registry
version: 5.0.1
integrity: sha256-7vnREK2fpGJ7exswSeA9UpZFuU+UXRt3SA7AFUldT/Y=
dependencies:
- arrays
- functions
- lists
- prelude
- record
- tuples
- typelevel-prelude
refs: refs:
type: registry type: registry
version: 6.0.0 version: 6.0.0

View File

@ -1,7 +1,7 @@
package: package:
name: cbor-stream name: cbor-stream
publish: publish:
version: '2.0.1' version: '1.4.0'
license: 'GPL-3.0-or-later' license: 'GPL-3.0-or-later'
location: location:
githubOwner: 'cakekindel' githubOwner: 'cakekindel'
@ -10,36 +10,32 @@ package:
strict: true strict: true
pedanticPackages: true pedanticPackages: true
dependencies: dependencies:
- node-stream-pipes: ">=1.3.0 <2.0.0"
- aff: ">=7.1.0 <8.0.0" - aff: ">=7.1.0 <8.0.0"
- arrays: ">=7.3.0 <8.0.0" - arrays: ">=7.3.0 <8.0.0"
- bifunctors: ">=6.0.0 <7.0.0" - bifunctors: ">=6.0.0 <7.0.0"
- datetime: ">=6.1.0 <7.0.0" - datetime: ">=6.1.0 <7.0.0"
- effect: ">=4.0.0 <5.0.0" - effect: ">=4.0.0 <5.0.0"
- either: ">=6.1.0 <7.0.0"
- exceptions: ">=6.0.0 <7.0.0" - exceptions: ">=6.0.0 <7.0.0"
- foldable-traversable: ">=6.0.0 <7.0.0" - foldable-traversable: ">=6.0.0 <7.0.0"
- foreign: ">=7.0.0 <8.0.0" - foreign: ">=7.0.0 <8.0.0"
- foreign-object: ">=4.1.0 <5.0.0" - foreign-object: ">=4.1.0 <5.0.0"
- integers: ">=6.0.0 <7.0.0" - js-bigints: ">=2.2.1 <3.0.0"
- lists: ">=7.0.0 <8.0.0" - js-date: ">=8.0.0 <9.0.0"
- js-maps: ">=0.1.2 <0.2.0"
- maybe: ">=6.0.0 <7.0.0" - maybe: ">=6.0.0 <7.0.0"
- newtype: ">=5.0.0 <6.0.0"
- node-buffer: ">=9.0.0 <10.0.0" - node-buffer: ">=9.0.0 <10.0.0"
- node-event-emitter: ">=3.0.0 <4.0.0" - node-event-emitter: ">=3.0.0 <4.0.0"
- node-stream-pipes: "*"
- node-streams: ">=9.0.0 <10.0.0" - node-streams: ">=9.0.0 <10.0.0"
- nullable: ">=6.0.0 <7.0.0" - nullable: ">=6.0.0 <7.0.0"
- numbers: ">=9.0.1 <10.0.0"
- ordered-collections: ">=3.2.0 <4.0.0" - ordered-collections: ">=3.2.0 <4.0.0"
- pipes: ">=8.0.0 <9.0.0" - pipes: ">=8.0.0 <9.0.0"
- precise-datetime: ">=7.0.0 <8.0.0"
- prelude: ">=6.0.1 <7.0.0" - prelude: ">=6.0.1 <7.0.0"
- record: ">=4.0.0 <5.0.0" - record: ">=4.0.0 <5.0.0"
- record-extra: ">=5.0.1 <6.0.0" - simple-json: ">=9.0.0 <10.0.0"
- st: ">=6.2.0 <7.0.0"
- strings: ">=6.0.1 <7.0.0"
- tailrec: ">=6.1.0 <7.0.0" - tailrec: ">=6.1.0 <7.0.0"
- transformers: ">=6.0.0 <7.0.0" - transformers: ">=6.0.0 <7.0.0"
- tuples: ">=7.0.0 <8.0.0"
- typelevel-prelude: ">=7.0.0 <8.0.0" - typelevel-prelude: ">=7.0.0 <8.0.0"
- unsafe-coerce: ">=6.0.0 <7.0.0" - unsafe-coerce: ">=6.0.0 <7.0.0"
test: test:
@ -49,8 +45,12 @@ package:
- gen - gen
- node-fs - node-fs
- node-zlib - node-zlib
- precise-datetime
- quickcheck - quickcheck
- simple-json - simple-json
- spec - spec
workspace: workspace:
extraPackages: {} extraPackages:
node-stream-pipes:
git: 'https://git.orionkindel.com/orion/purescript-node-stream-pipes'
ref: '657af14'

View File

@ -1,50 +0,0 @@
module Data.CSV.Record where
import Prelude
import Control.Monad.Error.Class (liftMaybe)
import Control.Monad.Except (Except)
import Data.Array as Array
import Data.CSV (class ReadCSV, class WriteCSV, readCSV, writeCSV)
import Data.List.NonEmpty (NonEmptyList)
import Data.Map (Map)
import Data.Map as Map
import Data.Maybe (fromMaybe)
import Data.Symbol (class IsSymbol, reflectSymbol)
import Foreign (ForeignError(..))
import Prim.Row (class Cons, class Lacks)
import Prim.RowList (class RowToList, Cons, Nil, RowList)
import Record as Record
import Type.Prelude (Proxy(..))
class WriteCSVRecord :: Row Type -> RowList Type -> Constraint
class RowToList r rl <= WriteCSVRecord r rl | rl -> r where
writeCSVRecord :: { | r } -> Array String
instance (RowToList r (Cons k v tailrl), IsSymbol k, WriteCSV v, Lacks k tail, Cons k v tail r, WriteCSVRecord tail tailrl) => WriteCSVRecord r (Cons k v tailrl) where
writeCSVRecord r =
let
val = writeCSV $ Record.get (Proxy @k) r
tail = writeCSVRecord @tail @tailrl $ Record.delete (Proxy @k) r
in
[ val ] <> tail
instance WriteCSVRecord () Nil where
writeCSVRecord _ = []
class ReadCSVRecord :: Row Type -> RowList Type -> Constraint
class RowToList r rl <= ReadCSVRecord r rl | rl -> r where
readCSVRecord :: Map String Int -> Array String -> Except (NonEmptyList ForeignError) { | r }
instance (RowToList r (Cons k v tailrl), IsSymbol k, ReadCSV v, Lacks k tail, Cons k v tail r, ReadCSVRecord tail tailrl) => ReadCSVRecord r (Cons k v tailrl) where
readCSVRecord cols vals = do
let
k = reflectSymbol (Proxy @k)
pos <- liftMaybe (pure $ ForeignError $ "reached end of row looking for column " <> k) $ Map.lookup k cols
let valraw = fromMaybe "" $ Array.index vals pos
val <- readCSV @v valraw
tail <- readCSVRecord @tail @tailrl cols vals
pure $ Record.insert (Proxy @k) val tail
instance ReadCSVRecord () Nil where
readCSVRecord _ _ = pure {}

View File

@ -1,74 +1,150 @@
module Data.CSV where module Data.CBOR where
import Prelude import Prelude
import Control.Monad.Error.Class (liftMaybe, throwError) import Control.Monad.Error.Class (liftMaybe)
import Control.Monad.Except (Except) import Control.Monad.Except (ExceptT(..), withExcept)
import Control.Monad.Except.Trans (runExceptT)
import Data.Array as Array
import Data.DateTime (DateTime) import Data.DateTime (DateTime)
import Data.Int as Int import Data.Either (Either(..))
import Data.List.NonEmpty (NonEmptyList) import Data.Foldable (class Foldable)
import Data.Maybe (Maybe(..), maybe) import Data.FoldableWithIndex (foldlWithIndex)
import Data.Newtype (unwrap) import Data.JSDate (JSDate)
import Data.Number (fromString) as Number import Data.JSDate as JSDate
import Data.Number.Format (toString) as Number import Data.Map (Map)
import Data.PreciseDateTime (fromDateTime, fromRFC3339String, toDateTimeLossy, toRFC3339String) import Data.Symbol (class IsSymbol, reflectSymbol)
import Data.RFC3339String (RFC3339String(..)) import Data.Traversable (traverse)
import Data.String as String import Foreign (F, Foreign, ForeignError(..), readArray, unsafeReadTagged, unsafeToForeign)
import Foreign (ForeignError(..)) import Foreign.Index (readProp)
import JS.BigInt (BigInt)
import JS.Map (Map) as JS
import JS.Map as JS.Map
import Prim.Row as Row
import Prim.RowList (class RowToList, Cons, Nil, RowList)
import Record (get)
import Record.Builder (Builder)
import Record.Builder as Builder
import Simple.JSON (class ReadForeign, class WriteForeign, readImpl, writeImpl)
import Type.Prelude (Proxy(..))
class ReadCSV a where class ReadCBOR :: Type -> Constraint
readCSV :: String -> Except (NonEmptyList ForeignError) a class ReadCBOR a where
readCBOR :: Foreign -> F a
class WriteCSV a where class WriteCBOR :: Type -> Constraint
writeCSV :: a -> String class WriteCBOR a where
writeCBOR :: a -> Foreign
instance ReadCSV Int where instance ReadCBOR Foreign where
readCSV s = liftMaybe (pure $ ForeignError $ "invalid integer: " <> s) $ Int.fromString s readCBOR = pure
else instance (RowToList r rl, ReadCBORFields rl () r) => ReadCBOR (Record r) where
readCBOR o = do
flip Builder.build {} <$> getFields (Proxy @rl) o
else instance ReadCBOR BigInt where
readCBOR = unsafeReadTagged "BigInt"
else instance ReadCBOR JSDate where
readCBOR = unsafeReadTagged "Date"
else instance ReadCBOR DateTime where
readCBOR a = do
date :: JSDate <- readCBOR a
liftMaybe (pure $ ForeignError $ "Invalid DateTime: " <> show date) $ JSDate.toDateTime date
else instance ReadCBOR a => ReadCBOR (Array a) where
readCBOR a = do
raws :: Array Foreign <- readArray a
traverse readCBOR raws
else instance (ReadCBOR v) => ReadCBOR (JS.Map String v) where
readCBOR map = do
map' :: JS.Map String Foreign <- unsafeReadTagged "Map" map
foldlWithIndex (\k b v -> do
map'' <- b
v' <- readCBOR v
pure $ JS.Map.insert k v' map''
) (pure JS.Map.empty) map'
else instance (ReadForeign a) => ReadCBOR a where
readCBOR = readImpl
instance ReadCSV Number where instance WriteCBOR Foreign where
readCSV s = liftMaybe (pure $ ForeignError $ "invalid number: " <> s) $ Number.fromString s writeCBOR = identity
else instance (RowToList r rl, WriteCBORFields rl r () to) => WriteCBOR (Record r) where
writeCBOR rec = unsafeToForeign $ Builder.build (writeImplFields (Proxy @rl) rec) {}
else instance WriteCBOR BigInt where
writeCBOR = unsafeToForeign
else instance WriteCBOR JSDate where
writeCBOR = unsafeToForeign
else instance WriteCBOR DateTime where
writeCBOR = unsafeToForeign <<< JSDate.fromDateTime
else instance (WriteCBOR k, WriteCBOR v) => WriteCBOR (JS.Map k v) where
writeCBOR = unsafeToForeign
else instance (WriteCBOR a) => WriteCBOR (Array a) where
writeCBOR as = unsafeToForeign $ writeCBOR <$> as
else instance (Foldable f, WriteCBOR a) => WriteCBOR (f a) where
writeCBOR as = unsafeToForeign $ writeCBOR $ Array.fromFoldable as
else instance (JS.Map.EncodeKey k, WriteCBOR k, WriteCBOR v) => WriteCBOR (Map k v) where
writeCBOR map = writeCBOR $ foldlWithIndex (\k m v -> JS.Map.insert k v m) JS.Map.empty map
else instance (WriteForeign a) => WriteCBOR a where
writeCBOR = writeImpl
instance ReadCSV String where applyEither :: forall e a b. Semigroup e => Either e (a -> b) -> Either e a -> Either e b
readCSV = pure applyEither (Left e) (Right _) = Left e
applyEither (Left e1) (Left e2) = Left (e1 <> e2)
applyEither (Right _) (Left e) = Left e
applyEither (Right fun) (Right a) = Right (fun a)
instance ReadCSV DateTime where exceptTApply :: forall a b e m. Semigroup e => Applicative m => ExceptT e m (a -> b) -> ExceptT e m a -> ExceptT e m b
readCSV s = map toDateTimeLossy $ liftMaybe (pure $ ForeignError $ "invalid ISO date string: " <> s) $ fromRFC3339String $ RFC3339String s exceptTApply fun a = ExceptT $ applyEither
<$> runExceptT fun
<*> runExceptT a
instance ReadCSV Boolean where class ReadCBORFields (xs :: RowList Type) (from :: Row Type) (to :: Row Type)
readCSV s = | xs -> from to where
let getFields :: Proxy xs
inner "t" = pure true -> Foreign
inner "true" = pure true -> F (Builder (Record from) (Record to))
inner "yes" = pure true
inner "y" = pure true
inner "1" = pure true
inner "f" = pure false
inner "false" = pure false
inner "no" = pure false
inner "n" = pure false
inner "0" = pure false
inner _ = throwError $ pure $ ForeignError $ "invalid boolean value: " <> s
in
inner $ String.toLower s
instance ReadCSV a => ReadCSV (Maybe a) where instance readFieldsCons ::
readCSV "" = pure Nothing ( IsSymbol name
readCSV s = Just <$> readCSV s , ReadCBOR ty
, ReadCBORFields tail from from'
, Row.Lacks name from'
, Row.Cons name ty from' to
) => ReadCBORFields (Cons name ty tail) from to where
getFields _ obj = (compose <$> first) `exceptTApply` rest
where
first = do
value <- withExcept' (readCBOR =<< readProp name obj)
pure $ Builder.insert nameP value
rest = getFields tailP obj
nameP = Proxy :: Proxy name
tailP = Proxy :: Proxy tail
name = reflectSymbol nameP
withExcept' = withExcept <<< map $ ErrorAtProperty name
instance WriteCSV Int where instance readFieldsNil ::
writeCSV = Int.toStringAs Int.decimal ReadCBORFields Nil () () where
getFields _ _ =
pure identity
instance WriteCSV Number where class WriteCBORFields (rl :: RowList Type) row (from :: Row Type) (to :: Row Type)
writeCSV = Number.toString | rl -> row from to where
writeImplFields :: forall g. g rl -> Record row -> Builder (Record from) (Record to)
instance WriteCSV String where instance consWriteCBORFields ::
writeCSV = identity ( IsSymbol name
, WriteCBOR ty
, WriteCBORFields tail row from from'
, Row.Cons name ty whatever row
, Row.Lacks name from'
, Row.Cons name Foreign from' to
) => WriteCBORFields (Cons name ty tail) row from to where
writeImplFields _ rec = result
where
namep = Proxy :: Proxy name
value = writeCBOR $ get namep rec
tailp = Proxy :: Proxy tail
rest = writeImplFields tailp rec
result = Builder.insert namep value <<< rest
instance WriteCSV DateTime where instance nilWriteCBORFields ::
writeCSV = unwrap <<< toRFC3339String <<< fromDateTime WriteCBORFields Nil row () () where
writeImplFields _ _ = identity
instance WriteCSV Boolean where
writeCSV = show
instance WriteCSV a => WriteCSV (Maybe a) where
writeCSV = maybe "" writeCSV

7
src/Effect.CBOR.js Normal file
View File

@ -0,0 +1,7 @@
import {decode, encode} from 'cbor-x'
/** @type {(a: Buffer) => () => unknown} */
export const decodeImpl = buf => () => decode(buf)
/** @type {(a: unknown) => () => Buffer} */
export const encodeImpl = buf => () => encode(buf)

21
src/Effect.CBOR.purs Normal file
View File

@ -0,0 +1,21 @@
module Effect.CBOR where
import Prelude
import Control.Monad.Error.Class (liftEither)
import Control.Monad.Except (runExcept)
import Data.Bifunctor (lmap)
import Data.CBOR (class ReadCBOR, class WriteCBOR, readCBOR, writeCBOR)
import Effect (Effect)
import Effect.Exception (error)
import Foreign (Foreign)
import Node.Buffer (Buffer)
foreign import decodeImpl :: Buffer -> Effect Foreign
foreign import encodeImpl :: Foreign -> Effect Buffer
decode :: forall a. ReadCBOR a => Buffer -> Effect a
decode = (liftEither <<< lmap (error <<< show) <<< runExcept <<< readCBOR) <=< decodeImpl
encode :: forall a. WriteCBOR a => a -> Effect Buffer
encode = encodeImpl <<< writeCBOR

View File

@ -1,7 +1,7 @@
import { Parser } from "csv-parse"; import { DecoderStream } from "cbor-x";
/** @type {(s: import('csv-parse').Options) => () => Parser} */ /** @type {(s: import('cbor-x').Options) => () => DecoderStream} */
export const makeImpl = (c) => () => new Parser(c); export const makeImpl = (c) => () => new DecoderStream({useRecords: false, ...c});
/** @type {(s: Parser) => () => Array<string> | null} */ /** @type {(s: DecoderStream) => () => unknown | null} */
export const readImpl = (p) => () => p.read(); export const readImpl = (p) => () => p.read();

View File

@ -1,76 +1,51 @@
module Node.Stream.CSV.Parse where module Node.Stream.CBOR.Decode where
import Prelude hiding (join) import Prelude hiding (join)
import Data.Nullable (Nullable) import Data.Nullable (Nullable)
import Effect (Effect) import Effect (Effect)
import Effect.Uncurried (mkEffectFn1) import Effect.Uncurried (mkEffectFn1)
import Foreign (Foreign, unsafeToForeign) import Foreign (Foreign)
import Foreign.Object (Object) import Foreign.Object (Object)
import Foreign.Object (union) as Object
import Node.Buffer (Buffer) import Node.Buffer (Buffer)
import Node.EventEmitter (EventHandle(..)) import Node.EventEmitter (EventHandle(..))
import Node.EventEmitter.UtilTypes (EventHandle1) import Node.EventEmitter.UtilTypes (EventHandle1)
import Node.Stream (Read, Stream, Write) import Node.Stream (Read, Stream, Write)
import Node.Stream.CBOR.Options (F32, Options, prepareOptions)
import Node.Stream.Object (Transform) as Object import Node.Stream.Object (Transform) as Object
import Prim.Row (class Union) import Prim.Row (class Nub, class Union)
import Unsafe.Coerce (unsafeCoerce) import Unsafe.Coerce (unsafeCoerce)
data CSVRead data CBORDecode
-- | Stream transforming chunks of a CSV file -- | CBOR decoding transform stream
-- | into parsed purescript objects.
-- | -- |
-- | The CSV contents may be piped into this stream -- | Accepts unencoded `Buffer` chunks, and transforms them
-- | as Buffer or String chunks. -- | to JS values.
type CSVParser :: Row Type -> Type type CBORDecoder :: Row Type -> Type
type CSVParser r = Stream (read :: Read, write :: Write, csv :: CSVRead | r) type CBORDecoder r = Stream (read :: Read, write :: Write, cbor :: CBORDecode | r)
-- | https://csv.js.org/parse/options/ make
type Config r = :: forall r missing extra minimal minimalExtra
( bom :: Boolean . Union r missing (Options extra)
, group_columns_by_name :: Boolean => Union r (useFloat32 :: F32) minimal
, comment :: String => Nub minimal (useFloat32 :: F32 | minimalExtra)
, comment_no_infix :: Boolean => { | r }
, delimiter :: String -> Effect (CBORDecoder ())
, encoding :: String make = makeImpl <<< prepareOptions @r @missing
, escape :: String
, from :: Int
, from_line :: Int
, ignore_last_delimiters :: Boolean
, info :: Boolean
, max_record_size :: Int
, quote :: String
, raw :: Boolean
, record_delimiter :: String
, relax_column_count :: Boolean
, skip_empty_lines :: Boolean
, skip_records_with_empty_values :: Boolean
, skip_records_with_error :: Boolean
, to :: Int
, to_line :: Int
, trim :: Boolean
, ltrim :: Boolean
, rtrim :: Boolean
| r
)
-- | Create a CSVParser toObjectStream :: forall r. CBORDecoder r -> Object.Transform Buffer Foreign
make :: forall @config @missing @extra. Union config missing (Config extra) => { | config } -> Effect (CSVParser ())
make = makeImpl <<< unsafeToForeign <<< Object.union (recordToForeign { columns: false, cast: false, cast_date: false }) <<< recordToForeign
toObjectStream :: CSVParser () -> Object.Transform Buffer (Array String)
toObjectStream = unsafeCoerce toObjectStream = unsafeCoerce
-- | `data` event. Emitted when a CSV record has been parsed. -- | `data` event. Emitted when a CSV record has been parsed.
dataH :: forall a. EventHandle1 (CSVParser a) (Array String) dataH :: forall a. EventHandle1 (CBORDecoder a) Foreign
dataH = EventHandle "data" mkEffectFn1 dataH = EventHandle "data" mkEffectFn1
-- | FFI -- | FFI
foreign import makeImpl :: forall r. Foreign -> Effect (Stream r) foreign import makeImpl :: forall r. Foreign -> Effect (Stream r)
-- | FFI -- | FFI
foreign import readImpl :: forall r. Stream r -> Effect (Nullable (Array String)) foreign import readImpl :: forall r. Stream r -> Effect (Nullable Foreign)
-- | FFI -- | FFI
recordToForeign :: forall r. Record r -> Object Foreign recordToForeign :: forall r. Record r -> Object Foreign

View File

@ -1,7 +1,7 @@
import { stringify } from "csv-stringify"; import { EncoderStream } from "cbor-x";
/** @type {(c: import('csv-stringify').Options) => () => import('csv-stringify').Stringifier} */ /** @type {(s: import('cbor-x').Options) => () => EncoderStream} */
export const makeImpl = (c) => () => stringify(c); export const makeImpl = (c) => () => new EncoderStream({useRecords: false, ...c});
/** @type {(s: import('csv-stringify').Stringifier) => (vals: Array<string>) => () => void} */ /** @type {(s: EncoderStream) => (a: unknown) => () => void} */
export const writeImpl = (s) => (vals) => () => s.write(vals); export const writeImpl = (s) => (a) => () => s.write(a);

View File

@ -1,49 +1,25 @@
module Node.Stream.CSV.Stringify where module Node.Stream.CBOR.Encode where
import Prelude import Prelude
import Data.CSV.Record (class WriteCSVRecord, writeCSVRecord) import Data.CBOR (class WriteCBOR, writeCBOR)
import Data.String.Regex (Regex)
import Effect (Effect) import Effect (Effect)
import Foreign (Foreign, unsafeToForeign) import Foreign (Foreign)
import Foreign.Object (Object) import Foreign.Object (Object)
import Foreign.Object (union) as Object import Node.Buffer (Buffer)
import Node.Stream (Read, Stream, Write) import Node.Stream (Read, Stream, Write)
import Node.Stream.CBOR.Options (F32, Options, prepareOptions)
import Node.Stream.Object (Transform) as Object import Node.Stream.Object (Transform) as Object
import Prim.Row (class Union) import Prim.Row (class Nub, class Union)
import Prim.RowList (class RowToList)
import Unsafe.Coerce (unsafeCoerce) import Unsafe.Coerce (unsafeCoerce)
data CSVWrite data CBOREncode
-- | Stream transforming rows of stringified CSV values type CBOREncoder :: Row Type -> Type
-- | to CSV-formatted rows. type CBOREncoder r = Stream (read :: Read, write :: Write, csv :: CBOREncode | r)
-- |
-- | Write rows to the stream using `write`.
-- |
-- | Stringified rows are emitted on the `Readable` end as string
-- | chunks, meaning it can be treated as a `Node.Stream.Readable`
-- | that has had `setEncoding UTF8` invoked on it.
type CSVStringifier :: Row Type -> Type
type CSVStringifier r = Stream (read :: Read, write :: Write, csv :: CSVWrite | r)
-- | https://csv.js.org/stringify/options/
type Config r =
( bom :: Boolean
, delimiter :: String
, record_delimiter :: String
, escape :: String
, escape_formulas :: Boolean
, quote :: String
, quoted :: Boolean
, quoted_empty :: Boolean
, quoted_match :: Regex
, quoted_string :: Boolean
| r
)
foreign import makeImpl :: forall r. Foreign -> Effect (Stream r) foreign import makeImpl :: forall r. Foreign -> Effect (Stream r)
foreign import writeImpl :: forall r. Stream r -> Array String -> Effect Unit foreign import writeImpl :: forall r. Stream r -> Foreign -> Effect Unit
recordToForeign :: forall r. Record r -> Object Foreign recordToForeign :: forall r. Record r -> Object Foreign
recordToForeign = unsafeCoerce recordToForeign = unsafeCoerce
@ -53,31 +29,21 @@ recordToForeign = unsafeCoerce
-- | -- |
-- | Requires an ordered array of column names. -- | Requires an ordered array of column names.
make make
:: forall @config @missing @extra :: forall r missing extra minimal minimalExtra
. Union config missing (Config extra) . Union r missing (Options extra)
=> Array String => Union r (useFloat32 :: F32) minimal
-> { | config } => Nub minimal (useFloat32 :: F32 | minimalExtra)
-> Effect (CSVStringifier ()) => { | r }
make columns = -> Effect (CBOREncoder ())
makeImpl make = makeImpl <<< prepareOptions @r @missing
<<< unsafeToForeign
<<< Object.union (recordToForeign { columns, header: true })
<<< recordToForeign
-- | Convert the raw stream to a typed ObjectStream -- | Convert the raw stream to a typed ObjectStream
toObjectStream :: CSVStringifier () -> Object.Transform (Array String) String toObjectStream :: CBOREncoder () -> Object.Transform Foreign Buffer
toObjectStream = unsafeCoerce toObjectStream = unsafeCoerce
-- | Write a record to a CSVStringifier. -- | Write a record to a CSVStringifier.
-- | -- |
-- | The record will be emitted on the `Readable` end -- | The record will be emitted on the `Readable` end
-- | of the stream as a string chunk. -- | of the stream as a string chunk.
write :: forall @r rl a. RowToList r rl => WriteCSVRecord r rl => CSVStringifier a -> { | r } -> Effect Unit write :: forall a r. WriteCBOR a => CBOREncoder r -> a -> Effect Unit
write s = writeImpl s <<< writeCSVRecord @r @rl write s a = writeImpl s $ writeCBOR a
-- | Write a record to a CSVStringifier.
-- |
-- | The record will be emitted on the `Readable` end
-- | of the stream as a string chunk.
writeRaw :: forall a. CSVStringifier a -> Array String -> Effect Unit
writeRaw = writeImpl

View File

@ -0,0 +1,11 @@
import {FLOAT32_OPTIONS} from 'cbor-x'
/** @type {<F32>(o: {round: (_a: F32) => boolean, fit: (_a: F32) => boolean, always: (_a: F32) => boolean}) => (f: F32) => FLOAT32_OPTIONS} */
export const f32ToConst = ({round, fit, always}) => a =>
round(a)
? FLOAT32_OPTIONS.ALWAYS
: fit(a)
? FLOAT32_OPTIONS.DECIMAL_FIT
: round(a)
? FLOAT32_OPTIONS.DECIMAL_ROUND
: FLOAT32_OPTIONS.NEVER

View File

@ -0,0 +1,50 @@
module Node.Stream.CBOR.Options where
import Prelude
import Foreign (Foreign, unsafeToForeign)
import Prim.Row (class Nub, class Union)
import Record (merge, modify)
import Type.Prelude (Proxy(..))
data F32
= F32Always
| F32DecimalRound
| F32DecimalFit
| F32Never
derive instance Eq F32
foreign import data CBORStruct :: Type
foreign import f32ToConst :: {always :: F32 -> Boolean, round :: F32 -> Boolean, fit :: F32 -> Boolean} -> F32 -> Foreign
type Options r =
( useRecords :: Boolean
, structures :: Array CBORStruct
, structuredClone :: Boolean
, mapsAsObject :: Boolean
, useFloat32 :: F32
, alwaysUseFloat :: Boolean
, pack :: Boolean
, variableMapSize :: Boolean
, copyBuffers :: Boolean
, bundleStrings :: Boolean
, useTimestamp32 :: Boolean
, largeBigIntToFloat :: Boolean
, useTag259ForMaps :: Boolean
, tagUint8Array :: Boolean
, int64AsNumber :: Boolean
| r
)
prepareOptions
:: forall @r @missing extra minimal minimalExtra
. Union r missing (Options extra)
=> Union r (useFloat32 :: F32) minimal
=> Nub minimal (useFloat32 :: F32 | minimalExtra)
=> { | r }
-> Foreign
prepareOptions a =
unsafeToForeign
$ modify (Proxy @"useFloat32") (f32ToConst {fit: eq F32DecimalFit, round: eq F32DecimalRound, always: eq F32Always})
$ merge a {useFloat32: F32Never}

View File

@ -1 +0,0 @@
module Node.Stream.CSV where

View File

@ -1,98 +1,52 @@
module Pipes.CSV where module Pipes.CBOR where
import Prelude import Prelude
import Control.Monad.Error.Class (liftEither) import Control.Monad.Error.Class (class MonadThrow, liftEither)
import Control.Monad.Except (runExcept) import Control.Monad.Except (runExcept)
import Control.Monad.Rec.Class (forever) import Control.Monad.Rec.Class (class MonadRec, forever)
import Control.Monad.ST.Global as ST
import Control.Monad.ST.Ref as STRef
import Data.Array as Array
import Data.Bifunctor (lmap) import Data.Bifunctor (lmap)
import Data.CSV.Record (class ReadCSVRecord, class WriteCSVRecord, readCSVRecord, writeCSVRecord) import Data.CBOR (class ReadCBOR, class WriteCBOR, readCBOR, writeCBOR)
import Data.FunctorWithIndex (mapWithIndex) import Data.Maybe (Maybe)
import Data.Map as Map import Effect.Aff.Class (class MonadAff)
import Data.Maybe (Maybe(..))
import Data.Tuple.Nested ((/\))
import Effect.Aff (Aff)
import Effect.Class (liftEffect) import Effect.Class (liftEffect)
import Effect.Exception (error) import Effect.Exception (Error, error)
import Node.Buffer (Buffer) import Node.Buffer (Buffer)
import Node.Stream.CSV.Parse as CSV.Parse import Node.Stream.CBOR.Decode as CBOR.Decode
import Node.Stream.CSV.Stringify as CSV.Stringify import Node.Stream.CBOR.Encode as CBOR.Encode
import Pipes (await, yield, (>->)) import Pipes (await, yield, (>->))
import Pipes.Core (Pipe) import Pipes.Core (Pipe)
import Pipes.Node.Stream as Pipes.Stream import Pipes.Node.Stream as Pipes.Stream
import Prim.RowList (class RowToList)
import Record.Extra (class Keys, keys)
import Type.Prelude (Proxy(..))
-- | Transforms buffer chunks of a CSV file to parsed
-- | records of `r`.
-- |
-- | ```
-- | -- == my-data.csv.gz ==
-- | -- id,foo,is_deleted
-- | -- 1,hi,f
-- | -- 2,bye,t
-- |
-- | rows
-- | :: Array {id :: Int, foo :: String, is_deleted :: Boolean}
-- | <- map Array.fromFoldable
-- | $ Pipes.toListM
-- | $ Pipes.Node.Stream.unEOS
-- | $ Pipes.Node.FS.read "my-data.csv.gz"
-- | >-> Pipes.Node.Zlib.gunzip
-- | >-> Pipes.CSV.parse
-- | rows `shouldEqual` [{id: 1, foo: "hi", is_deleted: false}, {id: 2, foo: "bye", is_deleted: true}]
-- | ```
parse
:: forall @r rl
. RowToList r rl
=> ReadCSVRecord r rl
=> Pipe (Maybe Buffer) (Maybe { | r }) Aff Unit
parse = do
raw <- liftEffect $ CSV.Parse.make {}
colsST <- liftEffect $ ST.toEffect $ STRef.new Nothing
-- | Transforms buffer chunks of a CBOR file to parsed values
-- | of type `a`.
decode
:: forall m @a
. MonadRec m
=> MonadAff m
=> MonadThrow Error m
=> ReadCBOR a
=> Pipe (Maybe Buffer) (Maybe a) m Unit
decode = do
raw <- liftEffect $ CBOR.Decode.make {}
let let
readCols = liftEffect $ ST.toEffect $ STRef.read colsST
putCols a = void $ liftEffect $ ST.toEffect $ STRef.write (Just a) colsST
parse' a cols' = liftEither $ lmap (error <<< show) $ runExcept $ readCSVRecord @r @rl cols' a
firstRow a = putCols $ Map.fromFoldable $ mapWithIndex (flip (/\)) a
row a cols' = yield =<< parse' a cols'
unmarshal = forever do unmarshal = forever do
r <- await r <- await
cols <- readCols yield =<< liftEither (lmap (error <<< show) $ runExcept $ readCBOR @a r)
case cols of parser = Pipes.Stream.fromTransform $ CBOR.Decode.toObjectStream raw
Just cols' -> row r cols'
Nothing -> firstRow r
parser = Pipes.Stream.fromTransform $ CSV.Parse.toObjectStream raw
parser >-> Pipes.Stream.inEOS unmarshal parser >-> Pipes.Stream.inEOS unmarshal
-- | Transforms buffer chunks of a CSV file to parsed -- | Encode purescript values as CBOR buffers
-- | arrays of CSV values. encode
parseRaw :: Pipe (Maybe Buffer) (Maybe (Array String)) Aff Unit :: forall m a
parseRaw = do . MonadAff m
s <- liftEffect $ CSV.Parse.toObjectStream <$> CSV.Parse.make {} => MonadThrow Error m
Pipes.Stream.fromTransform s => MonadRec m
=> WriteCBOR a
-- | Transforms CSV rows into stringified CSV records => Pipe (Maybe a) (Maybe Buffer) m Unit
-- | using the given ordered array of column names. encode = do
stringifyRaw :: Array String -> Pipe (Maybe (Array String)) (Maybe String) Aff Unit raw <- liftEffect $ CBOR.Encode.make {}
stringifyRaw columns = do
s <- liftEffect $ CSV.Stringify.toObjectStream <$> CSV.Stringify.make columns {}
Pipes.Stream.fromTransform s
-- | Transforms purescript records into stringified CSV records.
-- |
-- | Columns are inferred from the record's keys, ordered alphabetically.
stringify :: forall r rl. WriteCSVRecord r rl => RowToList r rl => Keys rl => Pipe (Maybe { | r }) (Maybe String) Aff Unit
stringify = do
raw <- liftEffect $ CSV.Stringify.make (Array.fromFoldable $ keys $ Proxy @r) {}
let let
printer = Pipes.Stream.fromTransform $ CSV.Stringify.toObjectStream raw printer = Pipes.Stream.fromTransform $ CBOR.Encode.toObjectStream raw
marshal = forever $ yield =<< (writeCSVRecord @r @rl <$> await) marshal = forever $ yield =<< (writeCBOR <$> await)
Pipes.Stream.inEOS marshal >-> printer Pipes.Stream.inEOS marshal >-> printer

View File

@ -5,10 +5,10 @@ import Prelude
import Data.Maybe (Maybe(..)) import Data.Maybe (Maybe(..))
import Effect (Effect) import Effect (Effect)
import Effect.Aff (launchAff_) import Effect.Aff (launchAff_)
import Test.Pipes.CSV as Test.Pipes.CSV import Test.Pipes.CBOR as Test.Pipes.CBOR
import Test.Spec.Reporter (specReporter) import Test.Spec.Reporter (specReporter)
import Test.Spec.Runner (defaultConfig, runSpec') import Test.Spec.Runner (defaultConfig, runSpec')
main :: Effect Unit main :: Effect Unit
main = launchAff_ $ runSpec' (defaultConfig { failFast = true, timeout = Nothing }) [ specReporter ] do main = launchAff_ $ runSpec' (defaultConfig { failFast = true, timeout = Nothing }) [ specReporter ] do
Test.Pipes.CSV.spec Test.Pipes.CBOR.spec

View File

@ -1,88 +1,87 @@
module Test.Pipes.CSV where module Test.Pipes.CBOR where
import Prelude import Prelude
import Control.Monad.Cont (lift)
import Control.Monad.Gen (chooseInt) import Control.Monad.Gen (chooseInt)
import Control.Monad.Rec.Class (Step(..), tailRecM)
import Data.Array as Array import Data.Array as Array
import Data.DateTime (DateTime) import Data.DateTime (DateTime)
import Data.Foldable (fold, sum) import Data.List ((:))
import Data.List as List
import Data.Maybe (Maybe(..), fromJust) import Data.Maybe (Maybe(..), fromJust)
import Data.Newtype (wrap) import Data.Newtype (wrap)
import Data.PreciseDateTime (fromRFC3339String, toDateTimeLossy) import Data.PreciseDateTime (fromRFC3339String, toDateTimeLossy)
import Data.String.CodePoints as String.CodePoints
import Data.Tuple.Nested ((/\)) import Data.Tuple.Nested ((/\))
import Effect (Effect)
import Effect.Aff (delay)
import Effect.CBOR as CBOR
import Effect.Class (liftEffect) import Effect.Class (liftEffect)
import Effect.Console (log) import Node.Buffer (Buffer)
import Node.Buffer as Buffer
import Node.Encoding (Encoding(..)) import Node.Encoding (Encoding(..))
import Partial.Unsafe (unsafePartial) import Partial.Unsafe (unsafePartial)
import Pipes (yield, (>->)) import Pipes (yield, (>->))
import Pipes.CSV as Pipes.CSV import Pipes.CBOR as Pipes.CBOR
import Pipes.Collect as Pipes.Collect import Pipes.Collect as Pipes.Collect
import Pipes.Construct as Pipes.Construct
import Pipes.Node.Buffer as Pipes.Buffer
import Pipes.Node.Stream as Pipes.Stream import Pipes.Node.Stream as Pipes.Stream
import Pipes.Prelude (chain, map, toListM) as Pipes import Pipes.Prelude (toListM) as Pipes
import Pipes.Util as Pipes.Util
import Test.QuickCheck.Gen (randomSample') import Test.QuickCheck.Gen (randomSample')
import Test.Spec (Spec, before, describe, it) import Test.Spec (Spec, before, describe, it)
import Test.Spec.Assertions (shouldEqual) import Test.Spec.Assertions (shouldEqual)
csv :: String cborHex :: String
csv = cborHex = "82b90002646e616d656568656e72796174c1fb41d990ee6d671aa0b90002646e616d65656a756c696f6174c1fbc1d756dad0bbb646"
"""created,flag,foo,id
2020-01-01T00:00:00.0Z,true,a,1 cborBuf :: Effect Buffer
2024-02-02T08:00:00.0Z,false,apple,2 cborBuf = Buffer.fromString cborHex Hex
1970-01-01T00:00:00.0Z,true,hello,3
""" exp :: Array {name :: String, t :: DateTime}
exp =
[{name: "henry", t: toDateTimeLossy $ unsafePartial fromJust $ fromRFC3339String $ wrap "2024-05-14T19:21:25.611Z"}
,{name: "julio", t: toDateTimeLossy $ unsafePartial fromJust $ fromRFC3339String $ wrap "1920-05-14T20:21:17.067Z"}
]
dt :: String -> DateTime dt :: String -> DateTime
dt = toDateTimeLossy <<< unsafePartial fromJust <<< fromRFC3339String <<< wrap dt = toDateTimeLossy <<< unsafePartial fromJust <<< fromRFC3339String <<< wrap
spec :: Spec Unit spec :: Spec Unit
spec = spec =
describe "Pipes.CSV" do describe "Pipes.CBOR" do
it "stringify" do it "encode" do
let bytes
objs = <- Pipes.Collect.toBuffer
[ { id: 1, foo: "a", flag: true, created: dt "2020-01-01T00:00:00Z" } $ Pipes.Stream.withEOS (yield exp)
, { id: 2, foo: "apple", flag: false, created: dt "2024-02-02T08:00:00Z" } >-> Pipes.CBOR.encode
, { id: 3, foo: "hello", flag: true, created: dt "1970-01-01T00:00:00Z" } >-> Pipes.Stream.unEOS
] act <- liftEffect $ CBOR.decode bytes
act `shouldEqual` exp
csv' <- map fold $ Pipes.Collect.toArray $ Pipes.Stream.withEOS (Pipes.Construct.eachArray objs) >-> Pipes.CSV.stringify >-> Pipes.Stream.unEOS
csv' `shouldEqual` csv
describe "parse" do describe "parse" do
it "parses csv" do it "parses csv" do
rows <- map Array.fromFoldable buf <- liftEffect $ cborBuf
$ Pipes.toListM rows <- Pipes.toListM
$ Pipes.Stream.withEOS (yield csv) $ Pipes.Stream.withEOS (yield buf *> lift (delay $ wrap 10.0))
>-> Pipes.Stream.inEOS (Pipes.Buffer.fromString UTF8) >-> Pipes.CBOR.decode
>-> Pipes.CSV.parse
>-> Pipes.Stream.unEOS
rows `shouldEqual` rows `shouldEqual` ((Just exp) : Nothing : List.Nil)
[ { id: 1, foo: "a", flag: true, created: dt "2020-01-01T00:00:00Z" }
, { id: 2, foo: "apple", flag: false, created: dt "2024-02-02T08:00:00Z" }
, { id: 3, foo: "hello", flag: true, created: dt "1970-01-01T00:00:00Z" }
]
before before
(do (do
nums <- liftEffect $ randomSample' 100000 (chooseInt 0 9) nums <- liftEffect $ randomSample' 100000 (chooseInt 0 9)
let let
chars = [ "i","d","\n" ] <> join ((\n -> [show n, "\n"]) <$> nums) objs = (\n -> {id: n}) <$> nums
bufs <- Pipes.Collect.toArray bytes <-
$ Pipes.Stream.withEOS (Pipes.Construct.eachArray chars) Pipes.Collect.toBuffer
>-> Pipes.Util.chunked 1000 $ Pipes.Stream.withEOS (yield objs)
>-> Pipes.Stream.inEOS (Pipes.map fold >-> Pipes.Buffer.fromString UTF8) >-> Pipes.CBOR.encode
>-> Pipes.Stream.unEOS >-> Pipes.Stream.unEOS
pure $ nums /\ bufs pure $ nums /\ bytes
) )
$ it "parses large csv" \(nums /\ bufs) -> do $ it "parses large csv" \(nums /\ bytes) -> do
rows <- rows <-
Pipes.Collect.toArray Pipes.Collect.toArray
$ Pipes.Stream.withEOS (Pipes.Construct.eachArray bufs) $ Pipes.Stream.withEOS (yield bytes)
>-> Pipes.CSV.parse @(id :: Int) >-> Pipes.CBOR.decode @(Array {id :: Int})
>-> Pipes.Stream.unEOS >-> Pipes.Stream.unEOS
rows `shouldEqual` ((\id -> { id }) <$> nums) rows `shouldEqual` [(\id -> { id }) <$> nums]