SlideShare a Scribd company logo
1 of 195
Download to read offline
K Y I V   2 0 1 9
Luciano Mammino (@loige)
IT’S ABOUT TIME TOIT’S ABOUT TIME TO
EMBRACE STREAMSEMBRACE STREAMS
  
loige.link/streams-kyiv
May 18th
1
// buffer-copy.js
const {
readFileSync,
writeFileSync
} = require('fs')
const [,, src, dest] = process.argv
// read entire file content
const content = readFileSync(src)
// write that content somewhere else
writeFileSync(dest, content)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
@loige2
// buffer-copy.js
const {
readFileSync,
writeFileSync
} = require('fs')
const [,, src, dest] = process.argv
// read entire file content
const content = readFileSync(src)
// write that content somewhere else
writeFileSync(dest, content)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
const {
readFileSync,
writeFileSync
} = require('fs')
// buffer-copy.js1
2
3
4
5
6
7
const [,, src, dest] = process.argv8
9
// read entire file content10
const content = readFileSync(src)11
12
// write that content somewhere else13
writeFileSync(dest, content)14
@loige2
// buffer-copy.js
const {
readFileSync,
writeFileSync
} = require('fs')
const [,, src, dest] = process.argv
// read entire file content
const content = readFileSync(src)
// write that content somewhere else
writeFileSync(dest, content)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
const {
readFileSync,
writeFileSync
} = require('fs')
// buffer-copy.js1
2
3
4
5
6
7
const [,, src, dest] = process.argv8
9
// read entire file content10
const content = readFileSync(src)11
12
// write that content somewhere else13
writeFileSync(dest, content)14
// read entire file content
const content = readFileSync(src)
// buffer-copy.js1
2
const {3
readFileSync,4
writeFileSync5
} = require('fs')6
7
const [,, src, dest] = process.argv8
9
10
11
12
// write that content somewhere else13
writeFileSync(dest, content)14
@loige2
// buffer-copy.js
const {
readFileSync,
writeFileSync
} = require('fs')
const [,, src, dest] = process.argv
// read entire file content
const content = readFileSync(src)
// write that content somewhere else
writeFileSync(dest, content)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
const {
readFileSync,
writeFileSync
} = require('fs')
// buffer-copy.js1
2
3
4
5
6
7
const [,, src, dest] = process.argv8
9
// read entire file content10
const content = readFileSync(src)11
12
// write that content somewhere else13
writeFileSync(dest, content)14
// read entire file content
const content = readFileSync(src)
// buffer-copy.js1
2
const {3
readFileSync,4
writeFileSync5
} = require('fs')6
7
const [,, src, dest] = process.argv8
9
10
11
12
// write that content somewhere else13
writeFileSync(dest, content)14
// write that content somewhere else
writeFileSync(dest, content)
// buffer-copy.js1
2
const {3
readFileSync,4
writeFileSync5
} = require('fs')6
7
const [,, src, dest] = process.argv8
9
// read entire file content10
const content = readFileSync(src)11
12
13
14
@loige2
@loige3
WE DO THIS ALL THE TIMEWE DO THIS ALL THE TIME
@loige3
WE DO THIS ALL THE TIMEWE DO THIS ALL THE TIME
AND IT'S OKAND IT'S OK
@loige3
WE DO THIS ALL THE TIMEWE DO THIS ALL THE TIME
AND IT'S OKAND IT'S OK
BUT SOMETIMES ...BUT SOMETIMES ...
@loige3
@loige
 ERR_FS_FILE_TOO_LARGE!  ERR_FS_FILE_TOO_LARGE! 
File size is greater than possible Buffer
4
BUT WHY?BUT WHY?
@loige5
IF BYTES IF BYTES WEREWERE BLOCKS... BLOCKS...@loige
6
MARIO CAN LIFTMARIO CAN LIFT
FEW BLOCKSFEW BLOCKS
@loige
7
BUT NOT TOO MANY...BUT NOT TOO MANY...@loige
?!
8
WHAT CAN WE DO IF WE HAVE TOWHAT CAN WE DO IF WE HAVE TO
MOVE MANY BLOCKS?MOVE MANY BLOCKS?
@loige9
WE CAN MOVE THEM ONE BY ONE!WE CAN MOVE THEM ONE BY ONE!
@loige
we stream them...
10
11
 HELLO, I AM LUCIANO! HELLO, I AM LUCIANO!
11
 HELLO, I AM LUCIANO! HELLO, I AM LUCIANO!
11
 HELLO, I AM LUCIANO! HELLO, I AM LUCIANO!
11
 HELLO, I AM LUCIANO! HELLO, I AM LUCIANO!
11
 HELLO, I AM LUCIANO! HELLO, I AM LUCIANO!
Cloud Architect
11
 HELLO, I AM LUCIANO! HELLO, I AM LUCIANO!
Cloud Architect
Blog: 
Twitter: 
GitHub:   
loige.co
@loige
@lmammino
11
 HELLO, I AM LUCIANO! HELLO, I AM LUCIANO!
Cloud Architect
Blog: 
Twitter: 
GitHub:   
loige.co
@loige
@lmammino
11
code: loige.link/streams-examples
loige.link/streams-kyiv
12
01. BUFFERS VS01. BUFFERS VS  
        STREAMS        STREAMS
@loige13
BUFFERBUFFER: DATA STRUCTURE TO STORE AND: DATA STRUCTURE TO STORE AND
TRANSFER ARBITRARY BINARY DATATRANSFER ARBITRARY BINARY DATA
@loige
*Note that this is loading all the content of the file in memory
*
14
STREAMSTREAM: ABSTRACT INTERFACE FOR: ABSTRACT INTERFACE FOR
WORKING WITH STREAMING DATAWORKING WITH STREAMING DATA
@loige
*It does not load all the data straight away
*
15
FILE COPY: FILE COPY: THE BUFFER WAYTHE BUFFER WAY
@loige
// buffer-copy.js
const {
readFileSync,
writeFileSync
} = require('fs')
const [,, src, dest] = process.argv
const content = readFileSync(src)
writeFileSync(dest, content)
1
2
3
4
5
6
7
8
9
10
16
FILE COPY: FILE COPY: THE STREAM WAYTHE STREAM WAY
// stream-copy.js
const {
createReadStream,
createWriteStream
} = require('fs')
const [,, src, dest] = process.argv
const srcStream = createReadStream(src)
const destStream = createWriteStream(dest)
srcStream.on('data', (data) => destStream.write(data))
1
2
3
4
5
6
7
8
9
10
11
@loige
* Careful: this implementation is not optimal
*
17
FILE COPY: FILE COPY: THE STREAM WAYTHE STREAM WAY
// stream-copy.js
const {
createReadStream,
createWriteStream
} = require('fs')
const [,, src, dest] = process.argv
const srcStream = createReadStream(src)
const destStream = createWriteStream(dest)
srcStream.on('data', (data) => destStream.write(data))
1
2
3
4
5
6
7
8
9
10
11
createReadStream,
createWriteStream
// stream-copy.js1
2
const {3
4
5
} = require('fs')6
7
const [,, src, dest] = process.argv8
const srcStream = createReadStream(src)9
const destStream = createWriteStream(dest)10
srcStream.on('data', (data) => destStream.write(data))11
@loige
* Careful: this implementation is not optimal
*
17
FILE COPY: FILE COPY: THE STREAM WAYTHE STREAM WAY
// stream-copy.js
const {
createReadStream,
createWriteStream
} = require('fs')
const [,, src, dest] = process.argv
const srcStream = createReadStream(src)
const destStream = createWriteStream(dest)
srcStream.on('data', (data) => destStream.write(data))
1
2
3
4
5
6
7
8
9
10
11
createReadStream,
createWriteStream
// stream-copy.js1
2
const {3
4
5
} = require('fs')6
7
const [,, src, dest] = process.argv8
const srcStream = createReadStream(src)9
const destStream = createWriteStream(dest)10
srcStream.on('data', (data) => destStream.write(data))11
const srcStream = createReadStream(src)
const destStream = createWriteStream(dest)
// stream-copy.js1
2
const {3
createReadStream,4
createWriteStream5
} = require('fs')6
7
const [,, src, dest] = process.argv8
9
10
srcStream.on('data', (data) => destStream.write(data))11
@loige
* Careful: this implementation is not optimal
*
17
FILE COPY: FILE COPY: THE STREAM WAYTHE STREAM WAY
// stream-copy.js
const {
createReadStream,
createWriteStream
} = require('fs')
const [,, src, dest] = process.argv
const srcStream = createReadStream(src)
const destStream = createWriteStream(dest)
srcStream.on('data', (data) => destStream.write(data))
1
2
3
4
5
6
7
8
9
10
11
createReadStream,
createWriteStream
// stream-copy.js1
2
const {3
4
5
} = require('fs')6
7
const [,, src, dest] = process.argv8
const srcStream = createReadStream(src)9
const destStream = createWriteStream(dest)10
srcStream.on('data', (data) => destStream.write(data))11
const srcStream = createReadStream(src)
const destStream = createWriteStream(dest)
// stream-copy.js1
2
const {3
createReadStream,4
createWriteStream5
} = require('fs')6
7
const [,, src, dest] = process.argv8
9
10
srcStream.on('data', (data) => destStream.write(data))11 srcStream.on('data', (data) => destStream.write(data))
// stream-copy.js1
2
const {3
createReadStream,4
createWriteStream5
} = require('fs')6
7
const [,, src, dest] = process.argv8
const srcStream = createReadStream(src)9
const destStream = createWriteStream(dest)10
11
@loige
* Careful: this implementation is not optimal
*
17
MEMORY COMPARISON (~600MB FILE)MEMORY COMPARISON (~600MB FILE)
node ­­inspect­brk buffer­copy.js assets/poster.psd ~/Downloads/poster.psd
@loige18
MEMORY COMPARISON (~600MB FILE)MEMORY COMPARISON (~600MB FILE)
node ­­inspect­brk stream­copy.js assets/poster.psd ~/Downloads/poster.psd
@loige19
LET'S TRY WITH A BIG FILE (~10GB)LET'S TRY WITH A BIG FILE (~10GB)
@loige20
LET'S TRY WITH A BIG FILE (~10GB)LET'S TRY WITH A BIG FILE (~10GB)
node ­­inspect­brk stream­copy.js assets/the­matrix­hd.mkv ~/Downloads/the­matrix­hd.mkv
@loige21
 STREAMS VS BUFFERS  STREAMS VS BUFFERS 
Streams keep a low memory footprint
even with large amounts of data 
 
Streams allows you to process data as
soon as it arrives
@loige22
03. STREAM TYPES03. STREAM TYPES  
       & APIS       & APIS
@loige23
ALL STREAMS ARE ALL STREAMS ARE EVENT EMITTERSEVENT EMITTERS
A stream instance is an object that emits events when its internal
state changes, for instance:
s.on('readable', () => {}) // ready to be consumed
s.on('data', (chunk) => {}) // new data is available
s.on('error', (err) => {}) // some error happened
s.on('end', () => {}) // no more data available
The events available depend from the type of stream
@loige24
READABLEREADABLE STREAMS STREAMS
A readable stream represents a source from which data is consumed.
Examples:
fs readStream
process.stdin
HTTP response (client-side)
HTTP request (server-side)
AWS S3 GetObject (data field)
It supports two modes for data consumption: flowing and paused (or non-
flowing) mode.
@loige25
READABLE STREAMSREADABLE STREAMS
Data is read from source automatically and chunks are emitted as soon
as they are available.
@loige26
@loige
1
2
3
Source data
Readable stream in
flowing mode
data listener
READABLE STREAMSREADABLE STREAMS
Data is read from source automatically and chunks are emitted as soon
as they are available.
27
@loige
12
3
Source data
Readable stream in
flowing mode
Read
data listener
READABLE STREAMSREADABLE STREAMS
Data is read from source automatically and chunks are emitted as soon
as they are available.
28
@loige
12
3
Source data
Readable stream in
flowing mode
data listener
data
READABLE STREAMSREADABLE STREAMS
Data is read from source automatically and chunks are emitted as soon
as they are available.
29
@loige
2
3
Source data
Readable stream in
flowing mode
data listener
Read
READABLE STREAMSREADABLE STREAMS
Data is read from source automatically and chunks are emitted as soon
as they are available.
30
@loige
2
3
Source data
Readable stream in
flowing mode
data listener
data
READABLE STREAMSREADABLE STREAMS
Data is read from source automatically and chunks are emitted as soon
as they are available.
31
@loige
3
Source data
Readable stream in
flowing mode
data listener
Read
READABLE STREAMSREADABLE STREAMS
Data is read from source automatically and chunks are emitted as soon
as they are available.
32
@loige
3
Source data
Readable stream in
flowing mode
data listener
data
READABLE STREAMSREADABLE STREAMS
Data is read from source automatically and chunks are emitted as soon
as they are available.
33
@loige
Source data
Readable stream in
flowing mode
Read
data listener
(end)
READABLE STREAMSREADABLE STREAMS
Data is read from source automatically and chunks are emitted as soon
as they are available.
34
@loige
Source data
Readable stream in
flowing mode
data listener
end
(end)
When no more data is available, end is emitted.
READABLE STREAMSREADABLE STREAMS
Data is read from source automatically and chunks are emitted as soon
as they are available.
35
// count-emojis-flowing.js
const { createReadStream } = require('fs')
const { EMOJI_MAP } = require('emoji') // from npm
const emojis = Object.keys(EMOJI_MAP)
const file = createReadStream(process.argv[2])
let counter = 0
file.on('data', chunk => {
for (let char of chunk.toString('utf8')) {
if (emojis.includes(char)) {
counter++
}
}
})
file.on('end', () => console.log(`Found ${counter} emojis`))
file.on('error', err => console.error(`Error reading file: ${err}`))
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
@loige36
// count-emojis-flowing.js
const { createReadStream } = require('fs')
const { EMOJI_MAP } = require('emoji') // from npm
const emojis = Object.keys(EMOJI_MAP)
const file = createReadStream(process.argv[2])
let counter = 0
file.on('data', chunk => {
for (let char of chunk.toString('utf8')) {
if (emojis.includes(char)) {
counter++
}
}
})
file.on('end', () => console.log(`Found ${counter} emojis`))
file.on('error', err => console.error(`Error reading file: ${err}`))
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
const { EMOJI_MAP } = require('emoji') // from npm
// count-emojis-flowing.js1
2
const { createReadStream } = require('fs')3
4
5
const emojis = Object.keys(EMOJI_MAP)6
7
const file = createReadStream(process.argv[2])8
let counter = 09
10
file.on('data', chunk => {11
for (let char of chunk.toString('utf8')) {12
if (emojis.includes(char)) {13
counter++14
}15
}16
})17
file.on('end', () => console.log(`Found ${counter} emojis`))18
file.on('error', err => console.error(`Error reading file: ${err}`))19
@loige36
// count-emojis-flowing.js
const { createReadStream } = require('fs')
const { EMOJI_MAP } = require('emoji') // from npm
const emojis = Object.keys(EMOJI_MAP)
const file = createReadStream(process.argv[2])
let counter = 0
file.on('data', chunk => {
for (let char of chunk.toString('utf8')) {
if (emojis.includes(char)) {
counter++
}
}
})
file.on('end', () => console.log(`Found ${counter} emojis`))
file.on('error', err => console.error(`Error reading file: ${err}`))
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
const { EMOJI_MAP } = require('emoji') // from npm
// count-emojis-flowing.js1
2
const { createReadStream } = require('fs')3
4
5
const emojis = Object.keys(EMOJI_MAP)6
7
const file = createReadStream(process.argv[2])8
let counter = 09
10
file.on('data', chunk => {11
for (let char of chunk.toString('utf8')) {12
if (emojis.includes(char)) {13
counter++14
}15
}16
})17
file.on('end', () => console.log(`Found ${counter} emojis`))18
file.on('error', err => console.error(`Error reading file: ${err}`))19
const file = createReadStream(process.argv[2])
// count-emojis-flowing.js1
2
const { createReadStream } = require('fs')3
const { EMOJI_MAP } = require('emoji') // from npm4
5
const emojis = Object.keys(EMOJI_MAP)6
7
8
let counter = 09
10
file.on('data', chunk => {11
for (let char of chunk.toString('utf8')) {12
if (emojis.includes(char)) {13
counter++14
}15
}16
})17
file.on('end', () => console.log(`Found ${counter} emojis`))18
file.on('error', err => console.error(`Error reading file: ${err}`))19
@loige36
// count-emojis-flowing.js
const { createReadStream } = require('fs')
const { EMOJI_MAP } = require('emoji') // from npm
const emojis = Object.keys(EMOJI_MAP)
const file = createReadStream(process.argv[2])
let counter = 0
file.on('data', chunk => {
for (let char of chunk.toString('utf8')) {
if (emojis.includes(char)) {
counter++
}
}
})
file.on('end', () => console.log(`Found ${counter} emojis`))
file.on('error', err => console.error(`Error reading file: ${err}`))
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
const { EMOJI_MAP } = require('emoji') // from npm
// count-emojis-flowing.js1
2
const { createReadStream } = require('fs')3
4
5
const emojis = Object.keys(EMOJI_MAP)6
7
const file = createReadStream(process.argv[2])8
let counter = 09
10
file.on('data', chunk => {11
for (let char of chunk.toString('utf8')) {12
if (emojis.includes(char)) {13
counter++14
}15
}16
})17
file.on('end', () => console.log(`Found ${counter} emojis`))18
file.on('error', err => console.error(`Error reading file: ${err}`))19
const file = createReadStream(process.argv[2])
// count-emojis-flowing.js1
2
const { createReadStream } = require('fs')3
const { EMOJI_MAP } = require('emoji') // from npm4
5
const emojis = Object.keys(EMOJI_MAP)6
7
8
let counter = 09
10
file.on('data', chunk => {11
for (let char of chunk.toString('utf8')) {12
if (emojis.includes(char)) {13
counter++14
}15
}16
})17
file.on('end', () => console.log(`Found ${counter} emojis`))18
file.on('error', err => console.error(`Error reading file: ${err}`))19
let counter = 0
// count-emojis-flowing.js1
2
const { createReadStream } = require('fs')3
const { EMOJI_MAP } = require('emoji') // from npm4
5
const emojis = Object.keys(EMOJI_MAP)6
7
const file = createReadStream(process.argv[2])8
9
10
file.on('data', chunk => {11
for (let char of chunk.toString('utf8')) {12
if (emojis.includes(char)) {13
counter++14
}15
}16
})17
file.on('end', () => console.log(`Found ${counter} emojis`))18
file.on('error', err => console.error(`Error reading file: ${err}`))19
@loige36
// count-emojis-flowing.js
const { createReadStream } = require('fs')
const { EMOJI_MAP } = require('emoji') // from npm
const emojis = Object.keys(EMOJI_MAP)
const file = createReadStream(process.argv[2])
let counter = 0
file.on('data', chunk => {
for (let char of chunk.toString('utf8')) {
if (emojis.includes(char)) {
counter++
}
}
})
file.on('end', () => console.log(`Found ${counter} emojis`))
file.on('error', err => console.error(`Error reading file: ${err}`))
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
const { EMOJI_MAP } = require('emoji') // from npm
// count-emojis-flowing.js1
2
const { createReadStream } = require('fs')3
4
5
const emojis = Object.keys(EMOJI_MAP)6
7
const file = createReadStream(process.argv[2])8
let counter = 09
10
file.on('data', chunk => {11
for (let char of chunk.toString('utf8')) {12
if (emojis.includes(char)) {13
counter++14
}15
}16
})17
file.on('end', () => console.log(`Found ${counter} emojis`))18
file.on('error', err => console.error(`Error reading file: ${err}`))19
const file = createReadStream(process.argv[2])
// count-emojis-flowing.js1
2
const { createReadStream } = require('fs')3
const { EMOJI_MAP } = require('emoji') // from npm4
5
const emojis = Object.keys(EMOJI_MAP)6
7
8
let counter = 09
10
file.on('data', chunk => {11
for (let char of chunk.toString('utf8')) {12
if (emojis.includes(char)) {13
counter++14
}15
}16
})17
file.on('end', () => console.log(`Found ${counter} emojis`))18
file.on('error', err => console.error(`Error reading file: ${err}`))19
let counter = 0
// count-emojis-flowing.js1
2
const { createReadStream } = require('fs')3
const { EMOJI_MAP } = require('emoji') // from npm4
5
const emojis = Object.keys(EMOJI_MAP)6
7
const file = createReadStream(process.argv[2])8
9
10
file.on('data', chunk => {11
for (let char of chunk.toString('utf8')) {12
if (emojis.includes(char)) {13
counter++14
}15
}16
})17
file.on('end', () => console.log(`Found ${counter} emojis`))18
file.on('error', err => console.error(`Error reading file: ${err}`))19
file.on('data', chunk => {
})
// count-emojis-flowing.js1
2
const { createReadStream } = require('fs')3
const { EMOJI_MAP } = require('emoji') // from npm4
5
const emojis = Object.keys(EMOJI_MAP)6
7
const file = createReadStream(process.argv[2])8
let counter = 09
10
11
for (let char of chunk.toString('utf8')) {12
if (emojis.includes(char)) {13
counter++14
}15
}16
17
file.on('end', () => console.log(`Found ${counter} emojis`))18
file.on('error', err => console.error(`Error reading file: ${err}`))19
@loige36
// count-emojis-flowing.js
const { createReadStream } = require('fs')
const { EMOJI_MAP } = require('emoji') // from npm
const emojis = Object.keys(EMOJI_MAP)
const file = createReadStream(process.argv[2])
let counter = 0
file.on('data', chunk => {
for (let char of chunk.toString('utf8')) {
if (emojis.includes(char)) {
counter++
}
}
})
file.on('end', () => console.log(`Found ${counter} emojis`))
file.on('error', err => console.error(`Error reading file: ${err}`))
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
const { EMOJI_MAP } = require('emoji') // from npm
// count-emojis-flowing.js1
2
const { createReadStream } = require('fs')3
4
5
const emojis = Object.keys(EMOJI_MAP)6
7
const file = createReadStream(process.argv[2])8
let counter = 09
10
file.on('data', chunk => {11
for (let char of chunk.toString('utf8')) {12
if (emojis.includes(char)) {13
counter++14
}15
}16
})17
file.on('end', () => console.log(`Found ${counter} emojis`))18
file.on('error', err => console.error(`Error reading file: ${err}`))19
const file = createReadStream(process.argv[2])
// count-emojis-flowing.js1
2
const { createReadStream } = require('fs')3
const { EMOJI_MAP } = require('emoji') // from npm4
5
const emojis = Object.keys(EMOJI_MAP)6
7
8
let counter = 09
10
file.on('data', chunk => {11
for (let char of chunk.toString('utf8')) {12
if (emojis.includes(char)) {13
counter++14
}15
}16
})17
file.on('end', () => console.log(`Found ${counter} emojis`))18
file.on('error', err => console.error(`Error reading file: ${err}`))19
let counter = 0
// count-emojis-flowing.js1
2
const { createReadStream } = require('fs')3
const { EMOJI_MAP } = require('emoji') // from npm4
5
const emojis = Object.keys(EMOJI_MAP)6
7
const file = createReadStream(process.argv[2])8
9
10
file.on('data', chunk => {11
for (let char of chunk.toString('utf8')) {12
if (emojis.includes(char)) {13
counter++14
}15
}16
})17
file.on('end', () => console.log(`Found ${counter} emojis`))18
file.on('error', err => console.error(`Error reading file: ${err}`))19
file.on('data', chunk => {
})
// count-emojis-flowing.js1
2
const { createReadStream } = require('fs')3
const { EMOJI_MAP } = require('emoji') // from npm4
5
const emojis = Object.keys(EMOJI_MAP)6
7
const file = createReadStream(process.argv[2])8
let counter = 09
10
11
for (let char of chunk.toString('utf8')) {12
if (emojis.includes(char)) {13
counter++14
}15
}16
17
file.on('end', () => console.log(`Found ${counter} emojis`))18
file.on('error', err => console.error(`Error reading file: ${err}`))19
for (let char of chunk.toString('utf8')) {
}
// count-emojis-flowing.js1
2
const { createReadStream } = require('fs')3
const { EMOJI_MAP } = require('emoji') // from npm4
5
const emojis = Object.keys(EMOJI_MAP)6
7
const file = createReadStream(process.argv[2])8
let counter = 09
10
file.on('data', chunk => {11
12
if (emojis.includes(char)) {13
counter++14
}15
16
})17
file.on('end', () => console.log(`Found ${counter} emojis`))18
file.on('error', err => console.error(`Error reading file: ${err}`))19
@loige36
// count-emojis-flowing.js
const { createReadStream } = require('fs')
const { EMOJI_MAP } = require('emoji') // from npm
const emojis = Object.keys(EMOJI_MAP)
const file = createReadStream(process.argv[2])
let counter = 0
file.on('data', chunk => {
for (let char of chunk.toString('utf8')) {
if (emojis.includes(char)) {
counter++
}
}
})
file.on('end', () => console.log(`Found ${counter} emojis`))
file.on('error', err => console.error(`Error reading file: ${err}`))
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
const { EMOJI_MAP } = require('emoji') // from npm
// count-emojis-flowing.js1
2
const { createReadStream } = require('fs')3
4
5
const emojis = Object.keys(EMOJI_MAP)6
7
const file = createReadStream(process.argv[2])8
let counter = 09
10
file.on('data', chunk => {11
for (let char of chunk.toString('utf8')) {12
if (emojis.includes(char)) {13
counter++14
}15
}16
})17
file.on('end', () => console.log(`Found ${counter} emojis`))18
file.on('error', err => console.error(`Error reading file: ${err}`))19
const file = createReadStream(process.argv[2])
// count-emojis-flowing.js1
2
const { createReadStream } = require('fs')3
const { EMOJI_MAP } = require('emoji') // from npm4
5
const emojis = Object.keys(EMOJI_MAP)6
7
8
let counter = 09
10
file.on('data', chunk => {11
for (let char of chunk.toString('utf8')) {12
if (emojis.includes(char)) {13
counter++14
}15
}16
})17
file.on('end', () => console.log(`Found ${counter} emojis`))18
file.on('error', err => console.error(`Error reading file: ${err}`))19
let counter = 0
// count-emojis-flowing.js1
2
const { createReadStream } = require('fs')3
const { EMOJI_MAP } = require('emoji') // from npm4
5
const emojis = Object.keys(EMOJI_MAP)6
7
const file = createReadStream(process.argv[2])8
9
10
file.on('data', chunk => {11
for (let char of chunk.toString('utf8')) {12
if (emojis.includes(char)) {13
counter++14
}15
}16
})17
file.on('end', () => console.log(`Found ${counter} emojis`))18
file.on('error', err => console.error(`Error reading file: ${err}`))19
file.on('data', chunk => {
})
// count-emojis-flowing.js1
2
const { createReadStream } = require('fs')3
const { EMOJI_MAP } = require('emoji') // from npm4
5
const emojis = Object.keys(EMOJI_MAP)6
7
const file = createReadStream(process.argv[2])8
let counter = 09
10
11
for (let char of chunk.toString('utf8')) {12
if (emojis.includes(char)) {13
counter++14
}15
}16
17
file.on('end', () => console.log(`Found ${counter} emojis`))18
file.on('error', err => console.error(`Error reading file: ${err}`))19
for (let char of chunk.toString('utf8')) {
}
// count-emojis-flowing.js1
2
const { createReadStream } = require('fs')3
const { EMOJI_MAP } = require('emoji') // from npm4
5
const emojis = Object.keys(EMOJI_MAP)6
7
const file = createReadStream(process.argv[2])8
let counter = 09
10
file.on('data', chunk => {11
12
if (emojis.includes(char)) {13
counter++14
}15
16
})17
file.on('end', () => console.log(`Found ${counter} emojis`))18
file.on('error', err => console.error(`Error reading file: ${err}`))19
if (emojis.includes(char)) {
counter++
}
// count-emojis-flowing.js1
2
const { createReadStream } = require('fs')3
const { EMOJI_MAP } = require('emoji') // from npm4
5
const emojis = Object.keys(EMOJI_MAP)6
7
const file = createReadStream(process.argv[2])8
let counter = 09
10
file.on('data', chunk => {11
for (let char of chunk.toString('utf8')) {12
13
14
15
}16
})17
file.on('end', () => console.log(`Found ${counter} emojis`))18
file.on('error', err => console.error(`Error reading file: ${err}`))19
@loige36
loige.link/up-emojiart
@loige37
READABLE STREAMS AREREADABLE STREAMS ARE
ALSO ALSO ASYNC ITERATORSASYNC ITERATORS  
((NODE.JS 10+)NODE.JS 10+)
@loige38
// count-emojis-async-iterator.js
const { createReadStream } = require('fs')
const { EMOJI_MAP } = require('emoji') // from npm
async function main () {
const emojis = Object.keys(EMOJI_MAP)
const file = createReadStream(process.argv[2])
let counter = 0
for await (let chunk of file) {
for (let char of chunk.toString('utf8')) {
if (emojis.includes(char)) {
counter++
}
}
}
console.log(`Found ${counter} emojis`)
}
main()
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21 @loige39
// count-emojis-async-iterator.js
const { createReadStream } = require('fs')
const { EMOJI_MAP } = require('emoji') // from npm
async function main () {
const emojis = Object.keys(EMOJI_MAP)
const file = createReadStream(process.argv[2])
let counter = 0
for await (let chunk of file) {
for (let char of chunk.toString('utf8')) {
if (emojis.includes(char)) {
counter++
}
}
}
console.log(`Found ${counter} emojis`)
}
main()
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
for await (let chunk of file) {
}
// count-emojis-async-iterator.js1
const { createReadStream } = require('fs')2
const { EMOJI_MAP } = require('emoji') // from npm3
4
async function main () {5
const emojis = Object.keys(EMOJI_MAP)6
const file = createReadStream(process.argv[2])7
let counter = 08
9
10
for (let char of chunk.toString('utf8')) {11
if (emojis.includes(char)) {12
counter++13
}14
}15
16
17
console.log(`Found ${counter} emojis`)18
}19
20
main()21 @loige39
// count-emojis-async-iterator.js
const { createReadStream } = require('fs')
const { EMOJI_MAP } = require('emoji') // from npm
async function main () {
const emojis = Object.keys(EMOJI_MAP)
const file = createReadStream(process.argv[2])
let counter = 0
for await (let chunk of file) {
for (let char of chunk.toString('utf8')) {
if (emojis.includes(char)) {
counter++
}
}
}
console.log(`Found ${counter} emojis`)
}
main()
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
for await (let chunk of file) {
}
// count-emojis-async-iterator.js1
const { createReadStream } = require('fs')2
const { EMOJI_MAP } = require('emoji') // from npm3
4
async function main () {5
const emojis = Object.keys(EMOJI_MAP)6
const file = createReadStream(process.argv[2])7
let counter = 08
9
10
for (let char of chunk.toString('utf8')) {11
if (emojis.includes(char)) {12
counter++13
}14
}15
16
17
console.log(`Found ${counter} emojis`)18
}19
20
main()21
async function main () {
}
// count-emojis-async-iterator.js1
const { createReadStream } = require('fs')2
const { EMOJI_MAP } = require('emoji') // from npm3
4
5
const emojis = Object.keys(EMOJI_MAP)6
const file = createReadStream(process.argv[2])7
let counter = 08
9
for await (let chunk of file) {10
for (let char of chunk.toString('utf8')) {11
if (emojis.includes(char)) {12
counter++13
}14
}15
}16
17
console.log(`Found ${counter} emojis`)18
19
20
main()21 @loige39
WRITABLEWRITABLE STREAMS STREAMS
A writable stream is an abstraction that allows to write data over a destination
 
Examples:
fs writeStream
process.stdout, process.stderr
HTTP request (client-side)
HTTP response (server-side)
AWS S3 PutObject (body parameter)
@loige40
// writable-http-request.js
const http = require('http')
const req = http.request(
{
hostname: 'enx6b07hdu6cs.x.pipedream.net',
method: 'POST'
},
resp => {
console.log(`Server responded with "${resp.statusCode}"`)
}
)
req.on('finish', () => console.log('request sent'))
req.on('close', () => console.log('Connection closed'))
req.on('error', err => console.error(`Request failed: ${err}`))
req.write('writing some content...n')
req.end('last write & close the stream')
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
@loige41
// writable-http-request.js
const http = require('http')
const req = http.request(
{
hostname: 'enx6b07hdu6cs.x.pipedream.net',
method: 'POST'
},
resp => {
console.log(`Server responded with "${resp.statusCode}"`)
}
)
req.on('finish', () => console.log('request sent'))
req.on('close', () => console.log('Connection closed'))
req.on('error', err => console.error(`Request failed: ${err}`))
req.write('writing some content...n')
req.end('last write & close the stream')
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
const req = http.request(
)
// writable-http-request.js1
const http = require('http')2
3
4
{5
hostname: 'enx6b07hdu6cs.x.pipedream.net',6
method: 'POST'7
},8
resp => {9
console.log(`Server responded with "${resp.statusCode}"`)10
}11
12
13
req.on('finish', () => console.log('request sent'))14
req.on('close', () => console.log('Connection closed'))15
req.on('error', err => console.error(`Request failed: ${err}`))16
17
req.write('writing some content...n')18
req.end('last write & close the stream')19
@loige41
// writable-http-request.js
const http = require('http')
const req = http.request(
{
hostname: 'enx6b07hdu6cs.x.pipedream.net',
method: 'POST'
},
resp => {
console.log(`Server responded with "${resp.statusCode}"`)
}
)
req.on('finish', () => console.log('request sent'))
req.on('close', () => console.log('Connection closed'))
req.on('error', err => console.error(`Request failed: ${err}`))
req.write('writing some content...n')
req.end('last write & close the stream')
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
const req = http.request(
)
// writable-http-request.js1
const http = require('http')2
3
4
{5
hostname: 'enx6b07hdu6cs.x.pipedream.net',6
method: 'POST'7
},8
resp => {9
console.log(`Server responded with "${resp.statusCode}"`)10
}11
12
13
req.on('finish', () => console.log('request sent'))14
req.on('close', () => console.log('Connection closed'))15
req.on('error', err => console.error(`Request failed: ${err}`))16
17
req.write('writing some content...n')18
req.end('last write & close the stream')19
req.on('finish', () => console.log('request sent'))
req.on('close', () => console.log('Connection closed'))
req.on('error', err => console.error(`Request failed: ${err}`))
// writable-http-request.js1
const http = require('http')2
3
const req = http.request(4
{5
hostname: 'enx6b07hdu6cs.x.pipedream.net',6
method: 'POST'7
},8
resp => {9
console.log(`Server responded with "${resp.statusCode}"`)10
}11
)12
13
14
15
16
17
req.write('writing some content...n')18
req.end('last write & close the stream')19
@loige41
// writable-http-request.js
const http = require('http')
const req = http.request(
{
hostname: 'enx6b07hdu6cs.x.pipedream.net',
method: 'POST'
},
resp => {
console.log(`Server responded with "${resp.statusCode}"`)
}
)
req.on('finish', () => console.log('request sent'))
req.on('close', () => console.log('Connection closed'))
req.on('error', err => console.error(`Request failed: ${err}`))
req.write('writing some content...n')
req.end('last write & close the stream')
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
const req = http.request(
)
// writable-http-request.js1
const http = require('http')2
3
4
{5
hostname: 'enx6b07hdu6cs.x.pipedream.net',6
method: 'POST'7
},8
resp => {9
console.log(`Server responded with "${resp.statusCode}"`)10
}11
12
13
req.on('finish', () => console.log('request sent'))14
req.on('close', () => console.log('Connection closed'))15
req.on('error', err => console.error(`Request failed: ${err}`))16
17
req.write('writing some content...n')18
req.end('last write & close the stream')19
req.on('finish', () => console.log('request sent'))
req.on('close', () => console.log('Connection closed'))
req.on('error', err => console.error(`Request failed: ${err}`))
// writable-http-request.js1
const http = require('http')2
3
const req = http.request(4
{5
hostname: 'enx6b07hdu6cs.x.pipedream.net',6
method: 'POST'7
},8
resp => {9
console.log(`Server responded with "${resp.statusCode}"`)10
}11
)12
13
14
15
16
17
req.write('writing some content...n')18
req.end('last write & close the stream')19
req.write('writing some content...n')
req.end('last write & close the stream')
// writable-http-request.js1
const http = require('http')2
3
const req = http.request(4
{5
hostname: 'enx6b07hdu6cs.x.pipedream.net',6
method: 'POST'7
},8
resp => {9
console.log(`Server responded with "${resp.statusCode}"`)10
}11
)12
13
req.on('finish', () => console.log('request sent'))14
req.on('close', () => console.log('Connection closed'))15
req.on('error', err => console.error(`Request failed: ${err}`))16
17
18
19
@loige41
@loige42
loige.link/writable-http-req
@loige43
BACKPRESSUREBACKPRESSURE
When writing large amounts of data you
should make sure you handle the stop write
signal and the drain event
 
loige.link/backpressure
@loige44
// stream-copy-safe.js
const { createReadStream, createWriteStream } = require('fs')
const [, , src, dest] = process.argv
const srcStream = createReadStream(src)
const destStream = createWriteStream(dest)
srcStream.on('data', data => {
const canContinue = destStream.write(data)
if (!canContinue) {
// we are overflowing the destination, we should pause
srcStream.pause()
// we will resume when the destination stream is drained
destStream.once('drain', () => srcStream.resume())
}
})
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
@loige45
// stream-copy-safe.js
const { createReadStream, createWriteStream } = require('fs')
const [, , src, dest] = process.argv
const srcStream = createReadStream(src)
const destStream = createWriteStream(dest)
srcStream.on('data', data => {
const canContinue = destStream.write(data)
if (!canContinue) {
// we are overflowing the destination, we should pause
srcStream.pause()
// we will resume when the destination stream is drained
destStream.once('drain', () => srcStream.resume())
}
})
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
const canContinue = destStream.write(data)
// stream-copy-safe.js1
2
const { createReadStream, createWriteStream } = require('fs')3
4
const [, , src, dest] = process.argv5
const srcStream = createReadStream(src)6
const destStream = createWriteStream(dest)7
8
srcStream.on('data', data => {9
10
if (!canContinue) {11
// we are overflowing the destination, we should pause12
srcStream.pause()13
// we will resume when the destination stream is drained14
destStream.once('drain', () => srcStream.resume())15
}16
})17
@loige45
// stream-copy-safe.js
const { createReadStream, createWriteStream } = require('fs')
const [, , src, dest] = process.argv
const srcStream = createReadStream(src)
const destStream = createWriteStream(dest)
srcStream.on('data', data => {
const canContinue = destStream.write(data)
if (!canContinue) {
// we are overflowing the destination, we should pause
srcStream.pause()
// we will resume when the destination stream is drained
destStream.once('drain', () => srcStream.resume())
}
})
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
const canContinue = destStream.write(data)
// stream-copy-safe.js1
2
const { createReadStream, createWriteStream } = require('fs')3
4
const [, , src, dest] = process.argv5
const srcStream = createReadStream(src)6
const destStream = createWriteStream(dest)7
8
srcStream.on('data', data => {9
10
if (!canContinue) {11
// we are overflowing the destination, we should pause12
srcStream.pause()13
// we will resume when the destination stream is drained14
destStream.once('drain', () => srcStream.resume())15
}16
})17
if (!canContinue) {
}
// stream-copy-safe.js1
2
const { createReadStream, createWriteStream } = require('fs')3
4
const [, , src, dest] = process.argv5
const srcStream = createReadStream(src)6
const destStream = createWriteStream(dest)7
8
srcStream.on('data', data => {9
const canContinue = destStream.write(data)10
11
// we are overflowing the destination, we should pause12
srcStream.pause()13
// we will resume when the destination stream is drained14
destStream.once('drain', () => srcStream.resume())15
16
})17
@loige45
// stream-copy-safe.js
const { createReadStream, createWriteStream } = require('fs')
const [, , src, dest] = process.argv
const srcStream = createReadStream(src)
const destStream = createWriteStream(dest)
srcStream.on('data', data => {
const canContinue = destStream.write(data)
if (!canContinue) {
// we are overflowing the destination, we should pause
srcStream.pause()
// we will resume when the destination stream is drained
destStream.once('drain', () => srcStream.resume())
}
})
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
const canContinue = destStream.write(data)
// stream-copy-safe.js1
2
const { createReadStream, createWriteStream } = require('fs')3
4
const [, , src, dest] = process.argv5
const srcStream = createReadStream(src)6
const destStream = createWriteStream(dest)7
8
srcStream.on('data', data => {9
10
if (!canContinue) {11
// we are overflowing the destination, we should pause12
srcStream.pause()13
// we will resume when the destination stream is drained14
destStream.once('drain', () => srcStream.resume())15
}16
})17
if (!canContinue) {
}
// stream-copy-safe.js1
2
const { createReadStream, createWriteStream } = require('fs')3
4
const [, , src, dest] = process.argv5
const srcStream = createReadStream(src)6
const destStream = createWriteStream(dest)7
8
srcStream.on('data', data => {9
const canContinue = destStream.write(data)10
11
// we are overflowing the destination, we should pause12
srcStream.pause()13
// we will resume when the destination stream is drained14
destStream.once('drain', () => srcStream.resume())15
16
})17
srcStream.pause()
// stream-copy-safe.js1
2
const { createReadStream, createWriteStream } = require('fs')3
4
const [, , src, dest] = process.argv5
const srcStream = createReadStream(src)6
const destStream = createWriteStream(dest)7
8
srcStream.on('data', data => {9
const canContinue = destStream.write(data)10
if (!canContinue) {11
// we are overflowing the destination, we should pause12
13
// we will resume when the destination stream is drained14
destStream.once('drain', () => srcStream.resume())15
}16
})17
@loige45
// stream-copy-safe.js
const { createReadStream, createWriteStream } = require('fs')
const [, , src, dest] = process.argv
const srcStream = createReadStream(src)
const destStream = createWriteStream(dest)
srcStream.on('data', data => {
const canContinue = destStream.write(data)
if (!canContinue) {
// we are overflowing the destination, we should pause
srcStream.pause()
// we will resume when the destination stream is drained
destStream.once('drain', () => srcStream.resume())
}
})
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
const canContinue = destStream.write(data)
// stream-copy-safe.js1
2
const { createReadStream, createWriteStream } = require('fs')3
4
const [, , src, dest] = process.argv5
const srcStream = createReadStream(src)6
const destStream = createWriteStream(dest)7
8
srcStream.on('data', data => {9
10
if (!canContinue) {11
// we are overflowing the destination, we should pause12
srcStream.pause()13
// we will resume when the destination stream is drained14
destStream.once('drain', () => srcStream.resume())15
}16
})17
if (!canContinue) {
}
// stream-copy-safe.js1
2
const { createReadStream, createWriteStream } = require('fs')3
4
const [, , src, dest] = process.argv5
const srcStream = createReadStream(src)6
const destStream = createWriteStream(dest)7
8
srcStream.on('data', data => {9
const canContinue = destStream.write(data)10
11
// we are overflowing the destination, we should pause12
srcStream.pause()13
// we will resume when the destination stream is drained14
destStream.once('drain', () => srcStream.resume())15
16
})17
srcStream.pause()
// stream-copy-safe.js1
2
const { createReadStream, createWriteStream } = require('fs')3
4
const [, , src, dest] = process.argv5
const srcStream = createReadStream(src)6
const destStream = createWriteStream(dest)7
8
srcStream.on('data', data => {9
const canContinue = destStream.write(data)10
if (!canContinue) {11
// we are overflowing the destination, we should pause12
13
// we will resume when the destination stream is drained14
destStream.once('drain', () => srcStream.resume())15
}16
})17
destStream.once('drain', () => srcStream.resume())
// stream-copy-safe.js1
2
const { createReadStream, createWriteStream } = require('fs')3
4
const [, , src, dest] = process.argv5
const srcStream = createReadStream(src)6
const destStream = createWriteStream(dest)7
8
srcStream.on('data', data => {9
const canContinue = destStream.write(data)10
if (!canContinue) {11
// we are overflowing the destination, we should pause12
srcStream.pause()13
// we will resume when the destination stream is drained14
15
}16
})17
@loige45
// stream-copy-safe.js
const { createReadStream, createWriteStream } = require('fs')
const [, , src, dest] = process.argv
const srcStream = createReadStream(src)
const destStream = createWriteStream(dest)
srcStream.on('data', data => {
const canContinue = destStream.write(data)
if (!canContinue) {
// we are overflowing the destination, we should pause
srcStream.pause()
// we will resume when the destination stream is drained
destStream.once('drain', () => srcStream.resume())
}
})
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
const canContinue = destStream.write(data)
// stream-copy-safe.js1
2
const { createReadStream, createWriteStream } = require('fs')3
4
const [, , src, dest] = process.argv5
const srcStream = createReadStream(src)6
const destStream = createWriteStream(dest)7
8
srcStream.on('data', data => {9
10
if (!canContinue) {11
// we are overflowing the destination, we should pause12
srcStream.pause()13
// we will resume when the destination stream is drained14
destStream.once('drain', () => srcStream.resume())15
}16
})17
if (!canContinue) {
}
// stream-copy-safe.js1
2
const { createReadStream, createWriteStream } = require('fs')3
4
const [, , src, dest] = process.argv5
const srcStream = createReadStream(src)6
const destStream = createWriteStream(dest)7
8
srcStream.on('data', data => {9
const canContinue = destStream.write(data)10
11
// we are overflowing the destination, we should pause12
srcStream.pause()13
// we will resume when the destination stream is drained14
destStream.once('drain', () => srcStream.resume())15
16
})17
srcStream.pause()
// stream-copy-safe.js1
2
const { createReadStream, createWriteStream } = require('fs')3
4
const [, , src, dest] = process.argv5
const srcStream = createReadStream(src)6
const destStream = createWriteStream(dest)7
8
srcStream.on('data', data => {9
const canContinue = destStream.write(data)10
if (!canContinue) {11
// we are overflowing the destination, we should pause12
13
// we will resume when the destination stream is drained14
destStream.once('drain', () => srcStream.resume())15
}16
})17
destStream.once('drain', () => srcStream.resume())
// stream-copy-safe.js1
2
const { createReadStream, createWriteStream } = require('fs')3
4
const [, , src, dest] = process.argv5
const srcStream = createReadStream(src)6
const destStream = createWriteStream(dest)7
8
srcStream.on('data', data => {9
const canContinue = destStream.write(data)10
if (!canContinue) {11
// we are overflowing the destination, we should pause12
srcStream.pause()13
// we will resume when the destination stream is drained14
15
}16
})17
@loige45
OTHER TYPES OF STREAMOTHER TYPES OF STREAM
Duplex Stream 
streams that are both Readable and Writable.  
(net.Socket) 
 
Transform Stream 
Duplex streams that can modify or transform the data as it is written
and read. 
(zlib.createGzip(), crypto.createCipheriv())
@loige46
ANATOMY OF A TRANSFORM STREAMANATOMY OF A TRANSFORM STREAM
transform stream
@loige47
ANATOMY OF A TRANSFORM STREAMANATOMY OF A TRANSFORM STREAM
1. write data
transform stream
(readable stream)
@loige47
ANATOMY OF A TRANSFORM STREAMANATOMY OF A TRANSFORM STREAM
1. write data
transform stream
2. transform the data
(readable stream)
@loige47
ANATOMY OF A TRANSFORM STREAMANATOMY OF A TRANSFORM STREAM
1. write data
transform stream
3. read transformed data2. transform the data
(readable stream) (writable stream)
@loige47
GZIP EXAMPLEGZIP EXAMPLE
1. write data
transform stream
3. read transformed data2. transform the data
(readable stream) (writable stream)
@loige48
GZIP EXAMPLEGZIP EXAMPLE
1. write data
transform stream
3. read transformed data2. transform the data
(readable stream) (writable stream)
@loige
Uncompressed data
48
GZIP EXAMPLEGZIP EXAMPLE
1. write data
transform stream
3. read transformed data2. transform the data
(readable stream) (writable stream)
@loige
Uncompressed data
compress
zlib.createGzip()
48
GZIP EXAMPLEGZIP EXAMPLE
1. write data
transform stream
3. read transformed data2. transform the data
(readable stream) (writable stream)
@loige
Uncompressed data Compressed data
compress
zlib.createGzip()
48
HOW CAN WE USE TRANSFORM STREAMS?HOW CAN WE USE TRANSFORM STREAMS?
Readable Transform Writable
49@loige
HOW CAN WE USE TRANSFORM STREAMS?HOW CAN WE USE TRANSFORM STREAMS?
Readable Transform Writable
⚡   data
49@loige
HOW CAN WE USE TRANSFORM STREAMS?HOW CAN WE USE TRANSFORM STREAMS?
Readable Transform Writable
⚡   data write()
49@loige
HOW CAN WE USE TRANSFORM STREAMS?HOW CAN WE USE TRANSFORM STREAMS?
Readable Transform Writable
⚡   data write() ⚡   data
49@loige
HOW CAN WE USE TRANSFORM STREAMS?HOW CAN WE USE TRANSFORM STREAMS?
Readable Transform Writable
⚡   data write() ⚡   data write()
49@loige
HOW CAN WE USE TRANSFORM STREAMS?HOW CAN WE USE TRANSFORM STREAMS?
Readable Transform Writable
⚡   data write() ⚡   data write()
49@loige
(Backpressure)
HOW CAN WE USE TRANSFORM STREAMS?HOW CAN WE USE TRANSFORM STREAMS?
Readable Transform Writable
⚡   data write() ⚡   data write()
pause()
49@loige
(Backpressure)
HOW CAN WE USE TRANSFORM STREAMS?HOW CAN WE USE TRANSFORM STREAMS?
Readable Transform Writable
⚡   data write() ⚡   data write()
pause()
⚡ drain
49@loige
(Backpressure)
HOW CAN WE USE TRANSFORM STREAMS?HOW CAN WE USE TRANSFORM STREAMS?
Readable Transform Writable
⚡   data write() ⚡   data write()
pause()
⚡ drainresume()
49@loige
(Backpressure)
HOW CAN WE USE TRANSFORM STREAMS?HOW CAN WE USE TRANSFORM STREAMS?
Readable Transform Writable
⚡   data write() ⚡   data write()
pause()
⚡ drainresume()
49@loige
(Backpressure) (Backpressure)
HOW CAN WE USE TRANSFORM STREAMS?HOW CAN WE USE TRANSFORM STREAMS?
Readable Transform Writable
⚡   data write() ⚡   data write()
pause()
⚡ drainresume()
pause()
49@loige
(Backpressure) (Backpressure)
HOW CAN WE USE TRANSFORM STREAMS?HOW CAN WE USE TRANSFORM STREAMS?
Readable Transform Writable
⚡   data write() ⚡   data write()
pause()
⚡ drainresume()
pause()
⚡ drain
49@loige
(Backpressure) (Backpressure)
HOW CAN WE USE TRANSFORM STREAMS?HOW CAN WE USE TRANSFORM STREAMS?
Readable Transform Writable
⚡   data write() ⚡   data write()
pause()
⚡ drainresume()
pause()
⚡ drainresume()
49@loige
(Backpressure) (Backpressure)
HOW CAN WE USE TRANSFORM STREAMS?HOW CAN WE USE TRANSFORM STREAMS?
Readable Transform Writable
⚡   data write() ⚡   data write()
pause()
⚡ drainresume()
pause()
⚡ drainresume()
49@loige
(Backpressure) (Backpressure)
You also have to handle end events and errors!
gzipStream.on('data', data => {
const canContinue = destStream.write(data)
if (!canContinue) {
gzipStream.pause()
destStream.once('drain', () => {
gzipStream.resume()
})
}
})
gzipStream.on('end', () => {
destStream.end()
})
// ⚠ TODO: handle errors!
// stream-copy-gzip.js
const {
createReadStream,
createWriteStream
} = require('fs')
const { createGzip } = require('zlib')
const [, , src, dest] = process.argv
const srcStream = createReadStream(src)
const gzipStream = createGzip()
const destStream = createWriteStream(dest)
srcStream.on('data', data => {
const canContinue = gzipStream.write(data)
if (!canContinue) {
srcStream.pause()
gzipStream.once('drain', () => {
srcStream.resume()
})
}
})
srcStream.on('end', () => {
// check if there's buffered data left
const remainingData = gzipStream.read()
if (remainingData !== null) {
destStream.write()
}
gzipStream.end()
})
@loige50
gzipStream.on('data', data => {
const canContinue = destStream.write(data)
if (!canContinue) {
gzipStream.pause()
destStream.once('drain', () => {
gzipStream.resume()
})
}
})
gzipStream.on('end', () => {
destStream.end()
})
// ⚠ TODO: handle errors!
// stream-copy-gzip.js
const {
createReadStream,
createWriteStream
} = require('fs')
const { createGzip } = require('zlib')
const [, , src, dest] = process.argv
const srcStream = createReadStream(src)
const gzipStream = createGzip()
const destStream = createWriteStream(dest)
srcStream.on('data', data => {
const canContinue = gzipStream.write(data)
if (!canContinue) {
srcStream.pause()
gzipStream.once('drain', () => {
srcStream.resume()
})
}
})
srcStream.on('end', () => {
// check if there's buffered data left
const remainingData = gzipStream.read()
if (remainingData !== null) {
destStream.write()
}
gzipStream.end()
})
@loige50
03. PIPE()03. PIPE()
@loige51
readable
.pipe(tranform1)
.pipe(transform2)
.pipe(transform3)
.pipe(writable)
readable.pipe(writableDest)
@loige
Connects a readable stream to a writable stream
A transform stream can be used as a destination as well
It returns the destination stream allowing for a chain of pipes
52
// stream-copy-gzip-pipe.js
const {
createReadStream,
createWriteStream
} = require('fs')
const { createGzip } = require('zlib')
const [, , src, dest] = process.argv
const srcStream = createReadStream(src)
const gzipStream = createGzip()
const destStream = createWriteStream(dest)
srcStream
.pipe(gzipStream)
.pipe(destStream)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
@loige53
// stream-copy-gzip-pipe.js
const {
createReadStream,
createWriteStream
} = require('fs')
const { createGzip } = require('zlib')
const [, , src, dest] = process.argv
const srcStream = createReadStream(src)
const gzipStream = createGzip()
const destStream = createWriteStream(dest)
srcStream
.pipe(gzipStream)
.pipe(destStream)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
srcStream
.pipe(gzipStream)
.pipe(destStream)
// stream-copy-gzip-pipe.js1
2
const {3
createReadStream,4
createWriteStream5
} = require('fs')6
const { createGzip } = require('zlib')7
8
const [, , src, dest] = process.argv9
const srcStream = createReadStream(src)10
const gzipStream = createGzip()11
const destStream = createWriteStream(dest)12
13
14
15
16
@loige53
// stream-copy-gzip-pipe.js
const {
createReadStream,
createWriteStream
} = require('fs')
const { createGzip } = require('zlib')
const [, , src, dest] = process.argv
const srcStream = createReadStream(src)
const gzipStream = createGzip()
const destStream = createWriteStream(dest)
srcStream
.pipe(gzipStream)
.pipe(destStream)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
srcStream
.pipe(gzipStream)
.pipe(destStream)
// stream-copy-gzip-pipe.js1
2
const {3
createReadStream,4
createWriteStream5
} = require('fs')6
const { createGzip } = require('zlib')7
8
const [, , src, dest] = process.argv9
const srcStream = createReadStream(src)10
const gzipStream = createGzip()11
const destStream = createWriteStream(dest)12
13
14
15
16
@loige53
readable
.pipe(decompress)
.pipe(decrypt)
.pipe(convert)
.pipe(encrypt)
.pipe(compress)
.pipe(writeToDisk)
Setup complex pipelines with pipe
@loige
This is the most common way to use streams
54
readable
.on('error', handleErr)
.pipe(decompress)
.on('error', handleErr)
.pipe(decrypt)
.on('error', handleErr)
.pipe(convert)
.on('error', handleErr)
.pipe(encrypt)
.on('error', handleErr)
.pipe(compress)
.on('error', handleErr)
.pipe(writeToDisk)
.on('error', handleErr)
Handling errors (correctly)
@loige55
readable
.on('error', handleErr)
.pipe(decompress)
.on('error', handleErr)
.pipe(decrypt)
.on('error', handleErr)
.pipe(convert)
.on('error', handleErr)
.pipe(encrypt)
.on('error', handleErr)
.pipe(compress)
.on('error', handleErr)
.pipe(writeToDisk)
.on('error', handleErr)
Handling errors (correctly)
@loige
 
handleErr should end and destroy the streams
(it doesn't happen automatically)
 
55
readable
.on('error', handleErr)
.pipe(decompress)
.on('error', handleErr)
.pipe(decrypt)
.on('error', handleErr)
.pipe(convert)
.on('error', handleErr)
.pipe(encrypt)
.on('error', handleErr)
.pipe(compress)
.on('error', handleErr)
.pipe(writeToDisk)
.on('error', handleErr)
Handling errors (correctly)
@loige
 
handleErr should end and destroy the streams
(it doesn't happen automatically)
 
55
04. STREAM UTILITIES04. STREAM UTILITIES
@loige56
// stream-copy-gzip-pipeline.js
const { pipeline } = require('stream')
const { createReadStream, createWriteStream } = require('fs')
const { createGzip } = require('zlib')
const [, , src, dest] = process.argv
pipeline(
createReadStream(src),
createGzip(),
createWriteStream(dest),
function onEnd (err) {
if (err) {
console.error(`Error: ${err}`)
process.exit(1)
}
console.log('Done!')
}
)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
stream.pipeline(...streams, callback) - Node.js 10+
@loige57
// stream-copy-gzip-pipeline.js
const { pipeline } = require('stream')
const { createReadStream, createWriteStream } = require('fs')
const { createGzip } = require('zlib')
const [, , src, dest] = process.argv
pipeline(
createReadStream(src),
createGzip(),
createWriteStream(dest),
function onEnd (err) {
if (err) {
console.error(`Error: ${err}`)
process.exit(1)
}
console.log('Done!')
}
)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
pipeline(
)
// stream-copy-gzip-pipeline.js1
2
const { pipeline } = require('stream')3
const { createReadStream, createWriteStream } = require('fs')4
const { createGzip } = require('zlib')5
6
const [, , src, dest] = process.argv7
8
9
createReadStream(src),10
createGzip(),11
createWriteStream(dest),12
function onEnd (err) {13
if (err) {14
console.error(`Error: ${err}`)15
process.exit(1)16
}17
18
console.log('Done!')19
}20
21
stream.pipeline(...streams, callback) - Node.js 10+
@loige57
// stream-copy-gzip-pipeline.js
const { pipeline } = require('stream')
const { createReadStream, createWriteStream } = require('fs')
const { createGzip } = require('zlib')
const [, , src, dest] = process.argv
pipeline(
createReadStream(src),
createGzip(),
createWriteStream(dest),
function onEnd (err) {
if (err) {
console.error(`Error: ${err}`)
process.exit(1)
}
console.log('Done!')
}
)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
pipeline(
)
// stream-copy-gzip-pipeline.js1
2
const { pipeline } = require('stream')3
const { createReadStream, createWriteStream } = require('fs')4
const { createGzip } = require('zlib')5
6
const [, , src, dest] = process.argv7
8
9
createReadStream(src),10
createGzip(),11
createWriteStream(dest),12
function onEnd (err) {13
if (err) {14
console.error(`Error: ${err}`)15
process.exit(1)16
}17
18
console.log('Done!')19
}20
21
createReadStream(src),
createGzip(),
createWriteStream(dest),
// stream-copy-gzip-pipeline.js1
2
const { pipeline } = require('stream')3
const { createReadStream, createWriteStream } = require('fs')4
const { createGzip } = require('zlib')5
6
const [, , src, dest] = process.argv7
8
pipeline(9
10
11
12
function onEnd (err) {13
if (err) {14
console.error(`Error: ${err}`)15
process.exit(1)16
}17
18
console.log('Done!')19
}20
)21
stream.pipeline(...streams, callback) - Node.js 10+
@loige57
// stream-copy-gzip-pipeline.js
const { pipeline } = require('stream')
const { createReadStream, createWriteStream } = require('fs')
const { createGzip } = require('zlib')
const [, , src, dest] = process.argv
pipeline(
createReadStream(src),
createGzip(),
createWriteStream(dest),
function onEnd (err) {
if (err) {
console.error(`Error: ${err}`)
process.exit(1)
}
console.log('Done!')
}
)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
pipeline(
)
// stream-copy-gzip-pipeline.js1
2
const { pipeline } = require('stream')3
const { createReadStream, createWriteStream } = require('fs')4
const { createGzip } = require('zlib')5
6
const [, , src, dest] = process.argv7
8
9
createReadStream(src),10
createGzip(),11
createWriteStream(dest),12
function onEnd (err) {13
if (err) {14
console.error(`Error: ${err}`)15
process.exit(1)16
}17
18
console.log('Done!')19
}20
21
createReadStream(src),
createGzip(),
createWriteStream(dest),
// stream-copy-gzip-pipeline.js1
2
const { pipeline } = require('stream')3
const { createReadStream, createWriteStream } = require('fs')4
const { createGzip } = require('zlib')5
6
const [, , src, dest] = process.argv7
8
pipeline(9
10
11
12
function onEnd (err) {13
if (err) {14
console.error(`Error: ${err}`)15
process.exit(1)16
}17
18
console.log('Done!')19
}20
)21
function onEnd (err) {
}
// stream-copy-gzip-pipeline.js1
2
const { pipeline } = require('stream')3
const { createReadStream, createWriteStream } = require('fs')4
const { createGzip } = require('zlib')5
6
const [, , src, dest] = process.argv7
8
pipeline(9
createReadStream(src),10
createGzip(),11
createWriteStream(dest),12
13
if (err) {14
console.error(`Error: ${err}`)15
process.exit(1)16
}17
18
console.log('Done!')19
20
)21
stream.pipeline(...streams, callback) - Node.js 10+
@loige57
// stream-copy-gzip-pipeline.js
const { pipeline } = require('stream')
const { createReadStream, createWriteStream } = require('fs')
const { createGzip } = require('zlib')
const [, , src, dest] = process.argv
pipeline(
createReadStream(src),
createGzip(),
createWriteStream(dest),
function onEnd (err) {
if (err) {
console.error(`Error: ${err}`)
process.exit(1)
}
console.log('Done!')
}
)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
pipeline(
)
// stream-copy-gzip-pipeline.js1
2
const { pipeline } = require('stream')3
const { createReadStream, createWriteStream } = require('fs')4
const { createGzip } = require('zlib')5
6
const [, , src, dest] = process.argv7
8
9
createReadStream(src),10
createGzip(),11
createWriteStream(dest),12
function onEnd (err) {13
if (err) {14
console.error(`Error: ${err}`)15
process.exit(1)16
}17
18
console.log('Done!')19
}20
21
createReadStream(src),
createGzip(),
createWriteStream(dest),
// stream-copy-gzip-pipeline.js1
2
const { pipeline } = require('stream')3
const { createReadStream, createWriteStream } = require('fs')4
const { createGzip } = require('zlib')5
6
const [, , src, dest] = process.argv7
8
pipeline(9
10
11
12
function onEnd (err) {13
if (err) {14
console.error(`Error: ${err}`)15
process.exit(1)16
}17
18
console.log('Done!')19
}20
)21
function onEnd (err) {
}
// stream-copy-gzip-pipeline.js1
2
const { pipeline } = require('stream')3
const { createReadStream, createWriteStream } = require('fs')4
const { createGzip } = require('zlib')5
6
const [, , src, dest] = process.argv7
8
pipeline(9
createReadStream(src),10
createGzip(),11
createWriteStream(dest),12
13
if (err) {14
console.error(`Error: ${err}`)15
process.exit(1)16
}17
18
console.log('Done!')19
20
)21
if (err) {
console.error(`Error: ${err}`)
process.exit(1)
}
// stream-copy-gzip-pipeline.js1
2
const { pipeline } = require('stream')3
const { createReadStream, createWriteStream } = require('fs')4
const { createGzip } = require('zlib')5
6
const [, , src, dest] = process.argv7
8
pipeline(9
createReadStream(src),10
createGzip(),11
createWriteStream(dest),12
function onEnd (err) {13
14
15
16
17
18
console.log('Done!')19
}20
)21
stream.pipeline(...streams, callback) - Node.js 10+
@loige57
// stream-copy-gzip-pipeline.js
const { pipeline } = require('stream')
const { createReadStream, createWriteStream } = require('fs')
const { createGzip } = require('zlib')
const [, , src, dest] = process.argv
pipeline(
createReadStream(src),
createGzip(),
createWriteStream(dest),
function onEnd (err) {
if (err) {
console.error(`Error: ${err}`)
process.exit(1)
}
console.log('Done!')
}
)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
pipeline(
)
// stream-copy-gzip-pipeline.js1
2
const { pipeline } = require('stream')3
const { createReadStream, createWriteStream } = require('fs')4
const { createGzip } = require('zlib')5
6
const [, , src, dest] = process.argv7
8
9
createReadStream(src),10
createGzip(),11
createWriteStream(dest),12
function onEnd (err) {13
if (err) {14
console.error(`Error: ${err}`)15
process.exit(1)16
}17
18
console.log('Done!')19
}20
21
createReadStream(src),
createGzip(),
createWriteStream(dest),
// stream-copy-gzip-pipeline.js1
2
const { pipeline } = require('stream')3
const { createReadStream, createWriteStream } = require('fs')4
const { createGzip } = require('zlib')5
6
const [, , src, dest] = process.argv7
8
pipeline(9
10
11
12
function onEnd (err) {13
if (err) {14
console.error(`Error: ${err}`)15
process.exit(1)16
}17
18
console.log('Done!')19
}20
)21
function onEnd (err) {
}
// stream-copy-gzip-pipeline.js1
2
const { pipeline } = require('stream')3
const { createReadStream, createWriteStream } = require('fs')4
const { createGzip } = require('zlib')5
6
const [, , src, dest] = process.argv7
8
pipeline(9
createReadStream(src),10
createGzip(),11
createWriteStream(dest),12
13
if (err) {14
console.error(`Error: ${err}`)15
process.exit(1)16
}17
18
console.log('Done!')19
20
)21
if (err) {
console.error(`Error: ${err}`)
process.exit(1)
}
// stream-copy-gzip-pipeline.js1
2
const { pipeline } = require('stream')3
const { createReadStream, createWriteStream } = require('fs')4
const { createGzip } = require('zlib')5
6
const [, , src, dest] = process.argv7
8
pipeline(9
createReadStream(src),10
createGzip(),11
createWriteStream(dest),12
function onEnd (err) {13
14
15
16
17
18
console.log('Done!')19
}20
)21
console.log('Done!')
// stream-copy-gzip-pipeline.js1
2
const { pipeline } = require('stream')3
const { createReadStream, createWriteStream } = require('fs')4
const { createGzip } = require('zlib')5
6
const [, , src, dest] = process.argv7
8
pipeline(9
createReadStream(src),10
createGzip(),11
createWriteStream(dest),12
function onEnd (err) {13
if (err) {14
console.error(`Error: ${err}`)15
process.exit(1)16
}17
18
19
}20
)21
stream.pipeline(...streams, callback) - Node.js 10+
@loige57
// stream-copy-gzip-pipeline.js
const { pipeline } = require('stream')
const { createReadStream, createWriteStream } = require('fs')
const { createGzip } = require('zlib')
const [, , src, dest] = process.argv
pipeline(
createReadStream(src),
createGzip(),
createWriteStream(dest),
function onEnd (err) {
if (err) {
console.error(`Error: ${err}`)
process.exit(1)
}
console.log('Done!')
}
)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
pipeline(
)
// stream-copy-gzip-pipeline.js1
2
const { pipeline } = require('stream')3
const { createReadStream, createWriteStream } = require('fs')4
const { createGzip } = require('zlib')5
6
const [, , src, dest] = process.argv7
8
9
createReadStream(src),10
createGzip(),11
createWriteStream(dest),12
function onEnd (err) {13
if (err) {14
console.error(`Error: ${err}`)15
process.exit(1)16
}17
18
console.log('Done!')19
}20
21
createReadStream(src),
createGzip(),
createWriteStream(dest),
// stream-copy-gzip-pipeline.js1
2
const { pipeline } = require('stream')3
const { createReadStream, createWriteStream } = require('fs')4
const { createGzip } = require('zlib')5
6
const [, , src, dest] = process.argv7
8
pipeline(9
10
11
12
function onEnd (err) {13
if (err) {14
console.error(`Error: ${err}`)15
process.exit(1)16
}17
18
console.log('Done!')19
}20
)21
function onEnd (err) {
}
// stream-copy-gzip-pipeline.js1
2
const { pipeline } = require('stream')3
const { createReadStream, createWriteStream } = require('fs')4
const { createGzip } = require('zlib')5
6
const [, , src, dest] = process.argv7
8
pipeline(9
createReadStream(src),10
createGzip(),11
createWriteStream(dest),12
13
if (err) {14
console.error(`Error: ${err}`)15
process.exit(1)16
}17
18
console.log('Done!')19
20
)21
if (err) {
console.error(`Error: ${err}`)
process.exit(1)
}
// stream-copy-gzip-pipeline.js1
2
const { pipeline } = require('stream')3
const { createReadStream, createWriteStream } = require('fs')4
const { createGzip } = require('zlib')5
6
const [, , src, dest] = process.argv7
8
pipeline(9
createReadStream(src),10
createGzip(),11
createWriteStream(dest),12
function onEnd (err) {13
14
15
16
17
18
console.log('Done!')19
}20
)21
console.log('Done!')
// stream-copy-gzip-pipeline.js1
2
const { pipeline } = require('stream')3
const { createReadStream, createWriteStream } = require('fs')4
const { createGzip } = require('zlib')5
6
const [, , src, dest] = process.argv7
8
pipeline(9
createReadStream(src),10
createGzip(),11
createWriteStream(dest),12
function onEnd (err) {13
if (err) {14
console.error(`Error: ${err}`)15
process.exit(1)16
}17
18
19
}20
)21
// stream-copy-gzip-pipeline.js
const { pipeline } = require('stream')
const { createReadStream, createWriteStream } = require('fs')
const { createGzip } = require('zlib')
const [, , src, dest] = process.argv
pipeline(
createReadStream(src),
createGzip(),
createWriteStream(dest),
function onEnd (err) {
if (err) {
console.error(`Error: ${err}`)
process.exit(1)
}
console.log('Done!')
}
)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
stream.pipeline(...streams, callback) - Node.js 10+
@loige
You can pass multiple streams (they will be piped)
The last argument is a callback. If invoked with an
error, it means the pipeline failed at some point.
All the streams are ended and destroyed correctly.
57
// stream-copy-gzip-pump.js
const pump = require('pump') // from npm
const { createReadStream, createWriteStream } = require('fs')
const { createGzip } = require('zlib')
const [, , src, dest] = process.argv
pump( // just swap pipeline with pump!
createReadStream(src),
createGzip(),
createWriteStream(dest),
function onEnd (err) {
if (err) {
console.error(`Error: ${err}`)
process.exit(1)
}
console.log('Done!')
}
)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
For Node.js < 10: pump - npm.im/pump
@loige58
// stream-copy-gzip-pump.js
const pump = require('pump') // from npm
const { createReadStream, createWriteStream } = require('fs')
const { createGzip } = require('zlib')
const [, , src, dest] = process.argv
pump( // just swap pipeline with pump!
createReadStream(src),
createGzip(),
createWriteStream(dest),
function onEnd (err) {
if (err) {
console.error(`Error: ${err}`)
process.exit(1)
}
console.log('Done!')
}
)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
const pump = require('pump') // from npm
// stream-copy-gzip-pump.js1
2
3
const { createReadStream, createWriteStream } = require('fs')4
const { createGzip } = require('zlib')5
6
const [, , src, dest] = process.argv7
8
pump( // just swap pipeline with pump!9
createReadStream(src),10
createGzip(),11
createWriteStream(dest),12
function onEnd (err) {13
if (err) {14
console.error(`Error: ${err}`)15
process.exit(1)16
}17
18
console.log('Done!')19
}20
)21
For Node.js < 10: pump - npm.im/pump
@loige58
// stream-copy-gzip-pump.js
const pump = require('pump') // from npm
const { createReadStream, createWriteStream } = require('fs')
const { createGzip } = require('zlib')
const [, , src, dest] = process.argv
pump( // just swap pipeline with pump!
createReadStream(src),
createGzip(),
createWriteStream(dest),
function onEnd (err) {
if (err) {
console.error(`Error: ${err}`)
process.exit(1)
}
console.log('Done!')
}
)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
const pump = require('pump') // from npm
// stream-copy-gzip-pump.js1
2
3
const { createReadStream, createWriteStream } = require('fs')4
const { createGzip } = require('zlib')5
6
const [, , src, dest] = process.argv7
8
pump( // just swap pipeline with pump!9
createReadStream(src),10
createGzip(),11
createWriteStream(dest),12
function onEnd (err) {13
if (err) {14
console.error(`Error: ${err}`)15
process.exit(1)16
}17
18
console.log('Done!')19
}20
)21
pump( // just swap pipeline with pump!
)
// stream-copy-gzip-pump.js1
2
const pump = require('pump') // from npm3
const { createReadStream, createWriteStream } = require('fs')4
const { createGzip } = require('zlib')5
6
const [, , src, dest] = process.argv7
8
9
createReadStream(src),10
createGzip(),11
createWriteStream(dest),12
function onEnd (err) {13
if (err) {14
console.error(`Error: ${err}`)15
process.exit(1)16
}17
18
console.log('Done!')19
}20
21
For Node.js < 10: pump - npm.im/pump
@loige58
pumpify(...streams) - 
Create reusable pieces of pipeline
npm.im/pumpify
@loige
Let's create EncGz, an application that
helps us to read and write encrypted-
gzipped files
59
// encgz-stream.js - utility library
const {
createCipheriv,
createDecipheriv,
randomBytes,
createHash
} = require('crypto')
const { createGzip, createGunzip } = require('zlib')
const pumpify = require('pumpify') // from npm
// calculates md5 of the secret (trimmed)
function getChiperKey (secret) {}
function createEncgz (secret) {
const initVect = randomBytes(16)
const cipherKey = getChiperKey(secret)
const encryptStream = createCipheriv('aes256', cipherKey, initVect)
const gzipStream = createGzip()
const stream = pumpify(encryptStream, gzipStream)
stream.initVect = initVect
return stream
}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
@loige60
// encgz-stream.js - utility library
const {
createCipheriv,
createDecipheriv,
randomBytes,
createHash
} = require('crypto')
const { createGzip, createGunzip } = require('zlib')
const pumpify = require('pumpify') // from npm
// calculates md5 of the secret (trimmed)
function getChiperKey (secret) {}
function createEncgz (secret) {
const initVect = randomBytes(16)
const cipherKey = getChiperKey(secret)
const encryptStream = createCipheriv('aes256', cipherKey, initVect)
const gzipStream = createGzip()
const stream = pumpify(encryptStream, gzipStream)
stream.initVect = initVect
return stream
}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
function createEncgz (secret) {
}
// encgz-stream.js - utility library1
2
const {3
createCipheriv,4
createDecipheriv,5
randomBytes,6
createHash7
} = require('crypto')8
const { createGzip, createGunzip } = require('zlib')9
const pumpify = require('pumpify') // from npm10
11
// calculates md5 of the secret (trimmed)12
function getChiperKey (secret) {}13
14
15
const initVect = randomBytes(16)16
const cipherKey = getChiperKey(secret)17
const encryptStream = createCipheriv('aes256', cipherKey, initVect)18
const gzipStream = createGzip()19
20
const stream = pumpify(encryptStream, gzipStream)21
stream.initVect = initVect22
23
return stream24
25
@loige60
// encgz-stream.js - utility library
const {
createCipheriv,
createDecipheriv,
randomBytes,
createHash
} = require('crypto')
const { createGzip, createGunzip } = require('zlib')
const pumpify = require('pumpify') // from npm
// calculates md5 of the secret (trimmed)
function getChiperKey (secret) {}
function createEncgz (secret) {
const initVect = randomBytes(16)
const cipherKey = getChiperKey(secret)
const encryptStream = createCipheriv('aes256', cipherKey, initVect)
const gzipStream = createGzip()
const stream = pumpify(encryptStream, gzipStream)
stream.initVect = initVect
return stream
}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
function createEncgz (secret) {
}
// encgz-stream.js - utility library1
2
const {3
createCipheriv,4
createDecipheriv,5
randomBytes,6
createHash7
} = require('crypto')8
const { createGzip, createGunzip } = require('zlib')9
const pumpify = require('pumpify') // from npm10
11
// calculates md5 of the secret (trimmed)12
function getChiperKey (secret) {}13
14
15
const initVect = randomBytes(16)16
const cipherKey = getChiperKey(secret)17
const encryptStream = createCipheriv('aes256', cipherKey, initVect)18
const gzipStream = createGzip()19
20
const stream = pumpify(encryptStream, gzipStream)21
stream.initVect = initVect22
23
return stream24
25
const encryptStream = createCipheriv('aes256', cipherKey, initVect)
const gzipStream = createGzip()
// encgz-stream.js - utility library1
2
const {3
createCipheriv,4
createDecipheriv,5
randomBytes,6
createHash7
} = require('crypto')8
const { createGzip, createGunzip } = require('zlib')9
const pumpify = require('pumpify') // from npm10
11
// calculates md5 of the secret (trimmed)12
function getChiperKey (secret) {}13
14
function createEncgz (secret) {15
const initVect = randomBytes(16)16
const cipherKey = getChiperKey(secret)17
18
19
20
const stream = pumpify(encryptStream, gzipStream)21
stream.initVect = initVect22
23
return stream24
}25
@loige60
// encgz-stream.js - utility library
const {
createCipheriv,
createDecipheriv,
randomBytes,
createHash
} = require('crypto')
const { createGzip, createGunzip } = require('zlib')
const pumpify = require('pumpify') // from npm
// calculates md5 of the secret (trimmed)
function getChiperKey (secret) {}
function createEncgz (secret) {
const initVect = randomBytes(16)
const cipherKey = getChiperKey(secret)
const encryptStream = createCipheriv('aes256', cipherKey, initVect)
const gzipStream = createGzip()
const stream = pumpify(encryptStream, gzipStream)
stream.initVect = initVect
return stream
}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
function createEncgz (secret) {
}
// encgz-stream.js - utility library1
2
const {3
createCipheriv,4
createDecipheriv,5
randomBytes,6
createHash7
} = require('crypto')8
const { createGzip, createGunzip } = require('zlib')9
const pumpify = require('pumpify') // from npm10
11
// calculates md5 of the secret (trimmed)12
function getChiperKey (secret) {}13
14
15
const initVect = randomBytes(16)16
const cipherKey = getChiperKey(secret)17
const encryptStream = createCipheriv('aes256', cipherKey, initVect)18
const gzipStream = createGzip()19
20
const stream = pumpify(encryptStream, gzipStream)21
stream.initVect = initVect22
23
return stream24
25
const encryptStream = createCipheriv('aes256', cipherKey, initVect)
const gzipStream = createGzip()
// encgz-stream.js - utility library1
2
const {3
createCipheriv,4
createDecipheriv,5
randomBytes,6
createHash7
} = require('crypto')8
const { createGzip, createGunzip } = require('zlib')9
const pumpify = require('pumpify') // from npm10
11
// calculates md5 of the secret (trimmed)12
function getChiperKey (secret) {}13
14
function createEncgz (secret) {15
const initVect = randomBytes(16)16
const cipherKey = getChiperKey(secret)17
18
19
20
const stream = pumpify(encryptStream, gzipStream)21
stream.initVect = initVect22
23
return stream24
}25
const stream = pumpify(encryptStream, gzipStream)
// encgz-stream.js - utility library1
2
const {3
createCipheriv,4
createDecipheriv,5
randomBytes,6
createHash7
} = require('crypto')8
const { createGzip, createGunzip } = require('zlib')9
const pumpify = require('pumpify') // from npm10
11
// calculates md5 of the secret (trimmed)12
function getChiperKey (secret) {}13
14
function createEncgz (secret) {15
const initVect = randomBytes(16)16
const cipherKey = getChiperKey(secret)17
const encryptStream = createCipheriv('aes256', cipherKey, initVect)18
const gzipStream = createGzip()19
20
21
stream.initVect = initVect22
23
return stream24
}25
@loige60
// encgz-stream.js - utility library
const {
createCipheriv,
createDecipheriv,
randomBytes,
createHash
} = require('crypto')
const { createGzip, createGunzip } = require('zlib')
const pumpify = require('pumpify') // from npm
// calculates md5 of the secret (trimmed)
function getChiperKey (secret) {}
function createEncgz (secret) {
const initVect = randomBytes(16)
const cipherKey = getChiperKey(secret)
const encryptStream = createCipheriv('aes256', cipherKey, initVect)
const gzipStream = createGzip()
const stream = pumpify(encryptStream, gzipStream)
stream.initVect = initVect
return stream
}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
function createEncgz (secret) {
}
// encgz-stream.js - utility library1
2
const {3
createCipheriv,4
createDecipheriv,5
randomBytes,6
createHash7
} = require('crypto')8
const { createGzip, createGunzip } = require('zlib')9
const pumpify = require('pumpify') // from npm10
11
// calculates md5 of the secret (trimmed)12
function getChiperKey (secret) {}13
14
15
const initVect = randomBytes(16)16
const cipherKey = getChiperKey(secret)17
const encryptStream = createCipheriv('aes256', cipherKey, initVect)18
const gzipStream = createGzip()19
20
const stream = pumpify(encryptStream, gzipStream)21
stream.initVect = initVect22
23
return stream24
25
const encryptStream = createCipheriv('aes256', cipherKey, initVect)
const gzipStream = createGzip()
// encgz-stream.js - utility library1
2
const {3
createCipheriv,4
createDecipheriv,5
randomBytes,6
createHash7
} = require('crypto')8
const { createGzip, createGunzip } = require('zlib')9
const pumpify = require('pumpify') // from npm10
11
// calculates md5 of the secret (trimmed)12
function getChiperKey (secret) {}13
14
function createEncgz (secret) {15
const initVect = randomBytes(16)16
const cipherKey = getChiperKey(secret)17
18
19
20
const stream = pumpify(encryptStream, gzipStream)21
stream.initVect = initVect22
23
return stream24
}25
const stream = pumpify(encryptStream, gzipStream)
// encgz-stream.js - utility library1
2
const {3
createCipheriv,4
createDecipheriv,5
randomBytes,6
createHash7
} = require('crypto')8
const { createGzip, createGunzip } = require('zlib')9
const pumpify = require('pumpify') // from npm10
11
// calculates md5 of the secret (trimmed)12
function getChiperKey (secret) {}13
14
function createEncgz (secret) {15
const initVect = randomBytes(16)16
const cipherKey = getChiperKey(secret)17
const encryptStream = createCipheriv('aes256', cipherKey, initVect)18
const gzipStream = createGzip()19
20
21
stream.initVect = initVect22
23
return stream24
}25
return stream
// encgz-stream.js - utility library1
2
const {3
createCipheriv,4
createDecipheriv,5
randomBytes,6
createHash7
} = require('crypto')8
const { createGzip, createGunzip } = require('zlib')9
const pumpify = require('pumpify') // from npm10
11
// calculates md5 of the secret (trimmed)12
function getChiperKey (secret) {}13
14
function createEncgz (secret) {15
const initVect = randomBytes(16)16
const cipherKey = getChiperKey(secret)17
const encryptStream = createCipheriv('aes256', cipherKey, initVect)18
const gzipStream = createGzip()19
20
const stream = pumpify(encryptStream, gzipStream)21
stream.initVect = initVect22
23
24
}25
@loige60
// encgz-stream.js (...continue from previous slide)
function createDecgz (secret, initVect) {
const cipherKey = getChiperKey(secret)
const decryptStream = createDecipheriv('aes256', cipherKey, initVect)
const gunzipStream = createGunzip()
const stream = pumpify(gunzipStream, decryptStream)
return stream
}
module.exports = {
createEncgz,
createDecgz
}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
@loige61
// encgz-stream.js (...continue from previous slide)
function createDecgz (secret, initVect) {
const cipherKey = getChiperKey(secret)
const decryptStream = createDecipheriv('aes256', cipherKey, initVect)
const gunzipStream = createGunzip()
const stream = pumpify(gunzipStream, decryptStream)
return stream
}
module.exports = {
createEncgz,
createDecgz
}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
function createDecgz (secret, initVect) {
}
// encgz-stream.js (...continue from previous slide)1
2
3
const cipherKey = getChiperKey(secret)4
const decryptStream = createDecipheriv('aes256', cipherKey, initVect)5
const gunzipStream = createGunzip()6
7
const stream = pumpify(gunzipStream, decryptStream)8
return stream9
10
11
module.exports = {12
createEncgz,13
createDecgz14
}15
@loige61
// encgz-stream.js (...continue from previous slide)
function createDecgz (secret, initVect) {
const cipherKey = getChiperKey(secret)
const decryptStream = createDecipheriv('aes256', cipherKey, initVect)
const gunzipStream = createGunzip()
const stream = pumpify(gunzipStream, decryptStream)
return stream
}
module.exports = {
createEncgz,
createDecgz
}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
function createDecgz (secret, initVect) {
}
// encgz-stream.js (...continue from previous slide)1
2
3
const cipherKey = getChiperKey(secret)4
const decryptStream = createDecipheriv('aes256', cipherKey, initVect)5
const gunzipStream = createGunzip()6
7
const stream = pumpify(gunzipStream, decryptStream)8
return stream9
10
11
module.exports = {12
createEncgz,13
createDecgz14
}15
const decryptStream = createDecipheriv('aes256', cipherKey, initVect)
const gunzipStream = createGunzip()
// encgz-stream.js (...continue from previous slide)1
2
function createDecgz (secret, initVect) {3
const cipherKey = getChiperKey(secret)4
5
6
7
const stream = pumpify(gunzipStream, decryptStream)8
return stream9
}10
11
module.exports = {12
createEncgz,13
createDecgz14
}15
@loige61
// encgz-stream.js (...continue from previous slide)
function createDecgz (secret, initVect) {
const cipherKey = getChiperKey(secret)
const decryptStream = createDecipheriv('aes256', cipherKey, initVect)
const gunzipStream = createGunzip()
const stream = pumpify(gunzipStream, decryptStream)
return stream
}
module.exports = {
createEncgz,
createDecgz
}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
function createDecgz (secret, initVect) {
}
// encgz-stream.js (...continue from previous slide)1
2
3
const cipherKey = getChiperKey(secret)4
const decryptStream = createDecipheriv('aes256', cipherKey, initVect)5
const gunzipStream = createGunzip()6
7
const stream = pumpify(gunzipStream, decryptStream)8
return stream9
10
11
module.exports = {12
createEncgz,13
createDecgz14
}15
const decryptStream = createDecipheriv('aes256', cipherKey, initVect)
const gunzipStream = createGunzip()
// encgz-stream.js (...continue from previous slide)1
2
function createDecgz (secret, initVect) {3
const cipherKey = getChiperKey(secret)4
5
6
7
const stream = pumpify(gunzipStream, decryptStream)8
return stream9
}10
11
module.exports = {12
createEncgz,13
createDecgz14
}15
const stream = pumpify(gunzipStream, decryptStream)
return stream
// encgz-stream.js (...continue from previous slide)1
2
function createDecgz (secret, initVect) {3
const cipherKey = getChiperKey(secret)4
const decryptStream = createDecipheriv('aes256', cipherKey, initVect)5
const gunzipStream = createGunzip()6
7
8
9
}10
11
module.exports = {12
createEncgz,13
createDecgz14
}15
@loige61
// encgz-stream.js (...continue from previous slide)
function createDecgz (secret, initVect) {
const cipherKey = getChiperKey(secret)
const decryptStream = createDecipheriv('aes256', cipherKey, initVect)
const gunzipStream = createGunzip()
const stream = pumpify(gunzipStream, decryptStream)
return stream
}
module.exports = {
createEncgz,
createDecgz
}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
function createDecgz (secret, initVect) {
}
// encgz-stream.js (...continue from previous slide)1
2
3
const cipherKey = getChiperKey(secret)4
const decryptStream = createDecipheriv('aes256', cipherKey, initVect)5
const gunzipStream = createGunzip()6
7
const stream = pumpify(gunzipStream, decryptStream)8
return stream9
10
11
module.exports = {12
createEncgz,13
createDecgz14
}15
const decryptStream = createDecipheriv('aes256', cipherKey, initVect)
const gunzipStream = createGunzip()
// encgz-stream.js (...continue from previous slide)1
2
function createDecgz (secret, initVect) {3
const cipherKey = getChiperKey(secret)4
5
6
7
const stream = pumpify(gunzipStream, decryptStream)8
return stream9
}10
11
module.exports = {12
createEncgz,13
createDecgz14
}15
const stream = pumpify(gunzipStream, decryptStream)
return stream
// encgz-stream.js (...continue from previous slide)1
2
function createDecgz (secret, initVect) {3
const cipherKey = getChiperKey(secret)4
const decryptStream = createDecipheriv('aes256', cipherKey, initVect)5
const gunzipStream = createGunzip()6
7
8
9
}10
11
module.exports = {12
createEncgz,13
createDecgz14
}15
module.exports = {
createEncgz,
createDecgz
}
// encgz-stream.js (...continue from previous slide)1
2
function createDecgz (secret, initVect) {3
const cipherKey = getChiperKey(secret)4
const decryptStream = createDecipheriv('aes256', cipherKey, initVect)5
const gunzipStream = createGunzip()6
7
const stream = pumpify(gunzipStream, decryptStream)8
return stream9
}10
11
12
13
14
15
@loige61
// encgz.js - CLI to encrypt and gzip (from stdin to stdout)
const { pipeline } = require('stream')
const { createEncgz } = require('./encgz-stream')
const [, , secret] = process.argv
const encgz = createEncgz(secret)
console.error(`init vector: ${encgz.initVect.toString('hex')}`)
pipeline(
process.stdin,
encgz,
process.stdout,
function onEnd (err) {
if (err) {
console.error(`Error: ${err}`)
process.exit(1)
}
}
)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21 @loige62
// encgz.js - CLI to encrypt and gzip (from stdin to stdout)
const { pipeline } = require('stream')
const { createEncgz } = require('./encgz-stream')
const [, , secret] = process.argv
const encgz = createEncgz(secret)
console.error(`init vector: ${encgz.initVect.toString('hex')}`)
pipeline(
process.stdin,
encgz,
process.stdout,
function onEnd (err) {
if (err) {
console.error(`Error: ${err}`)
process.exit(1)
}
}
)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
const [, , secret] = process.argv
// encgz.js - CLI to encrypt and gzip (from stdin to stdout)1
2
const { pipeline } = require('stream')3
const { createEncgz } = require('./encgz-stream')4
5
6
7
const encgz = createEncgz(secret)8
console.error(`init vector: ${encgz.initVect.toString('hex')}`)9
10
pipeline(11
process.stdin,12
encgz,13
process.stdout,14
function onEnd (err) {15
if (err) {16
console.error(`Error: ${err}`)17
process.exit(1)18
}19
}20
)21 @loige62
// encgz.js - CLI to encrypt and gzip (from stdin to stdout)
const { pipeline } = require('stream')
const { createEncgz } = require('./encgz-stream')
const [, , secret] = process.argv
const encgz = createEncgz(secret)
console.error(`init vector: ${encgz.initVect.toString('hex')}`)
pipeline(
process.stdin,
encgz,
process.stdout,
function onEnd (err) {
if (err) {
console.error(`Error: ${err}`)
process.exit(1)
}
}
)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
const [, , secret] = process.argv
// encgz.js - CLI to encrypt and gzip (from stdin to stdout)1
2
const { pipeline } = require('stream')3
const { createEncgz } = require('./encgz-stream')4
5
6
7
const encgz = createEncgz(secret)8
console.error(`init vector: ${encgz.initVect.toString('hex')}`)9
10
pipeline(11
process.stdin,12
encgz,13
process.stdout,14
function onEnd (err) {15
if (err) {16
console.error(`Error: ${err}`)17
process.exit(1)18
}19
}20
)21
const encgz = createEncgz(secret)
// encgz.js - CLI to encrypt and gzip (from stdin to stdout)1
2
const { pipeline } = require('stream')3
const { createEncgz } = require('./encgz-stream')4
5
const [, , secret] = process.argv6
7
8
console.error(`init vector: ${encgz.initVect.toString('hex')}`)9
10
pipeline(11
process.stdin,12
encgz,13
process.stdout,14
function onEnd (err) {15
if (err) {16
console.error(`Error: ${err}`)17
process.exit(1)18
}19
}20
)21 @loige62
// encgz.js - CLI to encrypt and gzip (from stdin to stdout)
const { pipeline } = require('stream')
const { createEncgz } = require('./encgz-stream')
const [, , secret] = process.argv
const encgz = createEncgz(secret)
console.error(`init vector: ${encgz.initVect.toString('hex')}`)
pipeline(
process.stdin,
encgz,
process.stdout,
function onEnd (err) {
if (err) {
console.error(`Error: ${err}`)
process.exit(1)
}
}
)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
const [, , secret] = process.argv
// encgz.js - CLI to encrypt and gzip (from stdin to stdout)1
2
const { pipeline } = require('stream')3
const { createEncgz } = require('./encgz-stream')4
5
6
7
const encgz = createEncgz(secret)8
console.error(`init vector: ${encgz.initVect.toString('hex')}`)9
10
pipeline(11
process.stdin,12
encgz,13
process.stdout,14
function onEnd (err) {15
if (err) {16
console.error(`Error: ${err}`)17
process.exit(1)18
}19
}20
)21
const encgz = createEncgz(secret)
// encgz.js - CLI to encrypt and gzip (from stdin to stdout)1
2
const { pipeline } = require('stream')3
const { createEncgz } = require('./encgz-stream')4
5
const [, , secret] = process.argv6
7
8
console.error(`init vector: ${encgz.initVect.toString('hex')}`)9
10
pipeline(11
process.stdin,12
encgz,13
process.stdout,14
function onEnd (err) {15
if (err) {16
console.error(`Error: ${err}`)17
process.exit(1)18
}19
}20
)21
pipeline(
)
// encgz.js - CLI to encrypt and gzip (from stdin to stdout)1
2
const { pipeline } = require('stream')3
const { createEncgz } = require('./encgz-stream')4
5
const [, , secret] = process.argv6
7
const encgz = createEncgz(secret)8
console.error(`init vector: ${encgz.initVect.toString('hex')}`)9
10
11
process.stdin,12
encgz,13
process.stdout,14
function onEnd (err) {15
if (err) {16
console.error(`Error: ${err}`)17
process.exit(1)18
}19
}20
21 @loige62
// encgz.js - CLI to encrypt and gzip (from stdin to stdout)
const { pipeline } = require('stream')
const { createEncgz } = require('./encgz-stream')
const [, , secret] = process.argv
const encgz = createEncgz(secret)
console.error(`init vector: ${encgz.initVect.toString('hex')}`)
pipeline(
process.stdin,
encgz,
process.stdout,
function onEnd (err) {
if (err) {
console.error(`Error: ${err}`)
process.exit(1)
}
}
)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
const [, , secret] = process.argv
// encgz.js - CLI to encrypt and gzip (from stdin to stdout)1
2
const { pipeline } = require('stream')3
const { createEncgz } = require('./encgz-stream')4
5
6
7
const encgz = createEncgz(secret)8
console.error(`init vector: ${encgz.initVect.toString('hex')}`)9
10
pipeline(11
process.stdin,12
encgz,13
process.stdout,14
function onEnd (err) {15
if (err) {16
console.error(`Error: ${err}`)17
process.exit(1)18
}19
}20
)21
const encgz = createEncgz(secret)
// encgz.js - CLI to encrypt and gzip (from stdin to stdout)1
2
const { pipeline } = require('stream')3
const { createEncgz } = require('./encgz-stream')4
5
const [, , secret] = process.argv6
7
8
console.error(`init vector: ${encgz.initVect.toString('hex')}`)9
10
pipeline(11
process.stdin,12
encgz,13
process.stdout,14
function onEnd (err) {15
if (err) {16
console.error(`Error: ${err}`)17
process.exit(1)18
}19
}20
)21
pipeline(
)
// encgz.js - CLI to encrypt and gzip (from stdin to stdout)1
2
const { pipeline } = require('stream')3
const { createEncgz } = require('./encgz-stream')4
5
const [, , secret] = process.argv6
7
const encgz = createEncgz(secret)8
console.error(`init vector: ${encgz.initVect.toString('hex')}`)9
10
11
process.stdin,12
encgz,13
process.stdout,14
function onEnd (err) {15
if (err) {16
console.error(`Error: ${err}`)17
process.exit(1)18
}19
}20
21
process.stdin,
encgz,
process.stdout,
// encgz.js - CLI to encrypt and gzip (from stdin to stdout)1
2
const { pipeline } = require('stream')3
const { createEncgz } = require('./encgz-stream')4
5
const [, , secret] = process.argv6
7
const encgz = createEncgz(secret)8
console.error(`init vector: ${encgz.initVect.toString('hex')}`)9
10
pipeline(11
12
13
14
function onEnd (err) {15
if (err) {16
console.error(`Error: ${err}`)17
process.exit(1)18
}19
}20
)21 @loige62
// decgz.js - CLI to gunzip and decrypt (from stdin to stdout)
const { pipeline } = require('stream')
const { createDecgz } = require('./encgz-stream')
const [, , secret, initVect] = process.argv
const decgz = createDecgz(secret, Buffer.from(initVect, 'hex'))
pipeline(
process.stdin,
decgz,
process.stdout,
function onEnd (err) {
if (err) {
console.error(`Error: ${err}`)
process.exit(1)
}
}
)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
@loige63
// decgz.js - CLI to gunzip and decrypt (from stdin to stdout)
const { pipeline } = require('stream')
const { createDecgz } = require('./encgz-stream')
const [, , secret, initVect] = process.argv
const decgz = createDecgz(secret, Buffer.from(initVect, 'hex'))
pipeline(
process.stdin,
decgz,
process.stdout,
function onEnd (err) {
if (err) {
console.error(`Error: ${err}`)
process.exit(1)
}
}
)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
const [, , secret, initVect] = process.argv
// decgz.js - CLI to gunzip and decrypt (from stdin to stdout)1
2
const { pipeline } = require('stream')3
const { createDecgz } = require('./encgz-stream')4
5
6
7
const decgz = createDecgz(secret, Buffer.from(initVect, 'hex'))8
9
pipeline(10
process.stdin,11
decgz,12
process.stdout,13
function onEnd (err) {14
if (err) {15
console.error(`Error: ${err}`)16
process.exit(1)17
}18
}19
)20
@loige63
// decgz.js - CLI to gunzip and decrypt (from stdin to stdout)
const { pipeline } = require('stream')
const { createDecgz } = require('./encgz-stream')
const [, , secret, initVect] = process.argv
const decgz = createDecgz(secret, Buffer.from(initVect, 'hex'))
pipeline(
process.stdin,
decgz,
process.stdout,
function onEnd (err) {
if (err) {
console.error(`Error: ${err}`)
process.exit(1)
}
}
)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
const [, , secret, initVect] = process.argv
// decgz.js - CLI to gunzip and decrypt (from stdin to stdout)1
2
const { pipeline } = require('stream')3
const { createDecgz } = require('./encgz-stream')4
5
6
7
const decgz = createDecgz(secret, Buffer.from(initVect, 'hex'))8
9
pipeline(10
process.stdin,11
decgz,12
process.stdout,13
function onEnd (err) {14
if (err) {15
console.error(`Error: ${err}`)16
process.exit(1)17
}18
}19
)20
const decgz = createDecgz(secret, Buffer.from(initVect, 'hex'))
// decgz.js - CLI to gunzip and decrypt (from stdin to stdout)1
2
const { pipeline } = require('stream')3
const { createDecgz } = require('./encgz-stream')4
5
const [, , secret, initVect] = process.argv6
7
8
9
pipeline(10
process.stdin,11
decgz,12
process.stdout,13
function onEnd (err) {14
if (err) {15
console.error(`Error: ${err}`)16
process.exit(1)17
}18
}19
)20
@loige63
"It’s about time to embrace Streams" Luciano Mammino
"It’s about time to embrace Streams" Luciano Mammino
"It’s about time to embrace Streams" Luciano Mammino
"It’s about time to embrace Streams" Luciano Mammino
"It’s about time to embrace Streams" Luciano Mammino
"It’s about time to embrace Streams" Luciano Mammino
"It’s about time to embrace Streams" Luciano Mammino
"It’s about time to embrace Streams" Luciano Mammino
"It’s about time to embrace Streams" Luciano Mammino
"It’s about time to embrace Streams" Luciano Mammino
"It’s about time to embrace Streams" Luciano Mammino
"It’s about time to embrace Streams" Luciano Mammino
"It’s about time to embrace Streams" Luciano Mammino
"It’s about time to embrace Streams" Luciano Mammino
"It’s about time to embrace Streams" Luciano Mammino
"It’s about time to embrace Streams" Luciano Mammino
"It’s about time to embrace Streams" Luciano Mammino
"It’s about time to embrace Streams" Luciano Mammino
"It’s about time to embrace Streams" Luciano Mammino
"It’s about time to embrace Streams" Luciano Mammino
"It’s about time to embrace Streams" Luciano Mammino
"It’s about time to embrace Streams" Luciano Mammino
"It’s about time to embrace Streams" Luciano Mammino
"It’s about time to embrace Streams" Luciano Mammino
"It’s about time to embrace Streams" Luciano Mammino
"It’s about time to embrace Streams" Luciano Mammino
"It’s about time to embrace Streams" Luciano Mammino
"It’s about time to embrace Streams" Luciano Mammino
"It’s about time to embrace Streams" Luciano Mammino
"It’s about time to embrace Streams" Luciano Mammino
"It’s about time to embrace Streams" Luciano Mammino
"It’s about time to embrace Streams" Luciano Mammino
"It’s about time to embrace Streams" Luciano Mammino
"It’s about time to embrace Streams" Luciano Mammino
"It’s about time to embrace Streams" Luciano Mammino
"It’s about time to embrace Streams" Luciano Mammino
"It’s about time to embrace Streams" Luciano Mammino
"It’s about time to embrace Streams" Luciano Mammino
"It’s about time to embrace Streams" Luciano Mammino
"It’s about time to embrace Streams" Luciano Mammino
"It’s about time to embrace Streams" Luciano Mammino
"It’s about time to embrace Streams" Luciano Mammino
"It’s about time to embrace Streams" Luciano Mammino
"It’s about time to embrace Streams" Luciano Mammino
"It’s about time to embrace Streams" Luciano Mammino
"It’s about time to embrace Streams" Luciano Mammino
"It’s about time to embrace Streams" Luciano Mammino
"It’s about time to embrace Streams" Luciano Mammino
"It’s about time to embrace Streams" Luciano Mammino
"It’s about time to embrace Streams" Luciano Mammino
"It’s about time to embrace Streams" Luciano Mammino
"It’s about time to embrace Streams" Luciano Mammino
"It’s about time to embrace Streams" Luciano Mammino
"It’s about time to embrace Streams" Luciano Mammino

More Related Content

What's hot

PSR-7 and PSR-15, why can't you ignore them
PSR-7 and PSR-15, why can't you ignore themPSR-7 and PSR-15, why can't you ignore them
PSR-7 and PSR-15, why can't you ignore themSérgio Rafael Siqueira
 
Bash in theory and in practice - part two
Bash in theory and in practice - part twoBash in theory and in practice - part two
Bash in theory and in practice - part twoValerio Balbi
 
Bash in theory and in practice - part one
Bash in theory and in practice - part oneBash in theory and in practice - part one
Bash in theory and in practice - part oneValerio Balbi
 
Basic command for linux
Basic command for linuxBasic command for linux
Basic command for linuxgt0ne
 
Intro to pl/PHP Oscon2007
Intro to pl/PHP Oscon2007Intro to pl/PHP Oscon2007
Intro to pl/PHP Oscon2007Robert Treat
 
REST in peace @ IPC 2012 in Mainz
REST in peace @ IPC 2012 in MainzREST in peace @ IPC 2012 in Mainz
REST in peace @ IPC 2012 in MainzAlessandro Nadalin
 
Getting groovy (ODP)
Getting groovy (ODP)Getting groovy (ODP)
Getting groovy (ODP)Nick Dixon
 
Piratte installation
Piratte installationPiratte installation
Piratte installationKampa Lavanya
 
JDD 2017: Nginx + Lua = OpenResty (Marcin Stożek)
JDD 2017: Nginx + Lua = OpenResty (Marcin Stożek)JDD 2017: Nginx + Lua = OpenResty (Marcin Stożek)
JDD 2017: Nginx + Lua = OpenResty (Marcin Stożek)PROIDEA
 
mapserver_install_linux
mapserver_install_linuxmapserver_install_linux
mapserver_install_linuxtutorialsruby
 
Drupal and Open shift (and php)
Drupal and Open shift (and php)Drupal and Open shift (and php)
Drupal and Open shift (and php)Phase2
 
Devinsampa nginx-scripting
Devinsampa nginx-scriptingDevinsampa nginx-scripting
Devinsampa nginx-scriptingTony Fabeen
 
Adventures in infrastructure as code
Adventures in infrastructure as codeAdventures in infrastructure as code
Adventures in infrastructure as codeJulian Simpson
 

What's hot (17)

PSR-7 and PSR-15, why can't you ignore them
PSR-7 and PSR-15, why can't you ignore themPSR-7 and PSR-15, why can't you ignore them
PSR-7 and PSR-15, why can't you ignore them
 
Bash in theory and in practice - part two
Bash in theory and in practice - part twoBash in theory and in practice - part two
Bash in theory and in practice - part two
 
Containers for sysadmins
Containers for sysadminsContainers for sysadmins
Containers for sysadmins
 
Bash in theory and in practice - part one
Bash in theory and in practice - part oneBash in theory and in practice - part one
Bash in theory and in practice - part one
 
Basic command for linux
Basic command for linuxBasic command for linux
Basic command for linux
 
Intro to pl/PHP Oscon2007
Intro to pl/PHP Oscon2007Intro to pl/PHP Oscon2007
Intro to pl/PHP Oscon2007
 
REST in peace @ IPC 2012 in Mainz
REST in peace @ IPC 2012 in MainzREST in peace @ IPC 2012 in Mainz
REST in peace @ IPC 2012 in Mainz
 
Getting groovy (ODP)
Getting groovy (ODP)Getting groovy (ODP)
Getting groovy (ODP)
 
Piratte installation
Piratte installationPiratte installation
Piratte installation
 
JDD 2017: Nginx + Lua = OpenResty (Marcin Stożek)
JDD 2017: Nginx + Lua = OpenResty (Marcin Stożek)JDD 2017: Nginx + Lua = OpenResty (Marcin Stożek)
JDD 2017: Nginx + Lua = OpenResty (Marcin Stożek)
 
rtwerewr
rtwerewrrtwerewr
rtwerewr
 
mapserver_install_linux
mapserver_install_linuxmapserver_install_linux
mapserver_install_linux
 
Drupal and Open shift (and php)
Drupal and Open shift (and php)Drupal and Open shift (and php)
Drupal and Open shift (and php)
 
Devinsampa nginx-scripting
Devinsampa nginx-scriptingDevinsampa nginx-scripting
Devinsampa nginx-scripting
 
Adventures in infrastructure as code
Adventures in infrastructure as codeAdventures in infrastructure as code
Adventures in infrastructure as code
 
Os Treat
Os TreatOs Treat
Os Treat
 
Linux Commands
Linux CommandsLinux Commands
Linux Commands
 

Similar to "It’s about time to embrace Streams" Luciano Mammino

It’s about time to embrace Node.js Streams - MancJS
It’s about time to embrace Node.js Streams - MancJSIt’s about time to embrace Node.js Streams - MancJS
It’s about time to embrace Node.js Streams - MancJSLuciano Mammino
 
It’s about time to embrace Node.js Streams - Austin Node.js meetup
It’s about time to embrace Node.js Streams - Austin Node.js meetupIt’s about time to embrace Node.js Streams - Austin Node.js meetup
It’s about time to embrace Node.js Streams - Austin Node.js meetupLuciano Mammino
 
It's about time to embrace Node.js streams
It's about time to embrace Node.js streamsIt's about time to embrace Node.js streams
It's about time to embrace Node.js streamsLuciano Mammino
 
It’s about time to embrace Node.js Streams
It’s about time to embrace Node.js StreamsIt’s about time to embrace Node.js Streams
It’s about time to embrace Node.js StreamsLuciano Mammino
 
Downloading a Billion Files in Python
Downloading a Billion Files in PythonDownloading a Billion Files in Python
Downloading a Billion Files in PythonJames Saryerwinnie
 
Topic - File operation.pptx
Topic - File operation.pptxTopic - File operation.pptx
Topic - File operation.pptxAdnan al-emran
 
Will iPython replace Bash?
Will iPython replace Bash?Will iPython replace Bash?
Will iPython replace Bash?Babel
 
Will iPython replace bash?
Will iPython replace bash?Will iPython replace bash?
Will iPython replace bash?Roberto Polli
 
Chap 5 php files part 1
Chap 5 php files part 1Chap 5 php files part 1
Chap 5 php files part 1monikadeshmane
 
File handling-dutt
File handling-duttFile handling-dutt
File handling-duttAnil Dutt
 
SEQFILE1.PPT
SEQFILE1.PPTSEQFILE1.PPT
SEQFILE1.PPTloverkodi
 
File Handling in c.ppt
File Handling in c.pptFile Handling in c.ppt
File Handling in c.pptBhumaNagaPavan
 
C 檔案輸入與輸出
C 檔案輸入與輸出C 檔案輸入與輸出
C 檔案輸入與輸出PingLun Liao
 
PHP File Handling
PHP File Handling PHP File Handling
PHP File Handling Degu8
 

Similar to "It’s about time to embrace Streams" Luciano Mammino (20)

It’s about time to embrace Node.js Streams - MancJS
It’s about time to embrace Node.js Streams - MancJSIt’s about time to embrace Node.js Streams - MancJS
It’s about time to embrace Node.js Streams - MancJS
 
It’s about time to embrace Node.js Streams - Austin Node.js meetup
It’s about time to embrace Node.js Streams - Austin Node.js meetupIt’s about time to embrace Node.js Streams - Austin Node.js meetup
It’s about time to embrace Node.js Streams - Austin Node.js meetup
 
It's about time to embrace Node.js streams
It's about time to embrace Node.js streamsIt's about time to embrace Node.js streams
It's about time to embrace Node.js streams
 
It’s about time to embrace Node.js Streams
It’s about time to embrace Node.js StreamsIt’s about time to embrace Node.js Streams
It’s about time to embrace Node.js Streams
 
Downloading a Billion Files in Python
Downloading a Billion Files in PythonDownloading a Billion Files in Python
Downloading a Billion Files in Python
 
DIWE - File handling with PHP
DIWE - File handling with PHPDIWE - File handling with PHP
DIWE - File handling with PHP
 
Php files
Php filesPhp files
Php files
 
Topic - File operation.pptx
Topic - File operation.pptxTopic - File operation.pptx
Topic - File operation.pptx
 
Will iPython replace Bash?
Will iPython replace Bash?Will iPython replace Bash?
Will iPython replace Bash?
 
Will iPython replace bash?
Will iPython replace bash?Will iPython replace bash?
Will iPython replace bash?
 
File handling in cpp
File handling in cppFile handling in cpp
File handling in cpp
 
Chap 5 php files part 1
Chap 5 php files part 1Chap 5 php files part 1
Chap 5 php files part 1
 
File handling-dutt
File handling-duttFile handling-dutt
File handling-dutt
 
SEQFILE1.PPT
SEQFILE1.PPTSEQFILE1.PPT
SEQFILE1.PPT
 
File Handling in c.ppt
File Handling in c.pptFile Handling in c.ppt
File Handling in c.ppt
 
file_handling_in_c.ppt
file_handling_in_c.pptfile_handling_in_c.ppt
file_handling_in_c.ppt
 
FILES IN C
FILES IN CFILES IN C
FILES IN C
 
C 檔案輸入與輸出
C 檔案輸入與輸出C 檔案輸入與輸出
C 檔案輸入與輸出
 
PHP File Handling
PHP File Handling PHP File Handling
PHP File Handling
 
File Handling
File HandlingFile Handling
File Handling
 

More from Julia Cherniak

"Definition of Done: Deadline driven development" Vitaliy Ratushnyi
"Definition of Done: Deadline driven development" Vitaliy Ratushnyi"Definition of Done: Deadline driven development" Vitaliy Ratushnyi
"Definition of Done: Deadline driven development" Vitaliy RatushnyiJulia Cherniak
 
"Muses Code JS or How Communities Change People" Tanya Butenko
"Muses Code JS or How Communities Change People" Tanya Butenko"Muses Code JS or How Communities Change People" Tanya Butenko
"Muses Code JS or How Communities Change People" Tanya ButenkoJulia Cherniak
 
"Принимают за hard skills, увольняют из-за soft skills" Павел Галушко
"Принимают за hard skills, увольняют из-за soft skills" Павел Галушко"Принимают за hard skills, увольняют из-за soft skills" Павел Галушко
"Принимают за hard skills, увольняют из-за soft skills" Павел ГалушкоJulia Cherniak
 
"Node.js and Serverless" Viacheslav Panevskyi
"Node.js and Serverless" Viacheslav Panevskyi"Node.js and Serverless" Viacheslav Panevskyi
"Node.js and Serverless" Viacheslav PanevskyiJulia Cherniak
 
"Lift me up. Mentorship 101" Tanya Butenko
"Lift me up. Mentorship 101" Tanya Butenko"Lift me up. Mentorship 101" Tanya Butenko
"Lift me up. Mentorship 101" Tanya ButenkoJulia Cherniak
 
"Using Blockchain in Node.js project: JavaScript Ninja’s experience" Maksym D...
"Using Blockchain in Node.js project: JavaScript Ninja’s experience" Maksym D..."Using Blockchain in Node.js project: JavaScript Ninja’s experience" Maksym D...
"Using Blockchain in Node.js project: JavaScript Ninja’s experience" Maksym D...Julia Cherniak
 
"Black Clouds and Silver Linings in Node.js Security" Liran Tal
"Black Clouds and Silver Linings in Node.js Security" Liran Tal"Black Clouds and Silver Linings in Node.js Security" Liran Tal
"Black Clouds and Silver Linings in Node.js Security" Liran TalJulia Cherniak
 
"The working architecture of NodeJs applications" Viktor Turskyi
"The working architecture of NodeJs applications" Viktor Turskyi"The working architecture of NodeJs applications" Viktor Turskyi
"The working architecture of NodeJs applications" Viktor TurskyiJulia Cherniak
 
"The search for App-iness : Progressive Web Apps" Jamie Maria Schouren
"The search for App-iness : Progressive Web Apps" Jamie Maria Schouren"The search for App-iness : Progressive Web Apps" Jamie Maria Schouren
"The search for App-iness : Progressive Web Apps" Jamie Maria SchourenJulia Cherniak
 
"The Future of Machine Learning & JavaScript"Asim Hussain
"The Future of Machine Learning & JavaScript"Asim Hussain"The Future of Machine Learning & JavaScript"Asim Hussain
"The Future of Machine Learning & JavaScript"Asim HussainJulia Cherniak
 
"ClojureScript journey: from little script, to CLI program, to AWS Lambda fun...
"ClojureScript journey: from little script, to CLI program, to AWS Lambda fun..."ClojureScript journey: from little script, to CLI program, to AWS Lambda fun...
"ClojureScript journey: from little script, to CLI program, to AWS Lambda fun...Julia Cherniak
 

More from Julia Cherniak (11)

"Definition of Done: Deadline driven development" Vitaliy Ratushnyi
"Definition of Done: Deadline driven development" Vitaliy Ratushnyi"Definition of Done: Deadline driven development" Vitaliy Ratushnyi
"Definition of Done: Deadline driven development" Vitaliy Ratushnyi
 
"Muses Code JS or How Communities Change People" Tanya Butenko
"Muses Code JS or How Communities Change People" Tanya Butenko"Muses Code JS or How Communities Change People" Tanya Butenko
"Muses Code JS or How Communities Change People" Tanya Butenko
 
"Принимают за hard skills, увольняют из-за soft skills" Павел Галушко
"Принимают за hard skills, увольняют из-за soft skills" Павел Галушко"Принимают за hard skills, увольняют из-за soft skills" Павел Галушко
"Принимают за hard skills, увольняют из-за soft skills" Павел Галушко
 
"Node.js and Serverless" Viacheslav Panevskyi
"Node.js and Serverless" Viacheslav Panevskyi"Node.js and Serverless" Viacheslav Panevskyi
"Node.js and Serverless" Viacheslav Panevskyi
 
"Lift me up. Mentorship 101" Tanya Butenko
"Lift me up. Mentorship 101" Tanya Butenko"Lift me up. Mentorship 101" Tanya Butenko
"Lift me up. Mentorship 101" Tanya Butenko
 
"Using Blockchain in Node.js project: JavaScript Ninja’s experience" Maksym D...
"Using Blockchain in Node.js project: JavaScript Ninja’s experience" Maksym D..."Using Blockchain in Node.js project: JavaScript Ninja’s experience" Maksym D...
"Using Blockchain in Node.js project: JavaScript Ninja’s experience" Maksym D...
 
"Black Clouds and Silver Linings in Node.js Security" Liran Tal
"Black Clouds and Silver Linings in Node.js Security" Liran Tal"Black Clouds and Silver Linings in Node.js Security" Liran Tal
"Black Clouds and Silver Linings in Node.js Security" Liran Tal
 
"The working architecture of NodeJs applications" Viktor Turskyi
"The working architecture of NodeJs applications" Viktor Turskyi"The working architecture of NodeJs applications" Viktor Turskyi
"The working architecture of NodeJs applications" Viktor Turskyi
 
"The search for App-iness : Progressive Web Apps" Jamie Maria Schouren
"The search for App-iness : Progressive Web Apps" Jamie Maria Schouren"The search for App-iness : Progressive Web Apps" Jamie Maria Schouren
"The search for App-iness : Progressive Web Apps" Jamie Maria Schouren
 
"The Future of Machine Learning & JavaScript"Asim Hussain
"The Future of Machine Learning & JavaScript"Asim Hussain"The Future of Machine Learning & JavaScript"Asim Hussain
"The Future of Machine Learning & JavaScript"Asim Hussain
 
"ClojureScript journey: from little script, to CLI program, to AWS Lambda fun...
"ClojureScript journey: from little script, to CLI program, to AWS Lambda fun..."ClojureScript journey: from little script, to CLI program, to AWS Lambda fun...
"ClojureScript journey: from little script, to CLI program, to AWS Lambda fun...
 

Recently uploaded

WhatsApp 9892124323 ✓Call Girls In Kalyan ( Mumbai ) secure service
WhatsApp 9892124323 ✓Call Girls In Kalyan ( Mumbai ) secure serviceWhatsApp 9892124323 ✓Call Girls In Kalyan ( Mumbai ) secure service
WhatsApp 9892124323 ✓Call Girls In Kalyan ( Mumbai ) secure servicePooja Nehwal
 
08448380779 Call Girls In Friends Colony Women Seeking Men
08448380779 Call Girls In Friends Colony Women Seeking Men08448380779 Call Girls In Friends Colony Women Seeking Men
08448380779 Call Girls In Friends Colony Women Seeking MenDelhi Call girls
 
IAC 2024 - IA Fast Track to Search Focused AI Solutions
IAC 2024 - IA Fast Track to Search Focused AI SolutionsIAC 2024 - IA Fast Track to Search Focused AI Solutions
IAC 2024 - IA Fast Track to Search Focused AI SolutionsEnterprise Knowledge
 
From Event to Action: Accelerate Your Decision Making with Real-Time Automation
From Event to Action: Accelerate Your Decision Making with Real-Time AutomationFrom Event to Action: Accelerate Your Decision Making with Real-Time Automation
From Event to Action: Accelerate Your Decision Making with Real-Time AutomationSafe Software
 
04-2024-HHUG-Sales-and-Marketing-Alignment.pptx
04-2024-HHUG-Sales-and-Marketing-Alignment.pptx04-2024-HHUG-Sales-and-Marketing-Alignment.pptx
04-2024-HHUG-Sales-and-Marketing-Alignment.pptxHampshireHUG
 
How to Remove Document Management Hurdles with X-Docs?
How to Remove Document Management Hurdles with X-Docs?How to Remove Document Management Hurdles with X-Docs?
How to Remove Document Management Hurdles with X-Docs?XfilesPro
 
GenCyber Cyber Security Day Presentation
GenCyber Cyber Security Day PresentationGenCyber Cyber Security Day Presentation
GenCyber Cyber Security Day PresentationMichael W. Hawkins
 
Breaking the Kubernetes Kill Chain: Host Path Mount
Breaking the Kubernetes Kill Chain: Host Path MountBreaking the Kubernetes Kill Chain: Host Path Mount
Breaking the Kubernetes Kill Chain: Host Path MountPuma Security, LLC
 
The Codex of Business Writing Software for Real-World Solutions 2.pptx
The Codex of Business Writing Software for Real-World Solutions 2.pptxThe Codex of Business Writing Software for Real-World Solutions 2.pptx
The Codex of Business Writing Software for Real-World Solutions 2.pptxMalak Abu Hammad
 
Key Features Of Token Development (1).pptx
Key  Features Of Token  Development (1).pptxKey  Features Of Token  Development (1).pptx
Key Features Of Token Development (1).pptxLBM Solutions
 
Maximizing Board Effectiveness 2024 Webinar.pptx
Maximizing Board Effectiveness 2024 Webinar.pptxMaximizing Board Effectiveness 2024 Webinar.pptx
Maximizing Board Effectiveness 2024 Webinar.pptxOnBoard
 
Scaling API-first – The story of a global engineering organization
Scaling API-first – The story of a global engineering organizationScaling API-first – The story of a global engineering organization
Scaling API-first – The story of a global engineering organizationRadu Cotescu
 
08448380779 Call Girls In Greater Kailash - I Women Seeking Men
08448380779 Call Girls In Greater Kailash - I Women Seeking Men08448380779 Call Girls In Greater Kailash - I Women Seeking Men
08448380779 Call Girls In Greater Kailash - I Women Seeking MenDelhi Call girls
 
Pigging Solutions in Pet Food Manufacturing
Pigging Solutions in Pet Food ManufacturingPigging Solutions in Pet Food Manufacturing
Pigging Solutions in Pet Food ManufacturingPigging Solutions
 
08448380779 Call Girls In Civil Lines Women Seeking Men
08448380779 Call Girls In Civil Lines Women Seeking Men08448380779 Call Girls In Civil Lines Women Seeking Men
08448380779 Call Girls In Civil Lines Women Seeking MenDelhi Call girls
 
08448380779 Call Girls In Diplomatic Enclave Women Seeking Men
08448380779 Call Girls In Diplomatic Enclave Women Seeking Men08448380779 Call Girls In Diplomatic Enclave Women Seeking Men
08448380779 Call Girls In Diplomatic Enclave Women Seeking MenDelhi Call girls
 
#StandardsGoals for 2024: What’s new for BISAC - Tech Forum 2024
#StandardsGoals for 2024: What’s new for BISAC - Tech Forum 2024#StandardsGoals for 2024: What’s new for BISAC - Tech Forum 2024
#StandardsGoals for 2024: What’s new for BISAC - Tech Forum 2024BookNet Canada
 
AI as an Interface for Commercial Buildings
AI as an Interface for Commercial BuildingsAI as an Interface for Commercial Buildings
AI as an Interface for Commercial BuildingsMemoori
 
Slack Application Development 101 Slides
Slack Application Development 101 SlidesSlack Application Development 101 Slides
Slack Application Development 101 Slidespraypatel2
 
[2024]Digital Global Overview Report 2024 Meltwater.pdf
[2024]Digital Global Overview Report 2024 Meltwater.pdf[2024]Digital Global Overview Report 2024 Meltwater.pdf
[2024]Digital Global Overview Report 2024 Meltwater.pdfhans926745
 

Recently uploaded (20)

WhatsApp 9892124323 ✓Call Girls In Kalyan ( Mumbai ) secure service
WhatsApp 9892124323 ✓Call Girls In Kalyan ( Mumbai ) secure serviceWhatsApp 9892124323 ✓Call Girls In Kalyan ( Mumbai ) secure service
WhatsApp 9892124323 ✓Call Girls In Kalyan ( Mumbai ) secure service
 
08448380779 Call Girls In Friends Colony Women Seeking Men
08448380779 Call Girls In Friends Colony Women Seeking Men08448380779 Call Girls In Friends Colony Women Seeking Men
08448380779 Call Girls In Friends Colony Women Seeking Men
 
IAC 2024 - IA Fast Track to Search Focused AI Solutions
IAC 2024 - IA Fast Track to Search Focused AI SolutionsIAC 2024 - IA Fast Track to Search Focused AI Solutions
IAC 2024 - IA Fast Track to Search Focused AI Solutions
 
From Event to Action: Accelerate Your Decision Making with Real-Time Automation
From Event to Action: Accelerate Your Decision Making with Real-Time AutomationFrom Event to Action: Accelerate Your Decision Making with Real-Time Automation
From Event to Action: Accelerate Your Decision Making with Real-Time Automation
 
04-2024-HHUG-Sales-and-Marketing-Alignment.pptx
04-2024-HHUG-Sales-and-Marketing-Alignment.pptx04-2024-HHUG-Sales-and-Marketing-Alignment.pptx
04-2024-HHUG-Sales-and-Marketing-Alignment.pptx
 
How to Remove Document Management Hurdles with X-Docs?
How to Remove Document Management Hurdles with X-Docs?How to Remove Document Management Hurdles with X-Docs?
How to Remove Document Management Hurdles with X-Docs?
 
GenCyber Cyber Security Day Presentation
GenCyber Cyber Security Day PresentationGenCyber Cyber Security Day Presentation
GenCyber Cyber Security Day Presentation
 
Breaking the Kubernetes Kill Chain: Host Path Mount
Breaking the Kubernetes Kill Chain: Host Path MountBreaking the Kubernetes Kill Chain: Host Path Mount
Breaking the Kubernetes Kill Chain: Host Path Mount
 
The Codex of Business Writing Software for Real-World Solutions 2.pptx
The Codex of Business Writing Software for Real-World Solutions 2.pptxThe Codex of Business Writing Software for Real-World Solutions 2.pptx
The Codex of Business Writing Software for Real-World Solutions 2.pptx
 
Key Features Of Token Development (1).pptx
Key  Features Of Token  Development (1).pptxKey  Features Of Token  Development (1).pptx
Key Features Of Token Development (1).pptx
 
Maximizing Board Effectiveness 2024 Webinar.pptx
Maximizing Board Effectiveness 2024 Webinar.pptxMaximizing Board Effectiveness 2024 Webinar.pptx
Maximizing Board Effectiveness 2024 Webinar.pptx
 
Scaling API-first – The story of a global engineering organization
Scaling API-first – The story of a global engineering organizationScaling API-first – The story of a global engineering organization
Scaling API-first – The story of a global engineering organization
 
08448380779 Call Girls In Greater Kailash - I Women Seeking Men
08448380779 Call Girls In Greater Kailash - I Women Seeking Men08448380779 Call Girls In Greater Kailash - I Women Seeking Men
08448380779 Call Girls In Greater Kailash - I Women Seeking Men
 
Pigging Solutions in Pet Food Manufacturing
Pigging Solutions in Pet Food ManufacturingPigging Solutions in Pet Food Manufacturing
Pigging Solutions in Pet Food Manufacturing
 
08448380779 Call Girls In Civil Lines Women Seeking Men
08448380779 Call Girls In Civil Lines Women Seeking Men08448380779 Call Girls In Civil Lines Women Seeking Men
08448380779 Call Girls In Civil Lines Women Seeking Men
 
08448380779 Call Girls In Diplomatic Enclave Women Seeking Men
08448380779 Call Girls In Diplomatic Enclave Women Seeking Men08448380779 Call Girls In Diplomatic Enclave Women Seeking Men
08448380779 Call Girls In Diplomatic Enclave Women Seeking Men
 
#StandardsGoals for 2024: What’s new for BISAC - Tech Forum 2024
#StandardsGoals for 2024: What’s new for BISAC - Tech Forum 2024#StandardsGoals for 2024: What’s new for BISAC - Tech Forum 2024
#StandardsGoals for 2024: What’s new for BISAC - Tech Forum 2024
 
AI as an Interface for Commercial Buildings
AI as an Interface for Commercial BuildingsAI as an Interface for Commercial Buildings
AI as an Interface for Commercial Buildings
 
Slack Application Development 101 Slides
Slack Application Development 101 SlidesSlack Application Development 101 Slides
Slack Application Development 101 Slides
 
[2024]Digital Global Overview Report 2024 Meltwater.pdf
[2024]Digital Global Overview Report 2024 Meltwater.pdf[2024]Digital Global Overview Report 2024 Meltwater.pdf
[2024]Digital Global Overview Report 2024 Meltwater.pdf
 

"It’s about time to embrace Streams" Luciano Mammino

  • 1. K Y I V   2 0 1 9 Luciano Mammino (@loige) IT’S ABOUT TIME TOIT’S ABOUT TIME TO EMBRACE STREAMSEMBRACE STREAMS    loige.link/streams-kyiv May 18th 1
  • 2. // buffer-copy.js const { readFileSync, writeFileSync } = require('fs') const [,, src, dest] = process.argv // read entire file content const content = readFileSync(src) // write that content somewhere else writeFileSync(dest, content) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 @loige2
  • 3. // buffer-copy.js const { readFileSync, writeFileSync } = require('fs') const [,, src, dest] = process.argv // read entire file content const content = readFileSync(src) // write that content somewhere else writeFileSync(dest, content) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 const { readFileSync, writeFileSync } = require('fs') // buffer-copy.js1 2 3 4 5 6 7 const [,, src, dest] = process.argv8 9 // read entire file content10 const content = readFileSync(src)11 12 // write that content somewhere else13 writeFileSync(dest, content)14 @loige2
  • 4. // buffer-copy.js const { readFileSync, writeFileSync } = require('fs') const [,, src, dest] = process.argv // read entire file content const content = readFileSync(src) // write that content somewhere else writeFileSync(dest, content) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 const { readFileSync, writeFileSync } = require('fs') // buffer-copy.js1 2 3 4 5 6 7 const [,, src, dest] = process.argv8 9 // read entire file content10 const content = readFileSync(src)11 12 // write that content somewhere else13 writeFileSync(dest, content)14 // read entire file content const content = readFileSync(src) // buffer-copy.js1 2 const {3 readFileSync,4 writeFileSync5 } = require('fs')6 7 const [,, src, dest] = process.argv8 9 10 11 12 // write that content somewhere else13 writeFileSync(dest, content)14 @loige2
  • 5. // buffer-copy.js const { readFileSync, writeFileSync } = require('fs') const [,, src, dest] = process.argv // read entire file content const content = readFileSync(src) // write that content somewhere else writeFileSync(dest, content) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 const { readFileSync, writeFileSync } = require('fs') // buffer-copy.js1 2 3 4 5 6 7 const [,, src, dest] = process.argv8 9 // read entire file content10 const content = readFileSync(src)11 12 // write that content somewhere else13 writeFileSync(dest, content)14 // read entire file content const content = readFileSync(src) // buffer-copy.js1 2 const {3 readFileSync,4 writeFileSync5 } = require('fs')6 7 const [,, src, dest] = process.argv8 9 10 11 12 // write that content somewhere else13 writeFileSync(dest, content)14 // write that content somewhere else writeFileSync(dest, content) // buffer-copy.js1 2 const {3 readFileSync,4 writeFileSync5 } = require('fs')6 7 const [,, src, dest] = process.argv8 9 // read entire file content10 const content = readFileSync(src)11 12 13 14 @loige2
  • 17. 11
  • 29. FILE COPY: FILE COPY: THE BUFFER WAYTHE BUFFER WAY @loige // buffer-copy.js const { readFileSync, writeFileSync } = require('fs') const [,, src, dest] = process.argv const content = readFileSync(src) writeFileSync(dest, content) 1 2 3 4 5 6 7 8 9 10 16
  • 30. FILE COPY: FILE COPY: THE STREAM WAYTHE STREAM WAY // stream-copy.js const { createReadStream, createWriteStream } = require('fs') const [,, src, dest] = process.argv const srcStream = createReadStream(src) const destStream = createWriteStream(dest) srcStream.on('data', (data) => destStream.write(data)) 1 2 3 4 5 6 7 8 9 10 11 @loige * Careful: this implementation is not optimal * 17
  • 31. FILE COPY: FILE COPY: THE STREAM WAYTHE STREAM WAY // stream-copy.js const { createReadStream, createWriteStream } = require('fs') const [,, src, dest] = process.argv const srcStream = createReadStream(src) const destStream = createWriteStream(dest) srcStream.on('data', (data) => destStream.write(data)) 1 2 3 4 5 6 7 8 9 10 11 createReadStream, createWriteStream // stream-copy.js1 2 const {3 4 5 } = require('fs')6 7 const [,, src, dest] = process.argv8 const srcStream = createReadStream(src)9 const destStream = createWriteStream(dest)10 srcStream.on('data', (data) => destStream.write(data))11 @loige * Careful: this implementation is not optimal * 17
  • 32. FILE COPY: FILE COPY: THE STREAM WAYTHE STREAM WAY // stream-copy.js const { createReadStream, createWriteStream } = require('fs') const [,, src, dest] = process.argv const srcStream = createReadStream(src) const destStream = createWriteStream(dest) srcStream.on('data', (data) => destStream.write(data)) 1 2 3 4 5 6 7 8 9 10 11 createReadStream, createWriteStream // stream-copy.js1 2 const {3 4 5 } = require('fs')6 7 const [,, src, dest] = process.argv8 const srcStream = createReadStream(src)9 const destStream = createWriteStream(dest)10 srcStream.on('data', (data) => destStream.write(data))11 const srcStream = createReadStream(src) const destStream = createWriteStream(dest) // stream-copy.js1 2 const {3 createReadStream,4 createWriteStream5 } = require('fs')6 7 const [,, src, dest] = process.argv8 9 10 srcStream.on('data', (data) => destStream.write(data))11 @loige * Careful: this implementation is not optimal * 17
  • 33. FILE COPY: FILE COPY: THE STREAM WAYTHE STREAM WAY // stream-copy.js const { createReadStream, createWriteStream } = require('fs') const [,, src, dest] = process.argv const srcStream = createReadStream(src) const destStream = createWriteStream(dest) srcStream.on('data', (data) => destStream.write(data)) 1 2 3 4 5 6 7 8 9 10 11 createReadStream, createWriteStream // stream-copy.js1 2 const {3 4 5 } = require('fs')6 7 const [,, src, dest] = process.argv8 const srcStream = createReadStream(src)9 const destStream = createWriteStream(dest)10 srcStream.on('data', (data) => destStream.write(data))11 const srcStream = createReadStream(src) const destStream = createWriteStream(dest) // stream-copy.js1 2 const {3 createReadStream,4 createWriteStream5 } = require('fs')6 7 const [,, src, dest] = process.argv8 9 10 srcStream.on('data', (data) => destStream.write(data))11 srcStream.on('data', (data) => destStream.write(data)) // stream-copy.js1 2 const {3 createReadStream,4 createWriteStream5 } = require('fs')6 7 const [,, src, dest] = process.argv8 const srcStream = createReadStream(src)9 const destStream = createWriteStream(dest)10 11 @loige * Careful: this implementation is not optimal * 17
  • 40. ALL STREAMS ARE ALL STREAMS ARE EVENT EMITTERSEVENT EMITTERS A stream instance is an object that emits events when its internal state changes, for instance: s.on('readable', () => {}) // ready to be consumed s.on('data', (chunk) => {}) // new data is available s.on('error', (err) => {}) // some error happened s.on('end', () => {}) // no more data available The events available depend from the type of stream @loige24
  • 52. // count-emojis-flowing.js const { createReadStream } = require('fs') const { EMOJI_MAP } = require('emoji') // from npm const emojis = Object.keys(EMOJI_MAP) const file = createReadStream(process.argv[2]) let counter = 0 file.on('data', chunk => { for (let char of chunk.toString('utf8')) { if (emojis.includes(char)) { counter++ } } }) file.on('end', () => console.log(`Found ${counter} emojis`)) file.on('error', err => console.error(`Error reading file: ${err}`)) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 @loige36
  • 53. // count-emojis-flowing.js const { createReadStream } = require('fs') const { EMOJI_MAP } = require('emoji') // from npm const emojis = Object.keys(EMOJI_MAP) const file = createReadStream(process.argv[2]) let counter = 0 file.on('data', chunk => { for (let char of chunk.toString('utf8')) { if (emojis.includes(char)) { counter++ } } }) file.on('end', () => console.log(`Found ${counter} emojis`)) file.on('error', err => console.error(`Error reading file: ${err}`)) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 const { EMOJI_MAP } = require('emoji') // from npm // count-emojis-flowing.js1 2 const { createReadStream } = require('fs')3 4 5 const emojis = Object.keys(EMOJI_MAP)6 7 const file = createReadStream(process.argv[2])8 let counter = 09 10 file.on('data', chunk => {11 for (let char of chunk.toString('utf8')) {12 if (emojis.includes(char)) {13 counter++14 }15 }16 })17 file.on('end', () => console.log(`Found ${counter} emojis`))18 file.on('error', err => console.error(`Error reading file: ${err}`))19 @loige36
  • 54. // count-emojis-flowing.js const { createReadStream } = require('fs') const { EMOJI_MAP } = require('emoji') // from npm const emojis = Object.keys(EMOJI_MAP) const file = createReadStream(process.argv[2]) let counter = 0 file.on('data', chunk => { for (let char of chunk.toString('utf8')) { if (emojis.includes(char)) { counter++ } } }) file.on('end', () => console.log(`Found ${counter} emojis`)) file.on('error', err => console.error(`Error reading file: ${err}`)) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 const { EMOJI_MAP } = require('emoji') // from npm // count-emojis-flowing.js1 2 const { createReadStream } = require('fs')3 4 5 const emojis = Object.keys(EMOJI_MAP)6 7 const file = createReadStream(process.argv[2])8 let counter = 09 10 file.on('data', chunk => {11 for (let char of chunk.toString('utf8')) {12 if (emojis.includes(char)) {13 counter++14 }15 }16 })17 file.on('end', () => console.log(`Found ${counter} emojis`))18 file.on('error', err => console.error(`Error reading file: ${err}`))19 const file = createReadStream(process.argv[2]) // count-emojis-flowing.js1 2 const { createReadStream } = require('fs')3 const { EMOJI_MAP } = require('emoji') // from npm4 5 const emojis = Object.keys(EMOJI_MAP)6 7 8 let counter = 09 10 file.on('data', chunk => {11 for (let char of chunk.toString('utf8')) {12 if (emojis.includes(char)) {13 counter++14 }15 }16 })17 file.on('end', () => console.log(`Found ${counter} emojis`))18 file.on('error', err => console.error(`Error reading file: ${err}`))19 @loige36
  • 55. // count-emojis-flowing.js const { createReadStream } = require('fs') const { EMOJI_MAP } = require('emoji') // from npm const emojis = Object.keys(EMOJI_MAP) const file = createReadStream(process.argv[2]) let counter = 0 file.on('data', chunk => { for (let char of chunk.toString('utf8')) { if (emojis.includes(char)) { counter++ } } }) file.on('end', () => console.log(`Found ${counter} emojis`)) file.on('error', err => console.error(`Error reading file: ${err}`)) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 const { EMOJI_MAP } = require('emoji') // from npm // count-emojis-flowing.js1 2 const { createReadStream } = require('fs')3 4 5 const emojis = Object.keys(EMOJI_MAP)6 7 const file = createReadStream(process.argv[2])8 let counter = 09 10 file.on('data', chunk => {11 for (let char of chunk.toString('utf8')) {12 if (emojis.includes(char)) {13 counter++14 }15 }16 })17 file.on('end', () => console.log(`Found ${counter} emojis`))18 file.on('error', err => console.error(`Error reading file: ${err}`))19 const file = createReadStream(process.argv[2]) // count-emojis-flowing.js1 2 const { createReadStream } = require('fs')3 const { EMOJI_MAP } = require('emoji') // from npm4 5 const emojis = Object.keys(EMOJI_MAP)6 7 8 let counter = 09 10 file.on('data', chunk => {11 for (let char of chunk.toString('utf8')) {12 if (emojis.includes(char)) {13 counter++14 }15 }16 })17 file.on('end', () => console.log(`Found ${counter} emojis`))18 file.on('error', err => console.error(`Error reading file: ${err}`))19 let counter = 0 // count-emojis-flowing.js1 2 const { createReadStream } = require('fs')3 const { EMOJI_MAP } = require('emoji') // from npm4 5 const emojis = Object.keys(EMOJI_MAP)6 7 const file = createReadStream(process.argv[2])8 9 10 file.on('data', chunk => {11 for (let char of chunk.toString('utf8')) {12 if (emojis.includes(char)) {13 counter++14 }15 }16 })17 file.on('end', () => console.log(`Found ${counter} emojis`))18 file.on('error', err => console.error(`Error reading file: ${err}`))19 @loige36
  • 56. // count-emojis-flowing.js const { createReadStream } = require('fs') const { EMOJI_MAP } = require('emoji') // from npm const emojis = Object.keys(EMOJI_MAP) const file = createReadStream(process.argv[2]) let counter = 0 file.on('data', chunk => { for (let char of chunk.toString('utf8')) { if (emojis.includes(char)) { counter++ } } }) file.on('end', () => console.log(`Found ${counter} emojis`)) file.on('error', err => console.error(`Error reading file: ${err}`)) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 const { EMOJI_MAP } = require('emoji') // from npm // count-emojis-flowing.js1 2 const { createReadStream } = require('fs')3 4 5 const emojis = Object.keys(EMOJI_MAP)6 7 const file = createReadStream(process.argv[2])8 let counter = 09 10 file.on('data', chunk => {11 for (let char of chunk.toString('utf8')) {12 if (emojis.includes(char)) {13 counter++14 }15 }16 })17 file.on('end', () => console.log(`Found ${counter} emojis`))18 file.on('error', err => console.error(`Error reading file: ${err}`))19 const file = createReadStream(process.argv[2]) // count-emojis-flowing.js1 2 const { createReadStream } = require('fs')3 const { EMOJI_MAP } = require('emoji') // from npm4 5 const emojis = Object.keys(EMOJI_MAP)6 7 8 let counter = 09 10 file.on('data', chunk => {11 for (let char of chunk.toString('utf8')) {12 if (emojis.includes(char)) {13 counter++14 }15 }16 })17 file.on('end', () => console.log(`Found ${counter} emojis`))18 file.on('error', err => console.error(`Error reading file: ${err}`))19 let counter = 0 // count-emojis-flowing.js1 2 const { createReadStream } = require('fs')3 const { EMOJI_MAP } = require('emoji') // from npm4 5 const emojis = Object.keys(EMOJI_MAP)6 7 const file = createReadStream(process.argv[2])8 9 10 file.on('data', chunk => {11 for (let char of chunk.toString('utf8')) {12 if (emojis.includes(char)) {13 counter++14 }15 }16 })17 file.on('end', () => console.log(`Found ${counter} emojis`))18 file.on('error', err => console.error(`Error reading file: ${err}`))19 file.on('data', chunk => { }) // count-emojis-flowing.js1 2 const { createReadStream } = require('fs')3 const { EMOJI_MAP } = require('emoji') // from npm4 5 const emojis = Object.keys(EMOJI_MAP)6 7 const file = createReadStream(process.argv[2])8 let counter = 09 10 11 for (let char of chunk.toString('utf8')) {12 if (emojis.includes(char)) {13 counter++14 }15 }16 17 file.on('end', () => console.log(`Found ${counter} emojis`))18 file.on('error', err => console.error(`Error reading file: ${err}`))19 @loige36
  • 57. // count-emojis-flowing.js const { createReadStream } = require('fs') const { EMOJI_MAP } = require('emoji') // from npm const emojis = Object.keys(EMOJI_MAP) const file = createReadStream(process.argv[2]) let counter = 0 file.on('data', chunk => { for (let char of chunk.toString('utf8')) { if (emojis.includes(char)) { counter++ } } }) file.on('end', () => console.log(`Found ${counter} emojis`)) file.on('error', err => console.error(`Error reading file: ${err}`)) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 const { EMOJI_MAP } = require('emoji') // from npm // count-emojis-flowing.js1 2 const { createReadStream } = require('fs')3 4 5 const emojis = Object.keys(EMOJI_MAP)6 7 const file = createReadStream(process.argv[2])8 let counter = 09 10 file.on('data', chunk => {11 for (let char of chunk.toString('utf8')) {12 if (emojis.includes(char)) {13 counter++14 }15 }16 })17 file.on('end', () => console.log(`Found ${counter} emojis`))18 file.on('error', err => console.error(`Error reading file: ${err}`))19 const file = createReadStream(process.argv[2]) // count-emojis-flowing.js1 2 const { createReadStream } = require('fs')3 const { EMOJI_MAP } = require('emoji') // from npm4 5 const emojis = Object.keys(EMOJI_MAP)6 7 8 let counter = 09 10 file.on('data', chunk => {11 for (let char of chunk.toString('utf8')) {12 if (emojis.includes(char)) {13 counter++14 }15 }16 })17 file.on('end', () => console.log(`Found ${counter} emojis`))18 file.on('error', err => console.error(`Error reading file: ${err}`))19 let counter = 0 // count-emojis-flowing.js1 2 const { createReadStream } = require('fs')3 const { EMOJI_MAP } = require('emoji') // from npm4 5 const emojis = Object.keys(EMOJI_MAP)6 7 const file = createReadStream(process.argv[2])8 9 10 file.on('data', chunk => {11 for (let char of chunk.toString('utf8')) {12 if (emojis.includes(char)) {13 counter++14 }15 }16 })17 file.on('end', () => console.log(`Found ${counter} emojis`))18 file.on('error', err => console.error(`Error reading file: ${err}`))19 file.on('data', chunk => { }) // count-emojis-flowing.js1 2 const { createReadStream } = require('fs')3 const { EMOJI_MAP } = require('emoji') // from npm4 5 const emojis = Object.keys(EMOJI_MAP)6 7 const file = createReadStream(process.argv[2])8 let counter = 09 10 11 for (let char of chunk.toString('utf8')) {12 if (emojis.includes(char)) {13 counter++14 }15 }16 17 file.on('end', () => console.log(`Found ${counter} emojis`))18 file.on('error', err => console.error(`Error reading file: ${err}`))19 for (let char of chunk.toString('utf8')) { } // count-emojis-flowing.js1 2 const { createReadStream } = require('fs')3 const { EMOJI_MAP } = require('emoji') // from npm4 5 const emojis = Object.keys(EMOJI_MAP)6 7 const file = createReadStream(process.argv[2])8 let counter = 09 10 file.on('data', chunk => {11 12 if (emojis.includes(char)) {13 counter++14 }15 16 })17 file.on('end', () => console.log(`Found ${counter} emojis`))18 file.on('error', err => console.error(`Error reading file: ${err}`))19 @loige36
  • 58. // count-emojis-flowing.js const { createReadStream } = require('fs') const { EMOJI_MAP } = require('emoji') // from npm const emojis = Object.keys(EMOJI_MAP) const file = createReadStream(process.argv[2]) let counter = 0 file.on('data', chunk => { for (let char of chunk.toString('utf8')) { if (emojis.includes(char)) { counter++ } } }) file.on('end', () => console.log(`Found ${counter} emojis`)) file.on('error', err => console.error(`Error reading file: ${err}`)) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 const { EMOJI_MAP } = require('emoji') // from npm // count-emojis-flowing.js1 2 const { createReadStream } = require('fs')3 4 5 const emojis = Object.keys(EMOJI_MAP)6 7 const file = createReadStream(process.argv[2])8 let counter = 09 10 file.on('data', chunk => {11 for (let char of chunk.toString('utf8')) {12 if (emojis.includes(char)) {13 counter++14 }15 }16 })17 file.on('end', () => console.log(`Found ${counter} emojis`))18 file.on('error', err => console.error(`Error reading file: ${err}`))19 const file = createReadStream(process.argv[2]) // count-emojis-flowing.js1 2 const { createReadStream } = require('fs')3 const { EMOJI_MAP } = require('emoji') // from npm4 5 const emojis = Object.keys(EMOJI_MAP)6 7 8 let counter = 09 10 file.on('data', chunk => {11 for (let char of chunk.toString('utf8')) {12 if (emojis.includes(char)) {13 counter++14 }15 }16 })17 file.on('end', () => console.log(`Found ${counter} emojis`))18 file.on('error', err => console.error(`Error reading file: ${err}`))19 let counter = 0 // count-emojis-flowing.js1 2 const { createReadStream } = require('fs')3 const { EMOJI_MAP } = require('emoji') // from npm4 5 const emojis = Object.keys(EMOJI_MAP)6 7 const file = createReadStream(process.argv[2])8 9 10 file.on('data', chunk => {11 for (let char of chunk.toString('utf8')) {12 if (emojis.includes(char)) {13 counter++14 }15 }16 })17 file.on('end', () => console.log(`Found ${counter} emojis`))18 file.on('error', err => console.error(`Error reading file: ${err}`))19 file.on('data', chunk => { }) // count-emojis-flowing.js1 2 const { createReadStream } = require('fs')3 const { EMOJI_MAP } = require('emoji') // from npm4 5 const emojis = Object.keys(EMOJI_MAP)6 7 const file = createReadStream(process.argv[2])8 let counter = 09 10 11 for (let char of chunk.toString('utf8')) {12 if (emojis.includes(char)) {13 counter++14 }15 }16 17 file.on('end', () => console.log(`Found ${counter} emojis`))18 file.on('error', err => console.error(`Error reading file: ${err}`))19 for (let char of chunk.toString('utf8')) { } // count-emojis-flowing.js1 2 const { createReadStream } = require('fs')3 const { EMOJI_MAP } = require('emoji') // from npm4 5 const emojis = Object.keys(EMOJI_MAP)6 7 const file = createReadStream(process.argv[2])8 let counter = 09 10 file.on('data', chunk => {11 12 if (emojis.includes(char)) {13 counter++14 }15 16 })17 file.on('end', () => console.log(`Found ${counter} emojis`))18 file.on('error', err => console.error(`Error reading file: ${err}`))19 if (emojis.includes(char)) { counter++ } // count-emojis-flowing.js1 2 const { createReadStream } = require('fs')3 const { EMOJI_MAP } = require('emoji') // from npm4 5 const emojis = Object.keys(EMOJI_MAP)6 7 const file = createReadStream(process.argv[2])8 let counter = 09 10 file.on('data', chunk => {11 for (let char of chunk.toString('utf8')) {12 13 14 15 }16 })17 file.on('end', () => console.log(`Found ${counter} emojis`))18 file.on('error', err => console.error(`Error reading file: ${err}`))19 @loige36
  • 61. // count-emojis-async-iterator.js const { createReadStream } = require('fs') const { EMOJI_MAP } = require('emoji') // from npm async function main () { const emojis = Object.keys(EMOJI_MAP) const file = createReadStream(process.argv[2]) let counter = 0 for await (let chunk of file) { for (let char of chunk.toString('utf8')) { if (emojis.includes(char)) { counter++ } } } console.log(`Found ${counter} emojis`) } main() 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 @loige39
  • 62. // count-emojis-async-iterator.js const { createReadStream } = require('fs') const { EMOJI_MAP } = require('emoji') // from npm async function main () { const emojis = Object.keys(EMOJI_MAP) const file = createReadStream(process.argv[2]) let counter = 0 for await (let chunk of file) { for (let char of chunk.toString('utf8')) { if (emojis.includes(char)) { counter++ } } } console.log(`Found ${counter} emojis`) } main() 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 for await (let chunk of file) { } // count-emojis-async-iterator.js1 const { createReadStream } = require('fs')2 const { EMOJI_MAP } = require('emoji') // from npm3 4 async function main () {5 const emojis = Object.keys(EMOJI_MAP)6 const file = createReadStream(process.argv[2])7 let counter = 08 9 10 for (let char of chunk.toString('utf8')) {11 if (emojis.includes(char)) {12 counter++13 }14 }15 16 17 console.log(`Found ${counter} emojis`)18 }19 20 main()21 @loige39
  • 63. // count-emojis-async-iterator.js const { createReadStream } = require('fs') const { EMOJI_MAP } = require('emoji') // from npm async function main () { const emojis = Object.keys(EMOJI_MAP) const file = createReadStream(process.argv[2]) let counter = 0 for await (let chunk of file) { for (let char of chunk.toString('utf8')) { if (emojis.includes(char)) { counter++ } } } console.log(`Found ${counter} emojis`) } main() 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 for await (let chunk of file) { } // count-emojis-async-iterator.js1 const { createReadStream } = require('fs')2 const { EMOJI_MAP } = require('emoji') // from npm3 4 async function main () {5 const emojis = Object.keys(EMOJI_MAP)6 const file = createReadStream(process.argv[2])7 let counter = 08 9 10 for (let char of chunk.toString('utf8')) {11 if (emojis.includes(char)) {12 counter++13 }14 }15 16 17 console.log(`Found ${counter} emojis`)18 }19 20 main()21 async function main () { } // count-emojis-async-iterator.js1 const { createReadStream } = require('fs')2 const { EMOJI_MAP } = require('emoji') // from npm3 4 5 const emojis = Object.keys(EMOJI_MAP)6 const file = createReadStream(process.argv[2])7 let counter = 08 9 for await (let chunk of file) {10 for (let char of chunk.toString('utf8')) {11 if (emojis.includes(char)) {12 counter++13 }14 }15 }16 17 console.log(`Found ${counter} emojis`)18 19 20 main()21 @loige39
  • 65. // writable-http-request.js const http = require('http') const req = http.request( { hostname: 'enx6b07hdu6cs.x.pipedream.net', method: 'POST' }, resp => { console.log(`Server responded with "${resp.statusCode}"`) } ) req.on('finish', () => console.log('request sent')) req.on('close', () => console.log('Connection closed')) req.on('error', err => console.error(`Request failed: ${err}`)) req.write('writing some content...n') req.end('last write & close the stream') 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 @loige41
  • 66. // writable-http-request.js const http = require('http') const req = http.request( { hostname: 'enx6b07hdu6cs.x.pipedream.net', method: 'POST' }, resp => { console.log(`Server responded with "${resp.statusCode}"`) } ) req.on('finish', () => console.log('request sent')) req.on('close', () => console.log('Connection closed')) req.on('error', err => console.error(`Request failed: ${err}`)) req.write('writing some content...n') req.end('last write & close the stream') 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 const req = http.request( ) // writable-http-request.js1 const http = require('http')2 3 4 {5 hostname: 'enx6b07hdu6cs.x.pipedream.net',6 method: 'POST'7 },8 resp => {9 console.log(`Server responded with "${resp.statusCode}"`)10 }11 12 13 req.on('finish', () => console.log('request sent'))14 req.on('close', () => console.log('Connection closed'))15 req.on('error', err => console.error(`Request failed: ${err}`))16 17 req.write('writing some content...n')18 req.end('last write & close the stream')19 @loige41
  • 67. // writable-http-request.js const http = require('http') const req = http.request( { hostname: 'enx6b07hdu6cs.x.pipedream.net', method: 'POST' }, resp => { console.log(`Server responded with "${resp.statusCode}"`) } ) req.on('finish', () => console.log('request sent')) req.on('close', () => console.log('Connection closed')) req.on('error', err => console.error(`Request failed: ${err}`)) req.write('writing some content...n') req.end('last write & close the stream') 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 const req = http.request( ) // writable-http-request.js1 const http = require('http')2 3 4 {5 hostname: 'enx6b07hdu6cs.x.pipedream.net',6 method: 'POST'7 },8 resp => {9 console.log(`Server responded with "${resp.statusCode}"`)10 }11 12 13 req.on('finish', () => console.log('request sent'))14 req.on('close', () => console.log('Connection closed'))15 req.on('error', err => console.error(`Request failed: ${err}`))16 17 req.write('writing some content...n')18 req.end('last write & close the stream')19 req.on('finish', () => console.log('request sent')) req.on('close', () => console.log('Connection closed')) req.on('error', err => console.error(`Request failed: ${err}`)) // writable-http-request.js1 const http = require('http')2 3 const req = http.request(4 {5 hostname: 'enx6b07hdu6cs.x.pipedream.net',6 method: 'POST'7 },8 resp => {9 console.log(`Server responded with "${resp.statusCode}"`)10 }11 )12 13 14 15 16 17 req.write('writing some content...n')18 req.end('last write & close the stream')19 @loige41
  • 68. // writable-http-request.js const http = require('http') const req = http.request( { hostname: 'enx6b07hdu6cs.x.pipedream.net', method: 'POST' }, resp => { console.log(`Server responded with "${resp.statusCode}"`) } ) req.on('finish', () => console.log('request sent')) req.on('close', () => console.log('Connection closed')) req.on('error', err => console.error(`Request failed: ${err}`)) req.write('writing some content...n') req.end('last write & close the stream') 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 const req = http.request( ) // writable-http-request.js1 const http = require('http')2 3 4 {5 hostname: 'enx6b07hdu6cs.x.pipedream.net',6 method: 'POST'7 },8 resp => {9 console.log(`Server responded with "${resp.statusCode}"`)10 }11 12 13 req.on('finish', () => console.log('request sent'))14 req.on('close', () => console.log('Connection closed'))15 req.on('error', err => console.error(`Request failed: ${err}`))16 17 req.write('writing some content...n')18 req.end('last write & close the stream')19 req.on('finish', () => console.log('request sent')) req.on('close', () => console.log('Connection closed')) req.on('error', err => console.error(`Request failed: ${err}`)) // writable-http-request.js1 const http = require('http')2 3 const req = http.request(4 {5 hostname: 'enx6b07hdu6cs.x.pipedream.net',6 method: 'POST'7 },8 resp => {9 console.log(`Server responded with "${resp.statusCode}"`)10 }11 )12 13 14 15 16 17 req.write('writing some content...n')18 req.end('last write & close the stream')19 req.write('writing some content...n') req.end('last write & close the stream') // writable-http-request.js1 const http = require('http')2 3 const req = http.request(4 {5 hostname: 'enx6b07hdu6cs.x.pipedream.net',6 method: 'POST'7 },8 resp => {9 console.log(`Server responded with "${resp.statusCode}"`)10 }11 )12 13 req.on('finish', () => console.log('request sent'))14 req.on('close', () => console.log('Connection closed'))15 req.on('error', err => console.error(`Request failed: ${err}`))16 17 18 19 @loige41
  • 72. // stream-copy-safe.js const { createReadStream, createWriteStream } = require('fs') const [, , src, dest] = process.argv const srcStream = createReadStream(src) const destStream = createWriteStream(dest) srcStream.on('data', data => { const canContinue = destStream.write(data) if (!canContinue) { // we are overflowing the destination, we should pause srcStream.pause() // we will resume when the destination stream is drained destStream.once('drain', () => srcStream.resume()) } }) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 @loige45
  • 73. // stream-copy-safe.js const { createReadStream, createWriteStream } = require('fs') const [, , src, dest] = process.argv const srcStream = createReadStream(src) const destStream = createWriteStream(dest) srcStream.on('data', data => { const canContinue = destStream.write(data) if (!canContinue) { // we are overflowing the destination, we should pause srcStream.pause() // we will resume when the destination stream is drained destStream.once('drain', () => srcStream.resume()) } }) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 const canContinue = destStream.write(data) // stream-copy-safe.js1 2 const { createReadStream, createWriteStream } = require('fs')3 4 const [, , src, dest] = process.argv5 const srcStream = createReadStream(src)6 const destStream = createWriteStream(dest)7 8 srcStream.on('data', data => {9 10 if (!canContinue) {11 // we are overflowing the destination, we should pause12 srcStream.pause()13 // we will resume when the destination stream is drained14 destStream.once('drain', () => srcStream.resume())15 }16 })17 @loige45
  • 74. // stream-copy-safe.js const { createReadStream, createWriteStream } = require('fs') const [, , src, dest] = process.argv const srcStream = createReadStream(src) const destStream = createWriteStream(dest) srcStream.on('data', data => { const canContinue = destStream.write(data) if (!canContinue) { // we are overflowing the destination, we should pause srcStream.pause() // we will resume when the destination stream is drained destStream.once('drain', () => srcStream.resume()) } }) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 const canContinue = destStream.write(data) // stream-copy-safe.js1 2 const { createReadStream, createWriteStream } = require('fs')3 4 const [, , src, dest] = process.argv5 const srcStream = createReadStream(src)6 const destStream = createWriteStream(dest)7 8 srcStream.on('data', data => {9 10 if (!canContinue) {11 // we are overflowing the destination, we should pause12 srcStream.pause()13 // we will resume when the destination stream is drained14 destStream.once('drain', () => srcStream.resume())15 }16 })17 if (!canContinue) { } // stream-copy-safe.js1 2 const { createReadStream, createWriteStream } = require('fs')3 4 const [, , src, dest] = process.argv5 const srcStream = createReadStream(src)6 const destStream = createWriteStream(dest)7 8 srcStream.on('data', data => {9 const canContinue = destStream.write(data)10 11 // we are overflowing the destination, we should pause12 srcStream.pause()13 // we will resume when the destination stream is drained14 destStream.once('drain', () => srcStream.resume())15 16 })17 @loige45
  • 75. // stream-copy-safe.js const { createReadStream, createWriteStream } = require('fs') const [, , src, dest] = process.argv const srcStream = createReadStream(src) const destStream = createWriteStream(dest) srcStream.on('data', data => { const canContinue = destStream.write(data) if (!canContinue) { // we are overflowing the destination, we should pause srcStream.pause() // we will resume when the destination stream is drained destStream.once('drain', () => srcStream.resume()) } }) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 const canContinue = destStream.write(data) // stream-copy-safe.js1 2 const { createReadStream, createWriteStream } = require('fs')3 4 const [, , src, dest] = process.argv5 const srcStream = createReadStream(src)6 const destStream = createWriteStream(dest)7 8 srcStream.on('data', data => {9 10 if (!canContinue) {11 // we are overflowing the destination, we should pause12 srcStream.pause()13 // we will resume when the destination stream is drained14 destStream.once('drain', () => srcStream.resume())15 }16 })17 if (!canContinue) { } // stream-copy-safe.js1 2 const { createReadStream, createWriteStream } = require('fs')3 4 const [, , src, dest] = process.argv5 const srcStream = createReadStream(src)6 const destStream = createWriteStream(dest)7 8 srcStream.on('data', data => {9 const canContinue = destStream.write(data)10 11 // we are overflowing the destination, we should pause12 srcStream.pause()13 // we will resume when the destination stream is drained14 destStream.once('drain', () => srcStream.resume())15 16 })17 srcStream.pause() // stream-copy-safe.js1 2 const { createReadStream, createWriteStream } = require('fs')3 4 const [, , src, dest] = process.argv5 const srcStream = createReadStream(src)6 const destStream = createWriteStream(dest)7 8 srcStream.on('data', data => {9 const canContinue = destStream.write(data)10 if (!canContinue) {11 // we are overflowing the destination, we should pause12 13 // we will resume when the destination stream is drained14 destStream.once('drain', () => srcStream.resume())15 }16 })17 @loige45
  • 76. // stream-copy-safe.js const { createReadStream, createWriteStream } = require('fs') const [, , src, dest] = process.argv const srcStream = createReadStream(src) const destStream = createWriteStream(dest) srcStream.on('data', data => { const canContinue = destStream.write(data) if (!canContinue) { // we are overflowing the destination, we should pause srcStream.pause() // we will resume when the destination stream is drained destStream.once('drain', () => srcStream.resume()) } }) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 const canContinue = destStream.write(data) // stream-copy-safe.js1 2 const { createReadStream, createWriteStream } = require('fs')3 4 const [, , src, dest] = process.argv5 const srcStream = createReadStream(src)6 const destStream = createWriteStream(dest)7 8 srcStream.on('data', data => {9 10 if (!canContinue) {11 // we are overflowing the destination, we should pause12 srcStream.pause()13 // we will resume when the destination stream is drained14 destStream.once('drain', () => srcStream.resume())15 }16 })17 if (!canContinue) { } // stream-copy-safe.js1 2 const { createReadStream, createWriteStream } = require('fs')3 4 const [, , src, dest] = process.argv5 const srcStream = createReadStream(src)6 const destStream = createWriteStream(dest)7 8 srcStream.on('data', data => {9 const canContinue = destStream.write(data)10 11 // we are overflowing the destination, we should pause12 srcStream.pause()13 // we will resume when the destination stream is drained14 destStream.once('drain', () => srcStream.resume())15 16 })17 srcStream.pause() // stream-copy-safe.js1 2 const { createReadStream, createWriteStream } = require('fs')3 4 const [, , src, dest] = process.argv5 const srcStream = createReadStream(src)6 const destStream = createWriteStream(dest)7 8 srcStream.on('data', data => {9 const canContinue = destStream.write(data)10 if (!canContinue) {11 // we are overflowing the destination, we should pause12 13 // we will resume when the destination stream is drained14 destStream.once('drain', () => srcStream.resume())15 }16 })17 destStream.once('drain', () => srcStream.resume()) // stream-copy-safe.js1 2 const { createReadStream, createWriteStream } = require('fs')3 4 const [, , src, dest] = process.argv5 const srcStream = createReadStream(src)6 const destStream = createWriteStream(dest)7 8 srcStream.on('data', data => {9 const canContinue = destStream.write(data)10 if (!canContinue) {11 // we are overflowing the destination, we should pause12 srcStream.pause()13 // we will resume when the destination stream is drained14 15 }16 })17 @loige45
  • 77. // stream-copy-safe.js const { createReadStream, createWriteStream } = require('fs') const [, , src, dest] = process.argv const srcStream = createReadStream(src) const destStream = createWriteStream(dest) srcStream.on('data', data => { const canContinue = destStream.write(data) if (!canContinue) { // we are overflowing the destination, we should pause srcStream.pause() // we will resume when the destination stream is drained destStream.once('drain', () => srcStream.resume()) } }) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 const canContinue = destStream.write(data) // stream-copy-safe.js1 2 const { createReadStream, createWriteStream } = require('fs')3 4 const [, , src, dest] = process.argv5 const srcStream = createReadStream(src)6 const destStream = createWriteStream(dest)7 8 srcStream.on('data', data => {9 10 if (!canContinue) {11 // we are overflowing the destination, we should pause12 srcStream.pause()13 // we will resume when the destination stream is drained14 destStream.once('drain', () => srcStream.resume())15 }16 })17 if (!canContinue) { } // stream-copy-safe.js1 2 const { createReadStream, createWriteStream } = require('fs')3 4 const [, , src, dest] = process.argv5 const srcStream = createReadStream(src)6 const destStream = createWriteStream(dest)7 8 srcStream.on('data', data => {9 const canContinue = destStream.write(data)10 11 // we are overflowing the destination, we should pause12 srcStream.pause()13 // we will resume when the destination stream is drained14 destStream.once('drain', () => srcStream.resume())15 16 })17 srcStream.pause() // stream-copy-safe.js1 2 const { createReadStream, createWriteStream } = require('fs')3 4 const [, , src, dest] = process.argv5 const srcStream = createReadStream(src)6 const destStream = createWriteStream(dest)7 8 srcStream.on('data', data => {9 const canContinue = destStream.write(data)10 if (!canContinue) {11 // we are overflowing the destination, we should pause12 13 // we will resume when the destination stream is drained14 destStream.once('drain', () => srcStream.resume())15 }16 })17 destStream.once('drain', () => srcStream.resume()) // stream-copy-safe.js1 2 const { createReadStream, createWriteStream } = require('fs')3 4 const [, , src, dest] = process.argv5 const srcStream = createReadStream(src)6 const destStream = createWriteStream(dest)7 8 srcStream.on('data', data => {9 const canContinue = destStream.write(data)10 if (!canContinue) {11 // we are overflowing the destination, we should pause12 srcStream.pause()13 // we will resume when the destination stream is drained14 15 }16 })17 @loige45
  • 95. HOW CAN WE USE TRANSFORM STREAMS?HOW CAN WE USE TRANSFORM STREAMS? Readable Transform Writable ⚡   data write() ⚡   data write() pause() ⚡ drainresume() 49@loige (Backpressure)
  • 96. HOW CAN WE USE TRANSFORM STREAMS?HOW CAN WE USE TRANSFORM STREAMS? Readable Transform Writable ⚡   data write() ⚡   data write() pause() ⚡ drainresume() 49@loige (Backpressure) (Backpressure)
  • 97. HOW CAN WE USE TRANSFORM STREAMS?HOW CAN WE USE TRANSFORM STREAMS? Readable Transform Writable ⚡   data write() ⚡   data write() pause() ⚡ drainresume() pause() 49@loige (Backpressure) (Backpressure)
  • 98. HOW CAN WE USE TRANSFORM STREAMS?HOW CAN WE USE TRANSFORM STREAMS? Readable Transform Writable ⚡   data write() ⚡   data write() pause() ⚡ drainresume() pause() ⚡ drain 49@loige (Backpressure) (Backpressure)
  • 99. HOW CAN WE USE TRANSFORM STREAMS?HOW CAN WE USE TRANSFORM STREAMS? Readable Transform Writable ⚡   data write() ⚡   data write() pause() ⚡ drainresume() pause() ⚡ drainresume() 49@loige (Backpressure) (Backpressure)
  • 100. HOW CAN WE USE TRANSFORM STREAMS?HOW CAN WE USE TRANSFORM STREAMS? Readable Transform Writable ⚡   data write() ⚡   data write() pause() ⚡ drainresume() pause() ⚡ drainresume() 49@loige (Backpressure) (Backpressure) You also have to handle end events and errors!
  • 101. gzipStream.on('data', data => { const canContinue = destStream.write(data) if (!canContinue) { gzipStream.pause() destStream.once('drain', () => { gzipStream.resume() }) } }) gzipStream.on('end', () => { destStream.end() }) // ⚠ TODO: handle errors! // stream-copy-gzip.js const { createReadStream, createWriteStream } = require('fs') const { createGzip } = require('zlib') const [, , src, dest] = process.argv const srcStream = createReadStream(src) const gzipStream = createGzip() const destStream = createWriteStream(dest) srcStream.on('data', data => { const canContinue = gzipStream.write(data) if (!canContinue) { srcStream.pause() gzipStream.once('drain', () => { srcStream.resume() }) } }) srcStream.on('end', () => { // check if there's buffered data left const remainingData = gzipStream.read() if (remainingData !== null) { destStream.write() } gzipStream.end() }) @loige50
  • 102. gzipStream.on('data', data => { const canContinue = destStream.write(data) if (!canContinue) { gzipStream.pause() destStream.once('drain', () => { gzipStream.resume() }) } }) gzipStream.on('end', () => { destStream.end() }) // ⚠ TODO: handle errors! // stream-copy-gzip.js const { createReadStream, createWriteStream } = require('fs') const { createGzip } = require('zlib') const [, , src, dest] = process.argv const srcStream = createReadStream(src) const gzipStream = createGzip() const destStream = createWriteStream(dest) srcStream.on('data', data => { const canContinue = gzipStream.write(data) if (!canContinue) { srcStream.pause() gzipStream.once('drain', () => { srcStream.resume() }) } }) srcStream.on('end', () => { // check if there's buffered data left const remainingData = gzipStream.read() if (remainingData !== null) { destStream.write() } gzipStream.end() }) @loige50
  • 105. // stream-copy-gzip-pipe.js const { createReadStream, createWriteStream } = require('fs') const { createGzip } = require('zlib') const [, , src, dest] = process.argv const srcStream = createReadStream(src) const gzipStream = createGzip() const destStream = createWriteStream(dest) srcStream .pipe(gzipStream) .pipe(destStream) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 @loige53
  • 106. // stream-copy-gzip-pipe.js const { createReadStream, createWriteStream } = require('fs') const { createGzip } = require('zlib') const [, , src, dest] = process.argv const srcStream = createReadStream(src) const gzipStream = createGzip() const destStream = createWriteStream(dest) srcStream .pipe(gzipStream) .pipe(destStream) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 srcStream .pipe(gzipStream) .pipe(destStream) // stream-copy-gzip-pipe.js1 2 const {3 createReadStream,4 createWriteStream5 } = require('fs')6 const { createGzip } = require('zlib')7 8 const [, , src, dest] = process.argv9 const srcStream = createReadStream(src)10 const gzipStream = createGzip()11 const destStream = createWriteStream(dest)12 13 14 15 16 @loige53
  • 107. // stream-copy-gzip-pipe.js const { createReadStream, createWriteStream } = require('fs') const { createGzip } = require('zlib') const [, , src, dest] = process.argv const srcStream = createReadStream(src) const gzipStream = createGzip() const destStream = createWriteStream(dest) srcStream .pipe(gzipStream) .pipe(destStream) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 srcStream .pipe(gzipStream) .pipe(destStream) // stream-copy-gzip-pipe.js1 2 const {3 createReadStream,4 createWriteStream5 } = require('fs')6 const { createGzip } = require('zlib')7 8 const [, , src, dest] = process.argv9 const srcStream = createReadStream(src)10 const gzipStream = createGzip()11 const destStream = createWriteStream(dest)12 13 14 15 16 @loige53
  • 109. readable .on('error', handleErr) .pipe(decompress) .on('error', handleErr) .pipe(decrypt) .on('error', handleErr) .pipe(convert) .on('error', handleErr) .pipe(encrypt) .on('error', handleErr) .pipe(compress) .on('error', handleErr) .pipe(writeToDisk) .on('error', handleErr) Handling errors (correctly) @loige55
  • 110. readable .on('error', handleErr) .pipe(decompress) .on('error', handleErr) .pipe(decrypt) .on('error', handleErr) .pipe(convert) .on('error', handleErr) .pipe(encrypt) .on('error', handleErr) .pipe(compress) .on('error', handleErr) .pipe(writeToDisk) .on('error', handleErr) Handling errors (correctly) @loige   handleErr should end and destroy the streams (it doesn't happen automatically)   55
  • 111. readable .on('error', handleErr) .pipe(decompress) .on('error', handleErr) .pipe(decrypt) .on('error', handleErr) .pipe(convert) .on('error', handleErr) .pipe(encrypt) .on('error', handleErr) .pipe(compress) .on('error', handleErr) .pipe(writeToDisk) .on('error', handleErr) Handling errors (correctly) @loige   handleErr should end and destroy the streams (it doesn't happen automatically)   55
  • 113. // stream-copy-gzip-pipeline.js const { pipeline } = require('stream') const { createReadStream, createWriteStream } = require('fs') const { createGzip } = require('zlib') const [, , src, dest] = process.argv pipeline( createReadStream(src), createGzip(), createWriteStream(dest), function onEnd (err) { if (err) { console.error(`Error: ${err}`) process.exit(1) } console.log('Done!') } ) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 stream.pipeline(...streams, callback) - Node.js 10+ @loige57
  • 114. // stream-copy-gzip-pipeline.js const { pipeline } = require('stream') const { createReadStream, createWriteStream } = require('fs') const { createGzip } = require('zlib') const [, , src, dest] = process.argv pipeline( createReadStream(src), createGzip(), createWriteStream(dest), function onEnd (err) { if (err) { console.error(`Error: ${err}`) process.exit(1) } console.log('Done!') } ) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 pipeline( ) // stream-copy-gzip-pipeline.js1 2 const { pipeline } = require('stream')3 const { createReadStream, createWriteStream } = require('fs')4 const { createGzip } = require('zlib')5 6 const [, , src, dest] = process.argv7 8 9 createReadStream(src),10 createGzip(),11 createWriteStream(dest),12 function onEnd (err) {13 if (err) {14 console.error(`Error: ${err}`)15 process.exit(1)16 }17 18 console.log('Done!')19 }20 21 stream.pipeline(...streams, callback) - Node.js 10+ @loige57
  • 115. // stream-copy-gzip-pipeline.js const { pipeline } = require('stream') const { createReadStream, createWriteStream } = require('fs') const { createGzip } = require('zlib') const [, , src, dest] = process.argv pipeline( createReadStream(src), createGzip(), createWriteStream(dest), function onEnd (err) { if (err) { console.error(`Error: ${err}`) process.exit(1) } console.log('Done!') } ) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 pipeline( ) // stream-copy-gzip-pipeline.js1 2 const { pipeline } = require('stream')3 const { createReadStream, createWriteStream } = require('fs')4 const { createGzip } = require('zlib')5 6 const [, , src, dest] = process.argv7 8 9 createReadStream(src),10 createGzip(),11 createWriteStream(dest),12 function onEnd (err) {13 if (err) {14 console.error(`Error: ${err}`)15 process.exit(1)16 }17 18 console.log('Done!')19 }20 21 createReadStream(src), createGzip(), createWriteStream(dest), // stream-copy-gzip-pipeline.js1 2 const { pipeline } = require('stream')3 const { createReadStream, createWriteStream } = require('fs')4 const { createGzip } = require('zlib')5 6 const [, , src, dest] = process.argv7 8 pipeline(9 10 11 12 function onEnd (err) {13 if (err) {14 console.error(`Error: ${err}`)15 process.exit(1)16 }17 18 console.log('Done!')19 }20 )21 stream.pipeline(...streams, callback) - Node.js 10+ @loige57
  • 116. // stream-copy-gzip-pipeline.js const { pipeline } = require('stream') const { createReadStream, createWriteStream } = require('fs') const { createGzip } = require('zlib') const [, , src, dest] = process.argv pipeline( createReadStream(src), createGzip(), createWriteStream(dest), function onEnd (err) { if (err) { console.error(`Error: ${err}`) process.exit(1) } console.log('Done!') } ) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 pipeline( ) // stream-copy-gzip-pipeline.js1 2 const { pipeline } = require('stream')3 const { createReadStream, createWriteStream } = require('fs')4 const { createGzip } = require('zlib')5 6 const [, , src, dest] = process.argv7 8 9 createReadStream(src),10 createGzip(),11 createWriteStream(dest),12 function onEnd (err) {13 if (err) {14 console.error(`Error: ${err}`)15 process.exit(1)16 }17 18 console.log('Done!')19 }20 21 createReadStream(src), createGzip(), createWriteStream(dest), // stream-copy-gzip-pipeline.js1 2 const { pipeline } = require('stream')3 const { createReadStream, createWriteStream } = require('fs')4 const { createGzip } = require('zlib')5 6 const [, , src, dest] = process.argv7 8 pipeline(9 10 11 12 function onEnd (err) {13 if (err) {14 console.error(`Error: ${err}`)15 process.exit(1)16 }17 18 console.log('Done!')19 }20 )21 function onEnd (err) { } // stream-copy-gzip-pipeline.js1 2 const { pipeline } = require('stream')3 const { createReadStream, createWriteStream } = require('fs')4 const { createGzip } = require('zlib')5 6 const [, , src, dest] = process.argv7 8 pipeline(9 createReadStream(src),10 createGzip(),11 createWriteStream(dest),12 13 if (err) {14 console.error(`Error: ${err}`)15 process.exit(1)16 }17 18 console.log('Done!')19 20 )21 stream.pipeline(...streams, callback) - Node.js 10+ @loige57
  • 117. // stream-copy-gzip-pipeline.js const { pipeline } = require('stream') const { createReadStream, createWriteStream } = require('fs') const { createGzip } = require('zlib') const [, , src, dest] = process.argv pipeline( createReadStream(src), createGzip(), createWriteStream(dest), function onEnd (err) { if (err) { console.error(`Error: ${err}`) process.exit(1) } console.log('Done!') } ) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 pipeline( ) // stream-copy-gzip-pipeline.js1 2 const { pipeline } = require('stream')3 const { createReadStream, createWriteStream } = require('fs')4 const { createGzip } = require('zlib')5 6 const [, , src, dest] = process.argv7 8 9 createReadStream(src),10 createGzip(),11 createWriteStream(dest),12 function onEnd (err) {13 if (err) {14 console.error(`Error: ${err}`)15 process.exit(1)16 }17 18 console.log('Done!')19 }20 21 createReadStream(src), createGzip(), createWriteStream(dest), // stream-copy-gzip-pipeline.js1 2 const { pipeline } = require('stream')3 const { createReadStream, createWriteStream } = require('fs')4 const { createGzip } = require('zlib')5 6 const [, , src, dest] = process.argv7 8 pipeline(9 10 11 12 function onEnd (err) {13 if (err) {14 console.error(`Error: ${err}`)15 process.exit(1)16 }17 18 console.log('Done!')19 }20 )21 function onEnd (err) { } // stream-copy-gzip-pipeline.js1 2 const { pipeline } = require('stream')3 const { createReadStream, createWriteStream } = require('fs')4 const { createGzip } = require('zlib')5 6 const [, , src, dest] = process.argv7 8 pipeline(9 createReadStream(src),10 createGzip(),11 createWriteStream(dest),12 13 if (err) {14 console.error(`Error: ${err}`)15 process.exit(1)16 }17 18 console.log('Done!')19 20 )21 if (err) { console.error(`Error: ${err}`) process.exit(1) } // stream-copy-gzip-pipeline.js1 2 const { pipeline } = require('stream')3 const { createReadStream, createWriteStream } = require('fs')4 const { createGzip } = require('zlib')5 6 const [, , src, dest] = process.argv7 8 pipeline(9 createReadStream(src),10 createGzip(),11 createWriteStream(dest),12 function onEnd (err) {13 14 15 16 17 18 console.log('Done!')19 }20 )21 stream.pipeline(...streams, callback) - Node.js 10+ @loige57
  • 118. // stream-copy-gzip-pipeline.js const { pipeline } = require('stream') const { createReadStream, createWriteStream } = require('fs') const { createGzip } = require('zlib') const [, , src, dest] = process.argv pipeline( createReadStream(src), createGzip(), createWriteStream(dest), function onEnd (err) { if (err) { console.error(`Error: ${err}`) process.exit(1) } console.log('Done!') } ) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 pipeline( ) // stream-copy-gzip-pipeline.js1 2 const { pipeline } = require('stream')3 const { createReadStream, createWriteStream } = require('fs')4 const { createGzip } = require('zlib')5 6 const [, , src, dest] = process.argv7 8 9 createReadStream(src),10 createGzip(),11 createWriteStream(dest),12 function onEnd (err) {13 if (err) {14 console.error(`Error: ${err}`)15 process.exit(1)16 }17 18 console.log('Done!')19 }20 21 createReadStream(src), createGzip(), createWriteStream(dest), // stream-copy-gzip-pipeline.js1 2 const { pipeline } = require('stream')3 const { createReadStream, createWriteStream } = require('fs')4 const { createGzip } = require('zlib')5 6 const [, , src, dest] = process.argv7 8 pipeline(9 10 11 12 function onEnd (err) {13 if (err) {14 console.error(`Error: ${err}`)15 process.exit(1)16 }17 18 console.log('Done!')19 }20 )21 function onEnd (err) { } // stream-copy-gzip-pipeline.js1 2 const { pipeline } = require('stream')3 const { createReadStream, createWriteStream } = require('fs')4 const { createGzip } = require('zlib')5 6 const [, , src, dest] = process.argv7 8 pipeline(9 createReadStream(src),10 createGzip(),11 createWriteStream(dest),12 13 if (err) {14 console.error(`Error: ${err}`)15 process.exit(1)16 }17 18 console.log('Done!')19 20 )21 if (err) { console.error(`Error: ${err}`) process.exit(1) } // stream-copy-gzip-pipeline.js1 2 const { pipeline } = require('stream')3 const { createReadStream, createWriteStream } = require('fs')4 const { createGzip } = require('zlib')5 6 const [, , src, dest] = process.argv7 8 pipeline(9 createReadStream(src),10 createGzip(),11 createWriteStream(dest),12 function onEnd (err) {13 14 15 16 17 18 console.log('Done!')19 }20 )21 console.log('Done!') // stream-copy-gzip-pipeline.js1 2 const { pipeline } = require('stream')3 const { createReadStream, createWriteStream } = require('fs')4 const { createGzip } = require('zlib')5 6 const [, , src, dest] = process.argv7 8 pipeline(9 createReadStream(src),10 createGzip(),11 createWriteStream(dest),12 function onEnd (err) {13 if (err) {14 console.error(`Error: ${err}`)15 process.exit(1)16 }17 18 19 }20 )21 stream.pipeline(...streams, callback) - Node.js 10+ @loige57
  • 119. // stream-copy-gzip-pipeline.js const { pipeline } = require('stream') const { createReadStream, createWriteStream } = require('fs') const { createGzip } = require('zlib') const [, , src, dest] = process.argv pipeline( createReadStream(src), createGzip(), createWriteStream(dest), function onEnd (err) { if (err) { console.error(`Error: ${err}`) process.exit(1) } console.log('Done!') } ) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 pipeline( ) // stream-copy-gzip-pipeline.js1 2 const { pipeline } = require('stream')3 const { createReadStream, createWriteStream } = require('fs')4 const { createGzip } = require('zlib')5 6 const [, , src, dest] = process.argv7 8 9 createReadStream(src),10 createGzip(),11 createWriteStream(dest),12 function onEnd (err) {13 if (err) {14 console.error(`Error: ${err}`)15 process.exit(1)16 }17 18 console.log('Done!')19 }20 21 createReadStream(src), createGzip(), createWriteStream(dest), // stream-copy-gzip-pipeline.js1 2 const { pipeline } = require('stream')3 const { createReadStream, createWriteStream } = require('fs')4 const { createGzip } = require('zlib')5 6 const [, , src, dest] = process.argv7 8 pipeline(9 10 11 12 function onEnd (err) {13 if (err) {14 console.error(`Error: ${err}`)15 process.exit(1)16 }17 18 console.log('Done!')19 }20 )21 function onEnd (err) { } // stream-copy-gzip-pipeline.js1 2 const { pipeline } = require('stream')3 const { createReadStream, createWriteStream } = require('fs')4 const { createGzip } = require('zlib')5 6 const [, , src, dest] = process.argv7 8 pipeline(9 createReadStream(src),10 createGzip(),11 createWriteStream(dest),12 13 if (err) {14 console.error(`Error: ${err}`)15 process.exit(1)16 }17 18 console.log('Done!')19 20 )21 if (err) { console.error(`Error: ${err}`) process.exit(1) } // stream-copy-gzip-pipeline.js1 2 const { pipeline } = require('stream')3 const { createReadStream, createWriteStream } = require('fs')4 const { createGzip } = require('zlib')5 6 const [, , src, dest] = process.argv7 8 pipeline(9 createReadStream(src),10 createGzip(),11 createWriteStream(dest),12 function onEnd (err) {13 14 15 16 17 18 console.log('Done!')19 }20 )21 console.log('Done!') // stream-copy-gzip-pipeline.js1 2 const { pipeline } = require('stream')3 const { createReadStream, createWriteStream } = require('fs')4 const { createGzip } = require('zlib')5 6 const [, , src, dest] = process.argv7 8 pipeline(9 createReadStream(src),10 createGzip(),11 createWriteStream(dest),12 function onEnd (err) {13 if (err) {14 console.error(`Error: ${err}`)15 process.exit(1)16 }17 18 19 }20 )21 // stream-copy-gzip-pipeline.js const { pipeline } = require('stream') const { createReadStream, createWriteStream } = require('fs') const { createGzip } = require('zlib') const [, , src, dest] = process.argv pipeline( createReadStream(src), createGzip(), createWriteStream(dest), function onEnd (err) { if (err) { console.error(`Error: ${err}`) process.exit(1) } console.log('Done!') } ) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 stream.pipeline(...streams, callback) - Node.js 10+ @loige You can pass multiple streams (they will be piped) The last argument is a callback. If invoked with an error, it means the pipeline failed at some point. All the streams are ended and destroyed correctly. 57
  • 120. // stream-copy-gzip-pump.js const pump = require('pump') // from npm const { createReadStream, createWriteStream } = require('fs') const { createGzip } = require('zlib') const [, , src, dest] = process.argv pump( // just swap pipeline with pump! createReadStream(src), createGzip(), createWriteStream(dest), function onEnd (err) { if (err) { console.error(`Error: ${err}`) process.exit(1) } console.log('Done!') } ) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 For Node.js < 10: pump - npm.im/pump @loige58
  • 121. // stream-copy-gzip-pump.js const pump = require('pump') // from npm const { createReadStream, createWriteStream } = require('fs') const { createGzip } = require('zlib') const [, , src, dest] = process.argv pump( // just swap pipeline with pump! createReadStream(src), createGzip(), createWriteStream(dest), function onEnd (err) { if (err) { console.error(`Error: ${err}`) process.exit(1) } console.log('Done!') } ) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 const pump = require('pump') // from npm // stream-copy-gzip-pump.js1 2 3 const { createReadStream, createWriteStream } = require('fs')4 const { createGzip } = require('zlib')5 6 const [, , src, dest] = process.argv7 8 pump( // just swap pipeline with pump!9 createReadStream(src),10 createGzip(),11 createWriteStream(dest),12 function onEnd (err) {13 if (err) {14 console.error(`Error: ${err}`)15 process.exit(1)16 }17 18 console.log('Done!')19 }20 )21 For Node.js < 10: pump - npm.im/pump @loige58
  • 122. // stream-copy-gzip-pump.js const pump = require('pump') // from npm const { createReadStream, createWriteStream } = require('fs') const { createGzip } = require('zlib') const [, , src, dest] = process.argv pump( // just swap pipeline with pump! createReadStream(src), createGzip(), createWriteStream(dest), function onEnd (err) { if (err) { console.error(`Error: ${err}`) process.exit(1) } console.log('Done!') } ) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 const pump = require('pump') // from npm // stream-copy-gzip-pump.js1 2 3 const { createReadStream, createWriteStream } = require('fs')4 const { createGzip } = require('zlib')5 6 const [, , src, dest] = process.argv7 8 pump( // just swap pipeline with pump!9 createReadStream(src),10 createGzip(),11 createWriteStream(dest),12 function onEnd (err) {13 if (err) {14 console.error(`Error: ${err}`)15 process.exit(1)16 }17 18 console.log('Done!')19 }20 )21 pump( // just swap pipeline with pump! ) // stream-copy-gzip-pump.js1 2 const pump = require('pump') // from npm3 const { createReadStream, createWriteStream } = require('fs')4 const { createGzip } = require('zlib')5 6 const [, , src, dest] = process.argv7 8 9 createReadStream(src),10 createGzip(),11 createWriteStream(dest),12 function onEnd (err) {13 if (err) {14 console.error(`Error: ${err}`)15 process.exit(1)16 }17 18 console.log('Done!')19 }20 21 For Node.js < 10: pump - npm.im/pump @loige58
  • 124. // encgz-stream.js - utility library const { createCipheriv, createDecipheriv, randomBytes, createHash } = require('crypto') const { createGzip, createGunzip } = require('zlib') const pumpify = require('pumpify') // from npm // calculates md5 of the secret (trimmed) function getChiperKey (secret) {} function createEncgz (secret) { const initVect = randomBytes(16) const cipherKey = getChiperKey(secret) const encryptStream = createCipheriv('aes256', cipherKey, initVect) const gzipStream = createGzip() const stream = pumpify(encryptStream, gzipStream) stream.initVect = initVect return stream } 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 @loige60
  • 125. // encgz-stream.js - utility library const { createCipheriv, createDecipheriv, randomBytes, createHash } = require('crypto') const { createGzip, createGunzip } = require('zlib') const pumpify = require('pumpify') // from npm // calculates md5 of the secret (trimmed) function getChiperKey (secret) {} function createEncgz (secret) { const initVect = randomBytes(16) const cipherKey = getChiperKey(secret) const encryptStream = createCipheriv('aes256', cipherKey, initVect) const gzipStream = createGzip() const stream = pumpify(encryptStream, gzipStream) stream.initVect = initVect return stream } 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 function createEncgz (secret) { } // encgz-stream.js - utility library1 2 const {3 createCipheriv,4 createDecipheriv,5 randomBytes,6 createHash7 } = require('crypto')8 const { createGzip, createGunzip } = require('zlib')9 const pumpify = require('pumpify') // from npm10 11 // calculates md5 of the secret (trimmed)12 function getChiperKey (secret) {}13 14 15 const initVect = randomBytes(16)16 const cipherKey = getChiperKey(secret)17 const encryptStream = createCipheriv('aes256', cipherKey, initVect)18 const gzipStream = createGzip()19 20 const stream = pumpify(encryptStream, gzipStream)21 stream.initVect = initVect22 23 return stream24 25 @loige60
  • 126. // encgz-stream.js - utility library const { createCipheriv, createDecipheriv, randomBytes, createHash } = require('crypto') const { createGzip, createGunzip } = require('zlib') const pumpify = require('pumpify') // from npm // calculates md5 of the secret (trimmed) function getChiperKey (secret) {} function createEncgz (secret) { const initVect = randomBytes(16) const cipherKey = getChiperKey(secret) const encryptStream = createCipheriv('aes256', cipherKey, initVect) const gzipStream = createGzip() const stream = pumpify(encryptStream, gzipStream) stream.initVect = initVect return stream } 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 function createEncgz (secret) { } // encgz-stream.js - utility library1 2 const {3 createCipheriv,4 createDecipheriv,5 randomBytes,6 createHash7 } = require('crypto')8 const { createGzip, createGunzip } = require('zlib')9 const pumpify = require('pumpify') // from npm10 11 // calculates md5 of the secret (trimmed)12 function getChiperKey (secret) {}13 14 15 const initVect = randomBytes(16)16 const cipherKey = getChiperKey(secret)17 const encryptStream = createCipheriv('aes256', cipherKey, initVect)18 const gzipStream = createGzip()19 20 const stream = pumpify(encryptStream, gzipStream)21 stream.initVect = initVect22 23 return stream24 25 const encryptStream = createCipheriv('aes256', cipherKey, initVect) const gzipStream = createGzip() // encgz-stream.js - utility library1 2 const {3 createCipheriv,4 createDecipheriv,5 randomBytes,6 createHash7 } = require('crypto')8 const { createGzip, createGunzip } = require('zlib')9 const pumpify = require('pumpify') // from npm10 11 // calculates md5 of the secret (trimmed)12 function getChiperKey (secret) {}13 14 function createEncgz (secret) {15 const initVect = randomBytes(16)16 const cipherKey = getChiperKey(secret)17 18 19 20 const stream = pumpify(encryptStream, gzipStream)21 stream.initVect = initVect22 23 return stream24 }25 @loige60
  • 127. // encgz-stream.js - utility library const { createCipheriv, createDecipheriv, randomBytes, createHash } = require('crypto') const { createGzip, createGunzip } = require('zlib') const pumpify = require('pumpify') // from npm // calculates md5 of the secret (trimmed) function getChiperKey (secret) {} function createEncgz (secret) { const initVect = randomBytes(16) const cipherKey = getChiperKey(secret) const encryptStream = createCipheriv('aes256', cipherKey, initVect) const gzipStream = createGzip() const stream = pumpify(encryptStream, gzipStream) stream.initVect = initVect return stream } 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 function createEncgz (secret) { } // encgz-stream.js - utility library1 2 const {3 createCipheriv,4 createDecipheriv,5 randomBytes,6 createHash7 } = require('crypto')8 const { createGzip, createGunzip } = require('zlib')9 const pumpify = require('pumpify') // from npm10 11 // calculates md5 of the secret (trimmed)12 function getChiperKey (secret) {}13 14 15 const initVect = randomBytes(16)16 const cipherKey = getChiperKey(secret)17 const encryptStream = createCipheriv('aes256', cipherKey, initVect)18 const gzipStream = createGzip()19 20 const stream = pumpify(encryptStream, gzipStream)21 stream.initVect = initVect22 23 return stream24 25 const encryptStream = createCipheriv('aes256', cipherKey, initVect) const gzipStream = createGzip() // encgz-stream.js - utility library1 2 const {3 createCipheriv,4 createDecipheriv,5 randomBytes,6 createHash7 } = require('crypto')8 const { createGzip, createGunzip } = require('zlib')9 const pumpify = require('pumpify') // from npm10 11 // calculates md5 of the secret (trimmed)12 function getChiperKey (secret) {}13 14 function createEncgz (secret) {15 const initVect = randomBytes(16)16 const cipherKey = getChiperKey(secret)17 18 19 20 const stream = pumpify(encryptStream, gzipStream)21 stream.initVect = initVect22 23 return stream24 }25 const stream = pumpify(encryptStream, gzipStream) // encgz-stream.js - utility library1 2 const {3 createCipheriv,4 createDecipheriv,5 randomBytes,6 createHash7 } = require('crypto')8 const { createGzip, createGunzip } = require('zlib')9 const pumpify = require('pumpify') // from npm10 11 // calculates md5 of the secret (trimmed)12 function getChiperKey (secret) {}13 14 function createEncgz (secret) {15 const initVect = randomBytes(16)16 const cipherKey = getChiperKey(secret)17 const encryptStream = createCipheriv('aes256', cipherKey, initVect)18 const gzipStream = createGzip()19 20 21 stream.initVect = initVect22 23 return stream24 }25 @loige60
  • 128. // encgz-stream.js - utility library const { createCipheriv, createDecipheriv, randomBytes, createHash } = require('crypto') const { createGzip, createGunzip } = require('zlib') const pumpify = require('pumpify') // from npm // calculates md5 of the secret (trimmed) function getChiperKey (secret) {} function createEncgz (secret) { const initVect = randomBytes(16) const cipherKey = getChiperKey(secret) const encryptStream = createCipheriv('aes256', cipherKey, initVect) const gzipStream = createGzip() const stream = pumpify(encryptStream, gzipStream) stream.initVect = initVect return stream } 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 function createEncgz (secret) { } // encgz-stream.js - utility library1 2 const {3 createCipheriv,4 createDecipheriv,5 randomBytes,6 createHash7 } = require('crypto')8 const { createGzip, createGunzip } = require('zlib')9 const pumpify = require('pumpify') // from npm10 11 // calculates md5 of the secret (trimmed)12 function getChiperKey (secret) {}13 14 15 const initVect = randomBytes(16)16 const cipherKey = getChiperKey(secret)17 const encryptStream = createCipheriv('aes256', cipherKey, initVect)18 const gzipStream = createGzip()19 20 const stream = pumpify(encryptStream, gzipStream)21 stream.initVect = initVect22 23 return stream24 25 const encryptStream = createCipheriv('aes256', cipherKey, initVect) const gzipStream = createGzip() // encgz-stream.js - utility library1 2 const {3 createCipheriv,4 createDecipheriv,5 randomBytes,6 createHash7 } = require('crypto')8 const { createGzip, createGunzip } = require('zlib')9 const pumpify = require('pumpify') // from npm10 11 // calculates md5 of the secret (trimmed)12 function getChiperKey (secret) {}13 14 function createEncgz (secret) {15 const initVect = randomBytes(16)16 const cipherKey = getChiperKey(secret)17 18 19 20 const stream = pumpify(encryptStream, gzipStream)21 stream.initVect = initVect22 23 return stream24 }25 const stream = pumpify(encryptStream, gzipStream) // encgz-stream.js - utility library1 2 const {3 createCipheriv,4 createDecipheriv,5 randomBytes,6 createHash7 } = require('crypto')8 const { createGzip, createGunzip } = require('zlib')9 const pumpify = require('pumpify') // from npm10 11 // calculates md5 of the secret (trimmed)12 function getChiperKey (secret) {}13 14 function createEncgz (secret) {15 const initVect = randomBytes(16)16 const cipherKey = getChiperKey(secret)17 const encryptStream = createCipheriv('aes256', cipherKey, initVect)18 const gzipStream = createGzip()19 20 21 stream.initVect = initVect22 23 return stream24 }25 return stream // encgz-stream.js - utility library1 2 const {3 createCipheriv,4 createDecipheriv,5 randomBytes,6 createHash7 } = require('crypto')8 const { createGzip, createGunzip } = require('zlib')9 const pumpify = require('pumpify') // from npm10 11 // calculates md5 of the secret (trimmed)12 function getChiperKey (secret) {}13 14 function createEncgz (secret) {15 const initVect = randomBytes(16)16 const cipherKey = getChiperKey(secret)17 const encryptStream = createCipheriv('aes256', cipherKey, initVect)18 const gzipStream = createGzip()19 20 const stream = pumpify(encryptStream, gzipStream)21 stream.initVect = initVect22 23 24 }25 @loige60
  • 129. // encgz-stream.js (...continue from previous slide) function createDecgz (secret, initVect) { const cipherKey = getChiperKey(secret) const decryptStream = createDecipheriv('aes256', cipherKey, initVect) const gunzipStream = createGunzip() const stream = pumpify(gunzipStream, decryptStream) return stream } module.exports = { createEncgz, createDecgz } 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 @loige61
  • 130. // encgz-stream.js (...continue from previous slide) function createDecgz (secret, initVect) { const cipherKey = getChiperKey(secret) const decryptStream = createDecipheriv('aes256', cipherKey, initVect) const gunzipStream = createGunzip() const stream = pumpify(gunzipStream, decryptStream) return stream } module.exports = { createEncgz, createDecgz } 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 function createDecgz (secret, initVect) { } // encgz-stream.js (...continue from previous slide)1 2 3 const cipherKey = getChiperKey(secret)4 const decryptStream = createDecipheriv('aes256', cipherKey, initVect)5 const gunzipStream = createGunzip()6 7 const stream = pumpify(gunzipStream, decryptStream)8 return stream9 10 11 module.exports = {12 createEncgz,13 createDecgz14 }15 @loige61
  • 131. // encgz-stream.js (...continue from previous slide) function createDecgz (secret, initVect) { const cipherKey = getChiperKey(secret) const decryptStream = createDecipheriv('aes256', cipherKey, initVect) const gunzipStream = createGunzip() const stream = pumpify(gunzipStream, decryptStream) return stream } module.exports = { createEncgz, createDecgz } 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 function createDecgz (secret, initVect) { } // encgz-stream.js (...continue from previous slide)1 2 3 const cipherKey = getChiperKey(secret)4 const decryptStream = createDecipheriv('aes256', cipherKey, initVect)5 const gunzipStream = createGunzip()6 7 const stream = pumpify(gunzipStream, decryptStream)8 return stream9 10 11 module.exports = {12 createEncgz,13 createDecgz14 }15 const decryptStream = createDecipheriv('aes256', cipherKey, initVect) const gunzipStream = createGunzip() // encgz-stream.js (...continue from previous slide)1 2 function createDecgz (secret, initVect) {3 const cipherKey = getChiperKey(secret)4 5 6 7 const stream = pumpify(gunzipStream, decryptStream)8 return stream9 }10 11 module.exports = {12 createEncgz,13 createDecgz14 }15 @loige61
  • 132. // encgz-stream.js (...continue from previous slide) function createDecgz (secret, initVect) { const cipherKey = getChiperKey(secret) const decryptStream = createDecipheriv('aes256', cipherKey, initVect) const gunzipStream = createGunzip() const stream = pumpify(gunzipStream, decryptStream) return stream } module.exports = { createEncgz, createDecgz } 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 function createDecgz (secret, initVect) { } // encgz-stream.js (...continue from previous slide)1 2 3 const cipherKey = getChiperKey(secret)4 const decryptStream = createDecipheriv('aes256', cipherKey, initVect)5 const gunzipStream = createGunzip()6 7 const stream = pumpify(gunzipStream, decryptStream)8 return stream9 10 11 module.exports = {12 createEncgz,13 createDecgz14 }15 const decryptStream = createDecipheriv('aes256', cipherKey, initVect) const gunzipStream = createGunzip() // encgz-stream.js (...continue from previous slide)1 2 function createDecgz (secret, initVect) {3 const cipherKey = getChiperKey(secret)4 5 6 7 const stream = pumpify(gunzipStream, decryptStream)8 return stream9 }10 11 module.exports = {12 createEncgz,13 createDecgz14 }15 const stream = pumpify(gunzipStream, decryptStream) return stream // encgz-stream.js (...continue from previous slide)1 2 function createDecgz (secret, initVect) {3 const cipherKey = getChiperKey(secret)4 const decryptStream = createDecipheriv('aes256', cipherKey, initVect)5 const gunzipStream = createGunzip()6 7 8 9 }10 11 module.exports = {12 createEncgz,13 createDecgz14 }15 @loige61
  • 133. // encgz-stream.js (...continue from previous slide) function createDecgz (secret, initVect) { const cipherKey = getChiperKey(secret) const decryptStream = createDecipheriv('aes256', cipherKey, initVect) const gunzipStream = createGunzip() const stream = pumpify(gunzipStream, decryptStream) return stream } module.exports = { createEncgz, createDecgz } 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 function createDecgz (secret, initVect) { } // encgz-stream.js (...continue from previous slide)1 2 3 const cipherKey = getChiperKey(secret)4 const decryptStream = createDecipheriv('aes256', cipherKey, initVect)5 const gunzipStream = createGunzip()6 7 const stream = pumpify(gunzipStream, decryptStream)8 return stream9 10 11 module.exports = {12 createEncgz,13 createDecgz14 }15 const decryptStream = createDecipheriv('aes256', cipherKey, initVect) const gunzipStream = createGunzip() // encgz-stream.js (...continue from previous slide)1 2 function createDecgz (secret, initVect) {3 const cipherKey = getChiperKey(secret)4 5 6 7 const stream = pumpify(gunzipStream, decryptStream)8 return stream9 }10 11 module.exports = {12 createEncgz,13 createDecgz14 }15 const stream = pumpify(gunzipStream, decryptStream) return stream // encgz-stream.js (...continue from previous slide)1 2 function createDecgz (secret, initVect) {3 const cipherKey = getChiperKey(secret)4 const decryptStream = createDecipheriv('aes256', cipherKey, initVect)5 const gunzipStream = createGunzip()6 7 8 9 }10 11 module.exports = {12 createEncgz,13 createDecgz14 }15 module.exports = { createEncgz, createDecgz } // encgz-stream.js (...continue from previous slide)1 2 function createDecgz (secret, initVect) {3 const cipherKey = getChiperKey(secret)4 const decryptStream = createDecipheriv('aes256', cipherKey, initVect)5 const gunzipStream = createGunzip()6 7 const stream = pumpify(gunzipStream, decryptStream)8 return stream9 }10 11 12 13 14 15 @loige61
  • 134. // encgz.js - CLI to encrypt and gzip (from stdin to stdout) const { pipeline } = require('stream') const { createEncgz } = require('./encgz-stream') const [, , secret] = process.argv const encgz = createEncgz(secret) console.error(`init vector: ${encgz.initVect.toString('hex')}`) pipeline( process.stdin, encgz, process.stdout, function onEnd (err) { if (err) { console.error(`Error: ${err}`) process.exit(1) } } ) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 @loige62
  • 135. // encgz.js - CLI to encrypt and gzip (from stdin to stdout) const { pipeline } = require('stream') const { createEncgz } = require('./encgz-stream') const [, , secret] = process.argv const encgz = createEncgz(secret) console.error(`init vector: ${encgz.initVect.toString('hex')}`) pipeline( process.stdin, encgz, process.stdout, function onEnd (err) { if (err) { console.error(`Error: ${err}`) process.exit(1) } } ) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 const [, , secret] = process.argv // encgz.js - CLI to encrypt and gzip (from stdin to stdout)1 2 const { pipeline } = require('stream')3 const { createEncgz } = require('./encgz-stream')4 5 6 7 const encgz = createEncgz(secret)8 console.error(`init vector: ${encgz.initVect.toString('hex')}`)9 10 pipeline(11 process.stdin,12 encgz,13 process.stdout,14 function onEnd (err) {15 if (err) {16 console.error(`Error: ${err}`)17 process.exit(1)18 }19 }20 )21 @loige62
  • 136. // encgz.js - CLI to encrypt and gzip (from stdin to stdout) const { pipeline } = require('stream') const { createEncgz } = require('./encgz-stream') const [, , secret] = process.argv const encgz = createEncgz(secret) console.error(`init vector: ${encgz.initVect.toString('hex')}`) pipeline( process.stdin, encgz, process.stdout, function onEnd (err) { if (err) { console.error(`Error: ${err}`) process.exit(1) } } ) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 const [, , secret] = process.argv // encgz.js - CLI to encrypt and gzip (from stdin to stdout)1 2 const { pipeline } = require('stream')3 const { createEncgz } = require('./encgz-stream')4 5 6 7 const encgz = createEncgz(secret)8 console.error(`init vector: ${encgz.initVect.toString('hex')}`)9 10 pipeline(11 process.stdin,12 encgz,13 process.stdout,14 function onEnd (err) {15 if (err) {16 console.error(`Error: ${err}`)17 process.exit(1)18 }19 }20 )21 const encgz = createEncgz(secret) // encgz.js - CLI to encrypt and gzip (from stdin to stdout)1 2 const { pipeline } = require('stream')3 const { createEncgz } = require('./encgz-stream')4 5 const [, , secret] = process.argv6 7 8 console.error(`init vector: ${encgz.initVect.toString('hex')}`)9 10 pipeline(11 process.stdin,12 encgz,13 process.stdout,14 function onEnd (err) {15 if (err) {16 console.error(`Error: ${err}`)17 process.exit(1)18 }19 }20 )21 @loige62
  • 137. // encgz.js - CLI to encrypt and gzip (from stdin to stdout) const { pipeline } = require('stream') const { createEncgz } = require('./encgz-stream') const [, , secret] = process.argv const encgz = createEncgz(secret) console.error(`init vector: ${encgz.initVect.toString('hex')}`) pipeline( process.stdin, encgz, process.stdout, function onEnd (err) { if (err) { console.error(`Error: ${err}`) process.exit(1) } } ) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 const [, , secret] = process.argv // encgz.js - CLI to encrypt and gzip (from stdin to stdout)1 2 const { pipeline } = require('stream')3 const { createEncgz } = require('./encgz-stream')4 5 6 7 const encgz = createEncgz(secret)8 console.error(`init vector: ${encgz.initVect.toString('hex')}`)9 10 pipeline(11 process.stdin,12 encgz,13 process.stdout,14 function onEnd (err) {15 if (err) {16 console.error(`Error: ${err}`)17 process.exit(1)18 }19 }20 )21 const encgz = createEncgz(secret) // encgz.js - CLI to encrypt and gzip (from stdin to stdout)1 2 const { pipeline } = require('stream')3 const { createEncgz } = require('./encgz-stream')4 5 const [, , secret] = process.argv6 7 8 console.error(`init vector: ${encgz.initVect.toString('hex')}`)9 10 pipeline(11 process.stdin,12 encgz,13 process.stdout,14 function onEnd (err) {15 if (err) {16 console.error(`Error: ${err}`)17 process.exit(1)18 }19 }20 )21 pipeline( ) // encgz.js - CLI to encrypt and gzip (from stdin to stdout)1 2 const { pipeline } = require('stream')3 const { createEncgz } = require('./encgz-stream')4 5 const [, , secret] = process.argv6 7 const encgz = createEncgz(secret)8 console.error(`init vector: ${encgz.initVect.toString('hex')}`)9 10 11 process.stdin,12 encgz,13 process.stdout,14 function onEnd (err) {15 if (err) {16 console.error(`Error: ${err}`)17 process.exit(1)18 }19 }20 21 @loige62
  • 138. // encgz.js - CLI to encrypt and gzip (from stdin to stdout) const { pipeline } = require('stream') const { createEncgz } = require('./encgz-stream') const [, , secret] = process.argv const encgz = createEncgz(secret) console.error(`init vector: ${encgz.initVect.toString('hex')}`) pipeline( process.stdin, encgz, process.stdout, function onEnd (err) { if (err) { console.error(`Error: ${err}`) process.exit(1) } } ) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 const [, , secret] = process.argv // encgz.js - CLI to encrypt and gzip (from stdin to stdout)1 2 const { pipeline } = require('stream')3 const { createEncgz } = require('./encgz-stream')4 5 6 7 const encgz = createEncgz(secret)8 console.error(`init vector: ${encgz.initVect.toString('hex')}`)9 10 pipeline(11 process.stdin,12 encgz,13 process.stdout,14 function onEnd (err) {15 if (err) {16 console.error(`Error: ${err}`)17 process.exit(1)18 }19 }20 )21 const encgz = createEncgz(secret) // encgz.js - CLI to encrypt and gzip (from stdin to stdout)1 2 const { pipeline } = require('stream')3 const { createEncgz } = require('./encgz-stream')4 5 const [, , secret] = process.argv6 7 8 console.error(`init vector: ${encgz.initVect.toString('hex')}`)9 10 pipeline(11 process.stdin,12 encgz,13 process.stdout,14 function onEnd (err) {15 if (err) {16 console.error(`Error: ${err}`)17 process.exit(1)18 }19 }20 )21 pipeline( ) // encgz.js - CLI to encrypt and gzip (from stdin to stdout)1 2 const { pipeline } = require('stream')3 const { createEncgz } = require('./encgz-stream')4 5 const [, , secret] = process.argv6 7 const encgz = createEncgz(secret)8 console.error(`init vector: ${encgz.initVect.toString('hex')}`)9 10 11 process.stdin,12 encgz,13 process.stdout,14 function onEnd (err) {15 if (err) {16 console.error(`Error: ${err}`)17 process.exit(1)18 }19 }20 21 process.stdin, encgz, process.stdout, // encgz.js - CLI to encrypt and gzip (from stdin to stdout)1 2 const { pipeline } = require('stream')3 const { createEncgz } = require('./encgz-stream')4 5 const [, , secret] = process.argv6 7 const encgz = createEncgz(secret)8 console.error(`init vector: ${encgz.initVect.toString('hex')}`)9 10 pipeline(11 12 13 14 function onEnd (err) {15 if (err) {16 console.error(`Error: ${err}`)17 process.exit(1)18 }19 }20 )21 @loige62
  • 139. // decgz.js - CLI to gunzip and decrypt (from stdin to stdout) const { pipeline } = require('stream') const { createDecgz } = require('./encgz-stream') const [, , secret, initVect] = process.argv const decgz = createDecgz(secret, Buffer.from(initVect, 'hex')) pipeline( process.stdin, decgz, process.stdout, function onEnd (err) { if (err) { console.error(`Error: ${err}`) process.exit(1) } } ) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 @loige63
  • 140. // decgz.js - CLI to gunzip and decrypt (from stdin to stdout) const { pipeline } = require('stream') const { createDecgz } = require('./encgz-stream') const [, , secret, initVect] = process.argv const decgz = createDecgz(secret, Buffer.from(initVect, 'hex')) pipeline( process.stdin, decgz, process.stdout, function onEnd (err) { if (err) { console.error(`Error: ${err}`) process.exit(1) } } ) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 const [, , secret, initVect] = process.argv // decgz.js - CLI to gunzip and decrypt (from stdin to stdout)1 2 const { pipeline } = require('stream')3 const { createDecgz } = require('./encgz-stream')4 5 6 7 const decgz = createDecgz(secret, Buffer.from(initVect, 'hex'))8 9 pipeline(10 process.stdin,11 decgz,12 process.stdout,13 function onEnd (err) {14 if (err) {15 console.error(`Error: ${err}`)16 process.exit(1)17 }18 }19 )20 @loige63
  • 141. // decgz.js - CLI to gunzip and decrypt (from stdin to stdout) const { pipeline } = require('stream') const { createDecgz } = require('./encgz-stream') const [, , secret, initVect] = process.argv const decgz = createDecgz(secret, Buffer.from(initVect, 'hex')) pipeline( process.stdin, decgz, process.stdout, function onEnd (err) { if (err) { console.error(`Error: ${err}`) process.exit(1) } } ) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 const [, , secret, initVect] = process.argv // decgz.js - CLI to gunzip and decrypt (from stdin to stdout)1 2 const { pipeline } = require('stream')3 const { createDecgz } = require('./encgz-stream')4 5 6 7 const decgz = createDecgz(secret, Buffer.from(initVect, 'hex'))8 9 pipeline(10 process.stdin,11 decgz,12 process.stdout,13 function onEnd (err) {14 if (err) {15 console.error(`Error: ${err}`)16 process.exit(1)17 }18 }19 )20 const decgz = createDecgz(secret, Buffer.from(initVect, 'hex')) // decgz.js - CLI to gunzip and decrypt (from stdin to stdout)1 2 const { pipeline } = require('stream')3 const { createDecgz } = require('./encgz-stream')4 5 const [, , secret, initVect] = process.argv6 7 8 9 pipeline(10 process.stdin,11 decgz,12 process.stdout,13 function onEnd (err) {14 if (err) {15 console.error(`Error: ${err}`)16 process.exit(1)17 }18 }19 )20 @loige63