SlideShare a Scribd company logo
1 of 192
Download to read offline
Luciano Mammino (@loige)
IT’S ABOUT TIME TOIT’S ABOUT TIME TO
EMBRACE NODE.JS STREAMSEMBRACE NODE.JS STREAMS
  
loige.link/streams-manc
October 2nd, 2019 1
// buffer-copy.js
const {
readFileSync,
writeFileSync
} = require('fs')
const [,, src, dest] = process.argv
// read entire file content
const content = readFileSync(src)
// write that content somewhere else
writeFileSync(dest, content)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
@loige2
// buffer-copy.js
const {
readFileSync,
writeFileSync
} = require('fs')
const [,, src, dest] = process.argv
// read entire file content
const content = readFileSync(src)
// write that content somewhere else
writeFileSync(dest, content)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
const {
readFileSync,
writeFileSync
} = require('fs')
// buffer-copy.js1
2
3
4
5
6
7
const [,, src, dest] = process.argv8
9
// read entire file content10
const content = readFileSync(src)11
12
// write that content somewhere else13
writeFileSync(dest, content)14
@loige2
// buffer-copy.js
const {
readFileSync,
writeFileSync
} = require('fs')
const [,, src, dest] = process.argv
// read entire file content
const content = readFileSync(src)
// write that content somewhere else
writeFileSync(dest, content)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
const {
readFileSync,
writeFileSync
} = require('fs')
// buffer-copy.js1
2
3
4
5
6
7
const [,, src, dest] = process.argv8
9
// read entire file content10
const content = readFileSync(src)11
12
// write that content somewhere else13
writeFileSync(dest, content)14
// read entire file content
const content = readFileSync(src)
// buffer-copy.js1
2
const {3
readFileSync,4
writeFileSync5
} = require('fs')6
7
const [,, src, dest] = process.argv8
9
10
11
12
// write that content somewhere else13
writeFileSync(dest, content)14
@loige2
// buffer-copy.js
const {
readFileSync,
writeFileSync
} = require('fs')
const [,, src, dest] = process.argv
// read entire file content
const content = readFileSync(src)
// write that content somewhere else
writeFileSync(dest, content)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
const {
readFileSync,
writeFileSync
} = require('fs')
// buffer-copy.js1
2
3
4
5
6
7
const [,, src, dest] = process.argv8
9
// read entire file content10
const content = readFileSync(src)11
12
// write that content somewhere else13
writeFileSync(dest, content)14
// read entire file content
const content = readFileSync(src)
// buffer-copy.js1
2
const {3
readFileSync,4
writeFileSync5
} = require('fs')6
7
const [,, src, dest] = process.argv8
9
10
11
12
// write that content somewhere else13
writeFileSync(dest, content)14
// write that content somewhere else
writeFileSync(dest, content)
// buffer-copy.js1
2
const {3
readFileSync,4
writeFileSync5
} = require('fs')6
7
const [,, src, dest] = process.argv8
9
// read entire file content10
const content = readFileSync(src)11
12
13
14
@loige2
@loige3
WE DO THIS ALL THE TIMEWE DO THIS ALL THE TIME
@loige3
WE DO THIS ALL THE TIMEWE DO THIS ALL THE TIME
AND IT'S OKAND IT'S OK
@loige3
WE DO THIS ALL THE TIMEWE DO THIS ALL THE TIME
AND IT'S OKAND IT'S OK
BUT SOMETIMES ...BUT SOMETIMES ...
@loige3
@loige
 ERR_FS_FILE_TOO_LARGE!  ERR_FS_FILE_TOO_LARGE! 
File size is greater than possible Buffer
4
BUT WHY?BUT WHY?
@loige5
IF BYTES IF BYTES WEREWERE BLOCKS... BLOCKS...@loige
6
MARIO CAN LIFTMARIO CAN LIFT
FEW BLOCKSFEW BLOCKS
@loige
7
BUT NOT TOO MANY...BUT NOT TOO MANY...@loige
?!
8
WHAT CAN WE DO IF WE HAVE TOWHAT CAN WE DO IF WE HAVE TO
MOVE MANY BLOCKS?MOVE MANY BLOCKS?
@loige9
WE CAN MOVE THEM ONE BY ONE!WE CAN MOVE THEM ONE BY ONE!
@loige
we stream them...
10
11
 HELLO, I AM LUCIANO! HELLO, I AM LUCIANO!
11
 HELLO, I AM LUCIANO! HELLO, I AM LUCIANO!
11
 HELLO, I AM LUCIANO! HELLO, I AM LUCIANO!
11
 HELLO, I AM LUCIANO! HELLO, I AM LUCIANO!
11
 HELLO, I AM LUCIANO! HELLO, I AM LUCIANO!
Cloud Architect
11
 HELLO, I AM LUCIANO! HELLO, I AM LUCIANO!
Cloud Architect
Blog: 
Twitter: 
GitHub:   
loige.co
@loige
@lmammino
11
 HELLO, I AM LUCIANO! HELLO, I AM LUCIANO!
Cloud Architect
Blog: 
Twitter: 
GitHub:   
loige.co
@loige
@lmammino
11
code: loige.link/streams-examples
loige.link/streams-manc
12
01. BUFFERS VS01. BUFFERS VS  
        STREAMS        STREAMS
@loige13
BUFFERBUFFER: DATA STRUCTURE TO STORE AND: DATA STRUCTURE TO STORE AND
TRANSFER ARBITRARY BINARY DATATRANSFER ARBITRARY BINARY DATA
@loige
*Note that this is loading all the content of the file in memory
*
14
STREAMSTREAM: ABSTRACT INTERFACE FOR: ABSTRACT INTERFACE FOR
WORKING WITH STREAMING DATAWORKING WITH STREAMING DATA
@loige
*It does not load all the data straight away
*
15
FILE COPY: FILE COPY: THE BUFFER WAYTHE BUFFER WAY
@loige
// buffer-copy.js
const {
readFileSync,
writeFileSync
} = require('fs')
const [,, src, dest] = process.argv
const content = readFileSync(src)
writeFileSync(dest, content)
1
2
3
4
5
6
7
8
9
10
16
FILE COPY: FILE COPY: THE STREAM WAYTHE STREAM WAY
// stream-copy.js
const {
createReadStream,
createWriteStream
} = require('fs')
const [,, src, dest] = process.argv
const srcStream = createReadStream(src)
const destStream = createWriteStream(dest)
srcStream.on('data', (data) => destStream.write(data))
1
2
3
4
5
6
7
8
9
10
11
@loige
* Careful: this implementation is not optimal
*
17
FILE COPY: FILE COPY: THE STREAM WAYTHE STREAM WAY
// stream-copy.js
const {
createReadStream,
createWriteStream
} = require('fs')
const [,, src, dest] = process.argv
const srcStream = createReadStream(src)
const destStream = createWriteStream(dest)
srcStream.on('data', (data) => destStream.write(data))
1
2
3
4
5
6
7
8
9
10
11
createReadStream,
createWriteStream
// stream-copy.js1
2
const {3
4
5
} = require('fs')6
7
const [,, src, dest] = process.argv8
const srcStream = createReadStream(src)9
const destStream = createWriteStream(dest)10
srcStream.on('data', (data) => destStream.write(data))11
@loige
* Careful: this implementation is not optimal
*
17
FILE COPY: FILE COPY: THE STREAM WAYTHE STREAM WAY
// stream-copy.js
const {
createReadStream,
createWriteStream
} = require('fs')
const [,, src, dest] = process.argv
const srcStream = createReadStream(src)
const destStream = createWriteStream(dest)
srcStream.on('data', (data) => destStream.write(data))
1
2
3
4
5
6
7
8
9
10
11
createReadStream,
createWriteStream
// stream-copy.js1
2
const {3
4
5
} = require('fs')6
7
const [,, src, dest] = process.argv8
const srcStream = createReadStream(src)9
const destStream = createWriteStream(dest)10
srcStream.on('data', (data) => destStream.write(data))11
const srcStream = createReadStream(src)
const destStream = createWriteStream(dest)
// stream-copy.js1
2
const {3
createReadStream,4
createWriteStream5
} = require('fs')6
7
const [,, src, dest] = process.argv8
9
10
srcStream.on('data', (data) => destStream.write(data))11
@loige
* Careful: this implementation is not optimal
*
17
FILE COPY: FILE COPY: THE STREAM WAYTHE STREAM WAY
// stream-copy.js
const {
createReadStream,
createWriteStream
} = require('fs')
const [,, src, dest] = process.argv
const srcStream = createReadStream(src)
const destStream = createWriteStream(dest)
srcStream.on('data', (data) => destStream.write(data))
1
2
3
4
5
6
7
8
9
10
11
createReadStream,
createWriteStream
// stream-copy.js1
2
const {3
4
5
} = require('fs')6
7
const [,, src, dest] = process.argv8
const srcStream = createReadStream(src)9
const destStream = createWriteStream(dest)10
srcStream.on('data', (data) => destStream.write(data))11
const srcStream = createReadStream(src)
const destStream = createWriteStream(dest)
// stream-copy.js1
2
const {3
createReadStream,4
createWriteStream5
} = require('fs')6
7
const [,, src, dest] = process.argv8
9
10
srcStream.on('data', (data) => destStream.write(data))11 srcStream.on('data', (data) => destStream.write(data))
// stream-copy.js1
2
const {3
createReadStream,4
createWriteStream5
} = require('fs')6
7
const [,, src, dest] = process.argv8
const srcStream = createReadStream(src)9
const destStream = createWriteStream(dest)10
11
@loige
* Careful: this implementation is not optimal
*
17
MEMORY COMPARISON (~600MB FILE)MEMORY COMPARISON (~600MB FILE)
node ­­inspect­brk buffer­copy.js assets/poster.psd ~/Downloads/poster.psd
@loige18
MEMORY COMPARISON (~600MB FILE)MEMORY COMPARISON (~600MB FILE)
node ­­inspect­brk stream­copy.js assets/poster.psd ~/Downloads/poster.psd
@loige19
LET'S TRY WITH A BIG FILE (~10GB)LET'S TRY WITH A BIG FILE (~10GB)
@loige20
LET'S TRY WITH A BIG FILE (~10GB)LET'S TRY WITH A BIG FILE (~10GB)
node ­­inspect­brk stream­copy.js assets/the­matrix­hd.mkv ~/Downloads/the­matrix­hd.mkv
@loige21
 STREAMS VS BUFFERS  STREAMS VS BUFFERS 
Streams keep a low memory footprint
even with large amounts of data 
 
Streams allow you to process data as
soon as it arrives
@loige22
03. STREAM TYPES03. STREAM TYPES  
       & APIS       & APIS
@loige23
ALL STREAMS ARE ALL STREAMS ARE EVENT EMITTERSEVENT EMITTERS
A stream instance is an object that emits events when its internal
state changes, for instance:
s.on('readable', () => {}) // ready to be consumed
s.on('data', (chunk) => {}) // new data is available
s.on('error', (err) => {}) // some error happened
s.on('end', () => {}) // no more data available
The events available depend from the type of stream
@loige24
READABLEREADABLE STREAMS STREAMS
A readable stream represents a source from which data is consumed.
Examples:
fs readStream
process.stdin
HTTP response (client-side)
HTTP request (server-side)
AWS S3 GetObject (data field)
It supports two modes for data consumption: flowing and paused (or non-
flowing) mode.
@loige25
READABLE STREAMSREADABLE STREAMS
Data is read from source automatically and chunks are emitted as soon
as they are available.
@loige26
@loige
1
2
3
Source data
Readable stream in
flowing mode
data listener
READABLE STREAMSREADABLE STREAMS
Data is read from source automatically and chunks are emitted as soon
as they are available.
27
@loige
12
3
Source data
Readable stream in
flowing mode
Read
data listener
READABLE STREAMSREADABLE STREAMS
Data is read from source automatically and chunks are emitted as soon
as they are available.
28
@loige
12
3
Source data
Readable stream in
flowing mode
data listener
data
READABLE STREAMSREADABLE STREAMS
Data is read from source automatically and chunks are emitted as soon
as they are available.
29
@loige
2
3
Source data
Readable stream in
flowing mode
data listener
Read
READABLE STREAMSREADABLE STREAMS
Data is read from source automatically and chunks are emitted as soon
as they are available.
30
@loige
2
3
Source data
Readable stream in
flowing mode
data listener
data
READABLE STREAMSREADABLE STREAMS
Data is read from source automatically and chunks are emitted as soon
as they are available.
31
@loige
3
Source data
Readable stream in
flowing mode
data listener
Read
READABLE STREAMSREADABLE STREAMS
Data is read from source automatically and chunks are emitted as soon
as they are available.
32
@loige
3
Source data
Readable stream in
flowing mode
data listener
data
READABLE STREAMSREADABLE STREAMS
Data is read from source automatically and chunks are emitted as soon
as they are available.
33
@loige
Source data
Readable stream in
flowing mode
Read
data listener
(end)
READABLE STREAMSREADABLE STREAMS
Data is read from source automatically and chunks are emitted as soon
as they are available.
34
@loige
Source data
Readable stream in
flowing mode
data listener
end
(end)
When no more data is available, end is emitted.
READABLE STREAMSREADABLE STREAMS
Data is read from source automatically and chunks are emitted as soon
as they are available.
35
// count-emojis-flowing.js
const { createReadStream } = require('fs')
const { EMOJI_MAP } = require('emoji') // from npm
const emojis = Object.keys(EMOJI_MAP)
const file = createReadStream(process.argv[2])
let counter = 0
file.on('data', chunk => {
for (let char of chunk.toString('utf8')) {
if (emojis.includes(char)) {
counter++
}
}
})
file.on('end', () => console.log(`Found ${counter} emojis`))
file.on('error', err => console.error(`Error reading file: ${err}`))
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
@loige36
// count-emojis-flowing.js
const { createReadStream } = require('fs')
const { EMOJI_MAP } = require('emoji') // from npm
const emojis = Object.keys(EMOJI_MAP)
const file = createReadStream(process.argv[2])
let counter = 0
file.on('data', chunk => {
for (let char of chunk.toString('utf8')) {
if (emojis.includes(char)) {
counter++
}
}
})
file.on('end', () => console.log(`Found ${counter} emojis`))
file.on('error', err => console.error(`Error reading file: ${err}`))
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
const { EMOJI_MAP } = require('emoji') // from npm
// count-emojis-flowing.js1
2
const { createReadStream } = require('fs')3
4
5
const emojis = Object.keys(EMOJI_MAP)6
7
const file = createReadStream(process.argv[2])8
let counter = 09
10
file.on('data', chunk => {11
for (let char of chunk.toString('utf8')) {12
if (emojis.includes(char)) {13
counter++14
}15
}16
})17
file.on('end', () => console.log(`Found ${counter} emojis`))18
file.on('error', err => console.error(`Error reading file: ${err}`))19
@loige36
// count-emojis-flowing.js
const { createReadStream } = require('fs')
const { EMOJI_MAP } = require('emoji') // from npm
const emojis = Object.keys(EMOJI_MAP)
const file = createReadStream(process.argv[2])
let counter = 0
file.on('data', chunk => {
for (let char of chunk.toString('utf8')) {
if (emojis.includes(char)) {
counter++
}
}
})
file.on('end', () => console.log(`Found ${counter} emojis`))
file.on('error', err => console.error(`Error reading file: ${err}`))
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
const { EMOJI_MAP } = require('emoji') // from npm
// count-emojis-flowing.js1
2
const { createReadStream } = require('fs')3
4
5
const emojis = Object.keys(EMOJI_MAP)6
7
const file = createReadStream(process.argv[2])8
let counter = 09
10
file.on('data', chunk => {11
for (let char of chunk.toString('utf8')) {12
if (emojis.includes(char)) {13
counter++14
}15
}16
})17
file.on('end', () => console.log(`Found ${counter} emojis`))18
file.on('error', err => console.error(`Error reading file: ${err}`))19
const file = createReadStream(process.argv[2])
// count-emojis-flowing.js1
2
const { createReadStream } = require('fs')3
const { EMOJI_MAP } = require('emoji') // from npm4
5
const emojis = Object.keys(EMOJI_MAP)6
7
8
let counter = 09
10
file.on('data', chunk => {11
for (let char of chunk.toString('utf8')) {12
if (emojis.includes(char)) {13
counter++14
}15
}16
})17
file.on('end', () => console.log(`Found ${counter} emojis`))18
file.on('error', err => console.error(`Error reading file: ${err}`))19
@loige36
// count-emojis-flowing.js
const { createReadStream } = require('fs')
const { EMOJI_MAP } = require('emoji') // from npm
const emojis = Object.keys(EMOJI_MAP)
const file = createReadStream(process.argv[2])
let counter = 0
file.on('data', chunk => {
for (let char of chunk.toString('utf8')) {
if (emojis.includes(char)) {
counter++
}
}
})
file.on('end', () => console.log(`Found ${counter} emojis`))
file.on('error', err => console.error(`Error reading file: ${err}`))
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
const { EMOJI_MAP } = require('emoji') // from npm
// count-emojis-flowing.js1
2
const { createReadStream } = require('fs')3
4
5
const emojis = Object.keys(EMOJI_MAP)6
7
const file = createReadStream(process.argv[2])8
let counter = 09
10
file.on('data', chunk => {11
for (let char of chunk.toString('utf8')) {12
if (emojis.includes(char)) {13
counter++14
}15
}16
})17
file.on('end', () => console.log(`Found ${counter} emojis`))18
file.on('error', err => console.error(`Error reading file: ${err}`))19
const file = createReadStream(process.argv[2])
// count-emojis-flowing.js1
2
const { createReadStream } = require('fs')3
const { EMOJI_MAP } = require('emoji') // from npm4
5
const emojis = Object.keys(EMOJI_MAP)6
7
8
let counter = 09
10
file.on('data', chunk => {11
for (let char of chunk.toString('utf8')) {12
if (emojis.includes(char)) {13
counter++14
}15
}16
})17
file.on('end', () => console.log(`Found ${counter} emojis`))18
file.on('error', err => console.error(`Error reading file: ${err}`))19
let counter = 0
// count-emojis-flowing.js1
2
const { createReadStream } = require('fs')3
const { EMOJI_MAP } = require('emoji') // from npm4
5
const emojis = Object.keys(EMOJI_MAP)6
7
const file = createReadStream(process.argv[2])8
9
10
file.on('data', chunk => {11
for (let char of chunk.toString('utf8')) {12
if (emojis.includes(char)) {13
counter++14
}15
}16
})17
file.on('end', () => console.log(`Found ${counter} emojis`))18
file.on('error', err => console.error(`Error reading file: ${err}`))19
@loige36
// count-emojis-flowing.js
const { createReadStream } = require('fs')
const { EMOJI_MAP } = require('emoji') // from npm
const emojis = Object.keys(EMOJI_MAP)
const file = createReadStream(process.argv[2])
let counter = 0
file.on('data', chunk => {
for (let char of chunk.toString('utf8')) {
if (emojis.includes(char)) {
counter++
}
}
})
file.on('end', () => console.log(`Found ${counter} emojis`))
file.on('error', err => console.error(`Error reading file: ${err}`))
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
const { EMOJI_MAP } = require('emoji') // from npm
// count-emojis-flowing.js1
2
const { createReadStream } = require('fs')3
4
5
const emojis = Object.keys(EMOJI_MAP)6
7
const file = createReadStream(process.argv[2])8
let counter = 09
10
file.on('data', chunk => {11
for (let char of chunk.toString('utf8')) {12
if (emojis.includes(char)) {13
counter++14
}15
}16
})17
file.on('end', () => console.log(`Found ${counter} emojis`))18
file.on('error', err => console.error(`Error reading file: ${err}`))19
const file = createReadStream(process.argv[2])
// count-emojis-flowing.js1
2
const { createReadStream } = require('fs')3
const { EMOJI_MAP } = require('emoji') // from npm4
5
const emojis = Object.keys(EMOJI_MAP)6
7
8
let counter = 09
10
file.on('data', chunk => {11
for (let char of chunk.toString('utf8')) {12
if (emojis.includes(char)) {13
counter++14
}15
}16
})17
file.on('end', () => console.log(`Found ${counter} emojis`))18
file.on('error', err => console.error(`Error reading file: ${err}`))19
let counter = 0
// count-emojis-flowing.js1
2
const { createReadStream } = require('fs')3
const { EMOJI_MAP } = require('emoji') // from npm4
5
const emojis = Object.keys(EMOJI_MAP)6
7
const file = createReadStream(process.argv[2])8
9
10
file.on('data', chunk => {11
for (let char of chunk.toString('utf8')) {12
if (emojis.includes(char)) {13
counter++14
}15
}16
})17
file.on('end', () => console.log(`Found ${counter} emojis`))18
file.on('error', err => console.error(`Error reading file: ${err}`))19
file.on('data', chunk => {
})
// count-emojis-flowing.js1
2
const { createReadStream } = require('fs')3
const { EMOJI_MAP } = require('emoji') // from npm4
5
const emojis = Object.keys(EMOJI_MAP)6
7
const file = createReadStream(process.argv[2])8
let counter = 09
10
11
for (let char of chunk.toString('utf8')) {12
if (emojis.includes(char)) {13
counter++14
}15
}16
17
file.on('end', () => console.log(`Found ${counter} emojis`))18
file.on('error', err => console.error(`Error reading file: ${err}`))19
@loige36
// count-emojis-flowing.js
const { createReadStream } = require('fs')
const { EMOJI_MAP } = require('emoji') // from npm
const emojis = Object.keys(EMOJI_MAP)
const file = createReadStream(process.argv[2])
let counter = 0
file.on('data', chunk => {
for (let char of chunk.toString('utf8')) {
if (emojis.includes(char)) {
counter++
}
}
})
file.on('end', () => console.log(`Found ${counter} emojis`))
file.on('error', err => console.error(`Error reading file: ${err}`))
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
const { EMOJI_MAP } = require('emoji') // from npm
// count-emojis-flowing.js1
2
const { createReadStream } = require('fs')3
4
5
const emojis = Object.keys(EMOJI_MAP)6
7
const file = createReadStream(process.argv[2])8
let counter = 09
10
file.on('data', chunk => {11
for (let char of chunk.toString('utf8')) {12
if (emojis.includes(char)) {13
counter++14
}15
}16
})17
file.on('end', () => console.log(`Found ${counter} emojis`))18
file.on('error', err => console.error(`Error reading file: ${err}`))19
const file = createReadStream(process.argv[2])
// count-emojis-flowing.js1
2
const { createReadStream } = require('fs')3
const { EMOJI_MAP } = require('emoji') // from npm4
5
const emojis = Object.keys(EMOJI_MAP)6
7
8
let counter = 09
10
file.on('data', chunk => {11
for (let char of chunk.toString('utf8')) {12
if (emojis.includes(char)) {13
counter++14
}15
}16
})17
file.on('end', () => console.log(`Found ${counter} emojis`))18
file.on('error', err => console.error(`Error reading file: ${err}`))19
let counter = 0
// count-emojis-flowing.js1
2
const { createReadStream } = require('fs')3
const { EMOJI_MAP } = require('emoji') // from npm4
5
const emojis = Object.keys(EMOJI_MAP)6
7
const file = createReadStream(process.argv[2])8
9
10
file.on('data', chunk => {11
for (let char of chunk.toString('utf8')) {12
if (emojis.includes(char)) {13
counter++14
}15
}16
})17
file.on('end', () => console.log(`Found ${counter} emojis`))18
file.on('error', err => console.error(`Error reading file: ${err}`))19
file.on('data', chunk => {
})
// count-emojis-flowing.js1
2
const { createReadStream } = require('fs')3
const { EMOJI_MAP } = require('emoji') // from npm4
5
const emojis = Object.keys(EMOJI_MAP)6
7
const file = createReadStream(process.argv[2])8
let counter = 09
10
11
for (let char of chunk.toString('utf8')) {12
if (emojis.includes(char)) {13
counter++14
}15
}16
17
file.on('end', () => console.log(`Found ${counter} emojis`))18
file.on('error', err => console.error(`Error reading file: ${err}`))19
for (let char of chunk.toString('utf8')) {
}
// count-emojis-flowing.js1
2
const { createReadStream } = require('fs')3
const { EMOJI_MAP } = require('emoji') // from npm4
5
const emojis = Object.keys(EMOJI_MAP)6
7
const file = createReadStream(process.argv[2])8
let counter = 09
10
file.on('data', chunk => {11
12
if (emojis.includes(char)) {13
counter++14
}15
16
})17
file.on('end', () => console.log(`Found ${counter} emojis`))18
file.on('error', err => console.error(`Error reading file: ${err}`))19
@loige36
// count-emojis-flowing.js
const { createReadStream } = require('fs')
const { EMOJI_MAP } = require('emoji') // from npm
const emojis = Object.keys(EMOJI_MAP)
const file = createReadStream(process.argv[2])
let counter = 0
file.on('data', chunk => {
for (let char of chunk.toString('utf8')) {
if (emojis.includes(char)) {
counter++
}
}
})
file.on('end', () => console.log(`Found ${counter} emojis`))
file.on('error', err => console.error(`Error reading file: ${err}`))
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
const { EMOJI_MAP } = require('emoji') // from npm
// count-emojis-flowing.js1
2
const { createReadStream } = require('fs')3
4
5
const emojis = Object.keys(EMOJI_MAP)6
7
const file = createReadStream(process.argv[2])8
let counter = 09
10
file.on('data', chunk => {11
for (let char of chunk.toString('utf8')) {12
if (emojis.includes(char)) {13
counter++14
}15
}16
})17
file.on('end', () => console.log(`Found ${counter} emojis`))18
file.on('error', err => console.error(`Error reading file: ${err}`))19
const file = createReadStream(process.argv[2])
// count-emojis-flowing.js1
2
const { createReadStream } = require('fs')3
const { EMOJI_MAP } = require('emoji') // from npm4
5
const emojis = Object.keys(EMOJI_MAP)6
7
8
let counter = 09
10
file.on('data', chunk => {11
for (let char of chunk.toString('utf8')) {12
if (emojis.includes(char)) {13
counter++14
}15
}16
})17
file.on('end', () => console.log(`Found ${counter} emojis`))18
file.on('error', err => console.error(`Error reading file: ${err}`))19
let counter = 0
// count-emojis-flowing.js1
2
const { createReadStream } = require('fs')3
const { EMOJI_MAP } = require('emoji') // from npm4
5
const emojis = Object.keys(EMOJI_MAP)6
7
const file = createReadStream(process.argv[2])8
9
10
file.on('data', chunk => {11
for (let char of chunk.toString('utf8')) {12
if (emojis.includes(char)) {13
counter++14
}15
}16
})17
file.on('end', () => console.log(`Found ${counter} emojis`))18
file.on('error', err => console.error(`Error reading file: ${err}`))19
file.on('data', chunk => {
})
// count-emojis-flowing.js1
2
const { createReadStream } = require('fs')3
const { EMOJI_MAP } = require('emoji') // from npm4
5
const emojis = Object.keys(EMOJI_MAP)6
7
const file = createReadStream(process.argv[2])8
let counter = 09
10
11
for (let char of chunk.toString('utf8')) {12
if (emojis.includes(char)) {13
counter++14
}15
}16
17
file.on('end', () => console.log(`Found ${counter} emojis`))18
file.on('error', err => console.error(`Error reading file: ${err}`))19
for (let char of chunk.toString('utf8')) {
}
// count-emojis-flowing.js1
2
const { createReadStream } = require('fs')3
const { EMOJI_MAP } = require('emoji') // from npm4
5
const emojis = Object.keys(EMOJI_MAP)6
7
const file = createReadStream(process.argv[2])8
let counter = 09
10
file.on('data', chunk => {11
12
if (emojis.includes(char)) {13
counter++14
}15
16
})17
file.on('end', () => console.log(`Found ${counter} emojis`))18
file.on('error', err => console.error(`Error reading file: ${err}`))19
if (emojis.includes(char)) {
counter++
}
// count-emojis-flowing.js1
2
const { createReadStream } = require('fs')3
const { EMOJI_MAP } = require('emoji') // from npm4
5
const emojis = Object.keys(EMOJI_MAP)6
7
const file = createReadStream(process.argv[2])8
let counter = 09
10
file.on('data', chunk => {11
for (let char of chunk.toString('utf8')) {12
13
14
15
}16
})17
file.on('end', () => console.log(`Found ${counter} emojis`))18
file.on('error', err => console.error(`Error reading file: ${err}`))19
@loige36
loige.link/up-emojiart
@loige37
READABLE STREAMS AREREADABLE STREAMS ARE
ALSO ALSO ASYNC ITERATORSASYNC ITERATORS  
((NODE.JS 10+)NODE.JS 10+)
@loige38
// count-emojis-async-iterator.js
const { createReadStream } = require('fs')
const { EMOJI_MAP } = require('emoji') // from npm
async function main () {
const emojis = Object.keys(EMOJI_MAP)
const file = createReadStream(process.argv[2])
let counter = 0
for await (let chunk of file) {
for (let char of chunk.toString('utf8')) {
if (emojis.includes(char)) {
counter++
}
}
}
console.log(`Found ${counter} emojis`)
}
main()
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21 @loige39
// count-emojis-async-iterator.js
const { createReadStream } = require('fs')
const { EMOJI_MAP } = require('emoji') // from npm
async function main () {
const emojis = Object.keys(EMOJI_MAP)
const file = createReadStream(process.argv[2])
let counter = 0
for await (let chunk of file) {
for (let char of chunk.toString('utf8')) {
if (emojis.includes(char)) {
counter++
}
}
}
console.log(`Found ${counter} emojis`)
}
main()
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
for await (let chunk of file) {
}
// count-emojis-async-iterator.js1
const { createReadStream } = require('fs')2
const { EMOJI_MAP } = require('emoji') // from npm3
4
async function main () {5
const emojis = Object.keys(EMOJI_MAP)6
const file = createReadStream(process.argv[2])7
let counter = 08
9
10
for (let char of chunk.toString('utf8')) {11
if (emojis.includes(char)) {12
counter++13
}14
}15
16
17
console.log(`Found ${counter} emojis`)18
}19
20
main()21 @loige39
// count-emojis-async-iterator.js
const { createReadStream } = require('fs')
const { EMOJI_MAP } = require('emoji') // from npm
async function main () {
const emojis = Object.keys(EMOJI_MAP)
const file = createReadStream(process.argv[2])
let counter = 0
for await (let chunk of file) {
for (let char of chunk.toString('utf8')) {
if (emojis.includes(char)) {
counter++
}
}
}
console.log(`Found ${counter} emojis`)
}
main()
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
for await (let chunk of file) {
}
// count-emojis-async-iterator.js1
const { createReadStream } = require('fs')2
const { EMOJI_MAP } = require('emoji') // from npm3
4
async function main () {5
const emojis = Object.keys(EMOJI_MAP)6
const file = createReadStream(process.argv[2])7
let counter = 08
9
10
for (let char of chunk.toString('utf8')) {11
if (emojis.includes(char)) {12
counter++13
}14
}15
16
17
console.log(`Found ${counter} emojis`)18
}19
20
main()21
async function main () {
}
// count-emojis-async-iterator.js1
const { createReadStream } = require('fs')2
const { EMOJI_MAP } = require('emoji') // from npm3
4
5
const emojis = Object.keys(EMOJI_MAP)6
const file = createReadStream(process.argv[2])7
let counter = 08
9
for await (let chunk of file) {10
for (let char of chunk.toString('utf8')) {11
if (emojis.includes(char)) {12
counter++13
}14
}15
}16
17
console.log(`Found ${counter} emojis`)18
19
20
main()21 @loige39
WRITABLEWRITABLE STREAMS STREAMS
A writable stream is an abstraction that allows you to write data to a
destination
 
Examples:
fs writeStream
process.stdout, process.stderr
HTTP request (client-side)
HTTP response (server-side)
AWS S3 PutObject (body parameter)
@loige40
// writable-http-request.js
const http = require('http')
const req = http.request(
{
hostname: 'enx6b07hdu6cs.x.pipedream.net',
method: 'POST'
},
resp => {
console.log(`Server responded with "${resp.statusCode}"`)
}
)
req.on('finish', () => console.log('request sent'))
req.on('close', () => console.log('Connection closed'))
req.on('error', err => console.error(`Request failed: ${err}`))
req.write('writing some content...n')
req.end('last write & close the stream')
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
@loige41
// writable-http-request.js
const http = require('http')
const req = http.request(
{
hostname: 'enx6b07hdu6cs.x.pipedream.net',
method: 'POST'
},
resp => {
console.log(`Server responded with "${resp.statusCode}"`)
}
)
req.on('finish', () => console.log('request sent'))
req.on('close', () => console.log('Connection closed'))
req.on('error', err => console.error(`Request failed: ${err}`))
req.write('writing some content...n')
req.end('last write & close the stream')
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
const req = http.request(
)
// writable-http-request.js1
const http = require('http')2
3
4
{5
hostname: 'enx6b07hdu6cs.x.pipedream.net',6
method: 'POST'7
},8
resp => {9
console.log(`Server responded with "${resp.statusCode}"`)10
}11
12
13
req.on('finish', () => console.log('request sent'))14
req.on('close', () => console.log('Connection closed'))15
req.on('error', err => console.error(`Request failed: ${err}`))16
17
req.write('writing some content...n')18
req.end('last write & close the stream')19
@loige41
// writable-http-request.js
const http = require('http')
const req = http.request(
{
hostname: 'enx6b07hdu6cs.x.pipedream.net',
method: 'POST'
},
resp => {
console.log(`Server responded with "${resp.statusCode}"`)
}
)
req.on('finish', () => console.log('request sent'))
req.on('close', () => console.log('Connection closed'))
req.on('error', err => console.error(`Request failed: ${err}`))
req.write('writing some content...n')
req.end('last write & close the stream')
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
const req = http.request(
)
// writable-http-request.js1
const http = require('http')2
3
4
{5
hostname: 'enx6b07hdu6cs.x.pipedream.net',6
method: 'POST'7
},8
resp => {9
console.log(`Server responded with "${resp.statusCode}"`)10
}11
12
13
req.on('finish', () => console.log('request sent'))14
req.on('close', () => console.log('Connection closed'))15
req.on('error', err => console.error(`Request failed: ${err}`))16
17
req.write('writing some content...n')18
req.end('last write & close the stream')19
req.on('finish', () => console.log('request sent'))
req.on('close', () => console.log('Connection closed'))
req.on('error', err => console.error(`Request failed: ${err}`))
// writable-http-request.js1
const http = require('http')2
3
const req = http.request(4
{5
hostname: 'enx6b07hdu6cs.x.pipedream.net',6
method: 'POST'7
},8
resp => {9
console.log(`Server responded with "${resp.statusCode}"`)10
}11
)12
13
14
15
16
17
req.write('writing some content...n')18
req.end('last write & close the stream')19
@loige41
// writable-http-request.js
const http = require('http')
const req = http.request(
{
hostname: 'enx6b07hdu6cs.x.pipedream.net',
method: 'POST'
},
resp => {
console.log(`Server responded with "${resp.statusCode}"`)
}
)
req.on('finish', () => console.log('request sent'))
req.on('close', () => console.log('Connection closed'))
req.on('error', err => console.error(`Request failed: ${err}`))
req.write('writing some content...n')
req.end('last write & close the stream')
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
const req = http.request(
)
// writable-http-request.js1
const http = require('http')2
3
4
{5
hostname: 'enx6b07hdu6cs.x.pipedream.net',6
method: 'POST'7
},8
resp => {9
console.log(`Server responded with "${resp.statusCode}"`)10
}11
12
13
req.on('finish', () => console.log('request sent'))14
req.on('close', () => console.log('Connection closed'))15
req.on('error', err => console.error(`Request failed: ${err}`))16
17
req.write('writing some content...n')18
req.end('last write & close the stream')19
req.on('finish', () => console.log('request sent'))
req.on('close', () => console.log('Connection closed'))
req.on('error', err => console.error(`Request failed: ${err}`))
// writable-http-request.js1
const http = require('http')2
3
const req = http.request(4
{5
hostname: 'enx6b07hdu6cs.x.pipedream.net',6
method: 'POST'7
},8
resp => {9
console.log(`Server responded with "${resp.statusCode}"`)10
}11
)12
13
14
15
16
17
req.write('writing some content...n')18
req.end('last write & close the stream')19
req.write('writing some content...n')
req.end('last write & close the stream')
// writable-http-request.js1
const http = require('http')2
3
const req = http.request(4
{5
hostname: 'enx6b07hdu6cs.x.pipedream.net',6
method: 'POST'7
},8
resp => {9
console.log(`Server responded with "${resp.statusCode}"`)10
}11
)12
13
req.on('finish', () => console.log('request sent'))14
req.on('close', () => console.log('Connection closed'))15
req.on('error', err => console.error(`Request failed: ${err}`))16
17
18
19
@loige41
@loige42
loige.link/writable-http-req
@loige43
BACKPRESSUREBACKPRESSURE
When writing large amounts of data you
should make sure you handle the stop write
signal and the drain event
 
loige.link/backpressure
@loige44
// stream-copy-safe.js
const { createReadStream, createWriteStream } = require('fs')
const [, , src, dest] = process.argv
const srcStream = createReadStream(src)
const destStream = createWriteStream(dest)
srcStream.on('data', data => {
const canContinue = destStream.write(data)
if (!canContinue) {
// we are overflowing the destination, we should pause
srcStream.pause()
// we will resume when the destination stream is drained
destStream.once('drain', () => srcStream.resume())
}
})
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
@loige45
// stream-copy-safe.js
const { createReadStream, createWriteStream } = require('fs')
const [, , src, dest] = process.argv
const srcStream = createReadStream(src)
const destStream = createWriteStream(dest)
srcStream.on('data', data => {
const canContinue = destStream.write(data)
if (!canContinue) {
// we are overflowing the destination, we should pause
srcStream.pause()
// we will resume when the destination stream is drained
destStream.once('drain', () => srcStream.resume())
}
})
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
const canContinue = destStream.write(data)
// stream-copy-safe.js1
2
const { createReadStream, createWriteStream } = require('fs')3
4
const [, , src, dest] = process.argv5
const srcStream = createReadStream(src)6
const destStream = createWriteStream(dest)7
8
srcStream.on('data', data => {9
10
if (!canContinue) {11
// we are overflowing the destination, we should pause12
srcStream.pause()13
// we will resume when the destination stream is drained14
destStream.once('drain', () => srcStream.resume())15
}16
})17
@loige45
// stream-copy-safe.js
const { createReadStream, createWriteStream } = require('fs')
const [, , src, dest] = process.argv
const srcStream = createReadStream(src)
const destStream = createWriteStream(dest)
srcStream.on('data', data => {
const canContinue = destStream.write(data)
if (!canContinue) {
// we are overflowing the destination, we should pause
srcStream.pause()
// we will resume when the destination stream is drained
destStream.once('drain', () => srcStream.resume())
}
})
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
const canContinue = destStream.write(data)
// stream-copy-safe.js1
2
const { createReadStream, createWriteStream } = require('fs')3
4
const [, , src, dest] = process.argv5
const srcStream = createReadStream(src)6
const destStream = createWriteStream(dest)7
8
srcStream.on('data', data => {9
10
if (!canContinue) {11
// we are overflowing the destination, we should pause12
srcStream.pause()13
// we will resume when the destination stream is drained14
destStream.once('drain', () => srcStream.resume())15
}16
})17
if (!canContinue) {
}
// stream-copy-safe.js1
2
const { createReadStream, createWriteStream } = require('fs')3
4
const [, , src, dest] = process.argv5
const srcStream = createReadStream(src)6
const destStream = createWriteStream(dest)7
8
srcStream.on('data', data => {9
const canContinue = destStream.write(data)10
11
// we are overflowing the destination, we should pause12
srcStream.pause()13
// we will resume when the destination stream is drained14
destStream.once('drain', () => srcStream.resume())15
16
})17
@loige45
// stream-copy-safe.js
const { createReadStream, createWriteStream } = require('fs')
const [, , src, dest] = process.argv
const srcStream = createReadStream(src)
const destStream = createWriteStream(dest)
srcStream.on('data', data => {
const canContinue = destStream.write(data)
if (!canContinue) {
// we are overflowing the destination, we should pause
srcStream.pause()
// we will resume when the destination stream is drained
destStream.once('drain', () => srcStream.resume())
}
})
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
const canContinue = destStream.write(data)
// stream-copy-safe.js1
2
const { createReadStream, createWriteStream } = require('fs')3
4
const [, , src, dest] = process.argv5
const srcStream = createReadStream(src)6
const destStream = createWriteStream(dest)7
8
srcStream.on('data', data => {9
10
if (!canContinue) {11
// we are overflowing the destination, we should pause12
srcStream.pause()13
// we will resume when the destination stream is drained14
destStream.once('drain', () => srcStream.resume())15
}16
})17
if (!canContinue) {
}
// stream-copy-safe.js1
2
const { createReadStream, createWriteStream } = require('fs')3
4
const [, , src, dest] = process.argv5
const srcStream = createReadStream(src)6
const destStream = createWriteStream(dest)7
8
srcStream.on('data', data => {9
const canContinue = destStream.write(data)10
11
// we are overflowing the destination, we should pause12
srcStream.pause()13
// we will resume when the destination stream is drained14
destStream.once('drain', () => srcStream.resume())15
16
})17
// we are overflowing the destination, we should pause
srcStream.pause()
// stream-copy-safe.js1
2
const { createReadStream, createWriteStream } = require('fs')3
4
const [, , src, dest] = process.argv5
const srcStream = createReadStream(src)6
const destStream = createWriteStream(dest)7
8
srcStream.on('data', data => {9
const canContinue = destStream.write(data)10
if (!canContinue) {11
12
13
// we will resume when the destination stream is drained14
destStream.once('drain', () => srcStream.resume())15
}16
})17
@loige45
// stream-copy-safe.js
const { createReadStream, createWriteStream } = require('fs')
const [, , src, dest] = process.argv
const srcStream = createReadStream(src)
const destStream = createWriteStream(dest)
srcStream.on('data', data => {
const canContinue = destStream.write(data)
if (!canContinue) {
// we are overflowing the destination, we should pause
srcStream.pause()
// we will resume when the destination stream is drained
destStream.once('drain', () => srcStream.resume())
}
})
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
const canContinue = destStream.write(data)
// stream-copy-safe.js1
2
const { createReadStream, createWriteStream } = require('fs')3
4
const [, , src, dest] = process.argv5
const srcStream = createReadStream(src)6
const destStream = createWriteStream(dest)7
8
srcStream.on('data', data => {9
10
if (!canContinue) {11
// we are overflowing the destination, we should pause12
srcStream.pause()13
// we will resume when the destination stream is drained14
destStream.once('drain', () => srcStream.resume())15
}16
})17
if (!canContinue) {
}
// stream-copy-safe.js1
2
const { createReadStream, createWriteStream } = require('fs')3
4
const [, , src, dest] = process.argv5
const srcStream = createReadStream(src)6
const destStream = createWriteStream(dest)7
8
srcStream.on('data', data => {9
const canContinue = destStream.write(data)10
11
// we are overflowing the destination, we should pause12
srcStream.pause()13
// we will resume when the destination stream is drained14
destStream.once('drain', () => srcStream.resume())15
16
})17
// we are overflowing the destination, we should pause
srcStream.pause()
// stream-copy-safe.js1
2
const { createReadStream, createWriteStream } = require('fs')3
4
const [, , src, dest] = process.argv5
const srcStream = createReadStream(src)6
const destStream = createWriteStream(dest)7
8
srcStream.on('data', data => {9
const canContinue = destStream.write(data)10
if (!canContinue) {11
12
13
// we will resume when the destination stream is drained14
destStream.once('drain', () => srcStream.resume())15
}16
})17
// we will resume when the destination stream is drained
destStream.once('drain', () => srcStream.resume())
// stream-copy-safe.js1
2
const { createReadStream, createWriteStream } = require('fs')3
4
const [, , src, dest] = process.argv5
const srcStream = createReadStream(src)6
const destStream = createWriteStream(dest)7
8
srcStream.on('data', data => {9
const canContinue = destStream.write(data)10
if (!canContinue) {11
// we are overflowing the destination, we should pause12
srcStream.pause()13
14
15
}16
})17
@loige45
// stream-copy-safe.js
const { createReadStream, createWriteStream } = require('fs')
const [, , src, dest] = process.argv
const srcStream = createReadStream(src)
const destStream = createWriteStream(dest)
srcStream.on('data', data => {
const canContinue = destStream.write(data)
if (!canContinue) {
// we are overflowing the destination, we should pause
srcStream.pause()
// we will resume when the destination stream is drained
destStream.once('drain', () => srcStream.resume())
}
})
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
const canContinue = destStream.write(data)
// stream-copy-safe.js1
2
const { createReadStream, createWriteStream } = require('fs')3
4
const [, , src, dest] = process.argv5
const srcStream = createReadStream(src)6
const destStream = createWriteStream(dest)7
8
srcStream.on('data', data => {9
10
if (!canContinue) {11
// we are overflowing the destination, we should pause12
srcStream.pause()13
// we will resume when the destination stream is drained14
destStream.once('drain', () => srcStream.resume())15
}16
})17
if (!canContinue) {
}
// stream-copy-safe.js1
2
const { createReadStream, createWriteStream } = require('fs')3
4
const [, , src, dest] = process.argv5
const srcStream = createReadStream(src)6
const destStream = createWriteStream(dest)7
8
srcStream.on('data', data => {9
const canContinue = destStream.write(data)10
11
// we are overflowing the destination, we should pause12
srcStream.pause()13
// we will resume when the destination stream is drained14
destStream.once('drain', () => srcStream.resume())15
16
})17
// we are overflowing the destination, we should pause
srcStream.pause()
// stream-copy-safe.js1
2
const { createReadStream, createWriteStream } = require('fs')3
4
const [, , src, dest] = process.argv5
const srcStream = createReadStream(src)6
const destStream = createWriteStream(dest)7
8
srcStream.on('data', data => {9
const canContinue = destStream.write(data)10
if (!canContinue) {11
12
13
// we will resume when the destination stream is drained14
destStream.once('drain', () => srcStream.resume())15
}16
})17
// we will resume when the destination stream is drained
destStream.once('drain', () => srcStream.resume())
// stream-copy-safe.js1
2
const { createReadStream, createWriteStream } = require('fs')3
4
const [, , src, dest] = process.argv5
const srcStream = createReadStream(src)6
const destStream = createWriteStream(dest)7
8
srcStream.on('data', data => {9
const canContinue = destStream.write(data)10
if (!canContinue) {11
// we are overflowing the destination, we should pause12
srcStream.pause()13
14
15
}16
})17
const canContinue = destStream.write(data)
if (!canContinue) {
// we are overflowing the destination, we should pause
srcStream.pause()
// we will resume when the destination stream is drained
destStream.once('drain', () => srcStream.resume())
}
// stream-copy-safe.js1
2
const { createReadStream, createWriteStream } = require('fs')3
4
const [, , src, dest] = process.argv5
const srcStream = createReadStream(src)6
const destStream = createWriteStream(dest)7
8
srcStream.on('data', data => {9
10
11
12
13
14
15
16
})17
@loige45
OTHER TYPES OF STREAMOTHER TYPES OF STREAM
Duplex Stream 
streams that are both Readable and Writable.  
(net.Socket) 
 
Transform Stream 
Duplex streams that can modify or transform the data as it is written
and read. 
(zlib.createGzip(), crypto.createCipheriv())
@loige46
ANATOMY OF A TRANSFORM STREAMANATOMY OF A TRANSFORM STREAM
transform stream
@loige47
ANATOMY OF A TRANSFORM STREAMANATOMY OF A TRANSFORM STREAM
1. write data
transform stream
(readable stream)
@loige47
ANATOMY OF A TRANSFORM STREAMANATOMY OF A TRANSFORM STREAM
1. write data
transform stream
2. transform the data
(readable stream)
@loige47
ANATOMY OF A TRANSFORM STREAMANATOMY OF A TRANSFORM STREAM
1. write data
transform stream
3. read transformed data2. transform the data
(readable stream) (writable stream)
@loige47
GZIP EXAMPLEGZIP EXAMPLE
1. write data
transform stream
3. read transformed data2. transform the data
(readable stream) (writable stream)
@loige48
GZIP EXAMPLEGZIP EXAMPLE
1. write data
transform stream
3. read transformed data2. transform the data
(readable stream) (writable stream)
@loige
Uncompressed data
48
GZIP EXAMPLEGZIP EXAMPLE
1. write data
transform stream
3. read transformed data2. transform the data
(readable stream) (writable stream)
@loige
Uncompressed data
compress
zlib.createGzip()
48
GZIP EXAMPLEGZIP EXAMPLE
1. write data
transform stream
3. read transformed data2. transform the data
(readable stream) (writable stream)
@loige
Uncompressed data Compressed data
compress
zlib.createGzip()
48
HOW CAN WE USE TRANSFORM STREAMS?HOW CAN WE USE TRANSFORM STREAMS?
Readable Transform Writable
49@loige
HOW CAN WE USE TRANSFORM STREAMS?HOW CAN WE USE TRANSFORM STREAMS?
Readable Transform Writable
⚡   data
49@loige
HOW CAN WE USE TRANSFORM STREAMS?HOW CAN WE USE TRANSFORM STREAMS?
Readable Transform Writable
⚡   data write()
49@loige
HOW CAN WE USE TRANSFORM STREAMS?HOW CAN WE USE TRANSFORM STREAMS?
Readable Transform Writable
⚡   data write() ⚡   data
49@loige
HOW CAN WE USE TRANSFORM STREAMS?HOW CAN WE USE TRANSFORM STREAMS?
Readable Transform Writable
⚡   data write() ⚡   data write()
49@loige
HOW CAN WE USE TRANSFORM STREAMS?HOW CAN WE USE TRANSFORM STREAMS?
Readable Transform Writable
⚡   data write() ⚡   data write()
49@loige
(Backpressure)
HOW CAN WE USE TRANSFORM STREAMS?HOW CAN WE USE TRANSFORM STREAMS?
Readable Transform Writable
⚡   data write() ⚡   data write()
pause()
49@loige
(Backpressure)
HOW CAN WE USE TRANSFORM STREAMS?HOW CAN WE USE TRANSFORM STREAMS?
Readable Transform Writable
⚡   data write() ⚡   data write()
pause()
⚡ drain
49@loige
(Backpressure)
HOW CAN WE USE TRANSFORM STREAMS?HOW CAN WE USE TRANSFORM STREAMS?
Readable Transform Writable
⚡   data write() ⚡   data write()
pause()
⚡ drainresume()
49@loige
(Backpressure)
HOW CAN WE USE TRANSFORM STREAMS?HOW CAN WE USE TRANSFORM STREAMS?
Readable Transform Writable
⚡   data write() ⚡   data write()
pause()
⚡ drainresume()
49@loige
(Backpressure) (Backpressure)
HOW CAN WE USE TRANSFORM STREAMS?HOW CAN WE USE TRANSFORM STREAMS?
Readable Transform Writable
⚡   data write() ⚡   data write()
pause()
⚡ drainresume()
pause()
49@loige
(Backpressure) (Backpressure)
HOW CAN WE USE TRANSFORM STREAMS?HOW CAN WE USE TRANSFORM STREAMS?
Readable Transform Writable
⚡   data write() ⚡   data write()
pause()
⚡ drainresume()
pause()
⚡ drain
49@loige
(Backpressure) (Backpressure)
HOW CAN WE USE TRANSFORM STREAMS?HOW CAN WE USE TRANSFORM STREAMS?
Readable Transform Writable
⚡   data write() ⚡   data write()
pause()
⚡ drainresume()
pause()
⚡ drainresume()
49@loige
(Backpressure) (Backpressure)
HOW CAN WE USE TRANSFORM STREAMS?HOW CAN WE USE TRANSFORM STREAMS?
Readable Transform Writable
⚡   data write() ⚡   data write()
pause()
⚡ drainresume()
pause()
⚡ drainresume()
49@loige
(Backpressure) (Backpressure)
You also have to handle end & error events!
gzipStream.on('data', data => {
const canContinue = destStream.write(data)
if (!canContinue) {
gzipStream.pause()
destStream.once('drain', () => {
gzipStream.resume()
})
}
})
gzipStream.on('end', () => {
destStream.end()
})
// ⚠ TODO: handle errors!
// stream-copy-gzip.js
const {
createReadStream,
createWriteStream
} = require('fs')
const { createGzip } = require('zlib')
const [, , src, dest] = process.argv
const srcStream = createReadStream(src)
const gzipStream = createGzip()
const destStream = createWriteStream(dest)
srcStream.on('data', data => {
const canContinue = gzipStream.write(data)
if (!canContinue) {
srcStream.pause()
gzipStream.once('drain', () => {
srcStream.resume()
})
}
})
srcStream.on('end', () => {
// check if there's buffered data left
const remainingData = gzipStream.read()
if (remainingData !== null) {
destStream.write()
}
gzipStream.end()
})
@loige50
gzipStream.on('data', data => {
const canContinue = destStream.write(data)
if (!canContinue) {
gzipStream.pause()
destStream.once('drain', () => {
gzipStream.resume()
})
}
})
gzipStream.on('end', () => {
destStream.end()
})
// ⚠ TODO: handle errors!
// stream-copy-gzip.js
const {
createReadStream,
createWriteStream
} = require('fs')
const { createGzip } = require('zlib')
const [, , src, dest] = process.argv
const srcStream = createReadStream(src)
const gzipStream = createGzip()
const destStream = createWriteStream(dest)
srcStream.on('data', data => {
const canContinue = gzipStream.write(data)
if (!canContinue) {
srcStream.pause()
gzipStream.once('drain', () => {
srcStream.resume()
})
}
})
srcStream.on('end', () => {
// check if there's buffered data left
const remainingData = gzipStream.read()
if (remainingData !== null) {
destStream.write()
}
gzipStream.end()
})
@loige50
03. PIPE()03. PIPE()
@loige51
readable
.pipe(tranform1)
.pipe(transform2)
.pipe(transform3)
.pipe(writable)
readable.pipe(writableDest)
@loige
Connects a readable stream to a writable stream
A transform stream can be used as a destination as well
It returns the destination stream allowing for a chain of pipes
52
// stream-copy-gzip-pipe.js
const {
createReadStream,
createWriteStream
} = require('fs')
const { createGzip } = require('zlib')
const [, , src, dest] = process.argv
const srcStream = createReadStream(src)
const gzipStream = createGzip()
const destStream = createWriteStream(dest)
srcStream
.pipe(gzipStream)
.pipe(destStream)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
@loige53
// stream-copy-gzip-pipe.js
const {
createReadStream,
createWriteStream
} = require('fs')
const { createGzip } = require('zlib')
const [, , src, dest] = process.argv
const srcStream = createReadStream(src)
const gzipStream = createGzip()
const destStream = createWriteStream(dest)
srcStream
.pipe(gzipStream)
.pipe(destStream)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
srcStream
.pipe(gzipStream)
.pipe(destStream)
// stream-copy-gzip-pipe.js1
2
const {3
createReadStream,4
createWriteStream5
} = require('fs')6
const { createGzip } = require('zlib')7
8
const [, , src, dest] = process.argv9
const srcStream = createReadStream(src)10
const gzipStream = createGzip()11
const destStream = createWriteStream(dest)12
13
14
15
16
@loige53
readable
.pipe(decompress)
.pipe(decrypt)
.pipe(convert)
.pipe(encrypt)
.pipe(compress)
.pipe(writeToDisk)
Setup complex pipelines with pipe
@loige
This is the most common way to use streams
54
readable
.on('error', handleErr)
.pipe(decompress)
.on('error', handleErr)
.pipe(decrypt)
.on('error', handleErr)
.pipe(convert)
.on('error', handleErr)
.pipe(encrypt)
.on('error', handleErr)
.pipe(compress)
.on('error', handleErr)
.pipe(writeToDisk)
.on('error', handleErr)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
Handling errors (correctly)
@loige55
readable
.on('error', handleErr)
.pipe(decompress)
.on('error', handleErr)
.pipe(decrypt)
.on('error', handleErr)
.pipe(convert)
.on('error', handleErr)
.pipe(encrypt)
.on('error', handleErr)
.pipe(compress)
.on('error', handleErr)
.pipe(writeToDisk)
.on('error', handleErr)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
readable
.pipe(decompress)
.pipe(decrypt)
.pipe(convert)
.pipe(encrypt)
.pipe(compress)
.pipe(writeToDisk)
1
.on('error', handleErr)2
3
.on('error', handleErr)4
5
.on('error', handleErr)6
7
.on('error', handleErr)8
9
.on('error', handleErr)10
11
.on('error', handleErr)12
13
.on('error', handleErr)14
Handling errors (correctly)
@loige55
readable
.on('error', handleErr)
.pipe(decompress)
.on('error', handleErr)
.pipe(decrypt)
.on('error', handleErr)
.pipe(convert)
.on('error', handleErr)
.pipe(encrypt)
.on('error', handleErr)
.pipe(compress)
.on('error', handleErr)
.pipe(writeToDisk)
.on('error', handleErr)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
readable
.pipe(decompress)
.pipe(decrypt)
.pipe(convert)
.pipe(encrypt)
.pipe(compress)
.pipe(writeToDisk)
1
.on('error', handleErr)2
3
.on('error', handleErr)4
5
.on('error', handleErr)6
7
.on('error', handleErr)8
9
.on('error', handleErr)10
11
.on('error', handleErr)12
13
.on('error', handleErr)14
.on('error', handleErr)
.on('error', handleErr)
.on('error', handleErr)
.on('error', handleErr)
.on('error', handleErr)
.on('error', handleErr)
.on('error', handleErr)
readable1
2
.pipe(decompress)3
4
.pipe(decrypt)5
6
.pipe(convert)7
8
.pipe(encrypt)9
10
.pipe(compress)11
12
.pipe(writeToDisk)13
14
Handling errors (correctly)
@loige
 
handleErr should end and destroy the streams
(it doesn't happen automatically)
 
55
readable
.on('error', handleErr)
.pipe(decompress)
.on('error', handleErr)
.pipe(decrypt)
.on('error', handleErr)
.pipe(convert)
.on('error', handleErr)
.pipe(encrypt)
.on('error', handleErr)
.pipe(compress)
.on('error', handleErr)
.pipe(writeToDisk)
.on('error', handleErr)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
readable
.pipe(decompress)
.pipe(decrypt)
.pipe(convert)
.pipe(encrypt)
.pipe(compress)
.pipe(writeToDisk)
1
.on('error', handleErr)2
3
.on('error', handleErr)4
5
.on('error', handleErr)6
7
.on('error', handleErr)8
9
.on('error', handleErr)10
11
.on('error', handleErr)12
13
.on('error', handleErr)14
.on('error', handleErr)
.on('error', handleErr)
.on('error', handleErr)
.on('error', handleErr)
.on('error', handleErr)
.on('error', handleErr)
.on('error', handleErr)
readable1
2
.pipe(decompress)3
4
.pipe(decrypt)5
6
.pipe(convert)7
8
.pipe(encrypt)9
10
.pipe(compress)11
12
.pipe(writeToDisk)13
14
Handling errors (correctly)
@loige
 
handleErr should end and destroy the streams
(it doesn't happen automatically)
 
55
04. STREAM UTILITIES04. STREAM UTILITIES
@loige56
// stream-copy-gzip-pipeline.js
const { pipeline } = require('stream')
const { createReadStream, createWriteStream } = require('fs')
const { createGzip } = require('zlib')
const [, , src, dest] = process.argv
pipeline(
createReadStream(src),
createGzip(),
createWriteStream(dest),
function onEnd (err) {
if (err) {
console.error(`Error: ${err}`)
process.exit(1)
}
console.log('Done!')
}
)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
stream.pipeline(...streams, callback) - Node.js 10+
@loige57
// stream-copy-gzip-pipeline.js
const { pipeline } = require('stream')
const { createReadStream, createWriteStream } = require('fs')
const { createGzip } = require('zlib')
const [, , src, dest] = process.argv
pipeline(
createReadStream(src),
createGzip(),
createWriteStream(dest),
function onEnd (err) {
if (err) {
console.error(`Error: ${err}`)
process.exit(1)
}
console.log('Done!')
}
)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
const { pipeline } = require('stream')
pipeline(
)
// stream-copy-gzip-pipeline.js1
2
3
const { createReadStream, createWriteStream } = require('fs')4
const { createGzip } = require('zlib')5
6
const [, , src, dest] = process.argv7
8
9
createReadStream(src),10
createGzip(),11
createWriteStream(dest),12
function onEnd (err) {13
if (err) {14
console.error(`Error: ${err}`)15
process.exit(1)16
}17
18
console.log('Done!')19
}20
21
stream.pipeline(...streams, callback) - Node.js 10+
@loige57
// stream-copy-gzip-pipeline.js
const { pipeline } = require('stream')
const { createReadStream, createWriteStream } = require('fs')
const { createGzip } = require('zlib')
const [, , src, dest] = process.argv
pipeline(
createReadStream(src),
createGzip(),
createWriteStream(dest),
function onEnd (err) {
if (err) {
console.error(`Error: ${err}`)
process.exit(1)
}
console.log('Done!')
}
)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
const { pipeline } = require('stream')
pipeline(
)
// stream-copy-gzip-pipeline.js1
2
3
const { createReadStream, createWriteStream } = require('fs')4
const { createGzip } = require('zlib')5
6
const [, , src, dest] = process.argv7
8
9
createReadStream(src),10
createGzip(),11
createWriteStream(dest),12
function onEnd (err) {13
if (err) {14
console.error(`Error: ${err}`)15
process.exit(1)16
}17
18
console.log('Done!')19
}20
21
createReadStream(src),
createGzip(),
createWriteStream(dest),
// stream-copy-gzip-pipeline.js1
2
const { pipeline } = require('stream')3
const { createReadStream, createWriteStream } = require('fs')4
const { createGzip } = require('zlib')5
6
const [, , src, dest] = process.argv7
8
pipeline(9
10
11
12
function onEnd (err) {13
if (err) {14
console.error(`Error: ${err}`)15
process.exit(1)16
}17
18
console.log('Done!')19
}20
)21
stream.pipeline(...streams, callback) - Node.js 10+
@loige
You can pass multiple streams (they will be piped)
57
// stream-copy-gzip-pipeline.js
const { pipeline } = require('stream')
const { createReadStream, createWriteStream } = require('fs')
const { createGzip } = require('zlib')
const [, , src, dest] = process.argv
pipeline(
createReadStream(src),
createGzip(),
createWriteStream(dest),
function onEnd (err) {
if (err) {
console.error(`Error: ${err}`)
process.exit(1)
}
console.log('Done!')
}
)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
const { pipeline } = require('stream')
pipeline(
)
// stream-copy-gzip-pipeline.js1
2
3
const { createReadStream, createWriteStream } = require('fs')4
const { createGzip } = require('zlib')5
6
const [, , src, dest] = process.argv7
8
9
createReadStream(src),10
createGzip(),11
createWriteStream(dest),12
function onEnd (err) {13
if (err) {14
console.error(`Error: ${err}`)15
process.exit(1)16
}17
18
console.log('Done!')19
}20
21
createReadStream(src),
createGzip(),
createWriteStream(dest),
// stream-copy-gzip-pipeline.js1
2
const { pipeline } = require('stream')3
const { createReadStream, createWriteStream } = require('fs')4
const { createGzip } = require('zlib')5
6
const [, , src, dest] = process.argv7
8
pipeline(9
10
11
12
function onEnd (err) {13
if (err) {14
console.error(`Error: ${err}`)15
process.exit(1)16
}17
18
console.log('Done!')19
}20
)21
function onEnd (err) {
}
// stream-copy-gzip-pipeline.js1
2
const { pipeline } = require('stream')3
const { createReadStream, createWriteStream } = require('fs')4
const { createGzip } = require('zlib')5
6
const [, , src, dest] = process.argv7
8
pipeline(9
createReadStream(src),10
createGzip(),11
createWriteStream(dest),12
13
if (err) {14
console.error(`Error: ${err}`)15
process.exit(1)16
}17
18
console.log('Done!')19
20
)21
stream.pipeline(...streams, callback) - Node.js 10+
@loige
You can pass multiple streams (they will be piped)
57
// stream-copy-gzip-pipeline.js
const { pipeline } = require('stream')
const { createReadStream, createWriteStream } = require('fs')
const { createGzip } = require('zlib')
const [, , src, dest] = process.argv
pipeline(
createReadStream(src),
createGzip(),
createWriteStream(dest),
function onEnd (err) {
if (err) {
console.error(`Error: ${err}`)
process.exit(1)
}
console.log('Done!')
}
)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
const { pipeline } = require('stream')
pipeline(
)
// stream-copy-gzip-pipeline.js1
2
3
const { createReadStream, createWriteStream } = require('fs')4
const { createGzip } = require('zlib')5
6
const [, , src, dest] = process.argv7
8
9
createReadStream(src),10
createGzip(),11
createWriteStream(dest),12
function onEnd (err) {13
if (err) {14
console.error(`Error: ${err}`)15
process.exit(1)16
}17
18
console.log('Done!')19
}20
21
createReadStream(src),
createGzip(),
createWriteStream(dest),
// stream-copy-gzip-pipeline.js1
2
const { pipeline } = require('stream')3
const { createReadStream, createWriteStream } = require('fs')4
const { createGzip } = require('zlib')5
6
const [, , src, dest] = process.argv7
8
pipeline(9
10
11
12
function onEnd (err) {13
if (err) {14
console.error(`Error: ${err}`)15
process.exit(1)16
}17
18
console.log('Done!')19
}20
)21
function onEnd (err) {
}
// stream-copy-gzip-pipeline.js1
2
const { pipeline } = require('stream')3
const { createReadStream, createWriteStream } = require('fs')4
const { createGzip } = require('zlib')5
6
const [, , src, dest] = process.argv7
8
pipeline(9
createReadStream(src),10
createGzip(),11
createWriteStream(dest),12
13
if (err) {14
console.error(`Error: ${err}`)15
process.exit(1)16
}17
18
console.log('Done!')19
20
)21
if (err) {
console.error(`Error: ${err}`)
process.exit(1)
}
console.log('Done!')
// stream-copy-gzip-pipeline.js1
2
const { pipeline } = require('stream')3
const { createReadStream, createWriteStream } = require('fs')4
const { createGzip } = require('zlib')5
6
const [, , src, dest] = process.argv7
8
pipeline(9
createReadStream(src),10
createGzip(),11
createWriteStream(dest),12
function onEnd (err) {13
14
15
16
17
18
19
}20
)21
stream.pipeline(...streams, callback) - Node.js 10+
@loige
You can pass multiple streams (they will be piped)
The last argument is a callback. If invoked with an
error, it means the pipeline failed at some point.
All the streams are ended and destroyed correctly.
57
// stream-copy-gzip-pump.js
const pump = require('pump') // from npm
const { createReadStream, createWriteStream } = require('fs')
const { createGzip } = require('zlib')
const [, , src, dest] = process.argv
pump( // just swap pipeline with pump!
createReadStream(src),
createGzip(),
createWriteStream(dest),
function onEnd (err) {
if (err) {
console.error(`Error: ${err}`)
process.exit(1)
}
console.log('Done!')
}
)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
For Node.js < 10: pump - npm.im/pump
@loige58
// stream-copy-gzip-pump.js
const pump = require('pump') // from npm
const { createReadStream, createWriteStream } = require('fs')
const { createGzip } = require('zlib')
const [, , src, dest] = process.argv
pump( // just swap pipeline with pump!
createReadStream(src),
createGzip(),
createWriteStream(dest),
function onEnd (err) {
if (err) {
console.error(`Error: ${err}`)
process.exit(1)
}
console.log('Done!')
}
)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
const pump = require('pump') // from npm
pump( // just swap pipeline with pump!
)
// stream-copy-gzip-pump.js1
2
3
const { createReadStream, createWriteStream } = require('fs')4
const { createGzip } = require('zlib')5
6
const [, , src, dest] = process.argv7
8
9
createReadStream(src),10
createGzip(),11
createWriteStream(dest),12
function onEnd (err) {13
if (err) {14
console.error(`Error: ${err}`)15
process.exit(1)16
}17
18
console.log('Done!')19
}20
21
For Node.js < 10: pump - npm.im/pump
@loige58
pumpify(...streams) - 
Create reusable pieces of pipeline
npm.im/pumpify
@loige
Let's create EncGz, an application that
helps us to read and write encrypted-
gzipped files
59
// encgz-stream.js - utility library
const {
createCipheriv,
createDecipheriv,
randomBytes,
createHash
} = require('crypto')
const { createGzip, createGunzip } = require('zlib')
const pumpify = require('pumpify') // from npm
// calculates md5 of the secret (trimmed)
function getChiperKey (secret) {}
function createEncgz (secret) {
const initVect = randomBytes(16)
const cipherKey = getChiperKey(secret)
const encryptStream = createCipheriv('aes256', cipherKey, initVect)
const gzipStream = createGzip()
const stream = pumpify(encryptStream, gzipStream)
stream.initVect = initVect
return stream
}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
@loige60
// encgz-stream.js - utility library
const {
createCipheriv,
createDecipheriv,
randomBytes,
createHash
} = require('crypto')
const { createGzip, createGunzip } = require('zlib')
const pumpify = require('pumpify') // from npm
// calculates md5 of the secret (trimmed)
function getChiperKey (secret) {}
function createEncgz (secret) {
const initVect = randomBytes(16)
const cipherKey = getChiperKey(secret)
const encryptStream = createCipheriv('aes256', cipherKey, initVect)
const gzipStream = createGzip()
const stream = pumpify(encryptStream, gzipStream)
stream.initVect = initVect
return stream
}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
function createEncgz (secret) {
}
// encgz-stream.js - utility library1
2
const {3
createCipheriv,4
createDecipheriv,5
randomBytes,6
createHash7
} = require('crypto')8
const { createGzip, createGunzip } = require('zlib')9
const pumpify = require('pumpify') // from npm10
11
// calculates md5 of the secret (trimmed)12
function getChiperKey (secret) {}13
14
15
const initVect = randomBytes(16)16
const cipherKey = getChiperKey(secret)17
const encryptStream = createCipheriv('aes256', cipherKey, initVect)18
const gzipStream = createGzip()19
20
const stream = pumpify(encryptStream, gzipStream)21
stream.initVect = initVect22
23
return stream24
25
@loige60
// encgz-stream.js - utility library
const {
createCipheriv,
createDecipheriv,
randomBytes,
createHash
} = require('crypto')
const { createGzip, createGunzip } = require('zlib')
const pumpify = require('pumpify') // from npm
// calculates md5 of the secret (trimmed)
function getChiperKey (secret) {}
function createEncgz (secret) {
const initVect = randomBytes(16)
const cipherKey = getChiperKey(secret)
const encryptStream = createCipheriv('aes256', cipherKey, initVect)
const gzipStream = createGzip()
const stream = pumpify(encryptStream, gzipStream)
stream.initVect = initVect
return stream
}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
function createEncgz (secret) {
}
// encgz-stream.js - utility library1
2
const {3
createCipheriv,4
createDecipheriv,5
randomBytes,6
createHash7
} = require('crypto')8
const { createGzip, createGunzip } = require('zlib')9
const pumpify = require('pumpify') // from npm10
11
// calculates md5 of the secret (trimmed)12
function getChiperKey (secret) {}13
14
15
const initVect = randomBytes(16)16
const cipherKey = getChiperKey(secret)17
const encryptStream = createCipheriv('aes256', cipherKey, initVect)18
const gzipStream = createGzip()19
20
const stream = pumpify(encryptStream, gzipStream)21
stream.initVect = initVect22
23
return stream24
25
const encryptStream = createCipheriv('aes256', cipherKey, initVect)
const gzipStream = createGzip()
// encgz-stream.js - utility library1
2
const {3
createCipheriv,4
createDecipheriv,5
randomBytes,6
createHash7
} = require('crypto')8
const { createGzip, createGunzip } = require('zlib')9
const pumpify = require('pumpify') // from npm10
11
// calculates md5 of the secret (trimmed)12
function getChiperKey (secret) {}13
14
function createEncgz (secret) {15
const initVect = randomBytes(16)16
const cipherKey = getChiperKey(secret)17
18
19
20
const stream = pumpify(encryptStream, gzipStream)21
stream.initVect = initVect22
23
return stream24
}25
@loige60
// encgz-stream.js - utility library
const {
createCipheriv,
createDecipheriv,
randomBytes,
createHash
} = require('crypto')
const { createGzip, createGunzip } = require('zlib')
const pumpify = require('pumpify') // from npm
// calculates md5 of the secret (trimmed)
function getChiperKey (secret) {}
function createEncgz (secret) {
const initVect = randomBytes(16)
const cipherKey = getChiperKey(secret)
const encryptStream = createCipheriv('aes256', cipherKey, initVect)
const gzipStream = createGzip()
const stream = pumpify(encryptStream, gzipStream)
stream.initVect = initVect
return stream
}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
function createEncgz (secret) {
}
// encgz-stream.js - utility library1
2
const {3
createCipheriv,4
createDecipheriv,5
randomBytes,6
createHash7
} = require('crypto')8
const { createGzip, createGunzip } = require('zlib')9
const pumpify = require('pumpify') // from npm10
11
// calculates md5 of the secret (trimmed)12
function getChiperKey (secret) {}13
14
15
const initVect = randomBytes(16)16
const cipherKey = getChiperKey(secret)17
const encryptStream = createCipheriv('aes256', cipherKey, initVect)18
const gzipStream = createGzip()19
20
const stream = pumpify(encryptStream, gzipStream)21
stream.initVect = initVect22
23
return stream24
25
const encryptStream = createCipheriv('aes256', cipherKey, initVect)
const gzipStream = createGzip()
// encgz-stream.js - utility library1
2
const {3
createCipheriv,4
createDecipheriv,5
randomBytes,6
createHash7
} = require('crypto')8
const { createGzip, createGunzip } = require('zlib')9
const pumpify = require('pumpify') // from npm10
11
// calculates md5 of the secret (trimmed)12
function getChiperKey (secret) {}13
14
function createEncgz (secret) {15
const initVect = randomBytes(16)16
const cipherKey = getChiperKey(secret)17
18
19
20
const stream = pumpify(encryptStream, gzipStream)21
stream.initVect = initVect22
23
return stream24
}25
const stream = pumpify(encryptStream, gzipStream)
// encgz-stream.js - utility library1
2
const {3
createCipheriv,4
createDecipheriv,5
randomBytes,6
createHash7
} = require('crypto')8
const { createGzip, createGunzip } = require('zlib')9
const pumpify = require('pumpify') // from npm10
11
// calculates md5 of the secret (trimmed)12
function getChiperKey (secret) {}13
14
function createEncgz (secret) {15
const initVect = randomBytes(16)16
const cipherKey = getChiperKey(secret)17
const encryptStream = createCipheriv('aes256', cipherKey, initVect)18
const gzipStream = createGzip()19
20
21
stream.initVect = initVect22
23
return stream24
}25
@loige60
// encgz-stream.js - utility library
const {
createCipheriv,
createDecipheriv,
randomBytes,
createHash
} = require('crypto')
const { createGzip, createGunzip } = require('zlib')
const pumpify = require('pumpify') // from npm
// calculates md5 of the secret (trimmed)
function getChiperKey (secret) {}
function createEncgz (secret) {
const initVect = randomBytes(16)
const cipherKey = getChiperKey(secret)
const encryptStream = createCipheriv('aes256', cipherKey, initVect)
const gzipStream = createGzip()
const stream = pumpify(encryptStream, gzipStream)
stream.initVect = initVect
return stream
}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
function createEncgz (secret) {
}
// encgz-stream.js - utility library1
2
const {3
createCipheriv,4
createDecipheriv,5
randomBytes,6
createHash7
} = require('crypto')8
const { createGzip, createGunzip } = require('zlib')9
const pumpify = require('pumpify') // from npm10
11
// calculates md5 of the secret (trimmed)12
function getChiperKey (secret) {}13
14
15
const initVect = randomBytes(16)16
const cipherKey = getChiperKey(secret)17
const encryptStream = createCipheriv('aes256', cipherKey, initVect)18
const gzipStream = createGzip()19
20
const stream = pumpify(encryptStream, gzipStream)21
stream.initVect = initVect22
23
return stream24
25
const encryptStream = createCipheriv('aes256', cipherKey, initVect)
const gzipStream = createGzip()
// encgz-stream.js - utility library1
2
const {3
createCipheriv,4
createDecipheriv,5
randomBytes,6
createHash7
} = require('crypto')8
const { createGzip, createGunzip } = require('zlib')9
const pumpify = require('pumpify') // from npm10
11
// calculates md5 of the secret (trimmed)12
function getChiperKey (secret) {}13
14
function createEncgz (secret) {15
const initVect = randomBytes(16)16
const cipherKey = getChiperKey(secret)17
18
19
20
const stream = pumpify(encryptStream, gzipStream)21
stream.initVect = initVect22
23
return stream24
}25
const stream = pumpify(encryptStream, gzipStream)
// encgz-stream.js - utility library1
2
const {3
createCipheriv,4
createDecipheriv,5
randomBytes,6
createHash7
} = require('crypto')8
const { createGzip, createGunzip } = require('zlib')9
const pumpify = require('pumpify') // from npm10
11
// calculates md5 of the secret (trimmed)12
function getChiperKey (secret) {}13
14
function createEncgz (secret) {15
const initVect = randomBytes(16)16
const cipherKey = getChiperKey(secret)17
const encryptStream = createCipheriv('aes256', cipherKey, initVect)18
const gzipStream = createGzip()19
20
21
stream.initVect = initVect22
23
return stream24
}25
return stream
// encgz-stream.js - utility library1
2
const {3
createCipheriv,4
createDecipheriv,5
randomBytes,6
createHash7
} = require('crypto')8
const { createGzip, createGunzip } = require('zlib')9
const pumpify = require('pumpify') // from npm10
11
// calculates md5 of the secret (trimmed)12
function getChiperKey (secret) {}13
14
function createEncgz (secret) {15
const initVect = randomBytes(16)16
const cipherKey = getChiperKey(secret)17
const encryptStream = createCipheriv('aes256', cipherKey, initVect)18
const gzipStream = createGzip()19
20
const stream = pumpify(encryptStream, gzipStream)21
stream.initVect = initVect22
23
24
}25
@loige60
// encgz-stream.js (...continue from previous slide)
function createDecgz (secret, initVect) {
const cipherKey = getChiperKey(secret)
const decryptStream = createDecipheriv('aes256', cipherKey, initVect)
const gunzipStream = createGunzip()
const stream = pumpify(gunzipStream, decryptStream)
return stream
}
module.exports = {
createEncgz,
createDecgz
}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
@loige61
// encgz-stream.js (...continue from previous slide)
function createDecgz (secret, initVect) {
const cipherKey = getChiperKey(secret)
const decryptStream = createDecipheriv('aes256', cipherKey, initVect)
const gunzipStream = createGunzip()
const stream = pumpify(gunzipStream, decryptStream)
return stream
}
module.exports = {
createEncgz,
createDecgz
}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
function createDecgz (secret, initVect) {
}
// encgz-stream.js (...continue from previous slide)1
2
3
const cipherKey = getChiperKey(secret)4
const decryptStream = createDecipheriv('aes256', cipherKey, initVect)5
const gunzipStream = createGunzip()6
7
const stream = pumpify(gunzipStream, decryptStream)8
return stream9
10
11
module.exports = {12
createEncgz,13
createDecgz14
}15
@loige61
// encgz-stream.js (...continue from previous slide)
function createDecgz (secret, initVect) {
const cipherKey = getChiperKey(secret)
const decryptStream = createDecipheriv('aes256', cipherKey, initVect)
const gunzipStream = createGunzip()
const stream = pumpify(gunzipStream, decryptStream)
return stream
}
module.exports = {
createEncgz,
createDecgz
}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
function createDecgz (secret, initVect) {
}
// encgz-stream.js (...continue from previous slide)1
2
3
const cipherKey = getChiperKey(secret)4
const decryptStream = createDecipheriv('aes256', cipherKey, initVect)5
const gunzipStream = createGunzip()6
7
const stream = pumpify(gunzipStream, decryptStream)8
return stream9
10
11
module.exports = {12
createEncgz,13
createDecgz14
}15
const decryptStream = createDecipheriv('aes256', cipherKey, initVect)
const gunzipStream = createGunzip()
// encgz-stream.js (...continue from previous slide)1
2
function createDecgz (secret, initVect) {3
const cipherKey = getChiperKey(secret)4
5
6
7
const stream = pumpify(gunzipStream, decryptStream)8
return stream9
}10
11
module.exports = {12
createEncgz,13
createDecgz14
}15
@loige61
// encgz-stream.js (...continue from previous slide)
function createDecgz (secret, initVect) {
const cipherKey = getChiperKey(secret)
const decryptStream = createDecipheriv('aes256', cipherKey, initVect)
const gunzipStream = createGunzip()
const stream = pumpify(gunzipStream, decryptStream)
return stream
}
module.exports = {
createEncgz,
createDecgz
}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
function createDecgz (secret, initVect) {
}
// encgz-stream.js (...continue from previous slide)1
2
3
const cipherKey = getChiperKey(secret)4
const decryptStream = createDecipheriv('aes256', cipherKey, initVect)5
const gunzipStream = createGunzip()6
7
const stream = pumpify(gunzipStream, decryptStream)8
return stream9
10
11
module.exports = {12
createEncgz,13
createDecgz14
}15
const decryptStream = createDecipheriv('aes256', cipherKey, initVect)
const gunzipStream = createGunzip()
// encgz-stream.js (...continue from previous slide)1
2
function createDecgz (secret, initVect) {3
const cipherKey = getChiperKey(secret)4
5
6
7
const stream = pumpify(gunzipStream, decryptStream)8
return stream9
}10
11
module.exports = {12
createEncgz,13
createDecgz14
}15
const stream = pumpify(gunzipStream, decryptStream)
return stream
// encgz-stream.js (...continue from previous slide)1
2
function createDecgz (secret, initVect) {3
const cipherKey = getChiperKey(secret)4
const decryptStream = createDecipheriv('aes256', cipherKey, initVect)5
const gunzipStream = createGunzip()6
7
8
9
}10
11
module.exports = {12
createEncgz,13
createDecgz14
}15
@loige61
// encgz-stream.js (...continue from previous slide)
function createDecgz (secret, initVect) {
const cipherKey = getChiperKey(secret)
const decryptStream = createDecipheriv('aes256', cipherKey, initVect)
const gunzipStream = createGunzip()
const stream = pumpify(gunzipStream, decryptStream)
return stream
}
module.exports = {
createEncgz,
createDecgz
}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
function createDecgz (secret, initVect) {
}
// encgz-stream.js (...continue from previous slide)1
2
3
const cipherKey = getChiperKey(secret)4
const decryptStream = createDecipheriv('aes256', cipherKey, initVect)5
const gunzipStream = createGunzip()6
7
const stream = pumpify(gunzipStream, decryptStream)8
return stream9
10
11
module.exports = {12
createEncgz,13
createDecgz14
}15
const decryptStream = createDecipheriv('aes256', cipherKey, initVect)
const gunzipStream = createGunzip()
// encgz-stream.js (...continue from previous slide)1
2
function createDecgz (secret, initVect) {3
const cipherKey = getChiperKey(secret)4
5
6
7
const stream = pumpify(gunzipStream, decryptStream)8
return stream9
}10
11
module.exports = {12
createEncgz,13
createDecgz14
}15
const stream = pumpify(gunzipStream, decryptStream)
return stream
// encgz-stream.js (...continue from previous slide)1
2
function createDecgz (secret, initVect) {3
const cipherKey = getChiperKey(secret)4
const decryptStream = createDecipheriv('aes256', cipherKey, initVect)5
const gunzipStream = createGunzip()6
7
8
9
}10
11
module.exports = {12
createEncgz,13
createDecgz14
}15
module.exports = {
createEncgz,
createDecgz
}
// encgz-stream.js (...continue from previous slide)1
2
function createDecgz (secret, initVect) {3
const cipherKey = getChiperKey(secret)4
const decryptStream = createDecipheriv('aes256', cipherKey, initVect)5
const gunzipStream = createGunzip()6
7
const stream = pumpify(gunzipStream, decryptStream)8
return stream9
}10
11
12
13
14
15
@loige61
// encgz.js - CLI to encrypt and gzip (from stdin to stdout)
const { pipeline } = require('stream')
const { createEncgz } = require('./encgz-stream')
const [, , secret] = process.argv
const encgz = createEncgz(secret)
console.error(`init vector: ${encgz.initVect.toString('hex')}`)
pipeline(
process.stdin,
encgz,
process.stdout,
function onEnd (err) {
if (err) {
console.error(`Error: ${err}`)
process.exit(1)
}
}
)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21 @loige62
// encgz.js - CLI to encrypt and gzip (from stdin to stdout)
const { pipeline } = require('stream')
const { createEncgz } = require('./encgz-stream')
const [, , secret] = process.argv
const encgz = createEncgz(secret)
console.error(`init vector: ${encgz.initVect.toString('hex')}`)
pipeline(
process.stdin,
encgz,
process.stdout,
function onEnd (err) {
if (err) {
console.error(`Error: ${err}`)
process.exit(1)
}
}
)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
const [, , secret] = process.argv
// encgz.js - CLI to encrypt and gzip (from stdin to stdout)1
2
const { pipeline } = require('stream')3
const { createEncgz } = require('./encgz-stream')4
5
6
7
const encgz = createEncgz(secret)8
console.error(`init vector: ${encgz.initVect.toString('hex')}`)9
10
pipeline(11
process.stdin,12
encgz,13
process.stdout,14
function onEnd (err) {15
if (err) {16
console.error(`Error: ${err}`)17
process.exit(1)18
}19
}20
)21 @loige62
// encgz.js - CLI to encrypt and gzip (from stdin to stdout)
const { pipeline } = require('stream')
const { createEncgz } = require('./encgz-stream')
const [, , secret] = process.argv
const encgz = createEncgz(secret)
console.error(`init vector: ${encgz.initVect.toString('hex')}`)
pipeline(
process.stdin,
encgz,
process.stdout,
function onEnd (err) {
if (err) {
console.error(`Error: ${err}`)
process.exit(1)
}
}
)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
const [, , secret] = process.argv
// encgz.js - CLI to encrypt and gzip (from stdin to stdout)1
2
const { pipeline } = require('stream')3
const { createEncgz } = require('./encgz-stream')4
5
6
7
const encgz = createEncgz(secret)8
console.error(`init vector: ${encgz.initVect.toString('hex')}`)9
10
pipeline(11
process.stdin,12
encgz,13
process.stdout,14
function onEnd (err) {15
if (err) {16
console.error(`Error: ${err}`)17
process.exit(1)18
}19
}20
)21
const encgz = createEncgz(secret)
// encgz.js - CLI to encrypt and gzip (from stdin to stdout)1
2
const { pipeline } = require('stream')3
const { createEncgz } = require('./encgz-stream')4
5
const [, , secret] = process.argv6
7
8
console.error(`init vector: ${encgz.initVect.toString('hex')}`)9
10
pipeline(11
process.stdin,12
encgz,13
process.stdout,14
function onEnd (err) {15
if (err) {16
console.error(`Error: ${err}`)17
process.exit(1)18
}19
}20
)21 @loige62
// encgz.js - CLI to encrypt and gzip (from stdin to stdout)
const { pipeline } = require('stream')
const { createEncgz } = require('./encgz-stream')
const [, , secret] = process.argv
const encgz = createEncgz(secret)
console.error(`init vector: ${encgz.initVect.toString('hex')}`)
pipeline(
process.stdin,
encgz,
process.stdout,
function onEnd (err) {
if (err) {
console.error(`Error: ${err}`)
process.exit(1)
}
}
)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
const [, , secret] = process.argv
// encgz.js - CLI to encrypt and gzip (from stdin to stdout)1
2
const { pipeline } = require('stream')3
const { createEncgz } = require('./encgz-stream')4
5
6
7
const encgz = createEncgz(secret)8
console.error(`init vector: ${encgz.initVect.toString('hex')}`)9
10
pipeline(11
process.stdin,12
encgz,13
process.stdout,14
function onEnd (err) {15
if (err) {16
console.error(`Error: ${err}`)17
process.exit(1)18
}19
}20
)21
const encgz = createEncgz(secret)
// encgz.js - CLI to encrypt and gzip (from stdin to stdout)1
2
const { pipeline } = require('stream')3
const { createEncgz } = require('./encgz-stream')4
5
const [, , secret] = process.argv6
7
8
console.error(`init vector: ${encgz.initVect.toString('hex')}`)9
10
pipeline(11
process.stdin,12
encgz,13
process.stdout,14
function onEnd (err) {15
if (err) {16
console.error(`Error: ${err}`)17
process.exit(1)18
}19
}20
)21
pipeline(
)
// encgz.js - CLI to encrypt and gzip (from stdin to stdout)1
2
const { pipeline } = require('stream')3
const { createEncgz } = require('./encgz-stream')4
5
const [, , secret] = process.argv6
7
const encgz = createEncgz(secret)8
console.error(`init vector: ${encgz.initVect.toString('hex')}`)9
10
11
process.stdin,12
encgz,13
process.stdout,14
function onEnd (err) {15
if (err) {16
console.error(`Error: ${err}`)17
process.exit(1)18
}19
}20
21 @loige62
// encgz.js - CLI to encrypt and gzip (from stdin to stdout)
const { pipeline } = require('stream')
const { createEncgz } = require('./encgz-stream')
const [, , secret] = process.argv
const encgz = createEncgz(secret)
console.error(`init vector: ${encgz.initVect.toString('hex')}`)
pipeline(
process.stdin,
encgz,
process.stdout,
function onEnd (err) {
if (err) {
console.error(`Error: ${err}`)
process.exit(1)
}
}
)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
const [, , secret] = process.argv
// encgz.js - CLI to encrypt and gzip (from stdin to stdout)1
2
const { pipeline } = require('stream')3
const { createEncgz } = require('./encgz-stream')4
5
6
7
const encgz = createEncgz(secret)8
console.error(`init vector: ${encgz.initVect.toString('hex')}`)9
10
pipeline(11
process.stdin,12
encgz,13
process.stdout,14
function onEnd (err) {15
if (err) {16
console.error(`Error: ${err}`)17
process.exit(1)18
}19
}20
)21
const encgz = createEncgz(secret)
// encgz.js - CLI to encrypt and gzip (from stdin to stdout)1
2
const { pipeline } = require('stream')3
const { createEncgz } = require('./encgz-stream')4
5
const [, , secret] = process.argv6
7
8
console.error(`init vector: ${encgz.initVect.toString('hex')}`)9
10
pipeline(11
process.stdin,12
encgz,13
process.stdout,14
function onEnd (err) {15
if (err) {16
console.error(`Error: ${err}`)17
process.exit(1)18
}19
}20
)21
pipeline(
)
// encgz.js - CLI to encrypt and gzip (from stdin to stdout)1
2
const { pipeline } = require('stream')3
const { createEncgz } = require('./encgz-stream')4
5
const [, , secret] = process.argv6
7
const encgz = createEncgz(secret)8
console.error(`init vector: ${encgz.initVect.toString('hex')}`)9
10
11
process.stdin,12
encgz,13
process.stdout,14
function onEnd (err) {15
if (err) {16
console.error(`Error: ${err}`)17
process.exit(1)18
}19
}20
21
process.stdin,
encgz,
process.stdout,
// encgz.js - CLI to encrypt and gzip (from stdin to stdout)1
2
const { pipeline } = require('stream')3
const { createEncgz } = require('./encgz-stream')4
5
const [, , secret] = process.argv6
7
const encgz = createEncgz(secret)8
console.error(`init vector: ${encgz.initVect.toString('hex')}`)9
10
pipeline(11
12
13
14
function onEnd (err) {15
if (err) {16
console.error(`Error: ${err}`)17
process.exit(1)18
}19
}20
)21 @loige62
// decgz.js - CLI to gunzip and decrypt (from stdin to stdout)
const { pipeline } = require('stream')
const { createDecgz } = require('./encgz-stream')
const [, , secret, initVect] = process.argv
const decgz = createDecgz(secret, Buffer.from(initVect, 'hex'))
pipeline(
process.stdin,
decgz,
process.stdout,
function onEnd (err) {
if (err) {
console.error(`Error: ${err}`)
process.exit(1)
}
}
)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21 @loige63
// decgz.js - CLI to gunzip and decrypt (from stdin to stdout)
const { pipeline } = require('stream')
const { createDecgz } = require('./encgz-stream')
const [, , secret, initVect] = process.argv
const decgz = createDecgz(secret, Buffer.from(initVect, 'hex'))
pipeline(
process.stdin,
decgz,
process.stdout,
function onEnd (err) {
if (err) {
console.error(`Error: ${err}`)
process.exit(1)
}
}
)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
const [, , secret, initVect] = process.argv
// decgz.js - CLI to gunzip and decrypt (from stdin to stdout)1
2
const { pipeline } = require('stream')3
const { createDecgz } = require('./encgz-stream')4
5
6
7
const decgz = createDecgz(secret, Buffer.from(initVect, 'hex'))8
9
10
pipeline(11
process.stdin,12
decgz,13
process.stdout,14
function onEnd (err) {15
if (err) {16
console.error(`Error: ${err}`)17
process.exit(1)18
}19
}20
)21 @loige63
// decgz.js - CLI to gunzip and decrypt (from stdin to stdout)
const { pipeline } = require('stream')
const { createDecgz } = require('./encgz-stream')
const [, , secret, initVect] = process.argv
const decgz = createDecgz(secret, Buffer.from(initVect, 'hex'))
pipeline(
process.stdin,
decgz,
process.stdout,
function onEnd (err) {
if (err) {
console.error(`Error: ${err}`)
process.exit(1)
}
}
)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
const [, , secret, initVect] = process.argv
// decgz.js - CLI to gunzip and decrypt (from stdin to stdout)1
2
const { pipeline } = require('stream')3
const { createDecgz } = require('./encgz-stream')4
5
6
7
const decgz = createDecgz(secret, Buffer.from(initVect, 'hex'))8
9
10
pipeline(11
process.stdin,12
decgz,13
process.stdout,14
function onEnd (err) {15
if (err) {16
console.error(`Error: ${err}`)17
process.exit(1)18
}19
}20
)21
const decgz = createDecgz(secret, Buffer.from(initVect, 'hex'))
// decgz.js - CLI to gunzip and decrypt (from stdin to stdout)1
2
const { pipeline } = require('stream')3
const { createDecgz } = require('./encgz-stream')4
5
const [, , secret, initVect] = process.argv6
7
8
9
10
pipeline(11
process.stdin,12
decgz,13
process.stdout,14
function onEnd (err) {15
if (err) {16
console.error(`Error: ${err}`)17
process.exit(1)18
}19
}20
)21 @loige63
// decgz.js - CLI to gunzip and decrypt (from stdin to stdout)
const { pipeline } = require('stream')
const { createDecgz } = require('./encgz-stream')
const [, , secret, initVect] = process.argv
const decgz = createDecgz(secret, Buffer.from(initVect, 'hex'))
pipeline(
process.stdin,
decgz,
process.stdout,
function onEnd (err) {
if (err) {
console.error(`Error: ${err}`)
process.exit(1)
}
}
)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
const [, , secret, initVect] = process.argv
// decgz.js - CLI to gunzip and decrypt (from stdin to stdout)1
2
const { pipeline } = require('stream')3
const { createDecgz } = require('./encgz-stream')4
5
6
7
const decgz = createDecgz(secret, Buffer.from(initVect, 'hex'))8
9
10
pipeline(11
process.stdin,12
decgz,13
process.stdout,14
function onEnd (err) {15
if (err) {16
console.error(`Error: ${err}`)17
process.exit(1)18
}19
}20
)21
const decgz = createDecgz(secret, Buffer.from(initVect, 'hex'))
// decgz.js - CLI to gunzip and decrypt (from stdin to stdout)1
2
const { pipeline } = require('stream')3
const { createDecgz } = require('./encgz-stream')4
5
const [, , secret, initVect] = process.argv6
7
8
9
10
pipeline(11
process.stdin,12
decgz,13
process.stdout,14
function onEnd (err) {15
if (err) {16
console.error(`Error: ${err}`)17
process.exit(1)18
}19
}20
)21
pipeline(
)
// decgz.js - CLI to gunzip and decrypt (from stdin to stdout)1
2
const { pipeline } = require('stream')3
const { createDecgz } = require('./encgz-stream')4
5
const [, , secret, initVect] = process.argv6
7
const decgz = createDecgz(secret, Buffer.from(initVect, 'hex'))8
9
10
11
process.stdin,12
decgz,13
process.stdout,14
function onEnd (err) {15
if (err) {16
console.error(`Error: ${err}`)17
process.exit(1)18
}19
}20
21 @loige63
// decgz.js - CLI to gunzip and decrypt (from stdin to stdout)
const { pipeline } = require('stream')
const { createDecgz } = require('./encgz-stream')
const [, , secret, initVect] = process.argv
const decgz = createDecgz(secret, Buffer.from(initVect, 'hex'))
pipeline(
process.stdin,
decgz,
process.stdout,
function onEnd (err) {
if (err) {
console.error(`Error: ${err}`)
process.exit(1)
}
}
)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
const [, , secret, initVect] = process.argv
// decgz.js - CLI to gunzip and decrypt (from stdin to stdout)1
2
const { pipeline } = require('stream')3
const { createDecgz } = require('./encgz-stream')4
5
6
7
const decgz = createDecgz(secret, Buffer.from(initVect, 'hex'))8
9
10
pipeline(11
process.stdin,12
decgz,13
process.stdout,14
function onEnd (err) {15
if (err) {16
console.error(`Error: ${err}`)17
process.exit(1)18
}19
}20
)21
const decgz = createDecgz(secret, Buffer.from(initVect, 'hex'))
// decgz.js - CLI to gunzip and decrypt (from stdin to stdout)1
2
const { pipeline } = require('stream')3
const { createDecgz } = require('./encgz-stream')4
5
const [, , secret, initVect] = process.argv6
7
8
9
10
pipeline(11
process.stdin,12
decgz,13
process.stdout,14
function onEnd (err) {15
if (err) {16
console.error(`Error: ${err}`)17
process.exit(1)18
}19
}20
)21
pipeline(
)
// decgz.js - CLI to gunzip and decrypt (from stdin to stdout)1
2
const { pipeline } = require('stream')3
const { createDecgz } = require('./encgz-stream')4
5
const [, , secret, initVect] = process.argv6
7
const decgz = createDecgz(secret, Buffer.from(initVect, 'hex'))8
9
10
11
process.stdin,12
decgz,13
process.stdout,14
function onEnd (err) {15
if (err) {16
console.error(`Error: ${err}`)17
process.exit(1)18
}19
}20
21
process.stdin,
decgz,
process.stdout,
// decgz.js - CLI to gunzip and decrypt (from stdin to stdout)1
2
const { pipeline } = require('stream')3
const { createDecgz } = require('./encgz-stream')4
5
const [, , secret, initVect] = process.argv6
7
const decgz = createDecgz(secret, Buffer.from(initVect, 'hex'))8
9
10
pipeline(11
12
13
14
function onEnd (err) {15
if (err) {16
console.error(`Error: ${err}`)17
process.exit(1)18
}19
}20
)21 @loige63
@loige64
readable-stream - 
Npm package that contains the latest version of Node.js stream library.
It also makes Node.js streams compatible with the browser (can be used with
Webpack and Broswserify)
npm.im/readable-stream
@loige
* yeah, the name is misleading. The package offers all the functionalities in the official 'stream'
package, not just readable streams.
*
65
04. WRITING CUSTOM   04. WRITING CUSTOM   
        STREAMS        STREAMS
@loige66
@loige
EmojiStream Uppercasify DOMAppend
67
@loige
EmojiStream Uppercasify DOMAppend
 Lemon
67
@loige
EmojiStream Uppercasify DOMAppend
 LEMON
67
@loige
EmojiStream Uppercasify DOMAppend
 LEMON
67
@loige
EmojiStream Uppercasify DOMAppend
 LEMON Banana
67
@loige
EmojiStream Uppercasify DOMAppend
 LEMON BANANA
67
@loige
EmojiStream Uppercasify DOMAppend
 LEMON
 BANANA
67
@loige
EmojiStream Uppercasify DOMAppend
 LEMON
class EmojiStream
extends Readable {
_read() {
// ...
}
}
 BANANA
67
@loige
EmojiStream Uppercasify DOMAppend
 LEMON
class EmojiStream
extends Readable {
_read() {
// ...
}
}
class Uppercasify
extends Transform {
_transform(
chunk,
enc,
done
) {
// ...
}
}
 BANANA
67
It’s about time to embrace Node.js Streams - MancJS
It’s about time to embrace Node.js Streams - MancJS
It’s about time to embrace Node.js Streams - MancJS
It’s about time to embrace Node.js Streams - MancJS
It’s about time to embrace Node.js Streams - MancJS
It’s about time to embrace Node.js Streams - MancJS
It’s about time to embrace Node.js Streams - MancJS
It’s about time to embrace Node.js Streams - MancJS
It’s about time to embrace Node.js Streams - MancJS
It’s about time to embrace Node.js Streams - MancJS
It’s about time to embrace Node.js Streams - MancJS
It’s about time to embrace Node.js Streams - MancJS
It’s about time to embrace Node.js Streams - MancJS
It’s about time to embrace Node.js Streams - MancJS
It’s about time to embrace Node.js Streams - MancJS
It’s about time to embrace Node.js Streams - MancJS
It’s about time to embrace Node.js Streams - MancJS
It’s about time to embrace Node.js Streams - MancJS
It’s about time to embrace Node.js Streams - MancJS
It’s about time to embrace Node.js Streams - MancJS
It’s about time to embrace Node.js Streams - MancJS
It’s about time to embrace Node.js Streams - MancJS
It’s about time to embrace Node.js Streams - MancJS
It’s about time to embrace Node.js Streams - MancJS
It’s about time to embrace Node.js Streams - MancJS
It’s about time to embrace Node.js Streams - MancJS
It’s about time to embrace Node.js Streams - MancJS
It’s about time to embrace Node.js Streams - MancJS
It’s about time to embrace Node.js Streams - MancJS
It’s about time to embrace Node.js Streams - MancJS
It’s about time to embrace Node.js Streams - MancJS
It’s about time to embrace Node.js Streams - MancJS
It’s about time to embrace Node.js Streams - MancJS
It’s about time to embrace Node.js Streams - MancJS
It’s about time to embrace Node.js Streams - MancJS
It’s about time to embrace Node.js Streams - MancJS
It’s about time to embrace Node.js Streams - MancJS
It’s about time to embrace Node.js Streams - MancJS
It’s about time to embrace Node.js Streams - MancJS
It’s about time to embrace Node.js Streams - MancJS

More Related Content

What's hot

X64服务器 lnmp服务器部署标准 new
X64服务器 lnmp服务器部署标准 newX64服务器 lnmp服务器部署标准 new
X64服务器 lnmp服务器部署标准 new
Yiwei Ma
 
On secure application of PHP wrappers
On secure application  of PHP wrappersOn secure application  of PHP wrappers
On secure application of PHP wrappers
Positive Hack Days
 
Devinsampa nginx-scripting
Devinsampa nginx-scriptingDevinsampa nginx-scripting
Devinsampa nginx-scripting
Tony Fabeen
 
Nginx 0.8.x 安装手册
Nginx 0.8.x 安装手册Nginx 0.8.x 安装手册
Nginx 0.8.x 安装手册
Yiwei Ma
 

What's hot (17)

livedoor blogのsorryサーバの話 #study2study
livedoor blogのsorryサーバの話 #study2studylivedoor blogのsorryサーバの話 #study2study
livedoor blogのsorryサーバの話 #study2study
 
Best practices for ansible roles development
Best practices for ansible roles developmentBest practices for ansible roles development
Best practices for ansible roles development
 
EC2
EC2EC2
EC2
 
Debugging: Rules & Tools
Debugging: Rules & ToolsDebugging: Rules & Tools
Debugging: Rules & Tools
 
Put on Your Asynchronous Hat and Node
Put on Your Asynchronous Hat and NodePut on Your Asynchronous Hat and Node
Put on Your Asynchronous Hat and Node
 
X64服务器 lnmp服务器部署标准 new
X64服务器 lnmp服务器部署标准 newX64服务器 lnmp服务器部署标准 new
X64服务器 lnmp服务器部署标准 new
 
On secure application of PHP wrappers
On secure application  of PHP wrappersOn secure application  of PHP wrappers
On secure application of PHP wrappers
 
MongoDB: How it Works
MongoDB: How it WorksMongoDB: How it Works
MongoDB: How it Works
 
Dtalk shell
Dtalk shellDtalk shell
Dtalk shell
 
Lightning fast with Varnish
Lightning fast with VarnishLightning fast with Varnish
Lightning fast with Varnish
 
ZeroMQ Is The Answer
ZeroMQ Is The AnswerZeroMQ Is The Answer
ZeroMQ Is The Answer
 
Devinsampa nginx-scripting
Devinsampa nginx-scriptingDevinsampa nginx-scripting
Devinsampa nginx-scripting
 
Backups
BackupsBackups
Backups
 
KubeCon EU 2016: Custom Volume Plugins
KubeCon EU 2016: Custom Volume PluginsKubeCon EU 2016: Custom Volume Plugins
KubeCon EU 2016: Custom Volume Plugins
 
Puppet Camp Phoenix 2015: Managing Files via Puppet: Let Me Count The Ways (B...
Puppet Camp Phoenix 2015: Managing Files via Puppet: Let Me Count The Ways (B...Puppet Camp Phoenix 2015: Managing Files via Puppet: Let Me Count The Ways (B...
Puppet Camp Phoenix 2015: Managing Files via Puppet: Let Me Count The Ways (B...
 
Free BSD次の一歩
Free BSD次の一歩Free BSD次の一歩
Free BSD次の一歩
 
Nginx 0.8.x 安装手册
Nginx 0.8.x 安装手册Nginx 0.8.x 安装手册
Nginx 0.8.x 安装手册
 

Similar to It’s about time to embrace Node.js Streams - MancJS

Filesinc 130512002619-phpapp01
Filesinc 130512002619-phpapp01Filesinc 130512002619-phpapp01
Filesinc 130512002619-phpapp01
Rex Joe
 

Similar to It’s about time to embrace Node.js Streams - MancJS (20)

"It’s about time to embrace Streams" Luciano Mammino
"It’s about time to embrace Streams" Luciano Mammino"It’s about time to embrace Streams" Luciano Mammino
"It’s about time to embrace Streams" Luciano Mammino
 
It’s about time to embrace Streams (Node Ukraine 2019)
 It’s about time to embrace Streams (Node Ukraine 2019) It’s about time to embrace Streams (Node Ukraine 2019)
It’s about time to embrace Streams (Node Ukraine 2019)
 
Mastering Grunt
Mastering GruntMastering Grunt
Mastering Grunt
 
working with files
working with filesworking with files
working with files
 
17 files and streams
17 files and streams17 files and streams
17 files and streams
 
File System.pptx
File System.pptxFile System.pptx
File System.pptx
 
File Handling in C.pptx
File Handling in C.pptxFile Handling in C.pptx
File Handling in C.pptx
 
It’s about time to embrace Node.js Streams
It’s about time to embrace Node.js StreamsIt’s about time to embrace Node.js Streams
It’s about time to embrace Node.js Streams
 
Tutorial on Node File System
Tutorial on Node File SystemTutorial on Node File System
Tutorial on Node File System
 
Data File Handiling File POINTERS IN C++
Data File Handiling File POINTERS IN C++Data File Handiling File POINTERS IN C++
Data File Handiling File POINTERS IN C++
 
File handling in cpp
File handling in cppFile handling in cpp
File handling in cpp
 
General Functions
General FunctionsGeneral Functions
General Functions
 
Advance C Programming UNIT 4-FILE HANDLING IN C.pdf
Advance C Programming UNIT 4-FILE HANDLING IN C.pdfAdvance C Programming UNIT 4-FILE HANDLING IN C.pdf
Advance C Programming UNIT 4-FILE HANDLING IN C.pdf
 
Apache: Big Data - Starting with Apache Spark, Best Practices
Apache: Big Data - Starting with Apache Spark, Best PracticesApache: Big Data - Starting with Apache Spark, Best Practices
Apache: Big Data - Starting with Apache Spark, Best Practices
 
ELK: a log management framework
ELK: a log management frameworkELK: a log management framework
ELK: a log management framework
 
data file handling
data file handlingdata file handling
data file handling
 
C Programming Unit-5
C Programming Unit-5C Programming Unit-5
C Programming Unit-5
 
It's about time to embrace Node.js streams
It's about time to embrace Node.js streamsIt's about time to embrace Node.js streams
It's about time to embrace Node.js streams
 
Files in c++
Files in c++Files in c++
Files in c++
 
Filesinc 130512002619-phpapp01
Filesinc 130512002619-phpapp01Filesinc 130512002619-phpapp01
Filesinc 130512002619-phpapp01
 

More from Luciano Mammino

More from Luciano Mammino (20)

Did you know JavaScript has iterators? DublinJS
Did you know JavaScript has iterators? DublinJSDid you know JavaScript has iterators? DublinJS
Did you know JavaScript has iterators? DublinJS
 
What I learned by solving 50 Advent of Code challenges in Rust - RustNation U...
What I learned by solving 50 Advent of Code challenges in Rust - RustNation U...What I learned by solving 50 Advent of Code challenges in Rust - RustNation U...
What I learned by solving 50 Advent of Code challenges in Rust - RustNation U...
 
Building an invite-only microsite with Next.js & Airtable - ReactJS Milano
Building an invite-only microsite with Next.js & Airtable - ReactJS MilanoBuilding an invite-only microsite with Next.js & Airtable - ReactJS Milano
Building an invite-only microsite with Next.js & Airtable - ReactJS Milano
 
From Node.js to Design Patterns - BuildPiper
From Node.js to Design Patterns - BuildPiperFrom Node.js to Design Patterns - BuildPiper
From Node.js to Design Patterns - BuildPiper
 
Let's build a 0-cost invite-only website with Next.js and Airtable!
Let's build a 0-cost invite-only website with Next.js and Airtable!Let's build a 0-cost invite-only website with Next.js and Airtable!
Let's build a 0-cost invite-only website with Next.js and Airtable!
 
Everything I know about S3 pre-signed URLs
Everything I know about S3 pre-signed URLsEverything I know about S3 pre-signed URLs
Everything I know about S3 pre-signed URLs
 
Serverless for High Performance Computing
Serverless for High Performance ComputingServerless for High Performance Computing
Serverless for High Performance Computing
 
Serverless for High Performance Computing
Serverless for High Performance ComputingServerless for High Performance Computing
Serverless for High Performance Computing
 
JavaScript Iteration Protocols - Workshop NodeConf EU 2022
JavaScript Iteration Protocols - Workshop NodeConf EU 2022JavaScript Iteration Protocols - Workshop NodeConf EU 2022
JavaScript Iteration Protocols - Workshop NodeConf EU 2022
 
Building an invite-only microsite with Next.js & Airtable
Building an invite-only microsite with Next.js & AirtableBuilding an invite-only microsite with Next.js & Airtable
Building an invite-only microsite with Next.js & Airtable
 
Let's take the monolith to the cloud 🚀
Let's take the monolith to the cloud 🚀Let's take the monolith to the cloud 🚀
Let's take the monolith to the cloud 🚀
 
A look inside the European Covid Green Certificate - Rust Dublin
A look inside the European Covid Green Certificate - Rust DublinA look inside the European Covid Green Certificate - Rust Dublin
A look inside the European Covid Green Certificate - Rust Dublin
 
Monoliths to the cloud!
Monoliths to the cloud!Monoliths to the cloud!
Monoliths to the cloud!
 
The senior dev
The senior devThe senior dev
The senior dev
 
Node.js: scalability tips - Azure Dev Community Vijayawada
Node.js: scalability tips - Azure Dev Community VijayawadaNode.js: scalability tips - Azure Dev Community Vijayawada
Node.js: scalability tips - Azure Dev Community Vijayawada
 
A look inside the European Covid Green Certificate (Codemotion 2021)
A look inside the European Covid Green Certificate (Codemotion 2021)A look inside the European Covid Green Certificate (Codemotion 2021)
A look inside the European Covid Green Certificate (Codemotion 2021)
 
AWS Observability Made Simple
AWS Observability Made SimpleAWS Observability Made Simple
AWS Observability Made Simple
 
Semplificare l'observability per progetti Serverless
Semplificare l'observability per progetti ServerlessSemplificare l'observability per progetti Serverless
Semplificare l'observability per progetti Serverless
 
Finding a lost song with Node.js and async iterators - NodeConf Remote 2021
Finding a lost song with Node.js and async iterators - NodeConf Remote 2021Finding a lost song with Node.js and async iterators - NodeConf Remote 2021
Finding a lost song with Node.js and async iterators - NodeConf Remote 2021
 
Finding a lost song with Node.js and async iterators - EnterJS 2021
Finding a lost song with Node.js and async iterators - EnterJS 2021Finding a lost song with Node.js and async iterators - EnterJS 2021
Finding a lost song with Node.js and async iterators - EnterJS 2021
 

Recently uploaded

Artificial Intelligence: Facts and Myths
Artificial Intelligence: Facts and MythsArtificial Intelligence: Facts and Myths
Artificial Intelligence: Facts and Myths
Joaquim Jorge
 

Recently uploaded (20)

From Event to Action: Accelerate Your Decision Making with Real-Time Automation
From Event to Action: Accelerate Your Decision Making with Real-Time AutomationFrom Event to Action: Accelerate Your Decision Making with Real-Time Automation
From Event to Action: Accelerate Your Decision Making with Real-Time Automation
 
Connector Corner: Accelerate revenue generation using UiPath API-centric busi...
Connector Corner: Accelerate revenue generation using UiPath API-centric busi...Connector Corner: Accelerate revenue generation using UiPath API-centric busi...
Connector Corner: Accelerate revenue generation using UiPath API-centric busi...
 
How to Troubleshoot Apps for the Modern Connected Worker
How to Troubleshoot Apps for the Modern Connected WorkerHow to Troubleshoot Apps for the Modern Connected Worker
How to Troubleshoot Apps for the Modern Connected Worker
 
Apidays New York 2024 - Scaling API-first by Ian Reasor and Radu Cotescu, Adobe
Apidays New York 2024 - Scaling API-first by Ian Reasor and Radu Cotescu, AdobeApidays New York 2024 - Scaling API-first by Ian Reasor and Radu Cotescu, Adobe
Apidays New York 2024 - Scaling API-first by Ian Reasor and Radu Cotescu, Adobe
 
Bajaj Allianz Life Insurance Company - Insurer Innovation Award 2024
Bajaj Allianz Life Insurance Company - Insurer Innovation Award 2024Bajaj Allianz Life Insurance Company - Insurer Innovation Award 2024
Bajaj Allianz Life Insurance Company - Insurer Innovation Award 2024
 
Powerful Google developer tools for immediate impact! (2023-24 C)
Powerful Google developer tools for immediate impact! (2023-24 C)Powerful Google developer tools for immediate impact! (2023-24 C)
Powerful Google developer tools for immediate impact! (2023-24 C)
 
GenCyber Cyber Security Day Presentation
GenCyber Cyber Security Day PresentationGenCyber Cyber Security Day Presentation
GenCyber Cyber Security Day Presentation
 
GenAI Risks & Security Meetup 01052024.pdf
GenAI Risks & Security Meetup 01052024.pdfGenAI Risks & Security Meetup 01052024.pdf
GenAI Risks & Security Meetup 01052024.pdf
 
Tech Trends Report 2024 Future Today Institute.pdf
Tech Trends Report 2024 Future Today Institute.pdfTech Trends Report 2024 Future Today Institute.pdf
Tech Trends Report 2024 Future Today Institute.pdf
 
Axa Assurance Maroc - Insurer Innovation Award 2024
Axa Assurance Maroc - Insurer Innovation Award 2024Axa Assurance Maroc - Insurer Innovation Award 2024
Axa Assurance Maroc - Insurer Innovation Award 2024
 
Developing An App To Navigate The Roads of Brazil
Developing An App To Navigate The Roads of BrazilDeveloping An App To Navigate The Roads of Brazil
Developing An App To Navigate The Roads of Brazil
 
Automating Google Workspace (GWS) & more with Apps Script
Automating Google Workspace (GWS) & more with Apps ScriptAutomating Google Workspace (GWS) & more with Apps Script
Automating Google Workspace (GWS) & more with Apps Script
 
HTML Injection Attacks: Impact and Mitigation Strategies
HTML Injection Attacks: Impact and Mitigation StrategiesHTML Injection Attacks: Impact and Mitigation Strategies
HTML Injection Attacks: Impact and Mitigation Strategies
 
04-2024-HHUG-Sales-and-Marketing-Alignment.pptx
04-2024-HHUG-Sales-and-Marketing-Alignment.pptx04-2024-HHUG-Sales-and-Marketing-Alignment.pptx
04-2024-HHUG-Sales-and-Marketing-Alignment.pptx
 
🐬 The future of MySQL is Postgres 🐘
🐬  The future of MySQL is Postgres   🐘🐬  The future of MySQL is Postgres   🐘
🐬 The future of MySQL is Postgres 🐘
 
TrustArc Webinar - Unlock the Power of AI-Driven Data Discovery
TrustArc Webinar - Unlock the Power of AI-Driven Data DiscoveryTrustArc Webinar - Unlock the Power of AI-Driven Data Discovery
TrustArc Webinar - Unlock the Power of AI-Driven Data Discovery
 
presentation ICT roal in 21st century education
presentation ICT roal in 21st century educationpresentation ICT roal in 21st century education
presentation ICT roal in 21st century education
 
Exploring the Future Potential of AI-Enabled Smartphone Processors
Exploring the Future Potential of AI-Enabled Smartphone ProcessorsExploring the Future Potential of AI-Enabled Smartphone Processors
Exploring the Future Potential of AI-Enabled Smartphone Processors
 
AWS Community Day CPH - Three problems of Terraform
AWS Community Day CPH - Three problems of TerraformAWS Community Day CPH - Three problems of Terraform
AWS Community Day CPH - Three problems of Terraform
 
Artificial Intelligence: Facts and Myths
Artificial Intelligence: Facts and MythsArtificial Intelligence: Facts and Myths
Artificial Intelligence: Facts and Myths
 

It’s about time to embrace Node.js Streams - MancJS

  • 2. // buffer-copy.js const { readFileSync, writeFileSync } = require('fs') const [,, src, dest] = process.argv // read entire file content const content = readFileSync(src) // write that content somewhere else writeFileSync(dest, content) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 @loige2
  • 3. // buffer-copy.js const { readFileSync, writeFileSync } = require('fs') const [,, src, dest] = process.argv // read entire file content const content = readFileSync(src) // write that content somewhere else writeFileSync(dest, content) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 const { readFileSync, writeFileSync } = require('fs') // buffer-copy.js1 2 3 4 5 6 7 const [,, src, dest] = process.argv8 9 // read entire file content10 const content = readFileSync(src)11 12 // write that content somewhere else13 writeFileSync(dest, content)14 @loige2
  • 4. // buffer-copy.js const { readFileSync, writeFileSync } = require('fs') const [,, src, dest] = process.argv // read entire file content const content = readFileSync(src) // write that content somewhere else writeFileSync(dest, content) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 const { readFileSync, writeFileSync } = require('fs') // buffer-copy.js1 2 3 4 5 6 7 const [,, src, dest] = process.argv8 9 // read entire file content10 const content = readFileSync(src)11 12 // write that content somewhere else13 writeFileSync(dest, content)14 // read entire file content const content = readFileSync(src) // buffer-copy.js1 2 const {3 readFileSync,4 writeFileSync5 } = require('fs')6 7 const [,, src, dest] = process.argv8 9 10 11 12 // write that content somewhere else13 writeFileSync(dest, content)14 @loige2
  • 5. // buffer-copy.js const { readFileSync, writeFileSync } = require('fs') const [,, src, dest] = process.argv // read entire file content const content = readFileSync(src) // write that content somewhere else writeFileSync(dest, content) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 const { readFileSync, writeFileSync } = require('fs') // buffer-copy.js1 2 3 4 5 6 7 const [,, src, dest] = process.argv8 9 // read entire file content10 const content = readFileSync(src)11 12 // write that content somewhere else13 writeFileSync(dest, content)14 // read entire file content const content = readFileSync(src) // buffer-copy.js1 2 const {3 readFileSync,4 writeFileSync5 } = require('fs')6 7 const [,, src, dest] = process.argv8 9 10 11 12 // write that content somewhere else13 writeFileSync(dest, content)14 // write that content somewhere else writeFileSync(dest, content) // buffer-copy.js1 2 const {3 readFileSync,4 writeFileSync5 } = require('fs')6 7 const [,, src, dest] = process.argv8 9 // read entire file content10 const content = readFileSync(src)11 12 13 14 @loige2
  • 17. 11
  • 29. FILE COPY: FILE COPY: THE BUFFER WAYTHE BUFFER WAY @loige // buffer-copy.js const { readFileSync, writeFileSync } = require('fs') const [,, src, dest] = process.argv const content = readFileSync(src) writeFileSync(dest, content) 1 2 3 4 5 6 7 8 9 10 16
  • 30. FILE COPY: FILE COPY: THE STREAM WAYTHE STREAM WAY // stream-copy.js const { createReadStream, createWriteStream } = require('fs') const [,, src, dest] = process.argv const srcStream = createReadStream(src) const destStream = createWriteStream(dest) srcStream.on('data', (data) => destStream.write(data)) 1 2 3 4 5 6 7 8 9 10 11 @loige * Careful: this implementation is not optimal * 17
  • 31. FILE COPY: FILE COPY: THE STREAM WAYTHE STREAM WAY // stream-copy.js const { createReadStream, createWriteStream } = require('fs') const [,, src, dest] = process.argv const srcStream = createReadStream(src) const destStream = createWriteStream(dest) srcStream.on('data', (data) => destStream.write(data)) 1 2 3 4 5 6 7 8 9 10 11 createReadStream, createWriteStream // stream-copy.js1 2 const {3 4 5 } = require('fs')6 7 const [,, src, dest] = process.argv8 const srcStream = createReadStream(src)9 const destStream = createWriteStream(dest)10 srcStream.on('data', (data) => destStream.write(data))11 @loige * Careful: this implementation is not optimal * 17
  • 32. FILE COPY: FILE COPY: THE STREAM WAYTHE STREAM WAY // stream-copy.js const { createReadStream, createWriteStream } = require('fs') const [,, src, dest] = process.argv const srcStream = createReadStream(src) const destStream = createWriteStream(dest) srcStream.on('data', (data) => destStream.write(data)) 1 2 3 4 5 6 7 8 9 10 11 createReadStream, createWriteStream // stream-copy.js1 2 const {3 4 5 } = require('fs')6 7 const [,, src, dest] = process.argv8 const srcStream = createReadStream(src)9 const destStream = createWriteStream(dest)10 srcStream.on('data', (data) => destStream.write(data))11 const srcStream = createReadStream(src) const destStream = createWriteStream(dest) // stream-copy.js1 2 const {3 createReadStream,4 createWriteStream5 } = require('fs')6 7 const [,, src, dest] = process.argv8 9 10 srcStream.on('data', (data) => destStream.write(data))11 @loige * Careful: this implementation is not optimal * 17
  • 33. FILE COPY: FILE COPY: THE STREAM WAYTHE STREAM WAY // stream-copy.js const { createReadStream, createWriteStream } = require('fs') const [,, src, dest] = process.argv const srcStream = createReadStream(src) const destStream = createWriteStream(dest) srcStream.on('data', (data) => destStream.write(data)) 1 2 3 4 5 6 7 8 9 10 11 createReadStream, createWriteStream // stream-copy.js1 2 const {3 4 5 } = require('fs')6 7 const [,, src, dest] = process.argv8 const srcStream = createReadStream(src)9 const destStream = createWriteStream(dest)10 srcStream.on('data', (data) => destStream.write(data))11 const srcStream = createReadStream(src) const destStream = createWriteStream(dest) // stream-copy.js1 2 const {3 createReadStream,4 createWriteStream5 } = require('fs')6 7 const [,, src, dest] = process.argv8 9 10 srcStream.on('data', (data) => destStream.write(data))11 srcStream.on('data', (data) => destStream.write(data)) // stream-copy.js1 2 const {3 createReadStream,4 createWriteStream5 } = require('fs')6 7 const [,, src, dest] = process.argv8 const srcStream = createReadStream(src)9 const destStream = createWriteStream(dest)10 11 @loige * Careful: this implementation is not optimal * 17
  • 40. ALL STREAMS ARE ALL STREAMS ARE EVENT EMITTERSEVENT EMITTERS A stream instance is an object that emits events when its internal state changes, for instance: s.on('readable', () => {}) // ready to be consumed s.on('data', (chunk) => {}) // new data is available s.on('error', (err) => {}) // some error happened s.on('end', () => {}) // no more data available The events available depend from the type of stream @loige24
  • 52. // count-emojis-flowing.js const { createReadStream } = require('fs') const { EMOJI_MAP } = require('emoji') // from npm const emojis = Object.keys(EMOJI_MAP) const file = createReadStream(process.argv[2]) let counter = 0 file.on('data', chunk => { for (let char of chunk.toString('utf8')) { if (emojis.includes(char)) { counter++ } } }) file.on('end', () => console.log(`Found ${counter} emojis`)) file.on('error', err => console.error(`Error reading file: ${err}`)) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 @loige36
  • 53. // count-emojis-flowing.js const { createReadStream } = require('fs') const { EMOJI_MAP } = require('emoji') // from npm const emojis = Object.keys(EMOJI_MAP) const file = createReadStream(process.argv[2]) let counter = 0 file.on('data', chunk => { for (let char of chunk.toString('utf8')) { if (emojis.includes(char)) { counter++ } } }) file.on('end', () => console.log(`Found ${counter} emojis`)) file.on('error', err => console.error(`Error reading file: ${err}`)) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 const { EMOJI_MAP } = require('emoji') // from npm // count-emojis-flowing.js1 2 const { createReadStream } = require('fs')3 4 5 const emojis = Object.keys(EMOJI_MAP)6 7 const file = createReadStream(process.argv[2])8 let counter = 09 10 file.on('data', chunk => {11 for (let char of chunk.toString('utf8')) {12 if (emojis.includes(char)) {13 counter++14 }15 }16 })17 file.on('end', () => console.log(`Found ${counter} emojis`))18 file.on('error', err => console.error(`Error reading file: ${err}`))19 @loige36
  • 54. // count-emojis-flowing.js const { createReadStream } = require('fs') const { EMOJI_MAP } = require('emoji') // from npm const emojis = Object.keys(EMOJI_MAP) const file = createReadStream(process.argv[2]) let counter = 0 file.on('data', chunk => { for (let char of chunk.toString('utf8')) { if (emojis.includes(char)) { counter++ } } }) file.on('end', () => console.log(`Found ${counter} emojis`)) file.on('error', err => console.error(`Error reading file: ${err}`)) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 const { EMOJI_MAP } = require('emoji') // from npm // count-emojis-flowing.js1 2 const { createReadStream } = require('fs')3 4 5 const emojis = Object.keys(EMOJI_MAP)6 7 const file = createReadStream(process.argv[2])8 let counter = 09 10 file.on('data', chunk => {11 for (let char of chunk.toString('utf8')) {12 if (emojis.includes(char)) {13 counter++14 }15 }16 })17 file.on('end', () => console.log(`Found ${counter} emojis`))18 file.on('error', err => console.error(`Error reading file: ${err}`))19 const file = createReadStream(process.argv[2]) // count-emojis-flowing.js1 2 const { createReadStream } = require('fs')3 const { EMOJI_MAP } = require('emoji') // from npm4 5 const emojis = Object.keys(EMOJI_MAP)6 7 8 let counter = 09 10 file.on('data', chunk => {11 for (let char of chunk.toString('utf8')) {12 if (emojis.includes(char)) {13 counter++14 }15 }16 })17 file.on('end', () => console.log(`Found ${counter} emojis`))18 file.on('error', err => console.error(`Error reading file: ${err}`))19 @loige36
  • 55. // count-emojis-flowing.js const { createReadStream } = require('fs') const { EMOJI_MAP } = require('emoji') // from npm const emojis = Object.keys(EMOJI_MAP) const file = createReadStream(process.argv[2]) let counter = 0 file.on('data', chunk => { for (let char of chunk.toString('utf8')) { if (emojis.includes(char)) { counter++ } } }) file.on('end', () => console.log(`Found ${counter} emojis`)) file.on('error', err => console.error(`Error reading file: ${err}`)) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 const { EMOJI_MAP } = require('emoji') // from npm // count-emojis-flowing.js1 2 const { createReadStream } = require('fs')3 4 5 const emojis = Object.keys(EMOJI_MAP)6 7 const file = createReadStream(process.argv[2])8 let counter = 09 10 file.on('data', chunk => {11 for (let char of chunk.toString('utf8')) {12 if (emojis.includes(char)) {13 counter++14 }15 }16 })17 file.on('end', () => console.log(`Found ${counter} emojis`))18 file.on('error', err => console.error(`Error reading file: ${err}`))19 const file = createReadStream(process.argv[2]) // count-emojis-flowing.js1 2 const { createReadStream } = require('fs')3 const { EMOJI_MAP } = require('emoji') // from npm4 5 const emojis = Object.keys(EMOJI_MAP)6 7 8 let counter = 09 10 file.on('data', chunk => {11 for (let char of chunk.toString('utf8')) {12 if (emojis.includes(char)) {13 counter++14 }15 }16 })17 file.on('end', () => console.log(`Found ${counter} emojis`))18 file.on('error', err => console.error(`Error reading file: ${err}`))19 let counter = 0 // count-emojis-flowing.js1 2 const { createReadStream } = require('fs')3 const { EMOJI_MAP } = require('emoji') // from npm4 5 const emojis = Object.keys(EMOJI_MAP)6 7 const file = createReadStream(process.argv[2])8 9 10 file.on('data', chunk => {11 for (let char of chunk.toString('utf8')) {12 if (emojis.includes(char)) {13 counter++14 }15 }16 })17 file.on('end', () => console.log(`Found ${counter} emojis`))18 file.on('error', err => console.error(`Error reading file: ${err}`))19 @loige36
  • 56. // count-emojis-flowing.js const { createReadStream } = require('fs') const { EMOJI_MAP } = require('emoji') // from npm const emojis = Object.keys(EMOJI_MAP) const file = createReadStream(process.argv[2]) let counter = 0 file.on('data', chunk => { for (let char of chunk.toString('utf8')) { if (emojis.includes(char)) { counter++ } } }) file.on('end', () => console.log(`Found ${counter} emojis`)) file.on('error', err => console.error(`Error reading file: ${err}`)) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 const { EMOJI_MAP } = require('emoji') // from npm // count-emojis-flowing.js1 2 const { createReadStream } = require('fs')3 4 5 const emojis = Object.keys(EMOJI_MAP)6 7 const file = createReadStream(process.argv[2])8 let counter = 09 10 file.on('data', chunk => {11 for (let char of chunk.toString('utf8')) {12 if (emojis.includes(char)) {13 counter++14 }15 }16 })17 file.on('end', () => console.log(`Found ${counter} emojis`))18 file.on('error', err => console.error(`Error reading file: ${err}`))19 const file = createReadStream(process.argv[2]) // count-emojis-flowing.js1 2 const { createReadStream } = require('fs')3 const { EMOJI_MAP } = require('emoji') // from npm4 5 const emojis = Object.keys(EMOJI_MAP)6 7 8 let counter = 09 10 file.on('data', chunk => {11 for (let char of chunk.toString('utf8')) {12 if (emojis.includes(char)) {13 counter++14 }15 }16 })17 file.on('end', () => console.log(`Found ${counter} emojis`))18 file.on('error', err => console.error(`Error reading file: ${err}`))19 let counter = 0 // count-emojis-flowing.js1 2 const { createReadStream } = require('fs')3 const { EMOJI_MAP } = require('emoji') // from npm4 5 const emojis = Object.keys(EMOJI_MAP)6 7 const file = createReadStream(process.argv[2])8 9 10 file.on('data', chunk => {11 for (let char of chunk.toString('utf8')) {12 if (emojis.includes(char)) {13 counter++14 }15 }16 })17 file.on('end', () => console.log(`Found ${counter} emojis`))18 file.on('error', err => console.error(`Error reading file: ${err}`))19 file.on('data', chunk => { }) // count-emojis-flowing.js1 2 const { createReadStream } = require('fs')3 const { EMOJI_MAP } = require('emoji') // from npm4 5 const emojis = Object.keys(EMOJI_MAP)6 7 const file = createReadStream(process.argv[2])8 let counter = 09 10 11 for (let char of chunk.toString('utf8')) {12 if (emojis.includes(char)) {13 counter++14 }15 }16 17 file.on('end', () => console.log(`Found ${counter} emojis`))18 file.on('error', err => console.error(`Error reading file: ${err}`))19 @loige36
  • 57. // count-emojis-flowing.js const { createReadStream } = require('fs') const { EMOJI_MAP } = require('emoji') // from npm const emojis = Object.keys(EMOJI_MAP) const file = createReadStream(process.argv[2]) let counter = 0 file.on('data', chunk => { for (let char of chunk.toString('utf8')) { if (emojis.includes(char)) { counter++ } } }) file.on('end', () => console.log(`Found ${counter} emojis`)) file.on('error', err => console.error(`Error reading file: ${err}`)) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 const { EMOJI_MAP } = require('emoji') // from npm // count-emojis-flowing.js1 2 const { createReadStream } = require('fs')3 4 5 const emojis = Object.keys(EMOJI_MAP)6 7 const file = createReadStream(process.argv[2])8 let counter = 09 10 file.on('data', chunk => {11 for (let char of chunk.toString('utf8')) {12 if (emojis.includes(char)) {13 counter++14 }15 }16 })17 file.on('end', () => console.log(`Found ${counter} emojis`))18 file.on('error', err => console.error(`Error reading file: ${err}`))19 const file = createReadStream(process.argv[2]) // count-emojis-flowing.js1 2 const { createReadStream } = require('fs')3 const { EMOJI_MAP } = require('emoji') // from npm4 5 const emojis = Object.keys(EMOJI_MAP)6 7 8 let counter = 09 10 file.on('data', chunk => {11 for (let char of chunk.toString('utf8')) {12 if (emojis.includes(char)) {13 counter++14 }15 }16 })17 file.on('end', () => console.log(`Found ${counter} emojis`))18 file.on('error', err => console.error(`Error reading file: ${err}`))19 let counter = 0 // count-emojis-flowing.js1 2 const { createReadStream } = require('fs')3 const { EMOJI_MAP } = require('emoji') // from npm4 5 const emojis = Object.keys(EMOJI_MAP)6 7 const file = createReadStream(process.argv[2])8 9 10 file.on('data', chunk => {11 for (let char of chunk.toString('utf8')) {12 if (emojis.includes(char)) {13 counter++14 }15 }16 })17 file.on('end', () => console.log(`Found ${counter} emojis`))18 file.on('error', err => console.error(`Error reading file: ${err}`))19 file.on('data', chunk => { }) // count-emojis-flowing.js1 2 const { createReadStream } = require('fs')3 const { EMOJI_MAP } = require('emoji') // from npm4 5 const emojis = Object.keys(EMOJI_MAP)6 7 const file = createReadStream(process.argv[2])8 let counter = 09 10 11 for (let char of chunk.toString('utf8')) {12 if (emojis.includes(char)) {13 counter++14 }15 }16 17 file.on('end', () => console.log(`Found ${counter} emojis`))18 file.on('error', err => console.error(`Error reading file: ${err}`))19 for (let char of chunk.toString('utf8')) { } // count-emojis-flowing.js1 2 const { createReadStream } = require('fs')3 const { EMOJI_MAP } = require('emoji') // from npm4 5 const emojis = Object.keys(EMOJI_MAP)6 7 const file = createReadStream(process.argv[2])8 let counter = 09 10 file.on('data', chunk => {11 12 if (emojis.includes(char)) {13 counter++14 }15 16 })17 file.on('end', () => console.log(`Found ${counter} emojis`))18 file.on('error', err => console.error(`Error reading file: ${err}`))19 @loige36
  • 58. // count-emojis-flowing.js const { createReadStream } = require('fs') const { EMOJI_MAP } = require('emoji') // from npm const emojis = Object.keys(EMOJI_MAP) const file = createReadStream(process.argv[2]) let counter = 0 file.on('data', chunk => { for (let char of chunk.toString('utf8')) { if (emojis.includes(char)) { counter++ } } }) file.on('end', () => console.log(`Found ${counter} emojis`)) file.on('error', err => console.error(`Error reading file: ${err}`)) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 const { EMOJI_MAP } = require('emoji') // from npm // count-emojis-flowing.js1 2 const { createReadStream } = require('fs')3 4 5 const emojis = Object.keys(EMOJI_MAP)6 7 const file = createReadStream(process.argv[2])8 let counter = 09 10 file.on('data', chunk => {11 for (let char of chunk.toString('utf8')) {12 if (emojis.includes(char)) {13 counter++14 }15 }16 })17 file.on('end', () => console.log(`Found ${counter} emojis`))18 file.on('error', err => console.error(`Error reading file: ${err}`))19 const file = createReadStream(process.argv[2]) // count-emojis-flowing.js1 2 const { createReadStream } = require('fs')3 const { EMOJI_MAP } = require('emoji') // from npm4 5 const emojis = Object.keys(EMOJI_MAP)6 7 8 let counter = 09 10 file.on('data', chunk => {11 for (let char of chunk.toString('utf8')) {12 if (emojis.includes(char)) {13 counter++14 }15 }16 })17 file.on('end', () => console.log(`Found ${counter} emojis`))18 file.on('error', err => console.error(`Error reading file: ${err}`))19 let counter = 0 // count-emojis-flowing.js1 2 const { createReadStream } = require('fs')3 const { EMOJI_MAP } = require('emoji') // from npm4 5 const emojis = Object.keys(EMOJI_MAP)6 7 const file = createReadStream(process.argv[2])8 9 10 file.on('data', chunk => {11 for (let char of chunk.toString('utf8')) {12 if (emojis.includes(char)) {13 counter++14 }15 }16 })17 file.on('end', () => console.log(`Found ${counter} emojis`))18 file.on('error', err => console.error(`Error reading file: ${err}`))19 file.on('data', chunk => { }) // count-emojis-flowing.js1 2 const { createReadStream } = require('fs')3 const { EMOJI_MAP } = require('emoji') // from npm4 5 const emojis = Object.keys(EMOJI_MAP)6 7 const file = createReadStream(process.argv[2])8 let counter = 09 10 11 for (let char of chunk.toString('utf8')) {12 if (emojis.includes(char)) {13 counter++14 }15 }16 17 file.on('end', () => console.log(`Found ${counter} emojis`))18 file.on('error', err => console.error(`Error reading file: ${err}`))19 for (let char of chunk.toString('utf8')) { } // count-emojis-flowing.js1 2 const { createReadStream } = require('fs')3 const { EMOJI_MAP } = require('emoji') // from npm4 5 const emojis = Object.keys(EMOJI_MAP)6 7 const file = createReadStream(process.argv[2])8 let counter = 09 10 file.on('data', chunk => {11 12 if (emojis.includes(char)) {13 counter++14 }15 16 })17 file.on('end', () => console.log(`Found ${counter} emojis`))18 file.on('error', err => console.error(`Error reading file: ${err}`))19 if (emojis.includes(char)) { counter++ } // count-emojis-flowing.js1 2 const { createReadStream } = require('fs')3 const { EMOJI_MAP } = require('emoji') // from npm4 5 const emojis = Object.keys(EMOJI_MAP)6 7 const file = createReadStream(process.argv[2])8 let counter = 09 10 file.on('data', chunk => {11 for (let char of chunk.toString('utf8')) {12 13 14 15 }16 })17 file.on('end', () => console.log(`Found ${counter} emojis`))18 file.on('error', err => console.error(`Error reading file: ${err}`))19 @loige36
  • 61. // count-emojis-async-iterator.js const { createReadStream } = require('fs') const { EMOJI_MAP } = require('emoji') // from npm async function main () { const emojis = Object.keys(EMOJI_MAP) const file = createReadStream(process.argv[2]) let counter = 0 for await (let chunk of file) { for (let char of chunk.toString('utf8')) { if (emojis.includes(char)) { counter++ } } } console.log(`Found ${counter} emojis`) } main() 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 @loige39
  • 62. // count-emojis-async-iterator.js const { createReadStream } = require('fs') const { EMOJI_MAP } = require('emoji') // from npm async function main () { const emojis = Object.keys(EMOJI_MAP) const file = createReadStream(process.argv[2]) let counter = 0 for await (let chunk of file) { for (let char of chunk.toString('utf8')) { if (emojis.includes(char)) { counter++ } } } console.log(`Found ${counter} emojis`) } main() 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 for await (let chunk of file) { } // count-emojis-async-iterator.js1 const { createReadStream } = require('fs')2 const { EMOJI_MAP } = require('emoji') // from npm3 4 async function main () {5 const emojis = Object.keys(EMOJI_MAP)6 const file = createReadStream(process.argv[2])7 let counter = 08 9 10 for (let char of chunk.toString('utf8')) {11 if (emojis.includes(char)) {12 counter++13 }14 }15 16 17 console.log(`Found ${counter} emojis`)18 }19 20 main()21 @loige39
  • 63. // count-emojis-async-iterator.js const { createReadStream } = require('fs') const { EMOJI_MAP } = require('emoji') // from npm async function main () { const emojis = Object.keys(EMOJI_MAP) const file = createReadStream(process.argv[2]) let counter = 0 for await (let chunk of file) { for (let char of chunk.toString('utf8')) { if (emojis.includes(char)) { counter++ } } } console.log(`Found ${counter} emojis`) } main() 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 for await (let chunk of file) { } // count-emojis-async-iterator.js1 const { createReadStream } = require('fs')2 const { EMOJI_MAP } = require('emoji') // from npm3 4 async function main () {5 const emojis = Object.keys(EMOJI_MAP)6 const file = createReadStream(process.argv[2])7 let counter = 08 9 10 for (let char of chunk.toString('utf8')) {11 if (emojis.includes(char)) {12 counter++13 }14 }15 16 17 console.log(`Found ${counter} emojis`)18 }19 20 main()21 async function main () { } // count-emojis-async-iterator.js1 const { createReadStream } = require('fs')2 const { EMOJI_MAP } = require('emoji') // from npm3 4 5 const emojis = Object.keys(EMOJI_MAP)6 const file = createReadStream(process.argv[2])7 let counter = 08 9 for await (let chunk of file) {10 for (let char of chunk.toString('utf8')) {11 if (emojis.includes(char)) {12 counter++13 }14 }15 }16 17 console.log(`Found ${counter} emojis`)18 19 20 main()21 @loige39
  • 65. // writable-http-request.js const http = require('http') const req = http.request( { hostname: 'enx6b07hdu6cs.x.pipedream.net', method: 'POST' }, resp => { console.log(`Server responded with "${resp.statusCode}"`) } ) req.on('finish', () => console.log('request sent')) req.on('close', () => console.log('Connection closed')) req.on('error', err => console.error(`Request failed: ${err}`)) req.write('writing some content...n') req.end('last write & close the stream') 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 @loige41
  • 66. // writable-http-request.js const http = require('http') const req = http.request( { hostname: 'enx6b07hdu6cs.x.pipedream.net', method: 'POST' }, resp => { console.log(`Server responded with "${resp.statusCode}"`) } ) req.on('finish', () => console.log('request sent')) req.on('close', () => console.log('Connection closed')) req.on('error', err => console.error(`Request failed: ${err}`)) req.write('writing some content...n') req.end('last write & close the stream') 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 const req = http.request( ) // writable-http-request.js1 const http = require('http')2 3 4 {5 hostname: 'enx6b07hdu6cs.x.pipedream.net',6 method: 'POST'7 },8 resp => {9 console.log(`Server responded with "${resp.statusCode}"`)10 }11 12 13 req.on('finish', () => console.log('request sent'))14 req.on('close', () => console.log('Connection closed'))15 req.on('error', err => console.error(`Request failed: ${err}`))16 17 req.write('writing some content...n')18 req.end('last write & close the stream')19 @loige41
  • 67. // writable-http-request.js const http = require('http') const req = http.request( { hostname: 'enx6b07hdu6cs.x.pipedream.net', method: 'POST' }, resp => { console.log(`Server responded with "${resp.statusCode}"`) } ) req.on('finish', () => console.log('request sent')) req.on('close', () => console.log('Connection closed')) req.on('error', err => console.error(`Request failed: ${err}`)) req.write('writing some content...n') req.end('last write & close the stream') 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 const req = http.request( ) // writable-http-request.js1 const http = require('http')2 3 4 {5 hostname: 'enx6b07hdu6cs.x.pipedream.net',6 method: 'POST'7 },8 resp => {9 console.log(`Server responded with "${resp.statusCode}"`)10 }11 12 13 req.on('finish', () => console.log('request sent'))14 req.on('close', () => console.log('Connection closed'))15 req.on('error', err => console.error(`Request failed: ${err}`))16 17 req.write('writing some content...n')18 req.end('last write & close the stream')19 req.on('finish', () => console.log('request sent')) req.on('close', () => console.log('Connection closed')) req.on('error', err => console.error(`Request failed: ${err}`)) // writable-http-request.js1 const http = require('http')2 3 const req = http.request(4 {5 hostname: 'enx6b07hdu6cs.x.pipedream.net',6 method: 'POST'7 },8 resp => {9 console.log(`Server responded with "${resp.statusCode}"`)10 }11 )12 13 14 15 16 17 req.write('writing some content...n')18 req.end('last write & close the stream')19 @loige41
  • 68. // writable-http-request.js const http = require('http') const req = http.request( { hostname: 'enx6b07hdu6cs.x.pipedream.net', method: 'POST' }, resp => { console.log(`Server responded with "${resp.statusCode}"`) } ) req.on('finish', () => console.log('request sent')) req.on('close', () => console.log('Connection closed')) req.on('error', err => console.error(`Request failed: ${err}`)) req.write('writing some content...n') req.end('last write & close the stream') 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 const req = http.request( ) // writable-http-request.js1 const http = require('http')2 3 4 {5 hostname: 'enx6b07hdu6cs.x.pipedream.net',6 method: 'POST'7 },8 resp => {9 console.log(`Server responded with "${resp.statusCode}"`)10 }11 12 13 req.on('finish', () => console.log('request sent'))14 req.on('close', () => console.log('Connection closed'))15 req.on('error', err => console.error(`Request failed: ${err}`))16 17 req.write('writing some content...n')18 req.end('last write & close the stream')19 req.on('finish', () => console.log('request sent')) req.on('close', () => console.log('Connection closed')) req.on('error', err => console.error(`Request failed: ${err}`)) // writable-http-request.js1 const http = require('http')2 3 const req = http.request(4 {5 hostname: 'enx6b07hdu6cs.x.pipedream.net',6 method: 'POST'7 },8 resp => {9 console.log(`Server responded with "${resp.statusCode}"`)10 }11 )12 13 14 15 16 17 req.write('writing some content...n')18 req.end('last write & close the stream')19 req.write('writing some content...n') req.end('last write & close the stream') // writable-http-request.js1 const http = require('http')2 3 const req = http.request(4 {5 hostname: 'enx6b07hdu6cs.x.pipedream.net',6 method: 'POST'7 },8 resp => {9 console.log(`Server responded with "${resp.statusCode}"`)10 }11 )12 13 req.on('finish', () => console.log('request sent'))14 req.on('close', () => console.log('Connection closed'))15 req.on('error', err => console.error(`Request failed: ${err}`))16 17 18 19 @loige41
  • 72. // stream-copy-safe.js const { createReadStream, createWriteStream } = require('fs') const [, , src, dest] = process.argv const srcStream = createReadStream(src) const destStream = createWriteStream(dest) srcStream.on('data', data => { const canContinue = destStream.write(data) if (!canContinue) { // we are overflowing the destination, we should pause srcStream.pause() // we will resume when the destination stream is drained destStream.once('drain', () => srcStream.resume()) } }) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 @loige45
  • 73. // stream-copy-safe.js const { createReadStream, createWriteStream } = require('fs') const [, , src, dest] = process.argv const srcStream = createReadStream(src) const destStream = createWriteStream(dest) srcStream.on('data', data => { const canContinue = destStream.write(data) if (!canContinue) { // we are overflowing the destination, we should pause srcStream.pause() // we will resume when the destination stream is drained destStream.once('drain', () => srcStream.resume()) } }) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 const canContinue = destStream.write(data) // stream-copy-safe.js1 2 const { createReadStream, createWriteStream } = require('fs')3 4 const [, , src, dest] = process.argv5 const srcStream = createReadStream(src)6 const destStream = createWriteStream(dest)7 8 srcStream.on('data', data => {9 10 if (!canContinue) {11 // we are overflowing the destination, we should pause12 srcStream.pause()13 // we will resume when the destination stream is drained14 destStream.once('drain', () => srcStream.resume())15 }16 })17 @loige45
  • 74. // stream-copy-safe.js const { createReadStream, createWriteStream } = require('fs') const [, , src, dest] = process.argv const srcStream = createReadStream(src) const destStream = createWriteStream(dest) srcStream.on('data', data => { const canContinue = destStream.write(data) if (!canContinue) { // we are overflowing the destination, we should pause srcStream.pause() // we will resume when the destination stream is drained destStream.once('drain', () => srcStream.resume()) } }) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 const canContinue = destStream.write(data) // stream-copy-safe.js1 2 const { createReadStream, createWriteStream } = require('fs')3 4 const [, , src, dest] = process.argv5 const srcStream = createReadStream(src)6 const destStream = createWriteStream(dest)7 8 srcStream.on('data', data => {9 10 if (!canContinue) {11 // we are overflowing the destination, we should pause12 srcStream.pause()13 // we will resume when the destination stream is drained14 destStream.once('drain', () => srcStream.resume())15 }16 })17 if (!canContinue) { } // stream-copy-safe.js1 2 const { createReadStream, createWriteStream } = require('fs')3 4 const [, , src, dest] = process.argv5 const srcStream = createReadStream(src)6 const destStream = createWriteStream(dest)7 8 srcStream.on('data', data => {9 const canContinue = destStream.write(data)10 11 // we are overflowing the destination, we should pause12 srcStream.pause()13 // we will resume when the destination stream is drained14 destStream.once('drain', () => srcStream.resume())15 16 })17 @loige45
  • 75. // stream-copy-safe.js const { createReadStream, createWriteStream } = require('fs') const [, , src, dest] = process.argv const srcStream = createReadStream(src) const destStream = createWriteStream(dest) srcStream.on('data', data => { const canContinue = destStream.write(data) if (!canContinue) { // we are overflowing the destination, we should pause srcStream.pause() // we will resume when the destination stream is drained destStream.once('drain', () => srcStream.resume()) } }) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 const canContinue = destStream.write(data) // stream-copy-safe.js1 2 const { createReadStream, createWriteStream } = require('fs')3 4 const [, , src, dest] = process.argv5 const srcStream = createReadStream(src)6 const destStream = createWriteStream(dest)7 8 srcStream.on('data', data => {9 10 if (!canContinue) {11 // we are overflowing the destination, we should pause12 srcStream.pause()13 // we will resume when the destination stream is drained14 destStream.once('drain', () => srcStream.resume())15 }16 })17 if (!canContinue) { } // stream-copy-safe.js1 2 const { createReadStream, createWriteStream } = require('fs')3 4 const [, , src, dest] = process.argv5 const srcStream = createReadStream(src)6 const destStream = createWriteStream(dest)7 8 srcStream.on('data', data => {9 const canContinue = destStream.write(data)10 11 // we are overflowing the destination, we should pause12 srcStream.pause()13 // we will resume when the destination stream is drained14 destStream.once('drain', () => srcStream.resume())15 16 })17 // we are overflowing the destination, we should pause srcStream.pause() // stream-copy-safe.js1 2 const { createReadStream, createWriteStream } = require('fs')3 4 const [, , src, dest] = process.argv5 const srcStream = createReadStream(src)6 const destStream = createWriteStream(dest)7 8 srcStream.on('data', data => {9 const canContinue = destStream.write(data)10 if (!canContinue) {11 12 13 // we will resume when the destination stream is drained14 destStream.once('drain', () => srcStream.resume())15 }16 })17 @loige45
  • 76. // stream-copy-safe.js const { createReadStream, createWriteStream } = require('fs') const [, , src, dest] = process.argv const srcStream = createReadStream(src) const destStream = createWriteStream(dest) srcStream.on('data', data => { const canContinue = destStream.write(data) if (!canContinue) { // we are overflowing the destination, we should pause srcStream.pause() // we will resume when the destination stream is drained destStream.once('drain', () => srcStream.resume()) } }) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 const canContinue = destStream.write(data) // stream-copy-safe.js1 2 const { createReadStream, createWriteStream } = require('fs')3 4 const [, , src, dest] = process.argv5 const srcStream = createReadStream(src)6 const destStream = createWriteStream(dest)7 8 srcStream.on('data', data => {9 10 if (!canContinue) {11 // we are overflowing the destination, we should pause12 srcStream.pause()13 // we will resume when the destination stream is drained14 destStream.once('drain', () => srcStream.resume())15 }16 })17 if (!canContinue) { } // stream-copy-safe.js1 2 const { createReadStream, createWriteStream } = require('fs')3 4 const [, , src, dest] = process.argv5 const srcStream = createReadStream(src)6 const destStream = createWriteStream(dest)7 8 srcStream.on('data', data => {9 const canContinue = destStream.write(data)10 11 // we are overflowing the destination, we should pause12 srcStream.pause()13 // we will resume when the destination stream is drained14 destStream.once('drain', () => srcStream.resume())15 16 })17 // we are overflowing the destination, we should pause srcStream.pause() // stream-copy-safe.js1 2 const { createReadStream, createWriteStream } = require('fs')3 4 const [, , src, dest] = process.argv5 const srcStream = createReadStream(src)6 const destStream = createWriteStream(dest)7 8 srcStream.on('data', data => {9 const canContinue = destStream.write(data)10 if (!canContinue) {11 12 13 // we will resume when the destination stream is drained14 destStream.once('drain', () => srcStream.resume())15 }16 })17 // we will resume when the destination stream is drained destStream.once('drain', () => srcStream.resume()) // stream-copy-safe.js1 2 const { createReadStream, createWriteStream } = require('fs')3 4 const [, , src, dest] = process.argv5 const srcStream = createReadStream(src)6 const destStream = createWriteStream(dest)7 8 srcStream.on('data', data => {9 const canContinue = destStream.write(data)10 if (!canContinue) {11 // we are overflowing the destination, we should pause12 srcStream.pause()13 14 15 }16 })17 @loige45
  • 77. // stream-copy-safe.js const { createReadStream, createWriteStream } = require('fs') const [, , src, dest] = process.argv const srcStream = createReadStream(src) const destStream = createWriteStream(dest) srcStream.on('data', data => { const canContinue = destStream.write(data) if (!canContinue) { // we are overflowing the destination, we should pause srcStream.pause() // we will resume when the destination stream is drained destStream.once('drain', () => srcStream.resume()) } }) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 const canContinue = destStream.write(data) // stream-copy-safe.js1 2 const { createReadStream, createWriteStream } = require('fs')3 4 const [, , src, dest] = process.argv5 const srcStream = createReadStream(src)6 const destStream = createWriteStream(dest)7 8 srcStream.on('data', data => {9 10 if (!canContinue) {11 // we are overflowing the destination, we should pause12 srcStream.pause()13 // we will resume when the destination stream is drained14 destStream.once('drain', () => srcStream.resume())15 }16 })17 if (!canContinue) { } // stream-copy-safe.js1 2 const { createReadStream, createWriteStream } = require('fs')3 4 const [, , src, dest] = process.argv5 const srcStream = createReadStream(src)6 const destStream = createWriteStream(dest)7 8 srcStream.on('data', data => {9 const canContinue = destStream.write(data)10 11 // we are overflowing the destination, we should pause12 srcStream.pause()13 // we will resume when the destination stream is drained14 destStream.once('drain', () => srcStream.resume())15 16 })17 // we are overflowing the destination, we should pause srcStream.pause() // stream-copy-safe.js1 2 const { createReadStream, createWriteStream } = require('fs')3 4 const [, , src, dest] = process.argv5 const srcStream = createReadStream(src)6 const destStream = createWriteStream(dest)7 8 srcStream.on('data', data => {9 const canContinue = destStream.write(data)10 if (!canContinue) {11 12 13 // we will resume when the destination stream is drained14 destStream.once('drain', () => srcStream.resume())15 }16 })17 // we will resume when the destination stream is drained destStream.once('drain', () => srcStream.resume()) // stream-copy-safe.js1 2 const { createReadStream, createWriteStream } = require('fs')3 4 const [, , src, dest] = process.argv5 const srcStream = createReadStream(src)6 const destStream = createWriteStream(dest)7 8 srcStream.on('data', data => {9 const canContinue = destStream.write(data)10 if (!canContinue) {11 // we are overflowing the destination, we should pause12 srcStream.pause()13 14 15 }16 })17 const canContinue = destStream.write(data) if (!canContinue) { // we are overflowing the destination, we should pause srcStream.pause() // we will resume when the destination stream is drained destStream.once('drain', () => srcStream.resume()) } // stream-copy-safe.js1 2 const { createReadStream, createWriteStream } = require('fs')3 4 const [, , src, dest] = process.argv5 const srcStream = createReadStream(src)6 const destStream = createWriteStream(dest)7 8 srcStream.on('data', data => {9 10 11 12 13 14 15 16 })17 @loige45
  • 95. HOW CAN WE USE TRANSFORM STREAMS?HOW CAN WE USE TRANSFORM STREAMS? Readable Transform Writable ⚡   data write() ⚡   data write() pause() ⚡ drainresume() 49@loige (Backpressure)
  • 96. HOW CAN WE USE TRANSFORM STREAMS?HOW CAN WE USE TRANSFORM STREAMS? Readable Transform Writable ⚡   data write() ⚡   data write() pause() ⚡ drainresume() 49@loige (Backpressure) (Backpressure)
  • 97. HOW CAN WE USE TRANSFORM STREAMS?HOW CAN WE USE TRANSFORM STREAMS? Readable Transform Writable ⚡   data write() ⚡   data write() pause() ⚡ drainresume() pause() 49@loige (Backpressure) (Backpressure)
  • 98. HOW CAN WE USE TRANSFORM STREAMS?HOW CAN WE USE TRANSFORM STREAMS? Readable Transform Writable ⚡   data write() ⚡   data write() pause() ⚡ drainresume() pause() ⚡ drain 49@loige (Backpressure) (Backpressure)
  • 99. HOW CAN WE USE TRANSFORM STREAMS?HOW CAN WE USE TRANSFORM STREAMS? Readable Transform Writable ⚡   data write() ⚡   data write() pause() ⚡ drainresume() pause() ⚡ drainresume() 49@loige (Backpressure) (Backpressure)
  • 100. HOW CAN WE USE TRANSFORM STREAMS?HOW CAN WE USE TRANSFORM STREAMS? Readable Transform Writable ⚡   data write() ⚡   data write() pause() ⚡ drainresume() pause() ⚡ drainresume() 49@loige (Backpressure) (Backpressure) You also have to handle end & error events!
  • 101. gzipStream.on('data', data => { const canContinue = destStream.write(data) if (!canContinue) { gzipStream.pause() destStream.once('drain', () => { gzipStream.resume() }) } }) gzipStream.on('end', () => { destStream.end() }) // ⚠ TODO: handle errors! // stream-copy-gzip.js const { createReadStream, createWriteStream } = require('fs') const { createGzip } = require('zlib') const [, , src, dest] = process.argv const srcStream = createReadStream(src) const gzipStream = createGzip() const destStream = createWriteStream(dest) srcStream.on('data', data => { const canContinue = gzipStream.write(data) if (!canContinue) { srcStream.pause() gzipStream.once('drain', () => { srcStream.resume() }) } }) srcStream.on('end', () => { // check if there's buffered data left const remainingData = gzipStream.read() if (remainingData !== null) { destStream.write() } gzipStream.end() }) @loige50
  • 102. gzipStream.on('data', data => { const canContinue = destStream.write(data) if (!canContinue) { gzipStream.pause() destStream.once('drain', () => { gzipStream.resume() }) } }) gzipStream.on('end', () => { destStream.end() }) // ⚠ TODO: handle errors! // stream-copy-gzip.js const { createReadStream, createWriteStream } = require('fs') const { createGzip } = require('zlib') const [, , src, dest] = process.argv const srcStream = createReadStream(src) const gzipStream = createGzip() const destStream = createWriteStream(dest) srcStream.on('data', data => { const canContinue = gzipStream.write(data) if (!canContinue) { srcStream.pause() gzipStream.once('drain', () => { srcStream.resume() }) } }) srcStream.on('end', () => { // check if there's buffered data left const remainingData = gzipStream.read() if (remainingData !== null) { destStream.write() } gzipStream.end() }) @loige50
  • 105. // stream-copy-gzip-pipe.js const { createReadStream, createWriteStream } = require('fs') const { createGzip } = require('zlib') const [, , src, dest] = process.argv const srcStream = createReadStream(src) const gzipStream = createGzip() const destStream = createWriteStream(dest) srcStream .pipe(gzipStream) .pipe(destStream) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 @loige53
  • 106. // stream-copy-gzip-pipe.js const { createReadStream, createWriteStream } = require('fs') const { createGzip } = require('zlib') const [, , src, dest] = process.argv const srcStream = createReadStream(src) const gzipStream = createGzip() const destStream = createWriteStream(dest) srcStream .pipe(gzipStream) .pipe(destStream) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 srcStream .pipe(gzipStream) .pipe(destStream) // stream-copy-gzip-pipe.js1 2 const {3 createReadStream,4 createWriteStream5 } = require('fs')6 const { createGzip } = require('zlib')7 8 const [, , src, dest] = process.argv9 const srcStream = createReadStream(src)10 const gzipStream = createGzip()11 const destStream = createWriteStream(dest)12 13 14 15 16 @loige53
  • 108. readable .on('error', handleErr) .pipe(decompress) .on('error', handleErr) .pipe(decrypt) .on('error', handleErr) .pipe(convert) .on('error', handleErr) .pipe(encrypt) .on('error', handleErr) .pipe(compress) .on('error', handleErr) .pipe(writeToDisk) .on('error', handleErr) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 Handling errors (correctly) @loige55
  • 109. readable .on('error', handleErr) .pipe(decompress) .on('error', handleErr) .pipe(decrypt) .on('error', handleErr) .pipe(convert) .on('error', handleErr) .pipe(encrypt) .on('error', handleErr) .pipe(compress) .on('error', handleErr) .pipe(writeToDisk) .on('error', handleErr) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 readable .pipe(decompress) .pipe(decrypt) .pipe(convert) .pipe(encrypt) .pipe(compress) .pipe(writeToDisk) 1 .on('error', handleErr)2 3 .on('error', handleErr)4 5 .on('error', handleErr)6 7 .on('error', handleErr)8 9 .on('error', handleErr)10 11 .on('error', handleErr)12 13 .on('error', handleErr)14 Handling errors (correctly) @loige55
  • 110. readable .on('error', handleErr) .pipe(decompress) .on('error', handleErr) .pipe(decrypt) .on('error', handleErr) .pipe(convert) .on('error', handleErr) .pipe(encrypt) .on('error', handleErr) .pipe(compress) .on('error', handleErr) .pipe(writeToDisk) .on('error', handleErr) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 readable .pipe(decompress) .pipe(decrypt) .pipe(convert) .pipe(encrypt) .pipe(compress) .pipe(writeToDisk) 1 .on('error', handleErr)2 3 .on('error', handleErr)4 5 .on('error', handleErr)6 7 .on('error', handleErr)8 9 .on('error', handleErr)10 11 .on('error', handleErr)12 13 .on('error', handleErr)14 .on('error', handleErr) .on('error', handleErr) .on('error', handleErr) .on('error', handleErr) .on('error', handleErr) .on('error', handleErr) .on('error', handleErr) readable1 2 .pipe(decompress)3 4 .pipe(decrypt)5 6 .pipe(convert)7 8 .pipe(encrypt)9 10 .pipe(compress)11 12 .pipe(writeToDisk)13 14 Handling errors (correctly) @loige   handleErr should end and destroy the streams (it doesn't happen automatically)   55
  • 111. readable .on('error', handleErr) .pipe(decompress) .on('error', handleErr) .pipe(decrypt) .on('error', handleErr) .pipe(convert) .on('error', handleErr) .pipe(encrypt) .on('error', handleErr) .pipe(compress) .on('error', handleErr) .pipe(writeToDisk) .on('error', handleErr) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 readable .pipe(decompress) .pipe(decrypt) .pipe(convert) .pipe(encrypt) .pipe(compress) .pipe(writeToDisk) 1 .on('error', handleErr)2 3 .on('error', handleErr)4 5 .on('error', handleErr)6 7 .on('error', handleErr)8 9 .on('error', handleErr)10 11 .on('error', handleErr)12 13 .on('error', handleErr)14 .on('error', handleErr) .on('error', handleErr) .on('error', handleErr) .on('error', handleErr) .on('error', handleErr) .on('error', handleErr) .on('error', handleErr) readable1 2 .pipe(decompress)3 4 .pipe(decrypt)5 6 .pipe(convert)7 8 .pipe(encrypt)9 10 .pipe(compress)11 12 .pipe(writeToDisk)13 14 Handling errors (correctly) @loige   handleErr should end and destroy the streams (it doesn't happen automatically)   55
  • 113. // stream-copy-gzip-pipeline.js const { pipeline } = require('stream') const { createReadStream, createWriteStream } = require('fs') const { createGzip } = require('zlib') const [, , src, dest] = process.argv pipeline( createReadStream(src), createGzip(), createWriteStream(dest), function onEnd (err) { if (err) { console.error(`Error: ${err}`) process.exit(1) } console.log('Done!') } ) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 stream.pipeline(...streams, callback) - Node.js 10+ @loige57
  • 114. // stream-copy-gzip-pipeline.js const { pipeline } = require('stream') const { createReadStream, createWriteStream } = require('fs') const { createGzip } = require('zlib') const [, , src, dest] = process.argv pipeline( createReadStream(src), createGzip(), createWriteStream(dest), function onEnd (err) { if (err) { console.error(`Error: ${err}`) process.exit(1) } console.log('Done!') } ) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 const { pipeline } = require('stream') pipeline( ) // stream-copy-gzip-pipeline.js1 2 3 const { createReadStream, createWriteStream } = require('fs')4 const { createGzip } = require('zlib')5 6 const [, , src, dest] = process.argv7 8 9 createReadStream(src),10 createGzip(),11 createWriteStream(dest),12 function onEnd (err) {13 if (err) {14 console.error(`Error: ${err}`)15 process.exit(1)16 }17 18 console.log('Done!')19 }20 21 stream.pipeline(...streams, callback) - Node.js 10+ @loige57
  • 115. // stream-copy-gzip-pipeline.js const { pipeline } = require('stream') const { createReadStream, createWriteStream } = require('fs') const { createGzip } = require('zlib') const [, , src, dest] = process.argv pipeline( createReadStream(src), createGzip(), createWriteStream(dest), function onEnd (err) { if (err) { console.error(`Error: ${err}`) process.exit(1) } console.log('Done!') } ) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 const { pipeline } = require('stream') pipeline( ) // stream-copy-gzip-pipeline.js1 2 3 const { createReadStream, createWriteStream } = require('fs')4 const { createGzip } = require('zlib')5 6 const [, , src, dest] = process.argv7 8 9 createReadStream(src),10 createGzip(),11 createWriteStream(dest),12 function onEnd (err) {13 if (err) {14 console.error(`Error: ${err}`)15 process.exit(1)16 }17 18 console.log('Done!')19 }20 21 createReadStream(src), createGzip(), createWriteStream(dest), // stream-copy-gzip-pipeline.js1 2 const { pipeline } = require('stream')3 const { createReadStream, createWriteStream } = require('fs')4 const { createGzip } = require('zlib')5 6 const [, , src, dest] = process.argv7 8 pipeline(9 10 11 12 function onEnd (err) {13 if (err) {14 console.error(`Error: ${err}`)15 process.exit(1)16 }17 18 console.log('Done!')19 }20 )21 stream.pipeline(...streams, callback) - Node.js 10+ @loige You can pass multiple streams (they will be piped) 57
  • 116. // stream-copy-gzip-pipeline.js const { pipeline } = require('stream') const { createReadStream, createWriteStream } = require('fs') const { createGzip } = require('zlib') const [, , src, dest] = process.argv pipeline( createReadStream(src), createGzip(), createWriteStream(dest), function onEnd (err) { if (err) { console.error(`Error: ${err}`) process.exit(1) } console.log('Done!') } ) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 const { pipeline } = require('stream') pipeline( ) // stream-copy-gzip-pipeline.js1 2 3 const { createReadStream, createWriteStream } = require('fs')4 const { createGzip } = require('zlib')5 6 const [, , src, dest] = process.argv7 8 9 createReadStream(src),10 createGzip(),11 createWriteStream(dest),12 function onEnd (err) {13 if (err) {14 console.error(`Error: ${err}`)15 process.exit(1)16 }17 18 console.log('Done!')19 }20 21 createReadStream(src), createGzip(), createWriteStream(dest), // stream-copy-gzip-pipeline.js1 2 const { pipeline } = require('stream')3 const { createReadStream, createWriteStream } = require('fs')4 const { createGzip } = require('zlib')5 6 const [, , src, dest] = process.argv7 8 pipeline(9 10 11 12 function onEnd (err) {13 if (err) {14 console.error(`Error: ${err}`)15 process.exit(1)16 }17 18 console.log('Done!')19 }20 )21 function onEnd (err) { } // stream-copy-gzip-pipeline.js1 2 const { pipeline } = require('stream')3 const { createReadStream, createWriteStream } = require('fs')4 const { createGzip } = require('zlib')5 6 const [, , src, dest] = process.argv7 8 pipeline(9 createReadStream(src),10 createGzip(),11 createWriteStream(dest),12 13 if (err) {14 console.error(`Error: ${err}`)15 process.exit(1)16 }17 18 console.log('Done!')19 20 )21 stream.pipeline(...streams, callback) - Node.js 10+ @loige You can pass multiple streams (they will be piped) 57
  • 117. // stream-copy-gzip-pipeline.js const { pipeline } = require('stream') const { createReadStream, createWriteStream } = require('fs') const { createGzip } = require('zlib') const [, , src, dest] = process.argv pipeline( createReadStream(src), createGzip(), createWriteStream(dest), function onEnd (err) { if (err) { console.error(`Error: ${err}`) process.exit(1) } console.log('Done!') } ) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 const { pipeline } = require('stream') pipeline( ) // stream-copy-gzip-pipeline.js1 2 3 const { createReadStream, createWriteStream } = require('fs')4 const { createGzip } = require('zlib')5 6 const [, , src, dest] = process.argv7 8 9 createReadStream(src),10 createGzip(),11 createWriteStream(dest),12 function onEnd (err) {13 if (err) {14 console.error(`Error: ${err}`)15 process.exit(1)16 }17 18 console.log('Done!')19 }20 21 createReadStream(src), createGzip(), createWriteStream(dest), // stream-copy-gzip-pipeline.js1 2 const { pipeline } = require('stream')3 const { createReadStream, createWriteStream } = require('fs')4 const { createGzip } = require('zlib')5 6 const [, , src, dest] = process.argv7 8 pipeline(9 10 11 12 function onEnd (err) {13 if (err) {14 console.error(`Error: ${err}`)15 process.exit(1)16 }17 18 console.log('Done!')19 }20 )21 function onEnd (err) { } // stream-copy-gzip-pipeline.js1 2 const { pipeline } = require('stream')3 const { createReadStream, createWriteStream } = require('fs')4 const { createGzip } = require('zlib')5 6 const [, , src, dest] = process.argv7 8 pipeline(9 createReadStream(src),10 createGzip(),11 createWriteStream(dest),12 13 if (err) {14 console.error(`Error: ${err}`)15 process.exit(1)16 }17 18 console.log('Done!')19 20 )21 if (err) { console.error(`Error: ${err}`) process.exit(1) } console.log('Done!') // stream-copy-gzip-pipeline.js1 2 const { pipeline } = require('stream')3 const { createReadStream, createWriteStream } = require('fs')4 const { createGzip } = require('zlib')5 6 const [, , src, dest] = process.argv7 8 pipeline(9 createReadStream(src),10 createGzip(),11 createWriteStream(dest),12 function onEnd (err) {13 14 15 16 17 18 19 }20 )21 stream.pipeline(...streams, callback) - Node.js 10+ @loige You can pass multiple streams (they will be piped) The last argument is a callback. If invoked with an error, it means the pipeline failed at some point. All the streams are ended and destroyed correctly. 57
  • 118. // stream-copy-gzip-pump.js const pump = require('pump') // from npm const { createReadStream, createWriteStream } = require('fs') const { createGzip } = require('zlib') const [, , src, dest] = process.argv pump( // just swap pipeline with pump! createReadStream(src), createGzip(), createWriteStream(dest), function onEnd (err) { if (err) { console.error(`Error: ${err}`) process.exit(1) } console.log('Done!') } ) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 For Node.js < 10: pump - npm.im/pump @loige58
  • 119. // stream-copy-gzip-pump.js const pump = require('pump') // from npm const { createReadStream, createWriteStream } = require('fs') const { createGzip } = require('zlib') const [, , src, dest] = process.argv pump( // just swap pipeline with pump! createReadStream(src), createGzip(), createWriteStream(dest), function onEnd (err) { if (err) { console.error(`Error: ${err}`) process.exit(1) } console.log('Done!') } ) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 const pump = require('pump') // from npm pump( // just swap pipeline with pump! ) // stream-copy-gzip-pump.js1 2 3 const { createReadStream, createWriteStream } = require('fs')4 const { createGzip } = require('zlib')5 6 const [, , src, dest] = process.argv7 8 9 createReadStream(src),10 createGzip(),11 createWriteStream(dest),12 function onEnd (err) {13 if (err) {14 console.error(`Error: ${err}`)15 process.exit(1)16 }17 18 console.log('Done!')19 }20 21 For Node.js < 10: pump - npm.im/pump @loige58
  • 121. // encgz-stream.js - utility library const { createCipheriv, createDecipheriv, randomBytes, createHash } = require('crypto') const { createGzip, createGunzip } = require('zlib') const pumpify = require('pumpify') // from npm // calculates md5 of the secret (trimmed) function getChiperKey (secret) {} function createEncgz (secret) { const initVect = randomBytes(16) const cipherKey = getChiperKey(secret) const encryptStream = createCipheriv('aes256', cipherKey, initVect) const gzipStream = createGzip() const stream = pumpify(encryptStream, gzipStream) stream.initVect = initVect return stream } 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 @loige60
  • 122. // encgz-stream.js - utility library const { createCipheriv, createDecipheriv, randomBytes, createHash } = require('crypto') const { createGzip, createGunzip } = require('zlib') const pumpify = require('pumpify') // from npm // calculates md5 of the secret (trimmed) function getChiperKey (secret) {} function createEncgz (secret) { const initVect = randomBytes(16) const cipherKey = getChiperKey(secret) const encryptStream = createCipheriv('aes256', cipherKey, initVect) const gzipStream = createGzip() const stream = pumpify(encryptStream, gzipStream) stream.initVect = initVect return stream } 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 function createEncgz (secret) { } // encgz-stream.js - utility library1 2 const {3 createCipheriv,4 createDecipheriv,5 randomBytes,6 createHash7 } = require('crypto')8 const { createGzip, createGunzip } = require('zlib')9 const pumpify = require('pumpify') // from npm10 11 // calculates md5 of the secret (trimmed)12 function getChiperKey (secret) {}13 14 15 const initVect = randomBytes(16)16 const cipherKey = getChiperKey(secret)17 const encryptStream = createCipheriv('aes256', cipherKey, initVect)18 const gzipStream = createGzip()19 20 const stream = pumpify(encryptStream, gzipStream)21 stream.initVect = initVect22 23 return stream24 25 @loige60
  • 123. // encgz-stream.js - utility library const { createCipheriv, createDecipheriv, randomBytes, createHash } = require('crypto') const { createGzip, createGunzip } = require('zlib') const pumpify = require('pumpify') // from npm // calculates md5 of the secret (trimmed) function getChiperKey (secret) {} function createEncgz (secret) { const initVect = randomBytes(16) const cipherKey = getChiperKey(secret) const encryptStream = createCipheriv('aes256', cipherKey, initVect) const gzipStream = createGzip() const stream = pumpify(encryptStream, gzipStream) stream.initVect = initVect return stream } 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 function createEncgz (secret) { } // encgz-stream.js - utility library1 2 const {3 createCipheriv,4 createDecipheriv,5 randomBytes,6 createHash7 } = require('crypto')8 const { createGzip, createGunzip } = require('zlib')9 const pumpify = require('pumpify') // from npm10 11 // calculates md5 of the secret (trimmed)12 function getChiperKey (secret) {}13 14 15 const initVect = randomBytes(16)16 const cipherKey = getChiperKey(secret)17 const encryptStream = createCipheriv('aes256', cipherKey, initVect)18 const gzipStream = createGzip()19 20 const stream = pumpify(encryptStream, gzipStream)21 stream.initVect = initVect22 23 return stream24 25 const encryptStream = createCipheriv('aes256', cipherKey, initVect) const gzipStream = createGzip() // encgz-stream.js - utility library1 2 const {3 createCipheriv,4 createDecipheriv,5 randomBytes,6 createHash7 } = require('crypto')8 const { createGzip, createGunzip } = require('zlib')9 const pumpify = require('pumpify') // from npm10 11 // calculates md5 of the secret (trimmed)12 function getChiperKey (secret) {}13 14 function createEncgz (secret) {15 const initVect = randomBytes(16)16 const cipherKey = getChiperKey(secret)17 18 19 20 const stream = pumpify(encryptStream, gzipStream)21 stream.initVect = initVect22 23 return stream24 }25 @loige60
  • 124. // encgz-stream.js - utility library const { createCipheriv, createDecipheriv, randomBytes, createHash } = require('crypto') const { createGzip, createGunzip } = require('zlib') const pumpify = require('pumpify') // from npm // calculates md5 of the secret (trimmed) function getChiperKey (secret) {} function createEncgz (secret) { const initVect = randomBytes(16) const cipherKey = getChiperKey(secret) const encryptStream = createCipheriv('aes256', cipherKey, initVect) const gzipStream = createGzip() const stream = pumpify(encryptStream, gzipStream) stream.initVect = initVect return stream } 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 function createEncgz (secret) { } // encgz-stream.js - utility library1 2 const {3 createCipheriv,4 createDecipheriv,5 randomBytes,6 createHash7 } = require('crypto')8 const { createGzip, createGunzip } = require('zlib')9 const pumpify = require('pumpify') // from npm10 11 // calculates md5 of the secret (trimmed)12 function getChiperKey (secret) {}13 14 15 const initVect = randomBytes(16)16 const cipherKey = getChiperKey(secret)17 const encryptStream = createCipheriv('aes256', cipherKey, initVect)18 const gzipStream = createGzip()19 20 const stream = pumpify(encryptStream, gzipStream)21 stream.initVect = initVect22 23 return stream24 25 const encryptStream = createCipheriv('aes256', cipherKey, initVect) const gzipStream = createGzip() // encgz-stream.js - utility library1 2 const {3 createCipheriv,4 createDecipheriv,5 randomBytes,6 createHash7 } = require('crypto')8 const { createGzip, createGunzip } = require('zlib')9 const pumpify = require('pumpify') // from npm10 11 // calculates md5 of the secret (trimmed)12 function getChiperKey (secret) {}13 14 function createEncgz (secret) {15 const initVect = randomBytes(16)16 const cipherKey = getChiperKey(secret)17 18 19 20 const stream = pumpify(encryptStream, gzipStream)21 stream.initVect = initVect22 23 return stream24 }25 const stream = pumpify(encryptStream, gzipStream) // encgz-stream.js - utility library1 2 const {3 createCipheriv,4 createDecipheriv,5 randomBytes,6 createHash7 } = require('crypto')8 const { createGzip, createGunzip } = require('zlib')9 const pumpify = require('pumpify') // from npm10 11 // calculates md5 of the secret (trimmed)12 function getChiperKey (secret) {}13 14 function createEncgz (secret) {15 const initVect = randomBytes(16)16 const cipherKey = getChiperKey(secret)17 const encryptStream = createCipheriv('aes256', cipherKey, initVect)18 const gzipStream = createGzip()19 20 21 stream.initVect = initVect22 23 return stream24 }25 @loige60
  • 125. // encgz-stream.js - utility library const { createCipheriv, createDecipheriv, randomBytes, createHash } = require('crypto') const { createGzip, createGunzip } = require('zlib') const pumpify = require('pumpify') // from npm // calculates md5 of the secret (trimmed) function getChiperKey (secret) {} function createEncgz (secret) { const initVect = randomBytes(16) const cipherKey = getChiperKey(secret) const encryptStream = createCipheriv('aes256', cipherKey, initVect) const gzipStream = createGzip() const stream = pumpify(encryptStream, gzipStream) stream.initVect = initVect return stream } 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 function createEncgz (secret) { } // encgz-stream.js - utility library1 2 const {3 createCipheriv,4 createDecipheriv,5 randomBytes,6 createHash7 } = require('crypto')8 const { createGzip, createGunzip } = require('zlib')9 const pumpify = require('pumpify') // from npm10 11 // calculates md5 of the secret (trimmed)12 function getChiperKey (secret) {}13 14 15 const initVect = randomBytes(16)16 const cipherKey = getChiperKey(secret)17 const encryptStream = createCipheriv('aes256', cipherKey, initVect)18 const gzipStream = createGzip()19 20 const stream = pumpify(encryptStream, gzipStream)21 stream.initVect = initVect22 23 return stream24 25 const encryptStream = createCipheriv('aes256', cipherKey, initVect) const gzipStream = createGzip() // encgz-stream.js - utility library1 2 const {3 createCipheriv,4 createDecipheriv,5 randomBytes,6 createHash7 } = require('crypto')8 const { createGzip, createGunzip } = require('zlib')9 const pumpify = require('pumpify') // from npm10 11 // calculates md5 of the secret (trimmed)12 function getChiperKey (secret) {}13 14 function createEncgz (secret) {15 const initVect = randomBytes(16)16 const cipherKey = getChiperKey(secret)17 18 19 20 const stream = pumpify(encryptStream, gzipStream)21 stream.initVect = initVect22 23 return stream24 }25 const stream = pumpify(encryptStream, gzipStream) // encgz-stream.js - utility library1 2 const {3 createCipheriv,4 createDecipheriv,5 randomBytes,6 createHash7 } = require('crypto')8 const { createGzip, createGunzip } = require('zlib')9 const pumpify = require('pumpify') // from npm10 11 // calculates md5 of the secret (trimmed)12 function getChiperKey (secret) {}13 14 function createEncgz (secret) {15 const initVect = randomBytes(16)16 const cipherKey = getChiperKey(secret)17 const encryptStream = createCipheriv('aes256', cipherKey, initVect)18 const gzipStream = createGzip()19 20 21 stream.initVect = initVect22 23 return stream24 }25 return stream // encgz-stream.js - utility library1 2 const {3 createCipheriv,4 createDecipheriv,5 randomBytes,6 createHash7 } = require('crypto')8 const { createGzip, createGunzip } = require('zlib')9 const pumpify = require('pumpify') // from npm10 11 // calculates md5 of the secret (trimmed)12 function getChiperKey (secret) {}13 14 function createEncgz (secret) {15 const initVect = randomBytes(16)16 const cipherKey = getChiperKey(secret)17 const encryptStream = createCipheriv('aes256', cipherKey, initVect)18 const gzipStream = createGzip()19 20 const stream = pumpify(encryptStream, gzipStream)21 stream.initVect = initVect22 23 24 }25 @loige60
  • 126. // encgz-stream.js (...continue from previous slide) function createDecgz (secret, initVect) { const cipherKey = getChiperKey(secret) const decryptStream = createDecipheriv('aes256', cipherKey, initVect) const gunzipStream = createGunzip() const stream = pumpify(gunzipStream, decryptStream) return stream } module.exports = { createEncgz, createDecgz } 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 @loige61
  • 127. // encgz-stream.js (...continue from previous slide) function createDecgz (secret, initVect) { const cipherKey = getChiperKey(secret) const decryptStream = createDecipheriv('aes256', cipherKey, initVect) const gunzipStream = createGunzip() const stream = pumpify(gunzipStream, decryptStream) return stream } module.exports = { createEncgz, createDecgz } 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 function createDecgz (secret, initVect) { } // encgz-stream.js (...continue from previous slide)1 2 3 const cipherKey = getChiperKey(secret)4 const decryptStream = createDecipheriv('aes256', cipherKey, initVect)5 const gunzipStream = createGunzip()6 7 const stream = pumpify(gunzipStream, decryptStream)8 return stream9 10 11 module.exports = {12 createEncgz,13 createDecgz14 }15 @loige61
  • 128. // encgz-stream.js (...continue from previous slide) function createDecgz (secret, initVect) { const cipherKey = getChiperKey(secret) const decryptStream = createDecipheriv('aes256', cipherKey, initVect) const gunzipStream = createGunzip() const stream = pumpify(gunzipStream, decryptStream) return stream } module.exports = { createEncgz, createDecgz } 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 function createDecgz (secret, initVect) { } // encgz-stream.js (...continue from previous slide)1 2 3 const cipherKey = getChiperKey(secret)4 const decryptStream = createDecipheriv('aes256', cipherKey, initVect)5 const gunzipStream = createGunzip()6 7 const stream = pumpify(gunzipStream, decryptStream)8 return stream9 10 11 module.exports = {12 createEncgz,13 createDecgz14 }15 const decryptStream = createDecipheriv('aes256', cipherKey, initVect) const gunzipStream = createGunzip() // encgz-stream.js (...continue from previous slide)1 2 function createDecgz (secret, initVect) {3 const cipherKey = getChiperKey(secret)4 5 6 7 const stream = pumpify(gunzipStream, decryptStream)8 return stream9 }10 11 module.exports = {12 createEncgz,13 createDecgz14 }15 @loige61
  • 129. // encgz-stream.js (...continue from previous slide) function createDecgz (secret, initVect) { const cipherKey = getChiperKey(secret) const decryptStream = createDecipheriv('aes256', cipherKey, initVect) const gunzipStream = createGunzip() const stream = pumpify(gunzipStream, decryptStream) return stream } module.exports = { createEncgz, createDecgz } 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 function createDecgz (secret, initVect) { } // encgz-stream.js (...continue from previous slide)1 2 3 const cipherKey = getChiperKey(secret)4 const decryptStream = createDecipheriv('aes256', cipherKey, initVect)5 const gunzipStream = createGunzip()6 7 const stream = pumpify(gunzipStream, decryptStream)8 return stream9 10 11 module.exports = {12 createEncgz,13 createDecgz14 }15 const decryptStream = createDecipheriv('aes256', cipherKey, initVect) const gunzipStream = createGunzip() // encgz-stream.js (...continue from previous slide)1 2 function createDecgz (secret, initVect) {3 const cipherKey = getChiperKey(secret)4 5 6 7 const stream = pumpify(gunzipStream, decryptStream)8 return stream9 }10 11 module.exports = {12 createEncgz,13 createDecgz14 }15 const stream = pumpify(gunzipStream, decryptStream) return stream // encgz-stream.js (...continue from previous slide)1 2 function createDecgz (secret, initVect) {3 const cipherKey = getChiperKey(secret)4 const decryptStream = createDecipheriv('aes256', cipherKey, initVect)5 const gunzipStream = createGunzip()6 7 8 9 }10 11 module.exports = {12 createEncgz,13 createDecgz14 }15 @loige61
  • 130. // encgz-stream.js (...continue from previous slide) function createDecgz (secret, initVect) { const cipherKey = getChiperKey(secret) const decryptStream = createDecipheriv('aes256', cipherKey, initVect) const gunzipStream = createGunzip() const stream = pumpify(gunzipStream, decryptStream) return stream } module.exports = { createEncgz, createDecgz } 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 function createDecgz (secret, initVect) { } // encgz-stream.js (...continue from previous slide)1 2 3 const cipherKey = getChiperKey(secret)4 const decryptStream = createDecipheriv('aes256', cipherKey, initVect)5 const gunzipStream = createGunzip()6 7 const stream = pumpify(gunzipStream, decryptStream)8 return stream9 10 11 module.exports = {12 createEncgz,13 createDecgz14 }15 const decryptStream = createDecipheriv('aes256', cipherKey, initVect) const gunzipStream = createGunzip() // encgz-stream.js (...continue from previous slide)1 2 function createDecgz (secret, initVect) {3 const cipherKey = getChiperKey(secret)4 5 6 7 const stream = pumpify(gunzipStream, decryptStream)8 return stream9 }10 11 module.exports = {12 createEncgz,13 createDecgz14 }15 const stream = pumpify(gunzipStream, decryptStream) return stream // encgz-stream.js (...continue from previous slide)1 2 function createDecgz (secret, initVect) {3 const cipherKey = getChiperKey(secret)4 const decryptStream = createDecipheriv('aes256', cipherKey, initVect)5 const gunzipStream = createGunzip()6 7 8 9 }10 11 module.exports = {12 createEncgz,13 createDecgz14 }15 module.exports = { createEncgz, createDecgz } // encgz-stream.js (...continue from previous slide)1 2 function createDecgz (secret, initVect) {3 const cipherKey = getChiperKey(secret)4 const decryptStream = createDecipheriv('aes256', cipherKey, initVect)5 const gunzipStream = createGunzip()6 7 const stream = pumpify(gunzipStream, decryptStream)8 return stream9 }10 11 12 13 14 15 @loige61
  • 131. // encgz.js - CLI to encrypt and gzip (from stdin to stdout) const { pipeline } = require('stream') const { createEncgz } = require('./encgz-stream') const [, , secret] = process.argv const encgz = createEncgz(secret) console.error(`init vector: ${encgz.initVect.toString('hex')}`) pipeline( process.stdin, encgz, process.stdout, function onEnd (err) { if (err) { console.error(`Error: ${err}`) process.exit(1) } } ) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 @loige62
  • 132. // encgz.js - CLI to encrypt and gzip (from stdin to stdout) const { pipeline } = require('stream') const { createEncgz } = require('./encgz-stream') const [, , secret] = process.argv const encgz = createEncgz(secret) console.error(`init vector: ${encgz.initVect.toString('hex')}`) pipeline( process.stdin, encgz, process.stdout, function onEnd (err) { if (err) { console.error(`Error: ${err}`) process.exit(1) } } ) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 const [, , secret] = process.argv // encgz.js - CLI to encrypt and gzip (from stdin to stdout)1 2 const { pipeline } = require('stream')3 const { createEncgz } = require('./encgz-stream')4 5 6 7 const encgz = createEncgz(secret)8 console.error(`init vector: ${encgz.initVect.toString('hex')}`)9 10 pipeline(11 process.stdin,12 encgz,13 process.stdout,14 function onEnd (err) {15 if (err) {16 console.error(`Error: ${err}`)17 process.exit(1)18 }19 }20 )21 @loige62
  • 133. // encgz.js - CLI to encrypt and gzip (from stdin to stdout) const { pipeline } = require('stream') const { createEncgz } = require('./encgz-stream') const [, , secret] = process.argv const encgz = createEncgz(secret) console.error(`init vector: ${encgz.initVect.toString('hex')}`) pipeline( process.stdin, encgz, process.stdout, function onEnd (err) { if (err) { console.error(`Error: ${err}`) process.exit(1) } } ) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 const [, , secret] = process.argv // encgz.js - CLI to encrypt and gzip (from stdin to stdout)1 2 const { pipeline } = require('stream')3 const { createEncgz } = require('./encgz-stream')4 5 6 7 const encgz = createEncgz(secret)8 console.error(`init vector: ${encgz.initVect.toString('hex')}`)9 10 pipeline(11 process.stdin,12 encgz,13 process.stdout,14 function onEnd (err) {15 if (err) {16 console.error(`Error: ${err}`)17 process.exit(1)18 }19 }20 )21 const encgz = createEncgz(secret) // encgz.js - CLI to encrypt and gzip (from stdin to stdout)1 2 const { pipeline } = require('stream')3 const { createEncgz } = require('./encgz-stream')4 5 const [, , secret] = process.argv6 7 8 console.error(`init vector: ${encgz.initVect.toString('hex')}`)9 10 pipeline(11 process.stdin,12 encgz,13 process.stdout,14 function onEnd (err) {15 if (err) {16 console.error(`Error: ${err}`)17 process.exit(1)18 }19 }20 )21 @loige62
  • 134. // encgz.js - CLI to encrypt and gzip (from stdin to stdout) const { pipeline } = require('stream') const { createEncgz } = require('./encgz-stream') const [, , secret] = process.argv const encgz = createEncgz(secret) console.error(`init vector: ${encgz.initVect.toString('hex')}`) pipeline( process.stdin, encgz, process.stdout, function onEnd (err) { if (err) { console.error(`Error: ${err}`) process.exit(1) } } ) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 const [, , secret] = process.argv // encgz.js - CLI to encrypt and gzip (from stdin to stdout)1 2 const { pipeline } = require('stream')3 const { createEncgz } = require('./encgz-stream')4 5 6 7 const encgz = createEncgz(secret)8 console.error(`init vector: ${encgz.initVect.toString('hex')}`)9 10 pipeline(11 process.stdin,12 encgz,13 process.stdout,14 function onEnd (err) {15 if (err) {16 console.error(`Error: ${err}`)17 process.exit(1)18 }19 }20 )21 const encgz = createEncgz(secret) // encgz.js - CLI to encrypt and gzip (from stdin to stdout)1 2 const { pipeline } = require('stream')3 const { createEncgz } = require('./encgz-stream')4 5 const [, , secret] = process.argv6 7 8 console.error(`init vector: ${encgz.initVect.toString('hex')}`)9 10 pipeline(11 process.stdin,12 encgz,13 process.stdout,14 function onEnd (err) {15 if (err) {16 console.error(`Error: ${err}`)17 process.exit(1)18 }19 }20 )21 pipeline( ) // encgz.js - CLI to encrypt and gzip (from stdin to stdout)1 2 const { pipeline } = require('stream')3 const { createEncgz } = require('./encgz-stream')4 5 const [, , secret] = process.argv6 7 const encgz = createEncgz(secret)8 console.error(`init vector: ${encgz.initVect.toString('hex')}`)9 10 11 process.stdin,12 encgz,13 process.stdout,14 function onEnd (err) {15 if (err) {16 console.error(`Error: ${err}`)17 process.exit(1)18 }19 }20 21 @loige62
  • 135. // encgz.js - CLI to encrypt and gzip (from stdin to stdout) const { pipeline } = require('stream') const { createEncgz } = require('./encgz-stream') const [, , secret] = process.argv const encgz = createEncgz(secret) console.error(`init vector: ${encgz.initVect.toString('hex')}`) pipeline( process.stdin, encgz, process.stdout, function onEnd (err) { if (err) { console.error(`Error: ${err}`) process.exit(1) } } ) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 const [, , secret] = process.argv // encgz.js - CLI to encrypt and gzip (from stdin to stdout)1 2 const { pipeline } = require('stream')3 const { createEncgz } = require('./encgz-stream')4 5 6 7 const encgz = createEncgz(secret)8 console.error(`init vector: ${encgz.initVect.toString('hex')}`)9 10 pipeline(11 process.stdin,12 encgz,13 process.stdout,14 function onEnd (err) {15 if (err) {16 console.error(`Error: ${err}`)17 process.exit(1)18 }19 }20 )21 const encgz = createEncgz(secret) // encgz.js - CLI to encrypt and gzip (from stdin to stdout)1 2 const { pipeline } = require('stream')3 const { createEncgz } = require('./encgz-stream')4 5 const [, , secret] = process.argv6 7 8 console.error(`init vector: ${encgz.initVect.toString('hex')}`)9 10 pipeline(11 process.stdin,12 encgz,13 process.stdout,14 function onEnd (err) {15 if (err) {16 console.error(`Error: ${err}`)17 process.exit(1)18 }19 }20 )21 pipeline( ) // encgz.js - CLI to encrypt and gzip (from stdin to stdout)1 2 const { pipeline } = require('stream')3 const { createEncgz } = require('./encgz-stream')4 5 const [, , secret] = process.argv6 7 const encgz = createEncgz(secret)8 console.error(`init vector: ${encgz.initVect.toString('hex')}`)9 10 11 process.stdin,12 encgz,13 process.stdout,14 function onEnd (err) {15 if (err) {16 console.error(`Error: ${err}`)17 process.exit(1)18 }19 }20 21 process.stdin, encgz, process.stdout, // encgz.js - CLI to encrypt and gzip (from stdin to stdout)1 2 const { pipeline } = require('stream')3 const { createEncgz } = require('./encgz-stream')4 5 const [, , secret] = process.argv6 7 const encgz = createEncgz(secret)8 console.error(`init vector: ${encgz.initVect.toString('hex')}`)9 10 pipeline(11 12 13 14 function onEnd (err) {15 if (err) {16 console.error(`Error: ${err}`)17 process.exit(1)18 }19 }20 )21 @loige62
  • 136. // decgz.js - CLI to gunzip and decrypt (from stdin to stdout) const { pipeline } = require('stream') const { createDecgz } = require('./encgz-stream') const [, , secret, initVect] = process.argv const decgz = createDecgz(secret, Buffer.from(initVect, 'hex')) pipeline( process.stdin, decgz, process.stdout, function onEnd (err) { if (err) { console.error(`Error: ${err}`) process.exit(1) } } ) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 @loige63
  • 137. // decgz.js - CLI to gunzip and decrypt (from stdin to stdout) const { pipeline } = require('stream') const { createDecgz } = require('./encgz-stream') const [, , secret, initVect] = process.argv const decgz = createDecgz(secret, Buffer.from(initVect, 'hex')) pipeline( process.stdin, decgz, process.stdout, function onEnd (err) { if (err) { console.error(`Error: ${err}`) process.exit(1) } } ) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 const [, , secret, initVect] = process.argv // decgz.js - CLI to gunzip and decrypt (from stdin to stdout)1 2 const { pipeline } = require('stream')3 const { createDecgz } = require('./encgz-stream')4 5 6 7 const decgz = createDecgz(secret, Buffer.from(initVect, 'hex'))8 9 10 pipeline(11 process.stdin,12 decgz,13 process.stdout,14 function onEnd (err) {15 if (err) {16 console.error(`Error: ${err}`)17 process.exit(1)18 }19 }20 )21 @loige63
  • 138. // decgz.js - CLI to gunzip and decrypt (from stdin to stdout) const { pipeline } = require('stream') const { createDecgz } = require('./encgz-stream') const [, , secret, initVect] = process.argv const decgz = createDecgz(secret, Buffer.from(initVect, 'hex')) pipeline( process.stdin, decgz, process.stdout, function onEnd (err) { if (err) { console.error(`Error: ${err}`) process.exit(1) } } ) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 const [, , secret, initVect] = process.argv // decgz.js - CLI to gunzip and decrypt (from stdin to stdout)1 2 const { pipeline } = require('stream')3 const { createDecgz } = require('./encgz-stream')4 5 6 7 const decgz = createDecgz(secret, Buffer.from(initVect, 'hex'))8 9 10 pipeline(11 process.stdin,12 decgz,13 process.stdout,14 function onEnd (err) {15 if (err) {16 console.error(`Error: ${err}`)17 process.exit(1)18 }19 }20 )21 const decgz = createDecgz(secret, Buffer.from(initVect, 'hex')) // decgz.js - CLI to gunzip and decrypt (from stdin to stdout)1 2 const { pipeline } = require('stream')3 const { createDecgz } = require('./encgz-stream')4 5 const [, , secret, initVect] = process.argv6 7 8 9 10 pipeline(11 process.stdin,12 decgz,13 process.stdout,14 function onEnd (err) {15 if (err) {16 console.error(`Error: ${err}`)17 process.exit(1)18 }19 }20 )21 @loige63
  • 139. // decgz.js - CLI to gunzip and decrypt (from stdin to stdout) const { pipeline } = require('stream') const { createDecgz } = require('./encgz-stream') const [, , secret, initVect] = process.argv const decgz = createDecgz(secret, Buffer.from(initVect, 'hex')) pipeline( process.stdin, decgz, process.stdout, function onEnd (err) { if (err) { console.error(`Error: ${err}`) process.exit(1) } } ) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 const [, , secret, initVect] = process.argv // decgz.js - CLI to gunzip and decrypt (from stdin to stdout)1 2 const { pipeline } = require('stream')3 const { createDecgz } = require('./encgz-stream')4 5 6 7 const decgz = createDecgz(secret, Buffer.from(initVect, 'hex'))8 9 10 pipeline(11 process.stdin,12 decgz,13 process.stdout,14 function onEnd (err) {15 if (err) {16 console.error(`Error: ${err}`)17 process.exit(1)18 }19 }20 )21 const decgz = createDecgz(secret, Buffer.from(initVect, 'hex')) // decgz.js - CLI to gunzip and decrypt (from stdin to stdout)1 2 const { pipeline } = require('stream')3 const { createDecgz } = require('./encgz-stream')4 5 const [, , secret, initVect] = process.argv6 7 8 9 10 pipeline(11 process.stdin,12 decgz,13 process.stdout,14 function onEnd (err) {15 if (err) {16 console.error(`Error: ${err}`)17 process.exit(1)18 }19 }20 )21 pipeline( ) // decgz.js - CLI to gunzip and decrypt (from stdin to stdout)1 2 const { pipeline } = require('stream')3 const { createDecgz } = require('./encgz-stream')4 5 const [, , secret, initVect] = process.argv6 7 const decgz = createDecgz(secret, Buffer.from(initVect, 'hex'))8 9 10 11 process.stdin,12 decgz,13 process.stdout,14 function onEnd (err) {15 if (err) {16 console.error(`Error: ${err}`)17 process.exit(1)18 }19 }20 21 @loige63
  • 140. // decgz.js - CLI to gunzip and decrypt (from stdin to stdout) const { pipeline } = require('stream') const { createDecgz } = require('./encgz-stream') const [, , secret, initVect] = process.argv const decgz = createDecgz(secret, Buffer.from(initVect, 'hex')) pipeline( process.stdin, decgz, process.stdout, function onEnd (err) { if (err) { console.error(`Error: ${err}`) process.exit(1) } } ) 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 const [, , secret, initVect] = process.argv // decgz.js - CLI to gunzip and decrypt (from stdin to stdout)1 2 const { pipeline } = require('stream')3 const { createDecgz } = require('./encgz-stream')4 5 6 7 const decgz = createDecgz(secret, Buffer.from(initVect, 'hex'))8 9 10 pipeline(11 process.stdin,12 decgz,13 process.stdout,14 function onEnd (err) {15 if (err) {16 console.error(`Error: ${err}`)17 process.exit(1)18 }19 }20 )21 const decgz = createDecgz(secret, Buffer.from(initVect, 'hex')) // decgz.js - CLI to gunzip and decrypt (from stdin to stdout)1 2 const { pipeline } = require('stream')3 const { createDecgz } = require('./encgz-stream')4 5 const [, , secret, initVect] = process.argv6 7 8 9 10 pipeline(11 process.stdin,12 decgz,13 process.stdout,14 function onEnd (err) {15 if (err) {16 console.error(`Error: ${err}`)17 process.exit(1)18 }19 }20 )21 pipeline( ) // decgz.js - CLI to gunzip and decrypt (from stdin to stdout)1 2 const { pipeline } = require('stream')3 const { createDecgz } = require('./encgz-stream')4 5 const [, , secret, initVect] = process.argv6 7 const decgz = createDecgz(secret, Buffer.from(initVect, 'hex'))8 9 10 11 process.stdin,12 decgz,13 process.stdout,14 function onEnd (err) {15 if (err) {16 console.error(`Error: ${err}`)17 process.exit(1)18 }19 }20 21 process.stdin, decgz, process.stdout, // decgz.js - CLI to gunzip and decrypt (from stdin to stdout)1 2 const { pipeline } = require('stream')3 const { createDecgz } = require('./encgz-stream')4 5 const [, , secret, initVect] = process.argv6 7 const decgz = createDecgz(secret, Buffer.from(initVect, 'hex'))8 9 10 pipeline(11 12 13 14 function onEnd (err) {15 if (err) {16 console.error(`Error: ${err}`)17 process.exit(1)18 }19 }20 )21 @loige63
  • 151. @loige EmojiStream Uppercasify DOMAppend  LEMON class EmojiStream extends Readable { _read() { // ... } }  BANANA 67
  • 152. @loige EmojiStream Uppercasify DOMAppend  LEMON class EmojiStream extends Readable { _read() { // ... } } class Uppercasify extends Transform { _transform( chunk, enc, done ) { // ... } }  BANANA 67