Skip to content

Commit 28d1ea9

Browse files
authored
fix: fix documentation (#78)
Many codes of the examples were not compilable. Fixing it and adding some other unit tests so some important properties not tested before are validated
1 parent 94745e7 commit 28d1ea9

9 files changed

Lines changed: 94 additions & 49 deletions

README.md

Lines changed: 48 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -110,7 +110,7 @@ import {
110110
fluentAsync,
111111
FluentIterable,
112112
FluentAsyncIterable,
113-
} from '**fluent-iterable**';
113+
} from '@codibre/fluent-iterable';
114114

115115
const iterableOfArray: FluentIterable<number> = fluent([3, 1, 8, 6, 9, 2]);
116116

@@ -137,7 +137,7 @@ async function* emails(): AsyncIterable<string> {
137137
if (!res.ok) {
138138
break;
139139
}
140-
yield* (await res.json()).data.map((user) => user.email);
140+
yield* (await res.json()).data.map((user: { email: string }) => user.email);
141141
}
142142
}
143143

@@ -172,11 +172,11 @@ function getNumberOfUsers(iterable: FluentAsyncIterable<ChatMessage>): Promise<n
172172
return getAllUsers(iterable).count();
173173
}
174174

175-
async function getMostActiveUser(iterable: FluentAsyncIterable<ChatMessage>): Promise<string> {
176-
const maxGroup: FluentGroup<ChatMessage> = await iterable
175+
async function getMostActiveUser(iterable: FluentAsyncIterable<ChatMessage>): Promise<string | undefined> {
176+
const maxGroup = await iterable
177177
.group(chatMessage => chatMessage.from) // group the messages by their sender
178178
.max(chatMessage => chatMessage.values.count()); // find one of the groups which has the most messages
179-
return maxGroup.key;
179+
return maxGroup?.key;
180180
}
181181

182182
async function hasUserSentEmptyMessage(iterable: FluentAsyncIterable<ChatMessage>, user: string): Promise<bool> {
@@ -187,22 +187,39 @@ async function hasUserSentEmptyMessage(iterable: FluentAsyncIterable<ChatMessage
187187
async function createBackupSequential(iterable: FluentAsyncIterable<ChatMessage>): Promise<void> {
188188
await iterable
189189
.execute(chatMessage => console.log(`Backing up message ${chatMessage.id}.`)) // log progress w/o modifying the iterable
190-
.forEachAsync(chatMessage => fetch(BACKUP_URL, { // execute the asynchronous backup operation against all elements one-by-one
190+
.forEach(chatMessage => fetch(BACKUP_URL, { // execute the asynchronous backup operation against all elements one-by-one
191191
method: 'post',
192192
body: JSON.stringify(chatMessage),
193193
headers: { 'Content-Type': 'application/json' },
194194
}));
195195
}
196196

197197
async function createBackupParallel(iterable: FluentAsyncIterable<ChatMessage>): Promise<void> {
198-
const promises = iterable
198+
await iterable
199199
.execute(chatMessage => console.log(`Backing up message ${chatMessage.id}.`)) // log progress w/o modifying the iterable
200-
.map(chatMessage => fetch(BACKUP_URL, { // translate all elements into a promise of their asynchronous backup operation
201-
method: 'post',
202-
body: JSON.stringify(chatMessage),
203-
headers: { 'Content-Type': 'application/json' },
204-
}));
205-
await Promise.all(promises);
200+
.map(chatMessage => {
201+
const result = fetch(BACKUP_URL, { // translate all elements into a promise of their asynchronous backup operation
202+
method: 'post',
203+
body: JSON.stringify(chatMessage),
204+
headers: { 'Content-Type': 'application/json' },
205+
}).then(x => [x]);
206+
return fluentAsync(result);
207+
})
208+
// Joins everything in parallel, generating an AsyncIterable with the results in the order of what yielded first
209+
.flatMerge(
210+
(error) => console.log(error) // This callback will be called whenever some of the fetch calls throws an error
211+
)
212+
.last();
213+
}
214+
215+
async function createBackupParallelV2(iterable: FluentAsyncIterable<ChatMessage>): Promise<Response[]> {
216+
return iterable
217+
.execute(chatMessage => console.log(`Backing up message ${chatMessage.id}.`)) // log progress w/o modifying the iterable
218+
.waitAll(chatMessage => fetch(BACKUP_URL, { // translate all elements into a promise of their asynchronous backup operation
219+
method: 'post',
220+
body: JSON.stringify(chatMessage),
221+
headers: { 'Content-Type': 'application/json' },
222+
}));
206223
}
207224
```
208225

@@ -213,7 +230,7 @@ You can see a list of many advanced examples for **fluent** clicking [here!](adv
213230
#### Playing with Fibonacci generator
214231

215232
``` typescript
216-
import { fluent } from '**fluent-iterable**';
233+
import { fluent } from '@codibre/fluent-iterable';
217234

218235
function* naiveFibonacci(): Iterable<number> {
219236
yield 0;
@@ -260,7 +277,7 @@ console.log(
260277
#### Playing with object arrays
261278

262279
``` typescript
263-
import { fluent } from '**fluent-iterable**';
280+
import { fluent } from '@codibre/fluent-iterable';
264281

265282
enum Gender {
266283
Male = 'Male',
@@ -343,7 +360,7 @@ console.log(
343360

344361
``` typescript
345362
import fetch from 'node-fetch';
346-
import { fluentAsync, Pager } from '**fluent-iterable**';
363+
import { fluentAsync, Pager } from '@cobidre/fluent-iterable';
347364

348365
interface Data {
349366
id: number;
@@ -372,7 +389,7 @@ fluentAsync(depaginate(pager))
372389
### Doing an inner join between two iterables:
373390

374391
``` typescript
375-
import { fluent, identity } from '**fluent-iterable**';
392+
import { fluent, identity } from '@codibre/fluent-iterable';
376393

377394
const genders = [
378395
{ code: 'm', description: 'male' },
@@ -421,18 +438,16 @@ fluent(genders)
421438
#### Bonus: How to Scan DynamoDB like a pro
422439

423440
``` typescript
424-
import { DynamoDB } from 'aws-sdk';
425-
import { Key } from 'aws-sdk/clients/dynamodb';
426-
import { depaginate, fluentAsync, Pager } from '**fluent-iterable**';
441+
import { DynamoDB, ScanInput, AttributeValue } from '@aws-sdk/client-dynamodb';
442+
import { depaginate, fluentAsync, Pager } from '@codibre/fluent-iterable';
427443

428444
async function *scan<TData>(
429-
input: DynamoDB.DocumentClient.ScanInput
445+
input: ScanInput
430446
): AsyncIterable<TData> {
431-
const ddb = new DynamoDB.DocumentClient(..);
432-
const pager: Pager<TData, Key> = async (token) => {
447+
const ddb = new DynamoDB();
448+
const pager: Pager<TData, Record<string, AttributeValue>> = async (token) => {
433449
const result = await ddb
434-
.scan(input)
435-
.promise();
450+
.scan(input);
436451

437452
return {
438453
nextPageToken: result.LastEvaluatedKey,
@@ -445,10 +460,10 @@ async function *scan<TData>(
445460

446461
// and use it like this:
447462

448-
const productsParams: DynamoDB.DocumentClient.ScanInput = {
463+
const productsParams: ScanInput = {
449464
TableName : 'ProductTable',
450465
FilterExpression : '#shoeName = :shoeName', // optional
451-
ExpressionAttributeValues : {':shoeName' : 'YeeZys'}, // optional
466+
ExpressionAttributeValues : {':shoeName' : { S: 'YeeZys' } }, // optional
452467
ExpressionAttributeNames: { '#shoeName': 'name' } // optional
453468
};
454469

@@ -470,17 +485,18 @@ The solution used for this problems was 90% inspired in the [fraxken combine-asy
470485
You can add custom methods to the FluentIterable and FluentAsyncIterable using the *extend* and *extendAsync* utilities. Here is a practical example of how to:
471486

472487
``` TypeScript
473-
declare module '**fluent-iterable**' {
474-
import { extendAsync } from '../src';
488+
import { extendAsync } from '../src';
489+
490+
declare module '@codibre/fluent-iterable' {
475491

476492
interface FluentAsyncIterable<T> {
477493
myCustomIterableMethod(): FluentAsyncIterable<T>;
478494
myCustomResolvingMethod(): PromiseLike<number>;
479495
}
480-
481-
extendAsync.use('myCustomIterableMethod', (x) => someOperation(x));
482-
extendAsync.use('myCustomResolvingMethod', (x) => someResolvingOperation(x));
483496
}
497+
498+
extendAsync.use('myCustomIterableMethod', (x) => someOperation(x));
499+
extendAsync.use('myCustomResolvingMethod', (x) => someResolvingOperation(x));
484500
```
485501

486502
Notice that, when you import a code like the above, all the next created FluentAsyncIterable will have the declared methods, so use it with caution!

advanced-examples/combining-iterables.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,7 @@ const categories = [
5555
},
5656
{
5757
parentId: 1,
58-
id: 2
58+
id: 2,
5959
description: 'movies',
6060
},
6161
{

advanced-examples/combining-sync-and-async-operations.md

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -11,8 +11,8 @@ fluentAsync(dataStream)
1111
})
1212
.map((x) => requestSomeData(x)) // Here, request is an async operation
1313
.map((x) => otherRequest(parse(x))) // Parse is sync, otherRequest async, but that's okay, as long async/await is not used
14-
.group('categoryId'); // A grouping operation is performed by the categoryId value
15-
take(10); //Take just 10 items
14+
.group('categoryId') // A grouping operation is performed by the categoryId value
15+
.take(10); //Take just 10 items
1616
```
1717

1818
The good thing here is that fluent works over the iterable and async iterable mechanism, and in that way, it is possible to chain operations that results in promises, and all the iterable sequence will be respected, instead of generating a lot of promises which will flood node queue.
@@ -26,8 +26,8 @@ fluent(data)
2626
})
2727
.mapAsync((x) => requestSomeData(x)) // From that point, the FluentIterable is transformed in a FluentAsyncIterable, and you don't need the async suffix any longer
2828
.map((x) => otherRequest(parse(x))) // Parse is sync, otherRequest async, but that's okay, as long async/await is not used
29-
.group('categoryId'); // A grouping operation is performed by the categoryId value
30-
take(10); //Take just 10 items
29+
.group('categoryId') // A grouping operation is performed by the categoryId value
30+
.take(10); //Take just 10 items
3131
```
3232

3333
[Go back](README.md)

advanced-examples/iterate-chunks.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@ async function doTheWork(request: MyRequest) {
1616
// Also notice that the item type is an array, because we're talking about a strem of chunked data
1717
await fluent<SomeData[]>(getChunkedData(request))
1818
.flatMap() // With flat map, the stream of SomeData[] will unwind and be transformed in a stream of SomeData
19-
.forEach(doEachWork); // You could also just return the iterable without resolve it and del with the data somewhere else, for decoupling purpose
19+
.forEachAsync(doEachWork); // You could also just return the iterable without resolve it and del with the data somewhere else, for decoupling purpose
2020
}
2121
```
2222

advanced-examples/nested-flattening-with-back-reference.md

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@ fluent(invoices)
4747
value: x.value
4848
}))
4949
)
50-
)
50+
);
5151
```
5252

5353
Those nested fluent operation can become very messy when your business rule starts to become more complicated. Luckily, fluent offers you a powerful operation called **flatJoin**. With flatJoin, you can automatically obtain the items of each nested level with just one operation, like that:
@@ -59,7 +59,7 @@ fluent(invoices)
5959
invoiceId: invoice.id,
6060
itemId: items.id,
6161
value: taxes.value
62-
}))
62+
}));
6363
```
6464

6565
Look that the root item of each operation is added to the result in the property named with the symbol **tail**, that you can import from fluent. You can also get the last items from symbol **head**, but in this case it'll be the same value as the property **taxes**.

advanced-examples/operators-and-resolvers.md

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ Instead, you need to do as follow:
2626
```ts
2727
const origin = [1, 2, 3];
2828
const evens = fluent(it).filter((x) => x % 2 === 0);
29-
const odds = fluent(it().filter((x) => x % 2 !== 0)
29+
const odds = fluent(it).filter((x) => x % 2 !== 0)
3030
```
3131

3232
That's how iterables works: you can't guarantee a second iteration over the same iterable, and that's needed for some core behaviors of it. Also, an iterable is not guaranteed to support multiples iterations over it. An array supports it, but it's not the rule, it's an exception. Iterables created using the [generator pattern](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Generator) can only be iterated once, for example. Because of this, fluent-iterable don't give support to parallel iterations deriving from the same node and must be used as a straight forward fluent tool.
@@ -44,7 +44,8 @@ The operations are executed in the chaining order for each iterable item when a
4444
const test = fluent([1, 2, 3])
4545
.repeat(3)
4646
.withIndex()
47-
.flatMap(({ value, idx }) => fluent(value).map((x) => x * (idx + 1)));
47+
.partition(3)
48+
.flatMap((x) => fluent(x).map(({ value, idx }) => value * (idx + 1)));
4849

4950
// Here, the iteration happens
5051
for (const i of test) {

src/utils/utils.ts

Lines changed: 2 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -252,9 +252,7 @@ function getAverageStepper() {
252252

253253
return wrapper;
254254
}
255-
function getItemToAssure<
256-
F extends Function | FluentIterable<any> | FluentAsyncIterable<any>,
257-
>(f: F): any {
255+
function getItemToAssure<F extends Function | AnyIterable<any>>(f: F): any {
258256
return typeof f === 'function' && !(f instanceof FluentClass)
259257
? (...args: any[]) => (f as CallableFunction)(...args)
260258
: f;
@@ -272,9 +270,7 @@ function getItemToAssure<
272270
*
273271
* @param f the function to assure order
274272
*/
275-
function assureOrder<
276-
F extends Function | FluentIterable<any> | FluentAsyncIterable<any>,
277-
>(f: F): F {
273+
function assureOrder<F extends Function | AnyIterable<any>>(f: F): F {
278274
const result = getItemToAssure(f);
279275
result[orderAssured] = 1;
280276
return result;

test/unit/fluent-async.spec.ts

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1090,6 +1090,27 @@ describe('fluent async iterable', () => {
10901090
expect(resolved).toBe(10);
10911091
expect(result).toEqual([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]);
10921092
});
1093+
1094+
it('should resolve promises in parallel', async () => {
1095+
let resolved = 0;
1096+
let current = 0;
1097+
1098+
const promise = fluentAsync(fluent(interval(1, 10)).toAsync()).waitAll(
1099+
(x) =>
1100+
new Promise(async (resolve) => {
1101+
await delay(1);
1102+
resolved++;
1103+
resolve(x + current);
1104+
await delay(1);
1105+
current = 1;
1106+
}),
1107+
);
1108+
1109+
expect(resolved).toBe(0);
1110+
const result = await promise;
1111+
expect(resolved).toBe(10);
1112+
expect(result).toEqual([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]);
1113+
});
10931114
});
10941115

10951116
it('should be identifiable as fluent async', () => {

test/unit/order-assuring.spec.ts

Lines changed: 12 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
import { fluent } from '../../src';
1+
import { fluent, o } from '../../src';
22
import { orderAssured } from '../../src/types-internal';
33

44
describe('order assuring', () => {
@@ -36,6 +36,17 @@ describe('order assuring', () => {
3636

3737
expect(mp[orderAssured]).not.toBeDefined();
3838
});
39+
it('should keep an assured ascending order through filter and takeWhile operations, but not through a mapper when using assurer directly on iterable', () => {
40+
const it: any = fluent(o([1, 2, 3])).filter((x) => x > 1);
41+
42+
expect(it[orderAssured]).toBeDefined();
43+
const tw = it.takeWhile((x: any) => x < 3);
44+
45+
expect(tw[orderAssured]).toBeDefined();
46+
const mp: any = tw.map((x: any) => x * 2);
47+
48+
expect(mp[orderAssured]).not.toBeDefined();
49+
});
3950
it('should keep an assured descending order through filter and takeWhile operations, but not through a mapper', () => {
4051
const it: any = fluent([1, 2, 3])
4152
.od()

0 commit comments

Comments
 (0)