Skip to content

Commit 7b97286

Browse files
committed
Merge branch 'master' of https://github.com/opensource9ja/danfojs into browser-groupby
2 parents c12f81e + 36b0cef commit 7b97286

8 files changed

Lines changed: 60 additions & 30 deletions

File tree

README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -186,7 +186,7 @@ Output in Node Console:
186186
The official documentation can be found [here](https://danfo.jsdata.org)
187187

188188
## Discussion and Development
189-
Development discussions take place on our [issues](https://github.com/opensource9ja/danfojs/issues) tab.
189+
Development discussions take place [here](https://github.com/opensource9ja/danfojs/discussions).
190190

191191
## Contributing to Danfo
192192
All contributions, bug reports, bug fixes, documentation improvements, enhancements, and ideas are welcome. A detailed overview on how to contribute can be found in the [contributing guide](https://danfo.jsdata.org/contributing-guide).

danfojs-browser/lib/bundle.js.LICENSE.txt

Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -146,6 +146,23 @@
146146
* =============================================================================
147147
*/
148148

149+
/**
150+
* @license
151+
* Copyright 2021 Google LLC. All Rights Reserved.
152+
* Licensed under the Apache License, Version 2.0 (the "License");
153+
* you may not use this file except in compliance with the License.
154+
* You may obtain a copy of the License at
155+
*
156+
* http://www.apache.org/licenses/LICENSE-2.0
157+
*
158+
* Unless required by applicable law or agreed to in writing, software
159+
* distributed under the License is distributed on an "AS IS" BASIS,
160+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
161+
* See the License for the specific language governing permissions and
162+
* limitations under the License.
163+
* =============================================================================
164+
*/
165+
149166
/**
150167
* @license Complex.js v2.0.11 11/02/2016
151168
*

danfojs-browser/lib/bundle.js.map

Lines changed: 1 addition & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

danfojs-browser/package.json

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@
2121
"types"
2222
],
2323
"dependencies": {
24-
"@tensorflow/tfjs": "3.2.0",
24+
"@tensorflow/tfjs": "3.3.0",
2525
"mathjs": "7.5.1",
2626
"table": "^5.4.6",
2727
"xlsx": "^0.16.7"
@@ -85,4 +85,4 @@
8585
]
8686
},
8787
"sideEffects": false
88-
}
88+
}

danfojs-browser/src/io/reader.js

Lines changed: 16 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -5,10 +5,15 @@ import { DataFrame } from "../core/frame";
55

66
/**
77
* Reads a CSV file from local or remote storage
8-
*
9-
* @param {source} URL to CSV file
10-
* @param {config} (Optional). A CSV Config object that contains configurations
8+
* @param {string} source URL to CSV file
9+
* @param {object} config (Optional). A CSV Config object that contains configurations
1110
* for reading and decoding from CSV file(s).
11+
* { start: The index position to start from when reading the CSV file.
12+
*
13+
* end: The end position to stop at when reading the CSV file.
14+
*
15+
* ...csvConfigs: other supported Tensorflow csvConfig parameters. See https://js.tensorflow.org/api/latest/#data.csv
16+
* }
1217
*
1318
* @returns {Promise} DataFrame structure of parsed CSV data
1419
*/
@@ -30,8 +35,7 @@ export const read_csv = async (source, configs = {}) => {
3035

3136
/**
3237
* Reads a JSON file from local or remote address
33-
*
34-
* @param {source} URL or local file path to retreive JSON file.
38+
* @param {string} source URL or local file path to retreive JSON file.
3539
* @returns {Promise} DataFrame structure of parsed CSV data
3640
*/
3741
export const read_json = async (source) => {
@@ -44,12 +48,15 @@ export const read_json = async (source) => {
4448

4549
/**
4650
* Reads an Excel file from local or remote address
51+
* @param {string} source URL to Excel file
52+
* @param {object} configs {
53+
*
54+
* sheet : string, (Optional) Name of the sheet which u want to parse. Default will be the first sheet.
4755
*
48-
* * @param {kwargs} kwargs --> {
49-
* source : string, URL or local file path to retreive Excel file.
50-
* sheet : string, (Optional) Name of the sheet which u want to parse. Default will be the first sheet.
5156
* header_index : int, (Optional) Index of the row which represents the header(columns) of the data. Default will be the first non empty row.
52-
* data_index : int, (Optional)Index of the row from which actual data(content) starts. Default will be the next row of `header_index`
57+
*
58+
* data_index : int, (Optional) Index of the row from which actual data(content) starts. Default will be the next row of `header_index`.
59+
*
5360
* }
5461
* @returns {Promise} DataFrame structure of parsed Excel data
5562
*/

danfojs-node/dist/io/reader.js

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -68,7 +68,7 @@ const read_excel = async (source, configs) => {
6868

6969
exports.read_excel = read_excel;
7070

71-
const read = async (path_or_descriptor, configs = {}) => {
71+
const read = async (source, configs = {}) => {
7272
let {
7373
data_num,
7474
header,
@@ -78,14 +78,14 @@ const read = async (path_or_descriptor, configs = {}) => {
7878
header = header === undefined ? true : header;
7979
let rows, file;
8080

81-
if ((0, _frictionless.isDataset)(path_or_descriptor)) {
81+
if ((0, _frictionless.isDataset)(source)) {
8282
console.log("datapackage.json found. Loading Dataset package from Datahub.io");
83-
const dataset = await _frictionless.Dataset.load(path_or_descriptor);
83+
const dataset = await _frictionless.Dataset.load(source);
8484
file = dataset.resources[data_num];
8585
rows = await (0, _streamToArray.default)(await file.rows());
8686
} else {
8787
try {
88-
file = (0, _frictionless.open)(path_or_descriptor);
88+
file = (0, _frictionless.open)(source);
8989

9090
if (sheet) {
9191
rows = await (0, _streamToArray.default)(await file.rows({

danfojs-node/package.json

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@
2121
"types"
2222
],
2323
"dependencies": {
24-
"@tensorflow/tfjs-node": "3.2.0",
24+
"@tensorflow/tfjs-node": "3.3.0",
2525
"frictionless.js": "0.13.4",
2626
"mathjs": "7.5.1",
2727
"node-fetch": "^2.6.1",
@@ -79,4 +79,4 @@
7979
"text"
8080
]
8181
}
82-
}
82+
}

danfojs-node/src/io/reader.js

Lines changed: 17 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -65,9 +65,10 @@ export const read_json = async (source) => {
6565

6666
/**
6767
* Reads an Excel file from local or remote address
68-
* * @param {kwargs} kwargs --> {
69-
* source : string, URL or local file path to retreive Excel file
70-
* configs: object, (Optional) {
68+
* @param {string} source URL or local file path to retreive Excel file.
69+
* @param {object} configs (Optional) Configuration options when reading excel files
70+
*
71+
* {
7172
* sheet : string, (Optional) number of the sheet to parse. Default will be the first sheet.
7273
* }
7374
* @returns {Promise} DataFrame structure of parsed Excel data
@@ -79,33 +80,38 @@ export const read_excel = async (source, configs) => {
7980

8081
/**
8182
* Opens a file using frictionless.js specification.
82-
* @param {string} pathOrDescriptor A path to the file/resources. It can be a local file,
83+
* @param {string} source A path to the file/resources. It can be a local file,
8384
* a URL to a tabular data (CSV, EXCEL) or Datahub.io Data Resource.
8485
* Data comes with extra properties and specification conforming to the Frictionless Data standards.
85-
* @param {object} configs { data_num (Defaults => 0): The specific dataset to load, when reading data from a datapackage.json,
86-
* header (Defaults => true): Whether the dataset contains header or not.
87-
* }
86+
* @param {object} configs {
87+
*
88+
* data_num (Defaults => 0): The specific dataset to load, when reading data from a datapackage.json
89+
*
90+
* header (Defaults => true): Whether the dataset contains header or not.
91+
*
92+
* sheet (Defaults => 0): Number of the excel sheet which u want to load.
93+
* }
8894
* @returns {DataFrame} Danfo DataFrame/Series
8995
*/
9096
export const read = async (
91-
path_or_descriptor,
97+
source,
9298
configs = {}
9399
) => {
94100
let { data_num, header, sheet } = configs;
95101
data_num = data_num === undefined ? 0 : data_num;
96102
header = header === undefined ? true : header;
97103
let rows, file;
98104

99-
if (isDataset(path_or_descriptor)) {
105+
if (isDataset(source)) {
100106
console.log(
101107
"datapackage.json found. Loading Dataset package from Datahub.io"
102108
);
103-
const dataset = await Dataset.load(path_or_descriptor);
109+
const dataset = await Dataset.load(source);
104110
file = dataset.resources[data_num];
105111
rows = await toArray(await file.rows());
106112
} else {
107113
try {
108-
file = open(path_or_descriptor);
114+
file = open(source);
109115
if (sheet) {
110116
rows = await toArray(await file.rows({ sheet }));
111117
} else {

0 commit comments

Comments
 (0)