Commit e783e29a by Waruenada kenanan

add backend saltstack_beacon

parent 6f8ab3ba
POSTGRES_ENDPOINT = 175.41.163.94 POSTGRES_ENDPOINT = 54.151.166.113
POSTGRES_USER = user POSTGRES_USER = user
POSTGRES_PASSWORD = password POSTGRES_PASSWORD = password
POSTGRES_DB = postgres POSTGRES_DB = postgres
......
module.exports = class salt_stack_v2 {
constructor(client) {
this.client = client;
}
async insert_userinfo_db(uuid,minion_name,status,created_time,updated_time) {
const sql = `INSERT INTO vm_profile(uuid,vm_name,status,created_time,updated_time) VALUES('${uuid}','${minion_name}','${status}','${created_time}','${updated_time}')`
const result = await this.client.query(sql);
// console.log("result--->", result);
return result
}
async get_user() {
const sql = 'SELECT * FROM vm_profile'
const result = await this.client.query(sql)
return result
}
async update_status_user(uuid, status, updated_time) {
const sql = `UPDATE vm_profile SET updated_time = '${updated_time}', status = '${status}' WHERE uuid = '${uuid}'`
const result = await this.client.query(sql)
return result
}
async update_cpu_user(uuid, status,cpu_total, updated_time) {
const sql = `UPDATE vm_profile SET updated_time = '${updated_time}', status = '${status}', cpu = ${cpu_total} WHERE uuid = '${uuid}'`
const result = await this.client.query(sql)
return result
}
async update_mem_user(uuid, status, mem_total, updated_time) {
const sql = `UPDATE vm_profile SET updated_time = '${updated_time}', status = '${status}', memory = ${mem_total} WHERE uuid = '${uuid}'`
const result = await this.client.query(sql)
return result
}
async insert_logs_disk(user_id, diskusage, create_time) {
// console.log(uptime[0]);
const sql = `INSERT INTO logs_disk(uuid,disk_used,created_time) VALUES('${user_id}',${diskusage},'${create_time}')`
const result = await this.client.query(sql);
return result
}
async insert_logs_mem(user_id, memusage,cached,buffers,mem_available, create_time) {
// console.log(mem[0]);
const sql = `INSERT INTO logs_mem(uuid,mem_used,cached,buffers,mem_available,created_time) VALUES('${user_id}',${memusage},${cached},${buffers},${mem_available},'${create_time}')`
const result = await this.client.query(sql);
return result
}
// async insert_logs_memory(user_id, memusage,cached, buffers,mem_available, create_time) {
// // console.log(mem[0]);
// const sql = `INSERT INTO logs_mem(uuid,mem_used,cached, buffers,mem_available,created_time) VALUES('${user_id}',${memusage},${cached},${buffers},${mem_available},'${create_time}')`
// const result = await this.client.query(sql);
// return result
// }
async insert_logs_cpu(user_id,persent_cpu_used,vcpu, create_time) {
// console.log(mem[0]);
const sql = `INSERT INTO logs_cpu(uuid,cpu_used,vcpu,created_time) VALUES('${user_id}',${persent_cpu_used},'${vcpu}','${create_time}')`
const result = await this.client.query(sql);
return result
}
async insert_logs_load(user_id,one_min, five_min, fifteen_min, create_time) {
// console.log(mem[0]);
const sql = `INSERT INTO logs_load(uuid,one_min, five_min, fifteen_min,created_time) VALUES('${user_id}',${one_min},${five_min},${fifteen_min},'${create_time}')`
const result = await this.client.query(sql);
return result
}
}
\ No newline at end of file
// import { Salt } from "salt-api";
const { Salt } = require("salt-api");
const pg = require("pg");
const saltdb_v2 = require("./saltdb_v2.js")
// const cpu = require("./cpu.js")
// const express = require("express");
// const salt_stack = require("./database/salt_stack.js")
const { Pool, Client } = pg
const salt = new Salt({
url: "http://54.151.166.113:8000",
username: "saltuser",
password: "saltpassword",
eauth: "pam"
});
const client = new Client({
user: 'user',
password: 'password',
host: '54.151.166.113',
port: '5432',
database: 'postgres',
});
const saltDB = new saltdb_v2(salt, client);
// const saltStack = new salt_stack(client)
// const app = express()
// const port = 8505;
client
.connect()
.then(async () => {
await saltDB.get_event()
})
.catch((err) => {
console.error('Error connecting to PostgreSQL database', err);
});
\ No newline at end of file
{
"name": "saltstack",
"version": "1.0.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"node_modules/asynckit": {
"version": "0.4.0",
"resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
"integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==",
"peer": true
},
"node_modules/axios": {
"version": "1.7.2",
"resolved": "https://registry.npmjs.org/axios/-/axios-1.7.2.tgz",
"integrity": "sha512-2A8QhOMrbomlDuiLeK9XibIBzuHeRcqqNOHp0Cyp5EoJ1IFDh+XZH3A6BkXtv0K4gFGCI0Y4BM7B1wOEi0Rmgw==",
"peer": true,
"dependencies": {
"follow-redirects": "^1.15.6",
"form-data": "^4.0.0",
"proxy-from-env": "^1.1.0"
}
},
"node_modules/combined-stream": {
"version": "1.0.8",
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
"integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==",
"peer": true,
"dependencies": {
"delayed-stream": "~1.0.0"
},
"engines": {
"node": ">= 0.8"
}
},
"node_modules/delayed-stream": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
"integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==",
"peer": true,
"engines": {
"node": ">=0.4.0"
}
},
"node_modules/dotenv": {
"version": "16.4.5",
"resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.4.5.tgz",
"integrity": "sha512-ZmdL2rui+eB2YwhsWzjInR8LldtZHGDoQ1ugH85ppHKwpUHL7j7rN0Ti9NCnGiQbhaZ11FpR+7ao1dNsmduNUg==",
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://dotenvx.com"
}
},
"node_modules/eventsource": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/eventsource/-/eventsource-2.0.2.tgz",
"integrity": "sha512-IzUmBGPR3+oUG9dUeXynyNmf91/3zUSJg1lCktzKw47OXuhco54U3r9B7O4XX+Rb1Itm9OZ2b0RkTs10bICOxA==",
"peer": true,
"engines": {
"node": ">=12.0.0"
}
},
"node_modules/follow-redirects": {
"version": "1.15.6",
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.6.tgz",
"integrity": "sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA==",
"funding": [
{
"type": "individual",
"url": "https://github.com/sponsors/RubenVerborgh"
}
],
"peer": true,
"engines": {
"node": ">=4.0"
},
"peerDependenciesMeta": {
"debug": {
"optional": true
}
}
},
"node_modules/form-data": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz",
"integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==",
"peer": true,
"dependencies": {
"asynckit": "^0.4.0",
"combined-stream": "^1.0.8",
"mime-types": "^2.1.12"
},
"engines": {
"node": ">= 6"
}
},
"node_modules/mime-db": {
"version": "1.52.0",
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
"integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==",
"peer": true,
"engines": {
"node": ">= 0.6"
}
},
"node_modules/mime-types": {
"version": "2.1.35",
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz",
"integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==",
"peer": true,
"dependencies": {
"mime-db": "1.52.0"
},
"engines": {
"node": ">= 0.6"
}
},
"node_modules/pg": {
"version": "8.12.0",
"resolved": "https://registry.npmjs.org/pg/-/pg-8.12.0.tgz",
"integrity": "sha512-A+LHUSnwnxrnL/tZ+OLfqR1SxLN3c/pgDztZ47Rpbsd4jUytsTtwQo/TLPRzPJMp/1pbhYVhH9cuSZLAajNfjQ==",
"dependencies": {
"pg-connection-string": "^2.6.4",
"pg-pool": "^3.6.2",
"pg-protocol": "^1.6.1",
"pg-types": "^2.1.0",
"pgpass": "1.x"
},
"engines": {
"node": ">= 8.0.0"
},
"optionalDependencies": {
"pg-cloudflare": "^1.1.1"
},
"peerDependencies": {
"pg-native": ">=3.0.1"
},
"peerDependenciesMeta": {
"pg-native": {
"optional": true
}
}
},
"node_modules/pg-cloudflare": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/pg-cloudflare/-/pg-cloudflare-1.1.1.tgz",
"integrity": "sha512-xWPagP/4B6BgFO+EKz3JONXv3YDgvkbVrGw2mTo3D6tVDQRh1e7cqVGvyR3BE+eQgAvx1XhW/iEASj4/jCWl3Q==",
"optional": true
},
"node_modules/pg-connection-string": {
"version": "2.6.4",
"resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.6.4.tgz",
"integrity": "sha512-v+Z7W/0EO707aNMaAEfiGnGL9sxxumwLl2fJvCQtMn9Fxsg+lPpPkdcyBSv/KFgpGdYkMfn+EI1Or2EHjpgLCA=="
},
"node_modules/pg-int8": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/pg-int8/-/pg-int8-1.0.1.tgz",
"integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==",
"engines": {
"node": ">=4.0.0"
}
},
"node_modules/pg-pool": {
"version": "3.6.2",
"resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.6.2.tgz",
"integrity": "sha512-Htjbg8BlwXqSBQ9V8Vjtc+vzf/6fVUuak/3/XXKA9oxZprwW3IMDQTGHP+KDmVL7rtd+R1QjbnCFPuTHm3G4hg==",
"peerDependencies": {
"pg": ">=8.0"
}
},
"node_modules/pg-protocol": {
"version": "1.6.1",
"resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.6.1.tgz",
"integrity": "sha512-jPIlvgoD63hrEuihvIg+tJhoGjUsLPn6poJY9N5CnlPd91c2T18T/9zBtLxZSb1EhYxBRoZJtzScCaWlYLtktg=="
},
"node_modules/pg-types": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/pg-types/-/pg-types-2.2.0.tgz",
"integrity": "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==",
"dependencies": {
"pg-int8": "1.0.1",
"postgres-array": "~2.0.0",
"postgres-bytea": "~1.0.0",
"postgres-date": "~1.0.4",
"postgres-interval": "^1.1.0"
},
"engines": {
"node": ">=4"
}
},
"node_modules/pgpass": {
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/pgpass/-/pgpass-1.0.5.tgz",
"integrity": "sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==",
"dependencies": {
"split2": "^4.1.0"
}
},
"node_modules/postgres-array": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz",
"integrity": "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==",
"engines": {
"node": ">=4"
}
},
"node_modules/postgres-bytea": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-1.0.0.tgz",
"integrity": "sha512-xy3pmLuQqRBZBXDULy7KbaitYqLcmxigw14Q5sj8QBVLqEwXfeybIKVWiqAXTlcvdvb0+xkOtDbfQMOf4lST1w==",
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/postgres-date": {
"version": "1.0.7",
"resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.7.tgz",
"integrity": "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==",
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/postgres-interval": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-1.2.0.tgz",
"integrity": "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==",
"dependencies": {
"xtend": "^4.0.0"
},
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/proxy-from-env": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz",
"integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==",
"peer": true
},
"node_modules/salt-api": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/salt-api/-/salt-api-2.1.2.tgz",
"integrity": "sha512-nRNfdwaebnsfHg8E2Des3kaVXkc1VUe1YsMwRUu07zNJxAWRriDwp4PRHJkPPm0ftCOD3dnL3kPTgu+nt3rewQ==",
"peerDependencies": {
"axios": "^1.4.0",
"eventsource": "^2.0.2"
}
},
"node_modules/split2": {
"version": "4.2.0",
"resolved": "https://registry.npmjs.org/split2/-/split2-4.2.0.tgz",
"integrity": "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==",
"engines": {
"node": ">= 10.x"
}
},
"node_modules/xtend": {
"version": "4.0.2",
"resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz",
"integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==",
"engines": {
"node": ">=0.4"
}
}
}
}
The MIT License (MIT)
Copyright (c) 2016 Alex Indigo
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
# asynckit [![NPM Module](https://img.shields.io/npm/v/asynckit.svg?style=flat)](https://www.npmjs.com/package/asynckit)
Minimal async jobs utility library, with streams support.
[![PhantomJS Build](https://img.shields.io/travis/alexindigo/asynckit/v0.4.0.svg?label=browser&style=flat)](https://travis-ci.org/alexindigo/asynckit)
[![Linux Build](https://img.shields.io/travis/alexindigo/asynckit/v0.4.0.svg?label=linux:0.12-6.x&style=flat)](https://travis-ci.org/alexindigo/asynckit)
[![Windows Build](https://img.shields.io/appveyor/ci/alexindigo/asynckit/v0.4.0.svg?label=windows:0.12-6.x&style=flat)](https://ci.appveyor.com/project/alexindigo/asynckit)
[![Coverage Status](https://img.shields.io/coveralls/alexindigo/asynckit/v0.4.0.svg?label=code+coverage&style=flat)](https://coveralls.io/github/alexindigo/asynckit?branch=master)
[![Dependency Status](https://img.shields.io/david/alexindigo/asynckit/v0.4.0.svg?style=flat)](https://david-dm.org/alexindigo/asynckit)
[![bitHound Overall Score](https://www.bithound.io/github/alexindigo/asynckit/badges/score.svg)](https://www.bithound.io/github/alexindigo/asynckit)
<!-- [![Readme](https://img.shields.io/badge/readme-tested-brightgreen.svg?style=flat)](https://www.npmjs.com/package/reamde) -->
AsyncKit provides harness for `parallel` and `serial` iterators over list of items represented by arrays or objects.
Optionally it accepts abort function (should be synchronously return by iterator for each item), and terminates left over jobs upon an error event. For specific iteration order built-in (`ascending` and `descending`) and custom sort helpers also supported, via `asynckit.serialOrdered` method.
It ensures async operations to keep behavior more stable and prevent `Maximum call stack size exceeded` errors, from sync iterators.
| compression | size |
| :----------------- | -------: |
| asynckit.js | 12.34 kB |
| asynckit.min.js | 4.11 kB |
| asynckit.min.js.gz | 1.47 kB |
## Install
```sh
$ npm install --save asynckit
```
## Examples
### Parallel Jobs
Runs iterator over provided array in parallel. Stores output in the `result` array,
on the matching positions. In unlikely event of an error from one of the jobs,
will terminate rest of the active jobs (if abort function is provided)
and return error along with salvaged data to the main callback function.
#### Input Array
```javascript
var parallel = require('asynckit').parallel
, assert = require('assert')
;
var source = [ 1, 1, 4, 16, 64, 32, 8, 2 ]
, expectedResult = [ 2, 2, 8, 32, 128, 64, 16, 4 ]
, expectedTarget = [ 1, 1, 2, 4, 8, 16, 32, 64 ]
, target = []
;
parallel(source, asyncJob, function(err, result)
{
assert.deepEqual(result, expectedResult);
assert.deepEqual(target, expectedTarget);
});
// async job accepts one element from the array
// and a callback function
function asyncJob(item, cb)
{
// different delays (in ms) per item
var delay = item * 25;
// pretend different jobs take different time to finish
// and not in consequential order
var timeoutId = setTimeout(function() {
target.push(item);
cb(null, item * 2);
}, delay);
// allow to cancel "leftover" jobs upon error
// return function, invoking of which will abort this job
return clearTimeout.bind(null, timeoutId);
}
```
More examples could be found in [test/test-parallel-array.js](test/test-parallel-array.js).
#### Input Object
Also it supports named jobs, listed via object.
```javascript
var parallel = require('asynckit/parallel')
, assert = require('assert')
;
var source = { first: 1, one: 1, four: 4, sixteen: 16, sixtyFour: 64, thirtyTwo: 32, eight: 8, two: 2 }
, expectedResult = { first: 2, one: 2, four: 8, sixteen: 32, sixtyFour: 128, thirtyTwo: 64, eight: 16, two: 4 }
, expectedTarget = [ 1, 1, 2, 4, 8, 16, 32, 64 ]
, expectedKeys = [ 'first', 'one', 'two', 'four', 'eight', 'sixteen', 'thirtyTwo', 'sixtyFour' ]
, target = []
, keys = []
;
parallel(source, asyncJob, function(err, result)
{
assert.deepEqual(result, expectedResult);
assert.deepEqual(target, expectedTarget);
assert.deepEqual(keys, expectedKeys);
});
// supports full value, key, callback (shortcut) interface
function asyncJob(item, key, cb)
{
// different delays (in ms) per item
var delay = item * 25;
// pretend different jobs take different time to finish
// and not in consequential order
var timeoutId = setTimeout(function() {
keys.push(key);
target.push(item);
cb(null, item * 2);
}, delay);
// allow to cancel "leftover" jobs upon error
// return function, invoking of which will abort this job
return clearTimeout.bind(null, timeoutId);
}
```
More examples could be found in [test/test-parallel-object.js](test/test-parallel-object.js).
### Serial Jobs
Runs iterator over provided array sequentially. Stores output in the `result` array,
on the matching positions. In unlikely event of an error from one of the jobs,
will not proceed to the rest of the items in the list
and return error along with salvaged data to the main callback function.
#### Input Array
```javascript
var serial = require('asynckit/serial')
, assert = require('assert')
;
var source = [ 1, 1, 4, 16, 64, 32, 8, 2 ]
, expectedResult = [ 2, 2, 8, 32, 128, 64, 16, 4 ]
, expectedTarget = [ 0, 1, 2, 3, 4, 5, 6, 7 ]
, target = []
;
serial(source, asyncJob, function(err, result)
{
assert.deepEqual(result, expectedResult);
assert.deepEqual(target, expectedTarget);
});
// extended interface (item, key, callback)
// also supported for arrays
function asyncJob(item, key, cb)
{
target.push(key);
// it will be automatically made async
// even it iterator "returns" in the same event loop
cb(null, item * 2);
}
```
More examples could be found in [test/test-serial-array.js](test/test-serial-array.js).
#### Input Object
Also it supports named jobs, listed via object.
```javascript
var serial = require('asynckit').serial
, assert = require('assert')
;
var source = [ 1, 1, 4, 16, 64, 32, 8, 2 ]
, expectedResult = [ 2, 2, 8, 32, 128, 64, 16, 4 ]
, expectedTarget = [ 0, 1, 2, 3, 4, 5, 6, 7 ]
, target = []
;
var source = { first: 1, one: 1, four: 4, sixteen: 16, sixtyFour: 64, thirtyTwo: 32, eight: 8, two: 2 }
, expectedResult = { first: 2, one: 2, four: 8, sixteen: 32, sixtyFour: 128, thirtyTwo: 64, eight: 16, two: 4 }
, expectedTarget = [ 1, 1, 4, 16, 64, 32, 8, 2 ]
, target = []
;
serial(source, asyncJob, function(err, result)
{
assert.deepEqual(result, expectedResult);
assert.deepEqual(target, expectedTarget);
});
// shortcut interface (item, callback)
// works for object as well as for the arrays
function asyncJob(item, cb)
{
target.push(item);
// it will be automatically made async
// even it iterator "returns" in the same event loop
cb(null, item * 2);
}
```
More examples could be found in [test/test-serial-object.js](test/test-serial-object.js).
_Note: Since _object_ is an _unordered_ collection of properties,
it may produce unexpected results with sequential iterations.
Whenever order of the jobs' execution is important please use `serialOrdered` method._
### Ordered Serial Iterations
TBD
For example [compare-property](compare-property) package.
### Streaming interface
TBD
## Want to Know More?
More examples can be found in [test folder](test/).
Or open an [issue](https://github.com/alexindigo/asynckit/issues) with questions and/or suggestions.
## License
AsyncKit is licensed under the MIT license.
/* eslint no-console: "off" */
var asynckit = require('./')
, async = require('async')
, assert = require('assert')
, expected = 0
;
var Benchmark = require('benchmark');
var suite = new Benchmark.Suite;
var source = [];
for (var z = 1; z < 100; z++)
{
source.push(z);
expected += z;
}
suite
// add tests
.add('async.map', function(deferred)
{
var total = 0;
async.map(source,
function(i, cb)
{
setImmediate(function()
{
total += i;
cb(null, total);
});
},
function(err, result)
{
assert.ifError(err);
assert.equal(result[result.length - 1], expected);
deferred.resolve();
});
}, {'defer': true})
.add('asynckit.parallel', function(deferred)
{
var total = 0;
asynckit.parallel(source,
function(i, cb)
{
setImmediate(function()
{
total += i;
cb(null, total);
});
},
function(err, result)
{
assert.ifError(err);
assert.equal(result[result.length - 1], expected);
deferred.resolve();
});
}, {'defer': true})
// add listeners
.on('cycle', function(ev)
{
console.log(String(ev.target));
})
.on('complete', function()
{
console.log('Fastest is ' + this.filter('fastest').map('name'));
})
// run async
.run({ 'async': true });
module.exports =
{
parallel : require('./parallel.js'),
serial : require('./serial.js'),
serialOrdered : require('./serialOrdered.js')
};
// API
module.exports = abort;
/**
* Aborts leftover active jobs
*
* @param {object} state - current state object
*/
function abort(state)
{
Object.keys(state.jobs).forEach(clean.bind(state));
// reset leftover jobs
state.jobs = {};
}
/**
* Cleans up leftover job by invoking abort function for the provided job id
*
* @this state
* @param {string|number} key - job id to abort
*/
function clean(key)
{
if (typeof this.jobs[key] == 'function')
{
this.jobs[key]();
}
}
var defer = require('./defer.js');
// API
module.exports = async;
/**
* Runs provided callback asynchronously
* even if callback itself is not
*
* @param {function} callback - callback to invoke
* @returns {function} - augmented callback
*/
function async(callback)
{
var isAsync = false;
// check if async happened
defer(function() { isAsync = true; });
return function async_callback(err, result)
{
if (isAsync)
{
callback(err, result);
}
else
{
defer(function nextTick_callback()
{
callback(err, result);
});
}
};
}
module.exports = defer;
/**
* Runs provided function on next iteration of the event loop
*
* @param {function} fn - function to run
*/
function defer(fn)
{
var nextTick = typeof setImmediate == 'function'
? setImmediate
: (
typeof process == 'object' && typeof process.nextTick == 'function'
? process.nextTick
: null
);
if (nextTick)
{
nextTick(fn);
}
else
{
setTimeout(fn, 0);
}
}
var async = require('./async.js')
, abort = require('./abort.js')
;
// API
module.exports = iterate;
/**
* Iterates over each job object
*
* @param {array|object} list - array or object (named list) to iterate over
* @param {function} iterator - iterator to run
* @param {object} state - current job status
* @param {function} callback - invoked when all elements processed
*/
function iterate(list, iterator, state, callback)
{
// store current index
var key = state['keyedList'] ? state['keyedList'][state.index] : state.index;
state.jobs[key] = runJob(iterator, key, list[key], function(error, output)
{
// don't repeat yourself
// skip secondary callbacks
if (!(key in state.jobs))
{
return;
}
// clean up jobs
delete state.jobs[key];
if (error)
{
// don't process rest of the results
// stop still active jobs
// and reset the list
abort(state);
}
else
{
state.results[key] = output;
}
// return salvaged results
callback(error, state.results);
});
}
/**
* Runs iterator over provided job element
*
* @param {function} iterator - iterator to invoke
* @param {string|number} key - key/index of the element in the list of jobs
* @param {mixed} item - job description
* @param {function} callback - invoked after iterator is done with the job
* @returns {function|mixed} - job abort function or something else
*/
function runJob(iterator, key, item, callback)
{
var aborter;
// allow shortcut if iterator expects only two arguments
if (iterator.length == 2)
{
aborter = iterator(item, async(callback));
}
// otherwise go with full three arguments
else
{
aborter = iterator(item, key, async(callback));
}
return aborter;
}
var streamify = require('./streamify.js')
, defer = require('./defer.js')
;
// API
module.exports = ReadableAsyncKit;
/**
* Base constructor for all streams
* used to hold properties/methods
*/
function ReadableAsyncKit()
{
ReadableAsyncKit.super_.apply(this, arguments);
// list of active jobs
this.jobs = {};
// add stream methods
this.destroy = destroy;
this._start = _start;
this._read = _read;
}
/**
* Destroys readable stream,
* by aborting outstanding jobs
*
* @returns {void}
*/
function destroy()
{
if (this.destroyed)
{
return;
}
this.destroyed = true;
if (typeof this.terminator == 'function')
{
this.terminator();
}
}
/**
* Starts provided jobs in async manner
*
* @private
*/
function _start()
{
// first argument – runner function
var runner = arguments[0]
// take away first argument
, args = Array.prototype.slice.call(arguments, 1)
// second argument - input data
, input = args[0]
// last argument - result callback
, endCb = streamify.callback.call(this, args[args.length - 1])
;
args[args.length - 1] = endCb;
// third argument - iterator
args[1] = streamify.iterator.call(this, args[1]);
// allow time for proper setup
defer(function()
{
if (!this.destroyed)
{
this.terminator = runner.apply(null, args);
}
else
{
endCb(null, Array.isArray(input) ? [] : {});
}
}.bind(this));
}
/**
* Implement _read to comply with Readable streams
* Doesn't really make sense for flowing object mode
*
* @private
*/
function _read()
{
}
var parallel = require('../parallel.js');
// API
module.exports = ReadableParallel;
/**
* Streaming wrapper to `asynckit.parallel`
*
* @param {array|object} list - array or object (named list) to iterate over
* @param {function} iterator - iterator to run
* @param {function} callback - invoked when all elements processed
* @returns {stream.Readable#}
*/
function ReadableParallel(list, iterator, callback)
{
if (!(this instanceof ReadableParallel))
{
return new ReadableParallel(list, iterator, callback);
}
// turn on object mode
ReadableParallel.super_.call(this, {objectMode: true});
this._start(parallel, list, iterator, callback);
}
var serial = require('../serial.js');
// API
module.exports = ReadableSerial;
/**
* Streaming wrapper to `asynckit.serial`
*
* @param {array|object} list - array or object (named list) to iterate over
* @param {function} iterator - iterator to run
* @param {function} callback - invoked when all elements processed
* @returns {stream.Readable#}
*/
function ReadableSerial(list, iterator, callback)
{
if (!(this instanceof ReadableSerial))
{
return new ReadableSerial(list, iterator, callback);
}
// turn on object mode
ReadableSerial.super_.call(this, {objectMode: true});
this._start(serial, list, iterator, callback);
}
var serialOrdered = require('../serialOrdered.js');
// API
module.exports = ReadableSerialOrdered;
// expose sort helpers
module.exports.ascending = serialOrdered.ascending;
module.exports.descending = serialOrdered.descending;
/**
* Streaming wrapper to `asynckit.serialOrdered`
*
* @param {array|object} list - array or object (named list) to iterate over
* @param {function} iterator - iterator to run
* @param {function} sortMethod - custom sort function
* @param {function} callback - invoked when all elements processed
* @returns {stream.Readable#}
*/
function ReadableSerialOrdered(list, iterator, sortMethod, callback)
{
if (!(this instanceof ReadableSerialOrdered))
{
return new ReadableSerialOrdered(list, iterator, sortMethod, callback);
}
// turn on object mode
ReadableSerialOrdered.super_.call(this, {objectMode: true});
this._start(serialOrdered, list, iterator, sortMethod, callback);
}
// API
module.exports = state;
/**
* Creates initial state object
* for iteration over list
*
* @param {array|object} list - list to iterate over
* @param {function|null} sortMethod - function to use for keys sort,
* or `null` to keep them as is
* @returns {object} - initial state object
*/
function state(list, sortMethod)
{
var isNamedList = !Array.isArray(list)
, initState =
{
index : 0,
keyedList: isNamedList || sortMethod ? Object.keys(list) : null,
jobs : {},
results : isNamedList ? {} : [],
size : isNamedList ? Object.keys(list).length : list.length
}
;
if (sortMethod)
{
// sort array keys based on it's values
// sort object's keys just on own merit
initState.keyedList.sort(isNamedList ? sortMethod : function(a, b)
{
return sortMethod(list[a], list[b]);
});
}
return initState;
}
var async = require('./async.js');
// API
module.exports = {
iterator: wrapIterator,
callback: wrapCallback
};
/**
* Wraps iterators with long signature
*
* @this ReadableAsyncKit#
* @param {function} iterator - function to wrap
* @returns {function} - wrapped function
*/
function wrapIterator(iterator)
{
var stream = this;
return function(item, key, cb)
{
var aborter
, wrappedCb = async(wrapIteratorCallback.call(stream, cb, key))
;
stream.jobs[key] = wrappedCb;
// it's either shortcut (item, cb)
if (iterator.length == 2)
{
aborter = iterator(item, wrappedCb);
}
// or long format (item, key, cb)
else
{
aborter = iterator(item, key, wrappedCb);
}
return aborter;
};
}
/**
* Wraps provided callback function
* allowing to execute snitch function before
* real callback
*
* @this ReadableAsyncKit#
* @param {function} callback - function to wrap
* @returns {function} - wrapped function
*/
function wrapCallback(callback)
{
var stream = this;
var wrapped = function(error, result)
{
return finisher.call(stream, error, result, callback);
};
return wrapped;
}
/**
* Wraps provided iterator callback function
* makes sure snitch only called once,
* but passes secondary calls to the original callback
*
* @this ReadableAsyncKit#
* @param {function} callback - callback to wrap
* @param {number|string} key - iteration key
* @returns {function} wrapped callback
*/
function wrapIteratorCallback(callback, key)
{
var stream = this;
return function(error, output)
{
// don't repeat yourself
if (!(key in stream.jobs))
{
callback(error, output);
return;
}
// clean up jobs
delete stream.jobs[key];
return streamer.call(stream, error, {key: key, value: output}, callback);
};
}
/**
* Stream wrapper for iterator callback
*
* @this ReadableAsyncKit#
* @param {mixed} error - error response
* @param {mixed} output - iterator output
* @param {function} callback - callback that expects iterator results
*/
function streamer(error, output, callback)
{
if (error && !this.error)
{
this.error = error;
this.pause();
this.emit('error', error);
// send back value only, as expected
callback(error, output && output.value);
return;
}
// stream stuff
this.push(output);
// back to original track
// send back value only, as expected
callback(error, output && output.value);
}
/**
* Stream wrapper for finishing callback
*
* @this ReadableAsyncKit#
* @param {mixed} error - error response
* @param {mixed} output - iterator output
* @param {function} callback - callback that expects final results
*/
function finisher(error, output, callback)
{
// signal end of the stream
// only for successfully finished streams
if (!error)
{
this.push(null);
}
// back to original track
callback(error, output);
}
var abort = require('./abort.js')
, async = require('./async.js')
;
// API
module.exports = terminator;
/**
* Terminates jobs in the attached state context
*
* @this AsyncKitState#
* @param {function} callback - final callback to invoke after termination
*/
function terminator(callback)
{
if (!Object.keys(this.jobs).length)
{
return;
}
// fast forward iteration index
this.index = this.size;
// abort jobs
abort(this);
// send back results we have so far
async(callback)(null, this.results);
}
{
"name": "asynckit",
"version": "0.4.0",
"description": "Minimal async jobs utility library, with streams support",
"main": "index.js",
"scripts": {
"clean": "rimraf coverage",
"lint": "eslint *.js lib/*.js test/*.js",
"test": "istanbul cover --reporter=json tape -- 'test/test-*.js' | tap-spec",
"win-test": "tape test/test-*.js",
"browser": "browserify -t browserify-istanbul test/lib/browserify_adjustment.js test/test-*.js | obake --coverage | tap-spec",
"report": "istanbul report",
"size": "browserify index.js | size-table asynckit",
"debug": "tape test/test-*.js"
},
"pre-commit": [
"clean",
"lint",
"test",
"browser",
"report",
"size"
],
"repository": {
"type": "git",
"url": "git+https://github.com/alexindigo/asynckit.git"
},
"keywords": [
"async",
"jobs",
"parallel",
"serial",
"iterator",
"array",
"object",
"stream",
"destroy",
"terminate",
"abort"
],
"author": "Alex Indigo <iam@alexindigo.com>",
"license": "MIT",
"bugs": {
"url": "https://github.com/alexindigo/asynckit/issues"
},
"homepage": "https://github.com/alexindigo/asynckit#readme",
"devDependencies": {
"browserify": "^13.0.0",
"browserify-istanbul": "^2.0.0",
"coveralls": "^2.11.9",
"eslint": "^2.9.0",
"istanbul": "^0.4.3",
"obake": "^0.1.2",
"phantomjs-prebuilt": "^2.1.7",
"pre-commit": "^1.1.3",
"reamde": "^1.1.0",
"rimraf": "^2.5.2",
"size-table": "^0.2.0",
"tap-spec": "^4.1.1",
"tape": "^4.5.1"
},
"dependencies": {}
}
var iterate = require('./lib/iterate.js')
, initState = require('./lib/state.js')
, terminator = require('./lib/terminator.js')
;
// Public API
module.exports = parallel;
/**
* Runs iterator over provided array elements in parallel
*
* @param {array|object} list - array or object (named list) to iterate over
* @param {function} iterator - iterator to run
* @param {function} callback - invoked when all elements processed
* @returns {function} - jobs terminator
*/
function parallel(list, iterator, callback)
{
var state = initState(list);
while (state.index < (state['keyedList'] || list).length)
{
iterate(list, iterator, state, function(error, result)
{
if (error)
{
callback(error, result);
return;
}
// looks like it's the last one
if (Object.keys(state.jobs).length === 0)
{
callback(null, state.results);
return;
}
});
state.index++;
}
return terminator.bind(state, callback);
}
var serialOrdered = require('./serialOrdered.js');
// Public API
module.exports = serial;
/**
* Runs iterator over provided array elements in series
*
* @param {array|object} list - array or object (named list) to iterate over
* @param {function} iterator - iterator to run
* @param {function} callback - invoked when all elements processed
* @returns {function} - jobs terminator
*/
function serial(list, iterator, callback)
{
return serialOrdered(list, iterator, null, callback);
}
var iterate = require('./lib/iterate.js')
, initState = require('./lib/state.js')
, terminator = require('./lib/terminator.js')
;
// Public API
module.exports = serialOrdered;
// sorting helpers
module.exports.ascending = ascending;
module.exports.descending = descending;
/**
* Runs iterator over provided sorted array elements in series
*
* @param {array|object} list - array or object (named list) to iterate over
* @param {function} iterator - iterator to run
* @param {function} sortMethod - custom sort function
* @param {function} callback - invoked when all elements processed
* @returns {function} - jobs terminator
*/
function serialOrdered(list, iterator, sortMethod, callback)
{
var state = initState(list, sortMethod);
iterate(list, iterator, state, function iteratorHandler(error, result)
{
if (error)
{
callback(error, result);
return;
}
state.index++;
// are we there yet?
if (state.index < (state['keyedList'] || list).length)
{
iterate(list, iterator, state, iteratorHandler);
return;
}
// done here
callback(null, state.results);
});
return terminator.bind(state, callback);
}
/*
* -- Sort methods
*/
/**
* sort helper to sort array elements in ascending order
*
* @param {mixed} a - an item to compare
* @param {mixed} b - an item to compare
* @returns {number} - comparison result
*/
function ascending(a, b)
{
return a < b ? -1 : a > b ? 1 : 0;
}
/**
* sort helper to sort array elements in descending order
*
* @param {mixed} a - an item to compare
* @param {mixed} b - an item to compare
* @returns {number} - comparison result
*/
function descending(a, b)
{
return -1 * ascending(a, b);
}
var inherits = require('util').inherits
, Readable = require('stream').Readable
, ReadableAsyncKit = require('./lib/readable_asynckit.js')
, ReadableParallel = require('./lib/readable_parallel.js')
, ReadableSerial = require('./lib/readable_serial.js')
, ReadableSerialOrdered = require('./lib/readable_serial_ordered.js')
;
// API
module.exports =
{
parallel : ReadableParallel,
serial : ReadableSerial,
serialOrdered : ReadableSerialOrdered,
};
inherits(ReadableAsyncKit, Readable);
inherits(ReadableParallel, ReadableAsyncKit);
inherits(ReadableSerial, ReadableAsyncKit);
inherits(ReadableSerialOrdered, ReadableAsyncKit);
# Copyright (c) 2014-present Matt Zabriskie & Collaborators
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# Migration Guide
## 0.x.x -> 1.1.0
# Reporting a Vulnerability
If you discover a security vulnerability in axios please disclose it via [our huntr page](https://huntr.dev/repos/axios/axios/). Bounty eligibility, CVE assignment, response times and past reports are all there.
Thank you for improving the security of axios.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
import axios from './lib/axios.js';
// This module is intended to unwrap Axios default export as named.
// Keep top-level export same with static properties
// so that it can keep same with es module or cjs
const {
Axios,
AxiosError,
CanceledError,
isCancel,
CancelToken,
VERSION,
all,
Cancel,
isAxiosError,
spread,
toFormData,
AxiosHeaders,
HttpStatusCode,
formToJSON,
getAdapter,
mergeConfig
} = axios;
export {
axios as default,
Axios,
AxiosError,
CanceledError,
isCancel,
CancelToken,
VERSION,
all,
Cancel,
isAxiosError,
spread,
toFormData,
AxiosHeaders,
HttpStatusCode,
formToJSON,
getAdapter,
mergeConfig
}
# axios // adapters
The modules under `adapters/` are modules that handle dispatching a request and settling a returned `Promise` once a response is received.
## Example
```js
var settle = require('./../core/settle');
module.exports = function myAdapter(config) {
// At this point:
// - config has been merged with defaults
// - request transformers have already run
// - request interceptors have already run
// Make the request using config provided
// Upon response settle the Promise
return new Promise(function(resolve, reject) {
var response = {
data: responseData,
status: request.status,
statusText: request.statusText,
headers: responseHeaders,
config: config,
request: request
};
settle(resolve, reject, response);
// From here:
// - response transformers will run
// - response interceptors will run
});
}
```
import utils from '../utils.js';
import httpAdapter from './http.js';
import xhrAdapter from './xhr.js';
import fetchAdapter from './fetch.js';
import AxiosError from "../core/AxiosError.js";
const knownAdapters = {
http: httpAdapter,
xhr: xhrAdapter,
fetch: fetchAdapter
}
utils.forEach(knownAdapters, (fn, value) => {
if (fn) {
try {
Object.defineProperty(fn, 'name', {value});
} catch (e) {
// eslint-disable-next-line no-empty
}
Object.defineProperty(fn, 'adapterName', {value});
}
});
const renderReason = (reason) => `- ${reason}`;
const isResolvedHandle = (adapter) => utils.isFunction(adapter) || adapter === null || adapter === false;
export default {
getAdapter: (adapters) => {
adapters = utils.isArray(adapters) ? adapters : [adapters];
const {length} = adapters;
let nameOrAdapter;
let adapter;
const rejectedReasons = {};
for (let i = 0; i < length; i++) {
nameOrAdapter = adapters[i];
let id;
adapter = nameOrAdapter;
if (!isResolvedHandle(nameOrAdapter)) {
adapter = knownAdapters[(id = String(nameOrAdapter)).toLowerCase()];
if (adapter === undefined) {
throw new AxiosError(`Unknown adapter '${id}'`);
}
}
if (adapter) {
break;
}
rejectedReasons[id || '#' + i] = adapter;
}
if (!adapter) {
const reasons = Object.entries(rejectedReasons)
.map(([id, state]) => `adapter ${id} ` +
(state === false ? 'is not supported by the environment' : 'is not available in the build')
);
let s = length ?
(reasons.length > 1 ? 'since :\n' + reasons.map(renderReason).join('\n') : ' ' + renderReason(reasons[0])) :
'as no adapter specified';
throw new AxiosError(
`There is no suitable adapter to dispatch the request ` + s,
'ERR_NOT_SUPPORT'
);
}
return adapter;
},
adapters: knownAdapters
}
import platform from "../platform/index.js";
import utils from "../utils.js";
import AxiosError from "../core/AxiosError.js";
import composeSignals from "../helpers/composeSignals.js";
import {trackStream} from "../helpers/trackStream.js";
import AxiosHeaders from "../core/AxiosHeaders.js";
import progressEventReducer from "../helpers/progressEventReducer.js";
import resolveConfig from "../helpers/resolveConfig.js";
import settle from "../core/settle.js";
const fetchProgressDecorator = (total, fn) => {
const lengthComputable = total != null;
return (loaded) => setTimeout(() => fn({
lengthComputable,
total,
loaded
}));
}
const isFetchSupported = typeof fetch === 'function' && typeof Request === 'function' && typeof Response === 'function';
const isReadableStreamSupported = isFetchSupported && typeof ReadableStream === 'function';
// used only inside the fetch adapter
const encodeText = isFetchSupported && (typeof TextEncoder === 'function' ?
((encoder) => (str) => encoder.encode(str))(new TextEncoder()) :
async (str) => new Uint8Array(await new Response(str).arrayBuffer())
);
const supportsRequestStream = isReadableStreamSupported && (() => {
let duplexAccessed = false;
const hasContentType = new Request(platform.origin, {
body: new ReadableStream(),
method: 'POST',
get duplex() {
duplexAccessed = true;
return 'half';
},
}).headers.has('Content-Type');
return duplexAccessed && !hasContentType;
})();
const DEFAULT_CHUNK_SIZE = 64 * 1024;
const supportsResponseStream = isReadableStreamSupported && !!(()=> {
try {
return utils.isReadableStream(new Response('').body);
} catch(err) {
// return undefined
}
})();
const resolvers = {
stream: supportsResponseStream && ((res) => res.body)
};
isFetchSupported && (((res) => {
['text', 'arrayBuffer', 'blob', 'formData', 'stream'].forEach(type => {
!resolvers[type] && (resolvers[type] = utils.isFunction(res[type]) ? (res) => res[type]() :
(_, config) => {
throw new AxiosError(`Response type '${type}' is not supported`, AxiosError.ERR_NOT_SUPPORT, config);
})
});
})(new Response));
const getBodyLength = async (body) => {
if (body == null) {
return 0;
}
if(utils.isBlob(body)) {
return body.size;
}
if(utils.isSpecCompliantForm(body)) {
return (await new Request(body).arrayBuffer()).byteLength;
}
if(utils.isArrayBufferView(body)) {
return body.byteLength;
}
if(utils.isURLSearchParams(body)) {
body = body + '';
}
if(utils.isString(body)) {
return (await encodeText(body)).byteLength;
}
}
const resolveBodyLength = async (headers, body) => {
const length = utils.toFiniteNumber(headers.getContentLength());
return length == null ? getBodyLength(body) : length;
}
export default isFetchSupported && (async (config) => {
let {
url,
method,
data,
signal,
cancelToken,
timeout,
onDownloadProgress,
onUploadProgress,
responseType,
headers,
withCredentials = 'same-origin',
fetchOptions
} = resolveConfig(config);
responseType = responseType ? (responseType + '').toLowerCase() : 'text';
let [composedSignal, stopTimeout] = (signal || cancelToken || timeout) ?
composeSignals([signal, cancelToken], timeout) : [];
let finished, request;
const onFinish = () => {
!finished && setTimeout(() => {
composedSignal && composedSignal.unsubscribe();
});
finished = true;
}
let requestContentLength;
try {
if (
onUploadProgress && supportsRequestStream && method !== 'get' && method !== 'head' &&
(requestContentLength = await resolveBodyLength(headers, data)) !== 0
) {
let _request = new Request(url, {
method: 'POST',
body: data,
duplex: "half"
});
let contentTypeHeader;
if (utils.isFormData(data) && (contentTypeHeader = _request.headers.get('content-type'))) {
headers.setContentType(contentTypeHeader)
}
if (_request.body) {
data = trackStream(_request.body, DEFAULT_CHUNK_SIZE, fetchProgressDecorator(
requestContentLength,
progressEventReducer(onUploadProgress)
), null, encodeText);
}
}
if (!utils.isString(withCredentials)) {
withCredentials = withCredentials ? 'cors' : 'omit';
}
request = new Request(url, {
...fetchOptions,
signal: composedSignal,
method: method.toUpperCase(),
headers: headers.normalize().toJSON(),
body: data,
duplex: "half",
withCredentials
});
let response = await fetch(request);
const isStreamResponse = supportsResponseStream && (responseType === 'stream' || responseType === 'response');
if (supportsResponseStream && (onDownloadProgress || isStreamResponse)) {
const options = {};
['status', 'statusText', 'headers'].forEach(prop => {
options[prop] = response[prop];
});
const responseContentLength = utils.toFiniteNumber(response.headers.get('content-length'));
response = new Response(
trackStream(response.body, DEFAULT_CHUNK_SIZE, onDownloadProgress && fetchProgressDecorator(
responseContentLength,
progressEventReducer(onDownloadProgress, true)
), isStreamResponse && onFinish, encodeText),
options
);
}
responseType = responseType || 'text';
let responseData = await resolvers[utils.findKey(resolvers, responseType) || 'text'](response, config);
!isStreamResponse && onFinish();
stopTimeout && stopTimeout();
return await new Promise((resolve, reject) => {
settle(resolve, reject, {
data: responseData,
headers: AxiosHeaders.from(response.headers),
status: response.status,
statusText: response.statusText,
config,
request
})
})
} catch (err) {
onFinish();
if (err && err.name === 'TypeError' && /fetch/i.test(err.message)) {
throw Object.assign(
new AxiosError('Network Error', AxiosError.ERR_NETWORK, config, request),
{
cause: err.cause || err
}
)
}
throw AxiosError.from(err, err && err.code, config, request);
}
});
import utils from './../utils.js';
import settle from './../core/settle.js';
import transitionalDefaults from '../defaults/transitional.js';
import AxiosError from '../core/AxiosError.js';
import CanceledError from '../cancel/CanceledError.js';
import parseProtocol from '../helpers/parseProtocol.js';
import platform from '../platform/index.js';
import AxiosHeaders from '../core/AxiosHeaders.js';
import progressEventReducer from '../helpers/progressEventReducer.js';
import resolveConfig from "../helpers/resolveConfig.js";
const isXHRAdapterSupported = typeof XMLHttpRequest !== 'undefined';
export default isXHRAdapterSupported && function (config) {
return new Promise(function dispatchXhrRequest(resolve, reject) {
const _config = resolveConfig(config);
let requestData = _config.data;
const requestHeaders = AxiosHeaders.from(_config.headers).normalize();
let {responseType} = _config;
let onCanceled;
function done() {
if (_config.cancelToken) {
_config.cancelToken.unsubscribe(onCanceled);
}
if (_config.signal) {
_config.signal.removeEventListener('abort', onCanceled);
}
}
let request = new XMLHttpRequest();
request.open(_config.method.toUpperCase(), _config.url, true);
// Set the request timeout in MS
request.timeout = _config.timeout;
function onloadend() {
if (!request) {
return;
}
// Prepare the response
const responseHeaders = AxiosHeaders.from(
'getAllResponseHeaders' in request && request.getAllResponseHeaders()
);
const responseData = !responseType || responseType === 'text' || responseType === 'json' ?
request.responseText : request.response;
const response = {
data: responseData,
status: request.status,
statusText: request.statusText,
headers: responseHeaders,
config,
request
};
settle(function _resolve(value) {
resolve(value);
done();
}, function _reject(err) {
reject(err);
done();
}, response);
// Clean up request
request = null;
}
if ('onloadend' in request) {
// Use onloadend if available
request.onloadend = onloadend;
} else {
// Listen for ready state to emulate onloadend
request.onreadystatechange = function handleLoad() {
if (!request || request.readyState !== 4) {
return;
}
// The request errored out and we didn't get a response, this will be
// handled by onerror instead
// With one exception: request that using file: protocol, most browsers
// will return status as 0 even though it's a successful request
if (request.status === 0 && !(request.responseURL && request.responseURL.indexOf('file:') === 0)) {
return;
}
// readystate handler is calling before onerror or ontimeout handlers,
// so we should call onloadend on the next 'tick'
setTimeout(onloadend);
};
}
// Handle browser request cancellation (as opposed to a manual cancellation)
request.onabort = function handleAbort() {
if (!request) {
return;
}
reject(new AxiosError('Request aborted', AxiosError.ECONNABORTED, _config, request));
// Clean up request
request = null;
};
// Handle low level network errors
request.onerror = function handleError() {
// Real errors are hidden from us by the browser
// onerror should only fire if it's a network error
reject(new AxiosError('Network Error', AxiosError.ERR_NETWORK, _config, request));
// Clean up request
request = null;
};
// Handle timeout
request.ontimeout = function handleTimeout() {
let timeoutErrorMessage = _config.timeout ? 'timeout of ' + _config.timeout + 'ms exceeded' : 'timeout exceeded';
const transitional = _config.transitional || transitionalDefaults;
if (_config.timeoutErrorMessage) {
timeoutErrorMessage = _config.timeoutErrorMessage;
}
reject(new AxiosError(
timeoutErrorMessage,
transitional.clarifyTimeoutError ? AxiosError.ETIMEDOUT : AxiosError.ECONNABORTED,
_config,
request));
// Clean up request
request = null;
};
// Remove Content-Type if data is undefined
requestData === undefined && requestHeaders.setContentType(null);
// Add headers to the request
if ('setRequestHeader' in request) {
utils.forEach(requestHeaders.toJSON(), function setRequestHeader(val, key) {
request.setRequestHeader(key, val);
});
}
// Add withCredentials to request if needed
if (!utils.isUndefined(_config.withCredentials)) {
request.withCredentials = !!_config.withCredentials;
}
// Add responseType to request if needed
if (responseType && responseType !== 'json') {
request.responseType = _config.responseType;
}
// Handle progress if needed
if (typeof _config.onDownloadProgress === 'function') {
request.addEventListener('progress', progressEventReducer(_config.onDownloadProgress, true));
}
// Not all browsers support upload events
if (typeof _config.onUploadProgress === 'function' && request.upload) {
request.upload.addEventListener('progress', progressEventReducer(_config.onUploadProgress));
}
if (_config.cancelToken || _config.signal) {
// Handle cancellation
// eslint-disable-next-line func-names
onCanceled = cancel => {
if (!request) {
return;
}
reject(!cancel || cancel.type ? new CanceledError(null, config, request) : cancel);
request.abort();
request = null;
};
_config.cancelToken && _config.cancelToken.subscribe(onCanceled);
if (_config.signal) {
_config.signal.aborted ? onCanceled() : _config.signal.addEventListener('abort', onCanceled);
}
}
const protocol = parseProtocol(_config.url);
if (protocol && platform.protocols.indexOf(protocol) === -1) {
reject(new AxiosError('Unsupported protocol ' + protocol + ':', AxiosError.ERR_BAD_REQUEST, config));
return;
}
// Send the request
request.send(requestData || null);
});
}
'use strict';
import utils from './utils.js';
import bind from './helpers/bind.js';
import Axios from './core/Axios.js';
import mergeConfig from './core/mergeConfig.js';
import defaults from './defaults/index.js';
import formDataToJSON from './helpers/formDataToJSON.js';
import CanceledError from './cancel/CanceledError.js';
import CancelToken from './cancel/CancelToken.js';
import isCancel from './cancel/isCancel.js';
import {VERSION} from './env/data.js';
import toFormData from './helpers/toFormData.js';
import AxiosError from './core/AxiosError.js';
import spread from './helpers/spread.js';
import isAxiosError from './helpers/isAxiosError.js';
import AxiosHeaders from "./core/AxiosHeaders.js";
import adapters from './adapters/adapters.js';
import HttpStatusCode from './helpers/HttpStatusCode.js';
/**
* Create an instance of Axios
*
* @param {Object} defaultConfig The default config for the instance
*
* @returns {Axios} A new instance of Axios
*/
function createInstance(defaultConfig) {
const context = new Axios(defaultConfig);
const instance = bind(Axios.prototype.request, context);
// Copy axios.prototype to instance
utils.extend(instance, Axios.prototype, context, {allOwnKeys: true});
// Copy context to instance
utils.extend(instance, context, null, {allOwnKeys: true});
// Factory for creating new instances
instance.create = function create(instanceConfig) {
return createInstance(mergeConfig(defaultConfig, instanceConfig));
};
return instance;
}
// Create the default instance to be exported
const axios = createInstance(defaults);
// Expose Axios class to allow class inheritance
axios.Axios = Axios;
// Expose Cancel & CancelToken
axios.CanceledError = CanceledError;
axios.CancelToken = CancelToken;
axios.isCancel = isCancel;
axios.VERSION = VERSION;
axios.toFormData = toFormData;
// Expose AxiosError class
axios.AxiosError = AxiosError;
// alias for CanceledError for backward compatibility
axios.Cancel = axios.CanceledError;
// Expose all/spread
axios.all = function all(promises) {
return Promise.all(promises);
};
axios.spread = spread;
// Expose isAxiosError
axios.isAxiosError = isAxiosError;
// Expose mergeConfig
axios.mergeConfig = mergeConfig;
axios.AxiosHeaders = AxiosHeaders;
axios.formToJSON = thing => formDataToJSON(utils.isHTMLForm(thing) ? new FormData(thing) : thing);
axios.getAdapter = adapters.getAdapter;
axios.HttpStatusCode = HttpStatusCode;
axios.default = axios;
// this module should only have a default export
export default axios
'use strict';
import CanceledError from './CanceledError.js';
/**
* A `CancelToken` is an object that can be used to request cancellation of an operation.
*
* @param {Function} executor The executor function.
*
* @returns {CancelToken}
*/
class CancelToken {
constructor(executor) {
if (typeof executor !== 'function') {
throw new TypeError('executor must be a function.');
}
let resolvePromise;
this.promise = new Promise(function promiseExecutor(resolve) {
resolvePromise = resolve;
});
const token = this;
// eslint-disable-next-line func-names
this.promise.then(cancel => {
if (!token._listeners) return;
let i = token._listeners.length;
while (i-- > 0) {
token._listeners[i](cancel);
}
token._listeners = null;
});
// eslint-disable-next-line func-names
this.promise.then = onfulfilled => {
let _resolve;
// eslint-disable-next-line func-names
const promise = new Promise(resolve => {
token.subscribe(resolve);
_resolve = resolve;
}).then(onfulfilled);
promise.cancel = function reject() {
token.unsubscribe(_resolve);
};
return promise;
};
executor(function cancel(message, config, request) {
if (token.reason) {
// Cancellation has already been requested
return;
}
token.reason = new CanceledError(message, config, request);
resolvePromise(token.reason);
});
}
/**
* Throws a `CanceledError` if cancellation has been requested.
*/
throwIfRequested() {
if (this.reason) {
throw this.reason;
}
}
/**
* Subscribe to the cancel signal
*/
subscribe(listener) {
if (this.reason) {
listener(this.reason);
return;
}
if (this._listeners) {
this._listeners.push(listener);
} else {
this._listeners = [listener];
}
}
/**
* Unsubscribe from the cancel signal
*/
unsubscribe(listener) {
if (!this._listeners) {
return;
}
const index = this._listeners.indexOf(listener);
if (index !== -1) {
this._listeners.splice(index, 1);
}
}
/**
* Returns an object that contains a new `CancelToken` and a function that, when called,
* cancels the `CancelToken`.
*/
static source() {
let cancel;
const token = new CancelToken(function executor(c) {
cancel = c;
});
return {
token,
cancel
};
}
}
export default CancelToken;
'use strict';
import AxiosError from '../core/AxiosError.js';
import utils from '../utils.js';
/**
* A `CanceledError` is an object that is thrown when an operation is canceled.
*
* @param {string=} message The message.
* @param {Object=} config The config.
* @param {Object=} request The request.
*
* @returns {CanceledError} The created error.
*/
function CanceledError(message, config, request) {
// eslint-disable-next-line no-eq-null,eqeqeq
AxiosError.call(this, message == null ? 'canceled' : message, AxiosError.ERR_CANCELED, config, request);
this.name = 'CanceledError';
}
utils.inherits(CanceledError, AxiosError, {
__CANCEL__: true
});
export default CanceledError;
'use strict';
export default function isCancel(value) {
return !!(value && value.__CANCEL__);
}
'use strict';
import utils from './../utils.js';
import buildURL from '../helpers/buildURL.js';
import InterceptorManager from './InterceptorManager.js';
import dispatchRequest from './dispatchRequest.js';
import mergeConfig from './mergeConfig.js';
import buildFullPath from './buildFullPath.js';
import validator from '../helpers/validator.js';
import AxiosHeaders from './AxiosHeaders.js';
const validators = validator.validators;
/**
* Create a new instance of Axios
*
* @param {Object} instanceConfig The default config for the instance
*
* @return {Axios} A new instance of Axios
*/
class Axios {
constructor(instanceConfig) {
this.defaults = instanceConfig;
this.interceptors = {
request: new InterceptorManager(),
response: new InterceptorManager()
};
}
/**
* Dispatch a request
*
* @param {String|Object} configOrUrl The config specific for this request (merged with this.defaults)
* @param {?Object} config
*
* @returns {Promise} The Promise to be fulfilled
*/
async request(configOrUrl, config) {
try {
return await this._request(configOrUrl, config);
} catch (err) {
if (err instanceof Error) {
let dummy;
Error.captureStackTrace ? Error.captureStackTrace(dummy = {}) : (dummy = new Error());
// slice off the Error: ... line
const stack = dummy.stack ? dummy.stack.replace(/^.+\n/, '') : '';
try {
if (!err.stack) {
err.stack = stack;
// match without the 2 top stack lines
} else if (stack && !String(err.stack).endsWith(stack.replace(/^.+\n.+\n/, ''))) {
err.stack += '\n' + stack
}
} catch (e) {
// ignore the case where "stack" is an un-writable property
}
}
throw err;
}
}
_request(configOrUrl, config) {
/*eslint no-param-reassign:0*/
// Allow for axios('example/url'[, config]) a la fetch API
if (typeof configOrUrl === 'string') {
config = config || {};
config.url = configOrUrl;
} else {
config = configOrUrl || {};
}
config = mergeConfig(this.defaults, config);
const {transitional, paramsSerializer, headers} = config;
if (transitional !== undefined) {
validator.assertOptions(transitional, {
silentJSONParsing: validators.transitional(validators.boolean),
forcedJSONParsing: validators.transitional(validators.boolean),
clarifyTimeoutError: validators.transitional(validators.boolean)
}, false);
}
if (paramsSerializer != null) {
if (utils.isFunction(paramsSerializer)) {
config.paramsSerializer = {
serialize: paramsSerializer
}
} else {
validator.assertOptions(paramsSerializer, {
encode: validators.function,
serialize: validators.function
}, true);
}
}
// Set config.method
config.method = (config.method || this.defaults.method || 'get').toLowerCase();
// Flatten headers
let contextHeaders = headers && utils.merge(
headers.common,
headers[config.method]
);
headers && utils.forEach(
['delete', 'get', 'head', 'post', 'put', 'patch', 'common'],
(method) => {
delete headers[method];
}
);
config.headers = AxiosHeaders.concat(contextHeaders, headers);
// filter out skipped interceptors
const requestInterceptorChain = [];
let synchronousRequestInterceptors = true;
this.interceptors.request.forEach(function unshiftRequestInterceptors(interceptor) {
if (typeof interceptor.runWhen === 'function' && interceptor.runWhen(config) === false) {
return;
}
synchronousRequestInterceptors = synchronousRequestInterceptors && interceptor.synchronous;
requestInterceptorChain.unshift(interceptor.fulfilled, interceptor.rejected);
});
const responseInterceptorChain = [];
this.interceptors.response.forEach(function pushResponseInterceptors(interceptor) {
responseInterceptorChain.push(interceptor.fulfilled, interceptor.rejected);
});
let promise;
let i = 0;
let len;
if (!synchronousRequestInterceptors) {
const chain = [dispatchRequest.bind(this), undefined];
chain.unshift.apply(chain, requestInterceptorChain);
chain.push.apply(chain, responseInterceptorChain);
len = chain.length;
promise = Promise.resolve(config);
while (i < len) {
promise = promise.then(chain[i++], chain[i++]);
}
return promise;
}
len = requestInterceptorChain.length;
let newConfig = config;
i = 0;
while (i < len) {
const onFulfilled = requestInterceptorChain[i++];
const onRejected = requestInterceptorChain[i++];
try {
newConfig = onFulfilled(newConfig);
} catch (error) {
onRejected.call(this, error);
break;
}
}
try {
promise = dispatchRequest.call(this, newConfig);
} catch (error) {
return Promise.reject(error);
}
i = 0;
len = responseInterceptorChain.length;
while (i < len) {
promise = promise.then(responseInterceptorChain[i++], responseInterceptorChain[i++]);
}
return promise;
}
getUri(config) {
config = mergeConfig(this.defaults, config);
const fullPath = buildFullPath(config.baseURL, config.url);
return buildURL(fullPath, config.params, config.paramsSerializer);
}
}
// Provide aliases for supported request methods
utils.forEach(['delete', 'get', 'head', 'options'], function forEachMethodNoData(method) {
/*eslint func-names:0*/
Axios.prototype[method] = function(url, config) {
return this.request(mergeConfig(config || {}, {
method,
url,
data: (config || {}).data
}));
};
});
utils.forEach(['post', 'put', 'patch'], function forEachMethodWithData(method) {
/*eslint func-names:0*/
function generateHTTPMethod(isForm) {
return function httpMethod(url, data, config) {
return this.request(mergeConfig(config || {}, {
method,
headers: isForm ? {
'Content-Type': 'multipart/form-data'
} : {},
url,
data
}));
};
}
Axios.prototype[method] = generateHTTPMethod();
Axios.prototype[method + 'Form'] = generateHTTPMethod(true);
});
export default Axios;
'use strict';
import utils from '../utils.js';
/**
* Create an Error with the specified message, config, error code, request and response.
*
* @param {string} message The error message.
* @param {string} [code] The error code (for example, 'ECONNABORTED').
* @param {Object} [config] The config.
* @param {Object} [request] The request.
* @param {Object} [response] The response.
*
* @returns {Error} The created error.
*/
function AxiosError(message, code, config, request, response) {
Error.call(this);
if (Error.captureStackTrace) {
Error.captureStackTrace(this, this.constructor);
} else {
this.stack = (new Error()).stack;
}
this.message = message;
this.name = 'AxiosError';
code && (this.code = code);
config && (this.config = config);
request && (this.request = request);
response && (this.response = response);
}
utils.inherits(AxiosError, Error, {
toJSON: function toJSON() {
return {
// Standard
message: this.message,
name: this.name,
// Microsoft
description: this.description,
number: this.number,
// Mozilla
fileName: this.fileName,
lineNumber: this.lineNumber,
columnNumber: this.columnNumber,
stack: this.stack,
// Axios
config: utils.toJSONObject(this.config),
code: this.code,
status: this.response && this.response.status ? this.response.status : null
};
}
});
const prototype = AxiosError.prototype;
const descriptors = {};
[
'ERR_BAD_OPTION_VALUE',
'ERR_BAD_OPTION',
'ECONNABORTED',
'ETIMEDOUT',
'ERR_NETWORK',
'ERR_FR_TOO_MANY_REDIRECTS',
'ERR_DEPRECATED',
'ERR_BAD_RESPONSE',
'ERR_BAD_REQUEST',
'ERR_CANCELED',
'ERR_NOT_SUPPORT',
'ERR_INVALID_URL'
// eslint-disable-next-line func-names
].forEach(code => {
descriptors[code] = {value: code};
});
Object.defineProperties(AxiosError, descriptors);
Object.defineProperty(prototype, 'isAxiosError', {value: true});
// eslint-disable-next-line func-names
AxiosError.from = (error, code, config, request, response, customProps) => {
const axiosError = Object.create(prototype);
utils.toFlatObject(error, axiosError, function filter(obj) {
return obj !== Error.prototype;
}, prop => {
return prop !== 'isAxiosError';
});
AxiosError.call(axiosError, error.message, code, config, request, response);
axiosError.cause = error;
axiosError.name = error.name;
customProps && Object.assign(axiosError, customProps);
return axiosError;
};
export default AxiosError;
'use strict';
import utils from '../utils.js';
import parseHeaders from '../helpers/parseHeaders.js';
const $internals = Symbol('internals');
function normalizeHeader(header) {
return header && String(header).trim().toLowerCase();
}
function normalizeValue(value) {
if (value === false || value == null) {
return value;
}
return utils.isArray(value) ? value.map(normalizeValue) : String(value);
}
function parseTokens(str) {
const tokens = Object.create(null);
const tokensRE = /([^\s,;=]+)\s*(?:=\s*([^,;]+))?/g;
let match;
while ((match = tokensRE.exec(str))) {
tokens[match[1]] = match[2];
}
return tokens;
}
const isValidHeaderName = (str) => /^[-_a-zA-Z0-9^`|~,!#$%&'*+.]+$/.test(str.trim());
function matchHeaderValue(context, value, header, filter, isHeaderNameFilter) {
if (utils.isFunction(filter)) {
return filter.call(this, value, header);
}
if (isHeaderNameFilter) {
value = header;
}
if (!utils.isString(value)) return;
if (utils.isString(filter)) {
return value.indexOf(filter) !== -1;
}
if (utils.isRegExp(filter)) {
return filter.test(value);
}
}
function formatHeader(header) {
return header.trim()
.toLowerCase().replace(/([a-z\d])(\w*)/g, (w, char, str) => {
return char.toUpperCase() + str;
});
}
function buildAccessors(obj, header) {
const accessorName = utils.toCamelCase(' ' + header);
['get', 'set', 'has'].forEach(methodName => {
Object.defineProperty(obj, methodName + accessorName, {
value: function(arg1, arg2, arg3) {
return this[methodName].call(this, header, arg1, arg2, arg3);
},
configurable: true
});
});
}
class AxiosHeaders {
constructor(headers) {
headers && this.set(headers);
}
set(header, valueOrRewrite, rewrite) {
const self = this;
function setHeader(_value, _header, _rewrite) {
const lHeader = normalizeHeader(_header);
if (!lHeader) {
throw new Error('header name must be a non-empty string');
}
const key = utils.findKey(self, lHeader);
if(!key || self[key] === undefined || _rewrite === true || (_rewrite === undefined && self[key] !== false)) {
self[key || _header] = normalizeValue(_value);
}
}
const setHeaders = (headers, _rewrite) =>
utils.forEach(headers, (_value, _header) => setHeader(_value, _header, _rewrite));
if (utils.isPlainObject(header) || header instanceof this.constructor) {
setHeaders(header, valueOrRewrite)
} else if(utils.isString(header) && (header = header.trim()) && !isValidHeaderName(header)) {
setHeaders(parseHeaders(header), valueOrRewrite);
} else if (utils.isHeaders(header)) {
for (const [key, value] of header.entries()) {
setHeader(value, key, rewrite);
}
} else {
header != null && setHeader(valueOrRewrite, header, rewrite);
}
return this;
}
get(header, parser) {
header = normalizeHeader(header);
if (header) {
const key = utils.findKey(this, header);
if (key) {
const value = this[key];
if (!parser) {
return value;
}
if (parser === true) {
return parseTokens(value);
}
if (utils.isFunction(parser)) {
return parser.call(this, value, key);
}
if (utils.isRegExp(parser)) {
return parser.exec(value);
}
throw new TypeError('parser must be boolean|regexp|function');
}
}
}
has(header, matcher) {
header = normalizeHeader(header);
if (header) {
const key = utils.findKey(this, header);
return !!(key && this[key] !== undefined && (!matcher || matchHeaderValue(this, this[key], key, matcher)));
}
return false;
}
delete(header, matcher) {
const self = this;
let deleted = false;
function deleteHeader(_header) {
_header = normalizeHeader(_header);
if (_header) {
const key = utils.findKey(self, _header);
if (key && (!matcher || matchHeaderValue(self, self[key], key, matcher))) {
delete self[key];
deleted = true;
}
}
}
if (utils.isArray(header)) {
header.forEach(deleteHeader);
} else {
deleteHeader(header);
}
return deleted;
}
clear(matcher) {
const keys = Object.keys(this);
let i = keys.length;
let deleted = false;
while (i--) {
const key = keys[i];
if(!matcher || matchHeaderValue(this, this[key], key, matcher, true)) {
delete this[key];
deleted = true;
}
}
return deleted;
}
normalize(format) {
const self = this;
const headers = {};
utils.forEach(this, (value, header) => {
const key = utils.findKey(headers, header);
if (key) {
self[key] = normalizeValue(value);
delete self[header];
return;
}
const normalized = format ? formatHeader(header) : String(header).trim();
if (normalized !== header) {
delete self[header];
}
self[normalized] = normalizeValue(value);
headers[normalized] = true;
});
return this;
}
concat(...targets) {
return this.constructor.concat(this, ...targets);
}
toJSON(asStrings) {
const obj = Object.create(null);
utils.forEach(this, (value, header) => {
value != null && value !== false && (obj[header] = asStrings && utils.isArray(value) ? value.join(', ') : value);
});
return obj;
}
[Symbol.iterator]() {
return Object.entries(this.toJSON())[Symbol.iterator]();
}
toString() {
return Object.entries(this.toJSON()).map(([header, value]) => header + ': ' + value).join('\n');
}
get [Symbol.toStringTag]() {
return 'AxiosHeaders';
}
static from(thing) {
return thing instanceof this ? thing : new this(thing);
}
static concat(first, ...targets) {
const computed = new this(first);
targets.forEach((target) => computed.set(target));
return computed;
}
static accessor(header) {
const internals = this[$internals] = (this[$internals] = {
accessors: {}
});
const accessors = internals.accessors;
const prototype = this.prototype;
function defineAccessor(_header) {
const lHeader = normalizeHeader(_header);
if (!accessors[lHeader]) {
buildAccessors(prototype, _header);
accessors[lHeader] = true;
}
}
utils.isArray(header) ? header.forEach(defineAccessor) : defineAccessor(header);
return this;
}
}
AxiosHeaders.accessor(['Content-Type', 'Content-Length', 'Accept', 'Accept-Encoding', 'User-Agent', 'Authorization']);
// reserved names hotfix
utils.reduceDescriptors(AxiosHeaders.prototype, ({value}, key) => {
let mapped = key[0].toUpperCase() + key.slice(1); // map `set` => `Set`
return {
get: () => value,
set(headerValue) {
this[mapped] = headerValue;
}
}
});
utils.freezeMethods(AxiosHeaders);
export default AxiosHeaders;
'use strict';
import utils from './../utils.js';
class InterceptorManager {
constructor() {
this.handlers = [];
}
/**
* Add a new interceptor to the stack
*
* @param {Function} fulfilled The function to handle `then` for a `Promise`
* @param {Function} rejected The function to handle `reject` for a `Promise`
*
* @return {Number} An ID used to remove interceptor later
*/
use(fulfilled, rejected, options) {
this.handlers.push({
fulfilled,
rejected,
synchronous: options ? options.synchronous : false,
runWhen: options ? options.runWhen : null
});
return this.handlers.length - 1;
}
/**
* Remove an interceptor from the stack
*
* @param {Number} id The ID that was returned by `use`
*
* @returns {Boolean} `true` if the interceptor was removed, `false` otherwise
*/
eject(id) {
if (this.handlers[id]) {
this.handlers[id] = null;
}
}
/**
* Clear all interceptors from the stack
*
* @returns {void}
*/
clear() {
if (this.handlers) {
this.handlers = [];
}
}
/**
* Iterate over all the registered interceptors
*
* This method is particularly useful for skipping over any
* interceptors that may have become `null` calling `eject`.
*
* @param {Function} fn The function to call for each interceptor
*
* @returns {void}
*/
forEach(fn) {
utils.forEach(this.handlers, function forEachHandler(h) {
if (h !== null) {
fn(h);
}
});
}
}
export default InterceptorManager;
# axios // core
The modules found in `core/` should be modules that are specific to the domain logic of axios. These modules would most likely not make sense to be consumed outside of the axios module, as their logic is too specific. Some examples of core modules are:
- Dispatching requests
- Requests sent via `adapters/` (see lib/adapters/README.md)
- Managing interceptors
- Handling config
'use strict';
import isAbsoluteURL from '../helpers/isAbsoluteURL.js';
import combineURLs from '../helpers/combineURLs.js';
/**
* Creates a new URL by combining the baseURL with the requestedURL,
* only when the requestedURL is not already an absolute URL.
* If the requestURL is absolute, this function returns the requestedURL untouched.
*
* @param {string} baseURL The base URL
* @param {string} requestedURL Absolute or relative URL to combine
*
* @returns {string} The combined full path
*/
export default function buildFullPath(baseURL, requestedURL) {
if (baseURL && !isAbsoluteURL(requestedURL)) {
return combineURLs(baseURL, requestedURL);
}
return requestedURL;
}
'use strict';
import transformData from './transformData.js';
import isCancel from '../cancel/isCancel.js';
import defaults from '../defaults/index.js';
import CanceledError from '../cancel/CanceledError.js';
import AxiosHeaders from '../core/AxiosHeaders.js';
import adapters from "../adapters/adapters.js";
/**
* Throws a `CanceledError` if cancellation has been requested.
*
* @param {Object} config The config that is to be used for the request
*
* @returns {void}
*/
function throwIfCancellationRequested(config) {
if (config.cancelToken) {
config.cancelToken.throwIfRequested();
}
if (config.signal && config.signal.aborted) {
throw new CanceledError(null, config);
}
}
/**
* Dispatch a request to the server using the configured adapter.
*
* @param {object} config The config that is to be used for the request
*
* @returns {Promise} The Promise to be fulfilled
*/
export default function dispatchRequest(config) {
throwIfCancellationRequested(config);
config.headers = AxiosHeaders.from(config.headers);
// Transform request data
config.data = transformData.call(
config,
config.transformRequest
);
if (['post', 'put', 'patch'].indexOf(config.method) !== -1) {
config.headers.setContentType('application/x-www-form-urlencoded', false);
}
const adapter = adapters.getAdapter(config.adapter || defaults.adapter);
return adapter(config).then(function onAdapterResolution(response) {
throwIfCancellationRequested(config);
// Transform response data
response.data = transformData.call(
config,
config.transformResponse,
response
);
response.headers = AxiosHeaders.from(response.headers);
return response;
}, function onAdapterRejection(reason) {
if (!isCancel(reason)) {
throwIfCancellationRequested(config);
// Transform response data
if (reason && reason.response) {
reason.response.data = transformData.call(
config,
config.transformResponse,
reason.response
);
reason.response.headers = AxiosHeaders.from(reason.response.headers);
}
}
return Promise.reject(reason);
});
}
'use strict';
import utils from '../utils.js';
import AxiosHeaders from "./AxiosHeaders.js";
const headersToObject = (thing) => thing instanceof AxiosHeaders ? { ...thing } : thing;
/**
* Config-specific merge-function which creates a new config-object
* by merging two configuration objects together.
*
* @param {Object} config1
* @param {Object} config2
*
* @returns {Object} New object resulting from merging config2 to config1
*/
export default function mergeConfig(config1, config2) {
// eslint-disable-next-line no-param-reassign
config2 = config2 || {};
const config = {};
function getMergedValue(target, source, caseless) {
if (utils.isPlainObject(target) && utils.isPlainObject(source)) {
return utils.merge.call({caseless}, target, source);
} else if (utils.isPlainObject(source)) {
return utils.merge({}, source);
} else if (utils.isArray(source)) {
return source.slice();
}
return source;
}
// eslint-disable-next-line consistent-return
function mergeDeepProperties(a, b, caseless) {
if (!utils.isUndefined(b)) {
return getMergedValue(a, b, caseless);
} else if (!utils.isUndefined(a)) {
return getMergedValue(undefined, a, caseless);
}
}
// eslint-disable-next-line consistent-return
function valueFromConfig2(a, b) {
if (!utils.isUndefined(b)) {
return getMergedValue(undefined, b);
}
}
// eslint-disable-next-line consistent-return
function defaultToConfig2(a, b) {
if (!utils.isUndefined(b)) {
return getMergedValue(undefined, b);
} else if (!utils.isUndefined(a)) {
return getMergedValue(undefined, a);
}
}
// eslint-disable-next-line consistent-return
function mergeDirectKeys(a, b, prop) {
if (prop in config2) {
return getMergedValue(a, b);
} else if (prop in config1) {
return getMergedValue(undefined, a);
}
}
const mergeMap = {
url: valueFromConfig2,
method: valueFromConfig2,
data: valueFromConfig2,
baseURL: defaultToConfig2,
transformRequest: defaultToConfig2,
transformResponse: defaultToConfig2,
paramsSerializer: defaultToConfig2,
timeout: defaultToConfig2,
timeoutMessage: defaultToConfig2,
withCredentials: defaultToConfig2,
withXSRFToken: defaultToConfig2,
adapter: defaultToConfig2,
responseType: defaultToConfig2,
xsrfCookieName: defaultToConfig2,
xsrfHeaderName: defaultToConfig2,
onUploadProgress: defaultToConfig2,
onDownloadProgress: defaultToConfig2,
decompress: defaultToConfig2,
maxContentLength: defaultToConfig2,
maxBodyLength: defaultToConfig2,
beforeRedirect: defaultToConfig2,
transport: defaultToConfig2,
httpAgent: defaultToConfig2,
httpsAgent: defaultToConfig2,
cancelToken: defaultToConfig2,
socketPath: defaultToConfig2,
responseEncoding: defaultToConfig2,
validateStatus: mergeDirectKeys,
headers: (a, b) => mergeDeepProperties(headersToObject(a), headersToObject(b), true)
};
utils.forEach(Object.keys(Object.assign({}, config1, config2)), function computeConfigValue(prop) {
const merge = mergeMap[prop] || mergeDeepProperties;
const configValue = merge(config1[prop], config2[prop], prop);
(utils.isUndefined(configValue) && merge !== mergeDirectKeys) || (config[prop] = configValue);
});
return config;
}
'use strict';
import AxiosError from './AxiosError.js';
/**
* Resolve or reject a Promise based on response status.
*
* @param {Function} resolve A function that resolves the promise.
* @param {Function} reject A function that rejects the promise.
* @param {object} response The response.
*
* @returns {object} The response.
*/
export default function settle(resolve, reject, response) {
const validateStatus = response.config.validateStatus;
if (!response.status || !validateStatus || validateStatus(response.status)) {
resolve(response);
} else {
reject(new AxiosError(
'Request failed with status code ' + response.status,
[AxiosError.ERR_BAD_REQUEST, AxiosError.ERR_BAD_RESPONSE][Math.floor(response.status / 100) - 4],
response.config,
response.request,
response
));
}
}
'use strict';
import utils from './../utils.js';
import defaults from '../defaults/index.js';
import AxiosHeaders from '../core/AxiosHeaders.js';
/**
* Transform the data for a request or a response
*
* @param {Array|Function} fns A single function or Array of functions
* @param {?Object} response The response object
*
* @returns {*} The resulting transformed data
*/
export default function transformData(fns, response) {
const config = this || defaults;
const context = response || config;
const headers = AxiosHeaders.from(context.headers);
let data = context.data;
utils.forEach(fns, function transform(fn) {
data = fn.call(config, data, headers.normalize(), response ? response.status : undefined);
});
headers.normalize();
return data;
}
'use strict';
import utils from '../utils.js';
import AxiosError from '../core/AxiosError.js';
import transitionalDefaults from './transitional.js';
import toFormData from '../helpers/toFormData.js';
import toURLEncodedForm from '../helpers/toURLEncodedForm.js';
import platform from '../platform/index.js';
import formDataToJSON from '../helpers/formDataToJSON.js';
/**
* It takes a string, tries to parse it, and if it fails, it returns the stringified version
* of the input
*
* @param {any} rawValue - The value to be stringified.
* @param {Function} parser - A function that parses a string into a JavaScript object.
* @param {Function} encoder - A function that takes a value and returns a string.
*
* @returns {string} A stringified version of the rawValue.
*/
function stringifySafely(rawValue, parser, encoder) {
if (utils.isString(rawValue)) {
try {
(parser || JSON.parse)(rawValue);
return utils.trim(rawValue);
} catch (e) {
if (e.name !== 'SyntaxError') {
throw e;
}
}
}
return (encoder || JSON.stringify)(rawValue);
}
const defaults = {
transitional: transitionalDefaults,
adapter: ['xhr', 'http', 'fetch'],
transformRequest: [function transformRequest(data, headers) {
const contentType = headers.getContentType() || '';
const hasJSONContentType = contentType.indexOf('application/json') > -1;
const isObjectPayload = utils.isObject(data);
if (isObjectPayload && utils.isHTMLForm(data)) {
data = new FormData(data);
}
const isFormData = utils.isFormData(data);
if (isFormData) {
return hasJSONContentType ? JSON.stringify(formDataToJSON(data)) : data;
}
if (utils.isArrayBuffer(data) ||
utils.isBuffer(data) ||
utils.isStream(data) ||
utils.isFile(data) ||
utils.isBlob(data) ||
utils.isReadableStream(data)
) {
return data;
}
if (utils.isArrayBufferView(data)) {
return data.buffer;
}
if (utils.isURLSearchParams(data)) {
headers.setContentType('application/x-www-form-urlencoded;charset=utf-8', false);
return data.toString();
}
let isFileList;
if (isObjectPayload) {
if (contentType.indexOf('application/x-www-form-urlencoded') > -1) {
return toURLEncodedForm(data, this.formSerializer).toString();
}
if ((isFileList = utils.isFileList(data)) || contentType.indexOf('multipart/form-data') > -1) {
const _FormData = this.env && this.env.FormData;
return toFormData(
isFileList ? {'files[]': data} : data,
_FormData && new _FormData(),
this.formSerializer
);
}
}
if (isObjectPayload || hasJSONContentType ) {
headers.setContentType('application/json', false);
return stringifySafely(data);
}
return data;
}],
transformResponse: [function transformResponse(data) {
const transitional = this.transitional || defaults.transitional;
const forcedJSONParsing = transitional && transitional.forcedJSONParsing;
const JSONRequested = this.responseType === 'json';
if (utils.isResponse(data) || utils.isReadableStream(data)) {
return data;
}
if (data && utils.isString(data) && ((forcedJSONParsing && !this.responseType) || JSONRequested)) {
const silentJSONParsing = transitional && transitional.silentJSONParsing;
const strictJSONParsing = !silentJSONParsing && JSONRequested;
try {
return JSON.parse(data);
} catch (e) {
if (strictJSONParsing) {
if (e.name === 'SyntaxError') {
throw AxiosError.from(e, AxiosError.ERR_BAD_RESPONSE, this, null, this.response);
}
throw e;
}
}
}
return data;
}],
/**
* A timeout in milliseconds to abort a request. If set to 0 (default) a
* timeout is not created.
*/
timeout: 0,
xsrfCookieName: 'XSRF-TOKEN',
xsrfHeaderName: 'X-XSRF-TOKEN',
maxContentLength: -1,
maxBodyLength: -1,
env: {
FormData: platform.classes.FormData,
Blob: platform.classes.Blob
},
validateStatus: function validateStatus(status) {
return status >= 200 && status < 300;
},
headers: {
common: {
'Accept': 'application/json, text/plain, */*',
'Content-Type': undefined
}
}
};
utils.forEach(['delete', 'get', 'head', 'post', 'put', 'patch'], (method) => {
defaults.headers[method] = {};
});
export default defaults;
'use strict';
export default {
silentJSONParsing: true,
forcedJSONParsing: true,
clarifyTimeoutError: false
};
# axios // env
The `data.js` file is updated automatically when the package version is upgrading. Please do not edit it manually.
import _FormData from 'form-data';
export default typeof FormData !== 'undefined' ? FormData : _FormData;
export const VERSION = "1.7.2";
\ No newline at end of file
'use strict';
import stream from 'stream';
import utils from '../utils.js';
import throttle from './throttle.js';
import speedometer from './speedometer.js';
const kInternals = Symbol('internals');
class AxiosTransformStream extends stream.Transform{
constructor(options) {
options = utils.toFlatObject(options, {
maxRate: 0,
chunkSize: 64 * 1024,
minChunkSize: 100,
timeWindow: 500,
ticksRate: 2,
samplesCount: 15
}, null, (prop, source) => {
return !utils.isUndefined(source[prop]);
});
super({
readableHighWaterMark: options.chunkSize
});
const self = this;
const internals = this[kInternals] = {
length: options.length,
timeWindow: options.timeWindow,
ticksRate: options.ticksRate,
chunkSize: options.chunkSize,
maxRate: options.maxRate,
minChunkSize: options.minChunkSize,
bytesSeen: 0,
isCaptured: false,
notifiedBytesLoaded: 0,
ts: Date.now(),
bytes: 0,
onReadCallback: null
};
const _speedometer = speedometer(internals.ticksRate * options.samplesCount, internals.timeWindow);
this.on('newListener', event => {
if (event === 'progress') {
if (!internals.isCaptured) {
internals.isCaptured = true;
}
}
});
let bytesNotified = 0;
internals.updateProgress = throttle(function throttledHandler() {
const totalBytes = internals.length;
const bytesTransferred = internals.bytesSeen;
const progressBytes = bytesTransferred - bytesNotified;
if (!progressBytes || self.destroyed) return;
const rate = _speedometer(progressBytes);
bytesNotified = bytesTransferred;
process.nextTick(() => {
self.emit('progress', {
loaded: bytesTransferred,
total: totalBytes,
progress: totalBytes ? (bytesTransferred / totalBytes) : undefined,
bytes: progressBytes,
rate: rate ? rate : undefined,
estimated: rate && totalBytes && bytesTransferred <= totalBytes ?
(totalBytes - bytesTransferred) / rate : undefined,
lengthComputable: totalBytes != null
});
});
}, internals.ticksRate);
const onFinish = () => {
internals.updateProgress.call(true);
};
this.once('end', onFinish);
this.once('error', onFinish);
}
_read(size) {
const internals = this[kInternals];
if (internals.onReadCallback) {
internals.onReadCallback();
}
return super._read(size);
}
_transform(chunk, encoding, callback) {
const self = this;
const internals = this[kInternals];
const maxRate = internals.maxRate;
const readableHighWaterMark = this.readableHighWaterMark;
const timeWindow = internals.timeWindow;
const divider = 1000 / timeWindow;
const bytesThreshold = (maxRate / divider);
const minChunkSize = internals.minChunkSize !== false ? Math.max(internals.minChunkSize, bytesThreshold * 0.01) : 0;
function pushChunk(_chunk, _callback) {
const bytes = Buffer.byteLength(_chunk);
internals.bytesSeen += bytes;
internals.bytes += bytes;
if (internals.isCaptured) {
internals.updateProgress();
}
if (self.push(_chunk)) {
process.nextTick(_callback);
} else {
internals.onReadCallback = () => {
internals.onReadCallback = null;
process.nextTick(_callback);
};
}
}
const transformChunk = (_chunk, _callback) => {
const chunkSize = Buffer.byteLength(_chunk);
let chunkRemainder = null;
let maxChunkSize = readableHighWaterMark;
let bytesLeft;
let passed = 0;
if (maxRate) {
const now = Date.now();
if (!internals.ts || (passed = (now - internals.ts)) >= timeWindow) {
internals.ts = now;
bytesLeft = bytesThreshold - internals.bytes;
internals.bytes = bytesLeft < 0 ? -bytesLeft : 0;
passed = 0;
}
bytesLeft = bytesThreshold - internals.bytes;
}
if (maxRate) {
if (bytesLeft <= 0) {
// next time window
return setTimeout(() => {
_callback(null, _chunk);
}, timeWindow - passed);
}
if (bytesLeft < maxChunkSize) {
maxChunkSize = bytesLeft;
}
}
if (maxChunkSize && chunkSize > maxChunkSize && (chunkSize - maxChunkSize) > minChunkSize) {
chunkRemainder = _chunk.subarray(maxChunkSize);
_chunk = _chunk.subarray(0, maxChunkSize);
}
pushChunk(_chunk, chunkRemainder ? () => {
process.nextTick(_callback, null, chunkRemainder);
} : _callback);
};
transformChunk(chunk, function transformNextChunk(err, _chunk) {
if (err) {
return callback(err);
}
if (_chunk) {
transformChunk(_chunk, transformNextChunk);
} else {
callback(null);
}
});
}
setLength(length) {
this[kInternals].length = +length;
return this;
}
}
export default AxiosTransformStream;
'use strict';
import toFormData from './toFormData.js';
/**
* It encodes a string by replacing all characters that are not in the unreserved set with
* their percent-encoded equivalents
*
* @param {string} str - The string to encode.
*
* @returns {string} The encoded string.
*/
function encode(str) {
const charMap = {
'!': '%21',
"'": '%27',
'(': '%28',
')': '%29',
'~': '%7E',
'%20': '+',
'%00': '\x00'
};
return encodeURIComponent(str).replace(/[!'()~]|%20|%00/g, function replacer(match) {
return charMap[match];
});
}
/**
* It takes a params object and converts it to a FormData object
*
* @param {Object<string, any>} params - The parameters to be converted to a FormData object.
* @param {Object<string, any>} options - The options object passed to the Axios constructor.
*
* @returns {void}
*/
function AxiosURLSearchParams(params, options) {
this._pairs = [];
params && toFormData(params, this, options);
}
const prototype = AxiosURLSearchParams.prototype;
prototype.append = function append(name, value) {
this._pairs.push([name, value]);
};
prototype.toString = function toString(encoder) {
const _encode = encoder ? function(value) {
return encoder.call(this, value, encode);
} : encode;
return this._pairs.map(function each(pair) {
return _encode(pair[0]) + '=' + _encode(pair[1]);
}, '').join('&');
};
export default AxiosURLSearchParams;
const HttpStatusCode = {
Continue: 100,
SwitchingProtocols: 101,
Processing: 102,
EarlyHints: 103,
Ok: 200,
Created: 201,
Accepted: 202,
NonAuthoritativeInformation: 203,
NoContent: 204,
ResetContent: 205,
PartialContent: 206,
MultiStatus: 207,
AlreadyReported: 208,
ImUsed: 226,
MultipleChoices: 300,
MovedPermanently: 301,
Found: 302,
SeeOther: 303,
NotModified: 304,
UseProxy: 305,
Unused: 306,
TemporaryRedirect: 307,
PermanentRedirect: 308,
BadRequest: 400,
Unauthorized: 401,
PaymentRequired: 402,
Forbidden: 403,
NotFound: 404,
MethodNotAllowed: 405,
NotAcceptable: 406,
ProxyAuthenticationRequired: 407,
RequestTimeout: 408,
Conflict: 409,
Gone: 410,
LengthRequired: 411,
PreconditionFailed: 412,
PayloadTooLarge: 413,
UriTooLong: 414,
UnsupportedMediaType: 415,
RangeNotSatisfiable: 416,
ExpectationFailed: 417,
ImATeapot: 418,
MisdirectedRequest: 421,
UnprocessableEntity: 422,
Locked: 423,
FailedDependency: 424,
TooEarly: 425,
UpgradeRequired: 426,
PreconditionRequired: 428,
TooManyRequests: 429,
RequestHeaderFieldsTooLarge: 431,
UnavailableForLegalReasons: 451,
InternalServerError: 500,
NotImplemented: 501,
BadGateway: 502,
ServiceUnavailable: 503,
GatewayTimeout: 504,
HttpVersionNotSupported: 505,
VariantAlsoNegotiates: 506,
InsufficientStorage: 507,
LoopDetected: 508,
NotExtended: 510,
NetworkAuthenticationRequired: 511,
};
Object.entries(HttpStatusCode).forEach(([key, value]) => {
HttpStatusCode[value] = key;
});
export default HttpStatusCode;
# axios // helpers
The modules found in `helpers/` should be generic modules that are _not_ specific to the domain logic of axios. These modules could theoretically be published to npm on their own and consumed by other modules or apps. Some examples of generic modules are things like:
- Browser polyfills
- Managing cookies
- Parsing HTTP headers
"use strict";
import stream from "stream";
class ZlibHeaderTransformStream extends stream.Transform {
__transform(chunk, encoding, callback) {
this.push(chunk);
callback();
}
_transform(chunk, encoding, callback) {
if (chunk.length !== 0) {
this._transform = this.__transform;
// Add Default Compression headers if no zlib headers are present
if (chunk[0] !== 120) { // Hex: 78
const header = Buffer.alloc(2);
header[0] = 120; // Hex: 78
header[1] = 156; // Hex: 9C
this.push(header, encoding);
}
}
this.__transform(chunk, encoding, callback);
}
}
export default ZlibHeaderTransformStream;
'use strict';
export default function bind(fn, thisArg) {
return function wrap() {
return fn.apply(thisArg, arguments);
};
}
'use strict';
import utils from '../utils.js';
import AxiosURLSearchParams from '../helpers/AxiosURLSearchParams.js';
/**
* It replaces all instances of the characters `:`, `$`, `,`, `+`, `[`, and `]` with their
* URI encoded counterparts
*
* @param {string} val The value to be encoded.
*
* @returns {string} The encoded value.
*/
function encode(val) {
return encodeURIComponent(val).
replace(/%3A/gi, ':').
replace(/%24/g, '$').
replace(/%2C/gi, ',').
replace(/%20/g, '+').
replace(/%5B/gi, '[').
replace(/%5D/gi, ']');
}
/**
* Build a URL by appending params to the end
*
* @param {string} url The base of the url (e.g., http://www.google.com)
* @param {object} [params] The params to be appended
* @param {?object} options
*
* @returns {string} The formatted url
*/
export default function buildURL(url, params, options) {
/*eslint no-param-reassign:0*/
if (!params) {
return url;
}
const _encode = options && options.encode || encode;
const serializeFn = options && options.serialize;
let serializedParams;
if (serializeFn) {
serializedParams = serializeFn(params, options);
} else {
serializedParams = utils.isURLSearchParams(params) ?
params.toString() :
new AxiosURLSearchParams(params, options).toString(_encode);
}
if (serializedParams) {
const hashmarkIndex = url.indexOf("#");
if (hashmarkIndex !== -1) {
url = url.slice(0, hashmarkIndex);
}
url += (url.indexOf('?') === -1 ? '?' : '&') + serializedParams;
}
return url;
}
import utils from "../utils.js";
const callbackify = (fn, reducer) => {
return utils.isAsyncFn(fn) ? function (...args) {
const cb = args.pop();
fn.apply(this, args).then((value) => {
try {
reducer ? cb(null, ...reducer(value)) : cb(null, value);
} catch (err) {
cb(err);
}
}, cb);
} : fn;
}
export default callbackify;
'use strict';
/**
* Creates a new URL by combining the specified URLs
*
* @param {string} baseURL The base URL
* @param {string} relativeURL The relative URL
*
* @returns {string} The combined URL
*/
export default function combineURLs(baseURL, relativeURL) {
return relativeURL
? baseURL.replace(/\/?\/$/, '') + '/' + relativeURL.replace(/^\/+/, '')
: baseURL;
}
import CanceledError from "../cancel/CanceledError.js";
import AxiosError from "../core/AxiosError.js";
const composeSignals = (signals, timeout) => {
let controller = new AbortController();
let aborted;
const onabort = function (cancel) {
if (!aborted) {
aborted = true;
unsubscribe();
const err = cancel instanceof Error ? cancel : this.reason;
controller.abort(err instanceof AxiosError ? err : new CanceledError(err instanceof Error ? err.message : err));
}
}
let timer = timeout && setTimeout(() => {
onabort(new AxiosError(`timeout ${timeout} of ms exceeded`, AxiosError.ETIMEDOUT))
}, timeout)
const unsubscribe = () => {
if (signals) {
timer && clearTimeout(timer);
timer = null;
signals.forEach(signal => {
signal &&
(signal.removeEventListener ? signal.removeEventListener('abort', onabort) : signal.unsubscribe(onabort));
});
signals = null;
}
}
signals.forEach((signal) => signal && signal.addEventListener && signal.addEventListener('abort', onabort));
const {signal} = controller;
signal.unsubscribe = unsubscribe;
return [signal, () => {
timer && clearTimeout(timer);
timer = null;
}];
}
export default composeSignals;
import utils from './../utils.js';
import platform from '../platform/index.js';
export default platform.hasStandardBrowserEnv ?
// Standard browser envs support document.cookie
{
write(name, value, expires, path, domain, secure) {
const cookie = [name + '=' + encodeURIComponent(value)];
utils.isNumber(expires) && cookie.push('expires=' + new Date(expires).toGMTString());
utils.isString(path) && cookie.push('path=' + path);
utils.isString(domain) && cookie.push('domain=' + domain);
secure === true && cookie.push('secure');
document.cookie = cookie.join('; ');
},
read(name) {
const match = document.cookie.match(new RegExp('(^|;\\s*)(' + name + ')=([^;]*)'));
return (match ? decodeURIComponent(match[3]) : null);
},
remove(name) {
this.write(name, '', Date.now() - 86400000);
}
}
:
// Non-standard browser env (web workers, react-native) lack needed support.
{
write() {},
read() {
return null;
},
remove() {}
};
'use strict';
/*eslint no-console:0*/
/**
* Supply a warning to the developer that a method they are using
* has been deprecated.
*
* @param {string} method The name of the deprecated method
* @param {string} [instead] The alternate method to use if applicable
* @param {string} [docs] The documentation URL to get further details
*
* @returns {void}
*/
export default function deprecatedMethod(method, instead, docs) {
try {
console.warn(
'DEPRECATED method `' + method + '`.' +
(instead ? ' Use `' + instead + '` instead.' : '') +
' This method will be removed in a future release.');
if (docs) {
console.warn('For more information about usage see ' + docs);
}
} catch (e) { /* Ignore */ }
}
'use strict';
import utils from '../utils.js';
/**
* It takes a string like `foo[x][y][z]` and returns an array like `['foo', 'x', 'y', 'z']
*
* @param {string} name - The name of the property to get.
*
* @returns An array of strings.
*/
function parsePropPath(name) {
// foo[x][y][z]
// foo.x.y.z
// foo-x-y-z
// foo x y z
return utils.matchAll(/\w+|\[(\w*)]/g, name).map(match => {
return match[0] === '[]' ? '' : match[1] || match[0];
});
}
/**
* Convert an array to an object.
*
* @param {Array<any>} arr - The array to convert to an object.
*
* @returns An object with the same keys and values as the array.
*/
function arrayToObject(arr) {
const obj = {};
const keys = Object.keys(arr);
let i;
const len = keys.length;
let key;
for (i = 0; i < len; i++) {
key = keys[i];
obj[key] = arr[key];
}
return obj;
}
/**
* It takes a FormData object and returns a JavaScript object
*
* @param {string} formData The FormData object to convert to JSON.
*
* @returns {Object<string, any> | null} The converted object.
*/
function formDataToJSON(formData) {
function buildPath(path, value, target, index) {
let name = path[index++];
if (name === '__proto__') return true;
const isNumericKey = Number.isFinite(+name);
const isLast = index >= path.length;
name = !name && utils.isArray(target) ? target.length : name;
if (isLast) {
if (utils.hasOwnProp(target, name)) {
target[name] = [target[name], value];
} else {
target[name] = value;
}
return !isNumericKey;
}
if (!target[name] || !utils.isObject(target[name])) {
target[name] = [];
}
const result = buildPath(path, value, target[name], index);
if (result && utils.isArray(target[name])) {
target[name] = arrayToObject(target[name]);
}
return !isNumericKey;
}
if (utils.isFormData(formData) && utils.isFunction(formData.entries)) {
const obj = {};
utils.forEachEntry(formData, (name, value) => {
buildPath(parsePropPath(name), value, obj, 0);
});
return obj;
}
return null;
}
export default formDataToJSON;
import {TextEncoder} from 'util';
import {Readable} from 'stream';
import utils from "../utils.js";
import readBlob from "./readBlob.js";
const BOUNDARY_ALPHABET = utils.ALPHABET.ALPHA_DIGIT + '-_';
const textEncoder = new TextEncoder();
const CRLF = '\r\n';
const CRLF_BYTES = textEncoder.encode(CRLF);
const CRLF_BYTES_COUNT = 2;
class FormDataPart {
constructor(name, value) {
const {escapeName} = this.constructor;
const isStringValue = utils.isString(value);
let headers = `Content-Disposition: form-data; name="${escapeName(name)}"${
!isStringValue && value.name ? `; filename="${escapeName(value.name)}"` : ''
}${CRLF}`;
if (isStringValue) {
value = textEncoder.encode(String(value).replace(/\r?\n|\r\n?/g, CRLF));
} else {
headers += `Content-Type: ${value.type || "application/octet-stream"}${CRLF}`
}
this.headers = textEncoder.encode(headers + CRLF);
this.contentLength = isStringValue ? value.byteLength : value.size;
this.size = this.headers.byteLength + this.contentLength + CRLF_BYTES_COUNT;
this.name = name;
this.value = value;
}
async *encode(){
yield this.headers;
const {value} = this;
if(utils.isTypedArray(value)) {
yield value;
} else {
yield* readBlob(value);
}
yield CRLF_BYTES;
}
static escapeName(name) {
return String(name).replace(/[\r\n"]/g, (match) => ({
'\r' : '%0D',
'\n' : '%0A',
'"' : '%22',
}[match]));
}
}
const formDataToStream = (form, headersHandler, options) => {
const {
tag = 'form-data-boundary',
size = 25,
boundary = tag + '-' + utils.generateString(size, BOUNDARY_ALPHABET)
} = options || {};
if(!utils.isFormData(form)) {
throw TypeError('FormData instance required');
}
if (boundary.length < 1 || boundary.length > 70) {
throw Error('boundary must be 10-70 characters long')
}
const boundaryBytes = textEncoder.encode('--' + boundary + CRLF);
const footerBytes = textEncoder.encode('--' + boundary + '--' + CRLF + CRLF);
let contentLength = footerBytes.byteLength;
const parts = Array.from(form.entries()).map(([name, value]) => {
const part = new FormDataPart(name, value);
contentLength += part.size;
return part;
});
contentLength += boundaryBytes.byteLength * parts.length;
contentLength = utils.toFiniteNumber(contentLength);
const computedHeaders = {
'Content-Type': `multipart/form-data; boundary=${boundary}`
}
if (Number.isFinite(contentLength)) {
computedHeaders['Content-Length'] = contentLength;
}
headersHandler && headersHandler(computedHeaders);
return Readable.from((async function *() {
for(const part of parts) {
yield boundaryBytes;
yield* part.encode();
}
yield footerBytes;
})());
};
export default formDataToStream;
'use strict';
import AxiosError from '../core/AxiosError.js';
import parseProtocol from './parseProtocol.js';
import platform from '../platform/index.js';
const DATA_URL_PATTERN = /^(?:([^;]+);)?(?:[^;]+;)?(base64|),([\s\S]*)$/;
/**
* Parse data uri to a Buffer or Blob
*
* @param {String} uri
* @param {?Boolean} asBlob
* @param {?Object} options
* @param {?Function} options.Blob
*
* @returns {Buffer|Blob}
*/
export default function fromDataURI(uri, asBlob, options) {
const _Blob = options && options.Blob || platform.classes.Blob;
const protocol = parseProtocol(uri);
if (asBlob === undefined && _Blob) {
asBlob = true;
}
if (protocol === 'data') {
uri = protocol.length ? uri.slice(protocol.length + 1) : uri;
const match = DATA_URL_PATTERN.exec(uri);
if (!match) {
throw new AxiosError('Invalid URL', AxiosError.ERR_INVALID_URL);
}
const mime = match[1];
const isBase64 = match[2];
const body = match[3];
const buffer = Buffer.from(decodeURIComponent(body), isBase64 ? 'base64' : 'utf8');
if (asBlob) {
if (!_Blob) {
throw new AxiosError('Blob is not supported', AxiosError.ERR_NOT_SUPPORT);
}
return new _Blob([buffer], {type: mime});
}
return buffer;
}
throw new AxiosError('Unsupported protocol ' + protocol, AxiosError.ERR_NOT_SUPPORT);
}
'use strict';
/**
* Determines whether the specified URL is absolute
*
* @param {string} url The URL to test
*
* @returns {boolean} True if the specified URL is absolute, otherwise false
*/
export default function isAbsoluteURL(url) {
// A URL is considered absolute if it begins with "<scheme>://" or "//" (protocol-relative URL).
// RFC 3986 defines scheme name as a sequence of characters beginning with a letter and followed
// by any combination of letters, digits, plus, period, or hyphen.
return /^([a-z][a-z\d+\-.]*:)?\/\//i.test(url);
}
'use strict';
import utils from './../utils.js';
/**
* Determines whether the payload is an error thrown by Axios
*
* @param {*} payload The value to test
*
* @returns {boolean} True if the payload is an error thrown by Axios, otherwise false
*/
export default function isAxiosError(payload) {
return utils.isObject(payload) && (payload.isAxiosError === true);
}
'use strict';
import utils from './../utils.js';
import platform from '../platform/index.js';
export default platform.hasStandardBrowserEnv ?
// Standard browser envs have full support of the APIs needed to test
// whether the request URL is of the same origin as current location.
(function standardBrowserEnv() {
const msie = /(msie|trident)/i.test(navigator.userAgent);
const urlParsingNode = document.createElement('a');
let originURL;
/**
* Parse a URL to discover its components
*
* @param {String} url The URL to be parsed
* @returns {Object}
*/
function resolveURL(url) {
let href = url;
if (msie) {
// IE needs attribute set twice to normalize properties
urlParsingNode.setAttribute('href', href);
href = urlParsingNode.href;
}
urlParsingNode.setAttribute('href', href);
// urlParsingNode provides the UrlUtils interface - http://url.spec.whatwg.org/#urlutils
return {
href: urlParsingNode.href,
protocol: urlParsingNode.protocol ? urlParsingNode.protocol.replace(/:$/, '') : '',
host: urlParsingNode.host,
search: urlParsingNode.search ? urlParsingNode.search.replace(/^\?/, '') : '',
hash: urlParsingNode.hash ? urlParsingNode.hash.replace(/^#/, '') : '',
hostname: urlParsingNode.hostname,
port: urlParsingNode.port,
pathname: (urlParsingNode.pathname.charAt(0) === '/') ?
urlParsingNode.pathname :
'/' + urlParsingNode.pathname
};
}
originURL = resolveURL(window.location.href);
/**
* Determine if a URL shares the same origin as the current location
*
* @param {String} requestURL The URL to test
* @returns {boolean} True if URL shares the same origin, otherwise false
*/
return function isURLSameOrigin(requestURL) {
const parsed = (utils.isString(requestURL)) ? resolveURL(requestURL) : requestURL;
return (parsed.protocol === originURL.protocol &&
parsed.host === originURL.host);
};
})() :
// Non standard browser envs (web workers, react-native) lack needed support.
(function nonStandardBrowserEnv() {
return function isURLSameOrigin() {
return true;
};
})();
// eslint-disable-next-line strict
export default null;
'use strict';
import utils from './../utils.js';
// RawAxiosHeaders whose duplicates are ignored by node
// c.f. https://nodejs.org/api/http.html#http_message_headers
const ignoreDuplicateOf = utils.toObjectSet([
'age', 'authorization', 'content-length', 'content-type', 'etag',
'expires', 'from', 'host', 'if-modified-since', 'if-unmodified-since',
'last-modified', 'location', 'max-forwards', 'proxy-authorization',
'referer', 'retry-after', 'user-agent'
]);
/**
* Parse headers into an object
*
* ```
* Date: Wed, 27 Aug 2014 08:58:49 GMT
* Content-Type: application/json
* Connection: keep-alive
* Transfer-Encoding: chunked
* ```
*
* @param {String} rawHeaders Headers needing to be parsed
*
* @returns {Object} Headers parsed into an object
*/
export default rawHeaders => {
const parsed = {};
let key;
let val;
let i;
rawHeaders && rawHeaders.split('\n').forEach(function parser(line) {
i = line.indexOf(':');
key = line.substring(0, i).trim().toLowerCase();
val = line.substring(i + 1).trim();
if (!key || (parsed[key] && ignoreDuplicateOf[key])) {
return;
}
if (key === 'set-cookie') {
if (parsed[key]) {
parsed[key].push(val);
} else {
parsed[key] = [val];
}
} else {
parsed[key] = parsed[key] ? parsed[key] + ', ' + val : val;
}
});
return parsed;
};
'use strict';
export default function parseProtocol(url) {
const match = /^([-+\w]{1,25})(:?\/\/|:)/.exec(url);
return match && match[1] || '';
}
import speedometer from "./speedometer.js";
import throttle from "./throttle.js";
export default (listener, isDownloadStream, freq = 3) => {
let bytesNotified = 0;
const _speedometer = speedometer(50, 250);
return throttle(e => {
const loaded = e.loaded;
const total = e.lengthComputable ? e.total : undefined;
const progressBytes = loaded - bytesNotified;
const rate = _speedometer(progressBytes);
const inRange = loaded <= total;
bytesNotified = loaded;
const data = {
loaded,
total,
progress: total ? (loaded / total) : undefined,
bytes: progressBytes,
rate: rate ? rate : undefined,
estimated: rate && total && inRange ? (total - loaded) / rate : undefined,
event: e,
lengthComputable: total != null
};
data[isDownloadStream ? 'download' : 'upload'] = true;
listener(data);
}, freq);
}
const {asyncIterator} = Symbol;
const readBlob = async function* (blob) {
if (blob.stream) {
yield* blob.stream()
} else if (blob.arrayBuffer) {
yield await blob.arrayBuffer()
} else if (blob[asyncIterator]) {
yield* blob[asyncIterator]();
} else {
yield blob;
}
}
export default readBlob;
import platform from "../platform/index.js";
import utils from "../utils.js";
import isURLSameOrigin from "./isURLSameOrigin.js";
import cookies from "./cookies.js";
import buildFullPath from "../core/buildFullPath.js";
import mergeConfig from "../core/mergeConfig.js";
import AxiosHeaders from "../core/AxiosHeaders.js";
import buildURL from "./buildURL.js";
export default (config) => {
const newConfig = mergeConfig({}, config);
let {data, withXSRFToken, xsrfHeaderName, xsrfCookieName, headers, auth} = newConfig;
newConfig.headers = headers = AxiosHeaders.from(headers);
newConfig.url = buildURL(buildFullPath(newConfig.baseURL, newConfig.url), config.params, config.paramsSerializer);
// HTTP basic authentication
if (auth) {
headers.set('Authorization', 'Basic ' +
btoa((auth.username || '') + ':' + (auth.password ? unescape(encodeURIComponent(auth.password)) : ''))
);
}
let contentType;
if (utils.isFormData(data)) {
if (platform.hasStandardBrowserEnv || platform.hasStandardBrowserWebWorkerEnv) {
headers.setContentType(undefined); // Let the browser set it
} else if ((contentType = headers.getContentType()) !== false) {
// fix semicolon duplication issue for ReactNative FormData implementation
const [type, ...tokens] = contentType ? contentType.split(';').map(token => token.trim()).filter(Boolean) : [];
headers.setContentType([type || 'multipart/form-data', ...tokens].join('; '));
}
}
// Add xsrf header
// This is only done if running in a standard browser environment.
// Specifically not if we're in a web worker, or react-native.
if (platform.hasStandardBrowserEnv) {
withXSRFToken && utils.isFunction(withXSRFToken) && (withXSRFToken = withXSRFToken(newConfig));
if (withXSRFToken || (withXSRFToken !== false && isURLSameOrigin(newConfig.url))) {
// Add xsrf header
const xsrfValue = xsrfHeaderName && xsrfCookieName && cookies.read(xsrfCookieName);
if (xsrfValue) {
headers.set(xsrfHeaderName, xsrfValue);
}
}
}
return newConfig;
}
'use strict';
/**
* Calculate data maxRate
* @param {Number} [samplesCount= 10]
* @param {Number} [min= 1000]
* @returns {Function}
*/
function speedometer(samplesCount, min) {
samplesCount = samplesCount || 10;
const bytes = new Array(samplesCount);
const timestamps = new Array(samplesCount);
let head = 0;
let tail = 0;
let firstSampleTS;
min = min !== undefined ? min : 1000;
return function push(chunkLength) {
const now = Date.now();
const startedAt = timestamps[tail];
if (!firstSampleTS) {
firstSampleTS = now;
}
bytes[head] = chunkLength;
timestamps[head] = now;
let i = tail;
let bytesCount = 0;
while (i !== head) {
bytesCount += bytes[i++];
i = i % samplesCount;
}
head = (head + 1) % samplesCount;
if (head === tail) {
tail = (tail + 1) % samplesCount;
}
if (now - firstSampleTS < min) {
return;
}
const passed = startedAt && now - startedAt;
return passed ? Math.round(bytesCount * 1000 / passed) : undefined;
};
}
export default speedometer;
'use strict';
/**
* Syntactic sugar for invoking a function and expanding an array for arguments.
*
* Common use case would be to use `Function.prototype.apply`.
*
* ```js
* function f(x, y, z) {}
* var args = [1, 2, 3];
* f.apply(null, args);
* ```
*
* With `spread` this example can be re-written.
*
* ```js
* spread(function(x, y, z) {})([1, 2, 3]);
* ```
*
* @param {Function} callback
*
* @returns {Function}
*/
export default function spread(callback) {
return function wrap(arr) {
return callback.apply(null, arr);
};
}
'use strict';
/**
* Throttle decorator
* @param {Function} fn
* @param {Number} freq
* @return {Function}
*/
function throttle(fn, freq) {
let timestamp = 0;
const threshold = 1000 / freq;
let timer = null;
return function throttled() {
const force = this === true;
const now = Date.now();
if (force || now - timestamp > threshold) {
if (timer) {
clearTimeout(timer);
timer = null;
}
timestamp = now;
return fn.apply(null, arguments);
}
if (!timer) {
timer = setTimeout(() => {
timer = null;
timestamp = Date.now();
return fn.apply(null, arguments);
}, threshold - (now - timestamp));
}
};
}
export default throttle;
'use strict';
import utils from '../utils.js';
import AxiosError from '../core/AxiosError.js';
// temporary hotfix to avoid circular references until AxiosURLSearchParams is refactored
import PlatformFormData from '../platform/node/classes/FormData.js';
/**
* Determines if the given thing is a array or js object.
*
* @param {string} thing - The object or array to be visited.
*
* @returns {boolean}
*/
function isVisitable(thing) {
return utils.isPlainObject(thing) || utils.isArray(thing);
}
/**
* It removes the brackets from the end of a string
*
* @param {string} key - The key of the parameter.
*
* @returns {string} the key without the brackets.
*/
function removeBrackets(key) {
return utils.endsWith(key, '[]') ? key.slice(0, -2) : key;
}
/**
* It takes a path, a key, and a boolean, and returns a string
*
* @param {string} path - The path to the current key.
* @param {string} key - The key of the current object being iterated over.
* @param {string} dots - If true, the key will be rendered with dots instead of brackets.
*
* @returns {string} The path to the current key.
*/
function renderKey(path, key, dots) {
if (!path) return key;
return path.concat(key).map(function each(token, i) {
// eslint-disable-next-line no-param-reassign
token = removeBrackets(token);
return !dots && i ? '[' + token + ']' : token;
}).join(dots ? '.' : '');
}
/**
* If the array is an array and none of its elements are visitable, then it's a flat array.
*
* @param {Array<any>} arr - The array to check
*
* @returns {boolean}
*/
function isFlatArray(arr) {
return utils.isArray(arr) && !arr.some(isVisitable);
}
const predicates = utils.toFlatObject(utils, {}, null, function filter(prop) {
return /^is[A-Z]/.test(prop);
});
/**
* Convert a data object to FormData
*
* @param {Object} obj
* @param {?Object} [formData]
* @param {?Object} [options]
* @param {Function} [options.visitor]
* @param {Boolean} [options.metaTokens = true]
* @param {Boolean} [options.dots = false]
* @param {?Boolean} [options.indexes = false]
*
* @returns {Object}
**/
/**
* It converts an object into a FormData object
*
* @param {Object<any, any>} obj - The object to convert to form data.
* @param {string} formData - The FormData object to append to.
* @param {Object<string, any>} options
*
* @returns
*/
function toFormData(obj, formData, options) {
if (!utils.isObject(obj)) {
throw new TypeError('target must be an object');
}
// eslint-disable-next-line no-param-reassign
formData = formData || new (PlatformFormData || FormData)();
// eslint-disable-next-line no-param-reassign
options = utils.toFlatObject(options, {
metaTokens: true,
dots: false,
indexes: false
}, false, function defined(option, source) {
// eslint-disable-next-line no-eq-null,eqeqeq
return !utils.isUndefined(source[option]);
});
const metaTokens = options.metaTokens;
// eslint-disable-next-line no-use-before-define
const visitor = options.visitor || defaultVisitor;
const dots = options.dots;
const indexes = options.indexes;
const _Blob = options.Blob || typeof Blob !== 'undefined' && Blob;
const useBlob = _Blob && utils.isSpecCompliantForm(formData);
if (!utils.isFunction(visitor)) {
throw new TypeError('visitor must be a function');
}
function convertValue(value) {
if (value === null) return '';
if (utils.isDate(value)) {
return value.toISOString();
}
if (!useBlob && utils.isBlob(value)) {
throw new AxiosError('Blob is not supported. Use a Buffer instead.');
}
if (utils.isArrayBuffer(value) || utils.isTypedArray(value)) {
return useBlob && typeof Blob === 'function' ? new Blob([value]) : Buffer.from(value);
}
return value;
}
/**
* Default visitor.
*
* @param {*} value
* @param {String|Number} key
* @param {Array<String|Number>} path
* @this {FormData}
*
* @returns {boolean} return true to visit the each prop of the value recursively
*/
function defaultVisitor(value, key, path) {
let arr = value;
if (value && !path && typeof value === 'object') {
if (utils.endsWith(key, '{}')) {
// eslint-disable-next-line no-param-reassign
key = metaTokens ? key : key.slice(0, -2);
// eslint-disable-next-line no-param-reassign
value = JSON.stringify(value);
} else if (
(utils.isArray(value) && isFlatArray(value)) ||
((utils.isFileList(value) || utils.endsWith(key, '[]')) && (arr = utils.toArray(value))
)) {
// eslint-disable-next-line no-param-reassign
key = removeBrackets(key);
arr.forEach(function each(el, index) {
!(utils.isUndefined(el) || el === null) && formData.append(
// eslint-disable-next-line no-nested-ternary
indexes === true ? renderKey([key], index, dots) : (indexes === null ? key : key + '[]'),
convertValue(el)
);
});
return false;
}
}
if (isVisitable(value)) {
return true;
}
formData.append(renderKey(path, key, dots), convertValue(value));
return false;
}
const stack = [];
const exposedHelpers = Object.assign(predicates, {
defaultVisitor,
convertValue,
isVisitable
});
function build(value, path) {
if (utils.isUndefined(value)) return;
if (stack.indexOf(value) !== -1) {
throw Error('Circular reference detected in ' + path.join('.'));
}
stack.push(value);
utils.forEach(value, function each(el, key) {
const result = !(utils.isUndefined(el) || el === null) && visitor.call(
formData, el, utils.isString(key) ? key.trim() : key, path, exposedHelpers
);
if (result === true) {
build(el, path ? path.concat(key) : [key]);
}
});
stack.pop();
}
if (!utils.isObject(obj)) {
throw new TypeError('data must be an object');
}
build(obj);
return formData;
}
export default toFormData;
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment