Implementacion de todo el servidor con js!

This commit is contained in:
santi 2025-05-30 13:29:38 +02:00
commit 8295d08e26
1258 changed files with 185243 additions and 0 deletions

8
.env Normal file
View File

@ -0,0 +1,8 @@
PORT=3000
DB_HOST=localhost
DB_USER=root
DB_PASSWORD=
DB_NAME=yomitrack
CLIENT_URL=http://localhost:3000
EMAIL_USER=yomitrack.notificaciones@gmail.com
EMAIL_PASS=roamnqxpmwusdukc

8
.idea/.gitignore vendored Normal file
View File

@ -0,0 +1,8 @@
# Default ignored files
/shelf/
/workspace.xml
# Editor-based HTTP Client requests
/httpRequests/
# Datasource local storage ignored files
/dataSources/
/dataSources.local.xml

View File

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="JavaScriptLibraryMappings">
<includedPredefinedLibrary name="Node.js Core" />
</component>
</project>

8
.idea/modules.xml Normal file
View File

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/.idea/yomitrack-api-node.iml" filepath="$PROJECT_DIR$/.idea/yomitrack-api-node.iml" />
</modules>
</component>
</project>

View File

@ -0,0 +1,12 @@
<?xml version="1.0" encoding="UTF-8"?>
<module type="WEB_MODULE" version="4">
<component name="NewModuleRootManager">
<content url="file://$MODULE_DIR$">
<excludeFolder url="file://$MODULE_DIR$/.tmp" />
<excludeFolder url="file://$MODULE_DIR$/temp" />
<excludeFolder url="file://$MODULE_DIR$/tmp" />
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
</module>

14
config/db.js Normal file
View File

@ -0,0 +1,14 @@
const mysql = require('mysql2/promise');
const pool = mysql.createPool({
host: 'localhost',
user: 'root',
password: '',
database: 'yomitrack',
port: 3306,
waitForConnections: true,
connectionLimit: 10,
queueLimit: 0
});
module.exports = pool;

View File

@ -0,0 +1,147 @@
const db = require('../config/db');
// POST: Registrar una nueva actividad
exports.addActivity = async (req, res) => {
const {userId, action, mediaTitle, imageUrl} = req.body;
if (!userId || !action || !mediaTitle) {
return res.status(400).json({error: 'Faltan datos requeridos'});
}
try {
await db.execute(
'INSERT INTO activity_log (userId, action, mediaTitle, imageUrl, timestamp) VALUES (?, ?, ?, ?, NOW())',
[userId, action, mediaTitle, imageUrl || null]
);
res.status(200).json({message: 'Actividad registrada correctamente'});
} catch (err) {
console.error('❌ Error al insertar actividad:', err);
res.status(500).json({error: 'Error al guardar la actividad'});
}
};
exports.getActivitiesByUser = async (req, res) => {
const {userId} = req.params;
try {
const [rows] = await db.execute(
`SELECT a.id, a.userId, u.username, a.action, a.mediaTitle, a.imageUrl, a.timestamp
FROM activity_log a
JOIN users u ON a.userId = u.id
WHERE a.userId = ?
ORDER BY a.timestamp DESC LIMIT 20`,
[userId]
);
res.status(200).json(rows);
} catch (err) {
console.error('❌ Error al obtener actividad:', err);
res.status(500).json({error: 'Error al obtener historial de actividad'});
}
};
// GET: Obtener comentarios de una actividad
exports.getCommentsByActivity = async (req, res) => {
const {activityId} = req.params;
try {
const [rows] = await db.execute(
`SELECT c.text, u.username, u.avatarUrl, c.created_at
FROM comments c
JOIN users u ON c.user_id = u.id
WHERE c.activity_id = ?
ORDER BY c.created_at DESC`,
[activityId]
);
res.status(200).json(rows);
} catch (error) {
console.error('❌ Error al obtener comentarios:', error);
res.status(500).json({error: 'Error al obtener comentarios'});
}
};
exports.postComment = async (req, res) => {
const {userId, activityId, text} = req.body;
try {
await db.execute(
'INSERT INTO comments (user_id, activity_id, text, created_at) VALUES (?, ?, ?, NOW())',
[userId, activityId, text]
);
res.status(200).json({message: 'Comentario guardado'});
} catch (error) {
console.error('❌ Error al guardar comentario:', error);
if (error.code === 'ER_NO_REFERENCED_ROW_2') {
res.status(400).json({error: 'El usuario no existe en la base de datos'});
} else {
res.status(500).json({error: 'Error al guardar comentario'});
}
}
};
// POST: Dar like a una actividad
exports.postLike = async (req, res) => {
const {userId, activityId} = req.body;
if (!userId || !activityId) {
return res.status(400).json({error: 'Faltan datos requeridos'});
}
try {
const [existing] = await db.execute(
'SELECT id FROM likes WHERE user_id = ? AND activity_id = ?',
[userId, activityId]
);
if (existing.length > 0) {
return res.status(409).json({message: 'Ya diste like'});
}
await db.execute(
'INSERT INTO likes (user_id, activity_id, created_at) VALUES (?, ?, NOW())',
[userId, activityId]
);
res.status(200).json({message: 'Like registrado'});
} catch (error) {
console.error('❌ Error al registrar like:', error);
res.status(500).json({error: 'Error al registrar like'});
}
};
// GET: Verificar si el usuario ya dio like
exports.checkLike = async (req, res) => {
const {userId, activityId} = req.params;
try {
const [rows] = await db.execute(
'SELECT id FROM likes WHERE user_id = ? AND activity_id = ?',
[userId, activityId]
);
res.json({liked: rows.length > 0});
} catch (err) {
console.error("Error al verificar like:", err);
res.status(500).json({error: "Error al verificar like"});
}
};
// DELETE: Eliminar un like
exports.removeLike = async (req, res) => {
const { userId, activityId } = req.body;
if (userId === undefined || activityId === undefined) {
return res.status(400).json({ error: 'Faltan userId o activityId' });
}
try {
await db.execute(
'DELETE FROM likes WHERE user_id = ? AND activity_id = ?',
[userId, activityId]
);
res.status(200).json({ message: "Like eliminado" });
} catch (err) {
console.error("Error al eliminar like:", err);
res.status(500).json({ error: "Error al eliminar like" });
}
};

View File

@ -0,0 +1,50 @@
const axios = require('axios');
const searchAniListMedia = async (req, res) => {
const { query, type } = req.query;
if (!query || !type || (type !== 'ANIME' && type !== 'MANGA')) {
return res.status(400).json({ error: 'Parámetros "query" y "type" (ANIME o MANGA) requeridos' });
}
const gqlQuery = {
query: `
query ($search: String, $type: MediaType) {
Page(perPage: 10) {
media(search: $search, type: $type) {
id
title {
romaji
}
coverImage {
large
}
}
}
}
`,
variables: {
search: query,
type
}
};
try {
const response = await axios.post('https://graphql.anilist.co', gqlQuery, {
headers: { 'Content-Type': 'application/json' }
});
const results = response.data.data.Page.media.map(item => ({
id: item.id,
title: item.title.romaji,
imageUrl: item.coverImage.large
}));
res.json(results);
} catch (error) {
console.error('AniList API error:', error.message);
res.status(500).json({ error: 'Error al consultar AniList' });
}
};
module.exports = { searchAniListMedia };

View File

@ -0,0 +1,122 @@
const db = require('../config/db');
// Insertar nuevo anime
const insertAnime = async (req, res) => {
const {userId, title, score, progress, status, type, imageUrl, totalEpisodes} = req.body;
if (!userId || !title || !status || !type || !imageUrl) {
return res.status(400).json({error: 'Faltan campos obligatorios'});
}
try {
// Verificar si ya existe
const [existing] = await db.execute(
'SELECT id FROM anime WHERE userId = ? AND title = ?',
[userId, title]
);
if (existing.length > 0) {
return res.status(409).json({error: 'Este anime ya está en tu lista'});
}
await db.execute(
'INSERT INTO anime (userId, title, score, progress, status, type, imageUrl, totalEpisodes) VALUES (?, ?, ?, ?, ?, ?, ?, ?)',
[userId, title, score || 0, progress || 0, status, type, imageUrl, totalEpisodes || 12]
);
res.status(200).json({message: 'Anime añadido correctamente'});
} catch (error) {
console.error('Error insertando anime:', error);
res.status(500).json({error: 'Error al insertar anime'});
}
};
// Obtener animes de un usuario (lista completa o paginada)
const getAnimesByUser = async (req, res) => {
const {userId} = req.params;
const page = parseInt(req.query.page);
const size = parseInt(req.query.size);
try {
if (!isNaN(page) && !isNaN(size)) {
const offset = (page - 1) * size;
const [[{total}]] = await db.execute('SELECT COUNT(*) AS total FROM anime WHERE userId = ?', [userId]);
const [rows] = await db.execute('SELECT * FROM anime WHERE userId = ? LIMIT ? OFFSET ?', [userId, size, offset]);
return res.json({
data: rows,
total,
page,
hasNextPage: offset + rows.length < total
});
} else {
const [rows] = await db.execute('SELECT * FROM anime WHERE userId = ?', [userId]);
return res.json(rows);
}
} catch (error) {
console.error('Error obteniendo animes:', error);
res.status(500).json({error: 'Error al obtener animes'});
}
};
const deleteAnime = async (req, res) => {
const {id} = req.params;
try {
await db.execute('DELETE FROM anime WHERE id = ?', [id]);
res.status(200).json({message: 'Anime eliminado correctamente'});
} catch (error) {
console.error('Error eliminando anime:', error);
res.status(500).json({error: 'Error al eliminar anime'});
}
};
const updateAnime = async (req, res) => {
const {id} = req.params;
const {title, score, progress, status, type, imageUrl, totalEpisodes} = req.body;
try {
await db.execute(
'UPDATE anime SET title = ?, score = ?, progress = ?, status = ?, type = ?, imageUrl = ?, totalEpisodes = ? WHERE id = ?',
[title, score, progress, status, type, imageUrl, totalEpisodes || 12, id]
);
res.status(200).json({message: 'Anime actualizado correctamente'});
} catch (error) {
console.error('Error actualizando anime:', error);
res.status(500).json({error: 'Error al actualizar anime'});
}
};
const getAnimesByUserAndStatus = async (req, res) => {
const {userId, status} = req.params;
try {
const [rows] = await db.execute(
'SELECT * FROM anime WHERE userId = ? AND status = ? ORDER BY id DESC',
[userId, status]
);
res.json(rows);
} catch (err) {
res.status(500).json({error: 'Error al obtener animes'});
}
};
const getAiringAnime = async (req, res) => {
try {
const [rows] = await db.execute(
'SELECT * FROM anime WHERE status = "airing" ORDER BY id DESC LIMIT 20'
);
res.json(rows);
} catch (err) {
res.status(500).json({error: 'Error al obtener airing'});
}
};
module.exports = {
insertAnime,
getAnimesByUser,
deleteAnime,
updateAnime,
getAnimesByUserAndStatus,
getAiringAnime
};

View File

@ -0,0 +1,109 @@
const db = require('../config/db');
// Insertar nuevo manga
const insertManga = async (req, res) => {
const {userId, title, score, progress, status, type, imageUrl, totalChapters} = req.body;
if (!userId || !title || !status || !type || !imageUrl) {
return res.status(400).json({error: 'Faltan campos obligatorios'});
}
try {
// Verificar si ya existe
const [existing] = await db.execute(
'SELECT id FROM manga WHERE userId = ? AND title = ?',
[userId, title]
);
if (existing.length > 0) {
return res.status(409).json({error: 'Este manga ya está en tu lista'});
}
await db.execute(
'INSERT INTO manga (userId, title, score, progress, status, type, imageUrl, totalChapters) VALUES (?, ?, ?, ?, ?, ?, ?, ?)',
[userId, title, score || 0, progress || 0, status, type, imageUrl, totalChapters || 12]
);
res.status(200).json({message: 'Manga añadido correctamente'});
} catch (error) {
console.error('Error insertando manga:', error);
res.status(500).json({error: 'Error al insertar manga'});
}
};
// Obtener mangas de un usuario (lista completa o paginada)
const getMangaByUser = async (req, res) => {
const {userId} = req.params;
const page = parseInt(req.query.page);
const size = parseInt(req.query.size);
try {
if (!isNaN(page) && !isNaN(size)) {
const offset = (page - 1) * size;
const [[{total}]] = await db.execute('SELECT COUNT(*) AS total FROM manga WHERE userId = ?', [userId]);
const [rows] = await db.execute('SELECT * FROM manga WHERE userId = ? LIMIT ? OFFSET ?', [userId, size, offset]);
return res.json({
data: rows,
total,
page,
hasNextPage: offset + rows.length < total
});
} else {
const [rows] = await db.execute('SELECT * FROM manga WHERE userId = ?', [userId]);
return res.json(rows);
}
} catch (error) {
console.error('Error obteniendo mangas:', error);
res.status(500).json({error: 'Error al obtener mangas'});
}
};
const deleteManga = async (req, res) => {
const {id} = req.params;
try {
await db.execute('DELETE FROM manga WHERE id = ?', [id]);
res.status(200).json({message: 'Manga eliminado correctamente'});
} catch (error) {
console.error('Error eliminando manga:', error);
res.status(500).json({error: 'Error al eliminar manga'});
}
};
const updateManga = async (req, res) => {
const {id} = req.params;
const {title, score, progress, status, type, imageUrl, totalChapters} = req.body;
try {
await db.execute(
'UPDATE manga SET title = ?, score = ?, progress = ?, status = ?, type = ?, imageUrl = ?, totalChapters = ? WHERE id = ?',
[title, score, progress, status, type, imageUrl, totalChapters || 12, id]
);
res.status(200).json({message: 'Manga actualizado correctamente'});
} catch (error) {
console.error('Error actualizando manga:', error);
res.status(500).json({error: 'Error al actualizar manga'});
}
};
const getMangaByUserAndStatus = async (req, res) => {
const {userId, status} = req.params;
try {
const [rows] = await db.execute(
'SELECT * FROM manga WHERE userId = ? AND status = ? ORDER BY id DESC',
[userId, status]
);
res.json(rows);
} catch (err) {
res.status(500).json({error: 'Error al obtener mangas'});
}
};
module.exports = {
insertManga,
getMangaByUser,
deleteManga,
updateManga,
getMangaByUserAndStatus
};

View File

@ -0,0 +1,204 @@
const db = require('../config/db');
const {sendVerificationEmail, sendResetEmail} = require('../utils/sendEmail');
const bcrypt = require('bcrypt');
const jwt = require('jsonwebtoken');
const nodemailer = require('nodemailer');
const crypto = require('crypto');
const transporter = nodemailer.createTransport({
service: 'gmail',
auth: {
user: process.env.EMAIL_USER,
pass: process.env.EMAIL_PASS
}
});
exports.registerUser = async (req, res) => {
try {
const {username, email, password} = req.body;
// Verifica si ya existe un usuario con ese username o email
const [existing] = await db.execute(
'SELECT * FROM users WHERE username = ? OR email = ?',
[username, email]
);
if (existing.length > 0) {
return res.status(400).json({success: false, message: "Usuario ya registrado."});
}
const hashedPassword = await bcrypt.hash(password, 10);
const token = crypto.randomBytes(32).toString('hex');
// Inserta el nuevo usuario con el token
await db.execute(
'INSERT INTO users (username, email, password, is_verified, verificationToken) VALUES (?, ?, ?, 0, ?)',
[username, email, hashedPassword, token]
);
await sendVerificationEmail(email, token);
res.status(201).json({success: true, message: "Usuario registrado. Verifica tu correo."});
} catch (err) {
console.error("❌ Error en registerUser:", err);
res.status(500).json({success: false, message: "Error en el servidor."});
}
};
exports.verifyEmail = async (req, res) => {
const token = req.params.token;
try {
const [rows] = await db.execute(
'SELECT * FROM users WHERE verificationToken = ?',
[token]
);
if (rows.length === 0) {
return res.redirect('/verify-failed.html');
}
await db.execute(
'UPDATE users SET is_verified = 1, verificationToken = NULL WHERE verificationToken = ?',
[token]
);
return res.redirect('/verify-success.html');
} catch (err) {
console.error("❌ Error en verifyEmail:", err);
return res.redirect('/verify-failed.html');
}
};
exports.loginUser = async (req, res) => {
const {username, password} = req.body;
if (!username || !password) {
return res.status(400).json({success: false, message: 'Usuario y contraseña requeridos'});
}
try {
const [results] = await db.execute('SELECT * FROM users WHERE username = ? LIMIT 1', [username]);
if (results.length === 0) {
return res.status(401).json({success: false, message: 'Usuario no encontrado'});
}
const user = results[0];
const passwordMatch = await bcrypt.compare(password, user.password);
if (!passwordMatch) {
return res.status(401).json({success: false, message: 'Contraseña incorrecta'});
}
if (!user.is_verified) {
return res.status(403).json({success: false, message: 'Tu correo no ha sido verificado'});
}
res.json({
success: true,
user: {id: user.id, username: user.username},
message: 'Inicio de sesión exitoso'
});
} catch (err) {
res.status(500).json({success: false, message: 'Error en el servidor', error: err.message});
}
};
exports.forgotPassword = async (req, res) => {
const {email} = req.body;
try {
const [users] = await db.execute('SELECT id FROM users WHERE email = ?', [email]);
if (users.length === 0) return res.status(404).json({message: 'Correo no encontrado'});
const token = crypto.randomBytes(32).toString('hex');
await db.execute('INSERT INTO password_resets (user_id, token) VALUES (?, ?)', [users[0].id, token]);
const resetLink = `http://localhost:3000/reset-password/${token}`;
// ✔️ Usa la función reutilizable
await sendResetEmail(email, resetLink);
res.json({message: 'Correo enviado con instrucciones'});
} catch (err) {
console.error('Error al enviar token de recuperación:', err);
res.status(500).json({message: 'Error interno del servidor'});
}
};
exports.resetPassword = async (req, res) => {
const {email, token, newPassword} = req.body;
if (!email || !token || !newPassword) {
return res.status(400).json({message: 'Todos los campos son obligatorios.'});
}
try {
const [rows] = await db.execute(
'SELECT user_id FROM password_resets WHERE token = ?',
[token]
);
if (rows.length === 0) {
return res.status(400).json({message: 'Token inválido o expirado'});
}
const userId = rows[0].user_id;
const hashedPassword = await bcrypt.hash(newPassword, 10);
await db.execute(
'UPDATE users SET password = ? WHERE id = ?',
[hashedPassword, userId]
);
await db.execute(
'DELETE FROM password_resets WHERE token = ?',
[token]
);
res.json({message: 'Contraseña actualizada correctamente'});
} catch (err) {
console.error('❌ Error al restablecer contraseña:', err);
res.status(500).json({message: 'Error interno del servidor'});
}
};
exports.getUserStats = async (req, res) => {
const userId = req.params.id;
try {
const [animeStatsRows] = await db.execute(
'SELECT status, COUNT(*) as count FROM anime WHERE userId = ? GROUP BY status',
[userId]
);
const [mangaStatsRows] = await db.execute(
'SELECT status, COUNT(*) as count FROM manga WHERE userId = ? GROUP BY status',
[userId]
);
const formatStats = (rows) => {
const stats = {};
rows.forEach(row => {
stats[row.status] = row.count;
});
return stats;
};
res.json({
animeStats: animeStatsRows.length ? formatStats(animeStatsRows) : {},
mangaStats: mangaStatsRows.length ? formatStats(mangaStatsRows) : {}
});
} catch (err) {
res.status(500).json({
success: false,
message: 'Error al obtener estadísticas',
error: err.message
});
}
};

16
node_modules/.bin/node-gyp-build generated vendored Normal file
View File

@ -0,0 +1,16 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*|*MINGW*|*MSYS*)
if command -v cygpath > /dev/null 2>&1; then
basedir=`cygpath -w "$basedir"`
fi
;;
esac
if [ -x "$basedir/node" ]; then
exec "$basedir/node" "$basedir/../node-gyp-build/bin.js" "$@"
else
exec node "$basedir/../node-gyp-build/bin.js" "$@"
fi

16
node_modules/.bin/node-gyp-build-optional generated vendored Normal file
View File

@ -0,0 +1,16 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*|*MINGW*|*MSYS*)
if command -v cygpath > /dev/null 2>&1; then
basedir=`cygpath -w "$basedir"`
fi
;;
esac
if [ -x "$basedir/node" ]; then
exec "$basedir/node" "$basedir/../node-gyp-build/optional.js" "$@"
else
exec node "$basedir/../node-gyp-build/optional.js" "$@"
fi

17
node_modules/.bin/node-gyp-build-optional.cmd generated vendored Normal file
View File

@ -0,0 +1,17 @@
@ECHO off
GOTO start
:find_dp0
SET dp0=%~dp0
EXIT /b
:start
SETLOCAL
CALL :find_dp0
IF EXIST "%dp0%\node.exe" (
SET "_prog=%dp0%\node.exe"
) ELSE (
SET "_prog=node"
SET PATHEXT=%PATHEXT:;.JS;=;%
)
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\node-gyp-build\optional.js" %*

28
node_modules/.bin/node-gyp-build-optional.ps1 generated vendored Normal file
View File

@ -0,0 +1,28 @@
#!/usr/bin/env pwsh
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
$exe=""
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
# Fix case when both the Windows and Linux builds of Node
# are installed in the same directory
$exe=".exe"
}
$ret=0
if (Test-Path "$basedir/node$exe") {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "$basedir/node$exe" "$basedir/../node-gyp-build/optional.js" $args
} else {
& "$basedir/node$exe" "$basedir/../node-gyp-build/optional.js" $args
}
$ret=$LASTEXITCODE
} else {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "node$exe" "$basedir/../node-gyp-build/optional.js" $args
} else {
& "node$exe" "$basedir/../node-gyp-build/optional.js" $args
}
$ret=$LASTEXITCODE
}
exit $ret

16
node_modules/.bin/node-gyp-build-test generated vendored Normal file
View File

@ -0,0 +1,16 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*|*MINGW*|*MSYS*)
if command -v cygpath > /dev/null 2>&1; then
basedir=`cygpath -w "$basedir"`
fi
;;
esac
if [ -x "$basedir/node" ]; then
exec "$basedir/node" "$basedir/../node-gyp-build/build-test.js" "$@"
else
exec node "$basedir/../node-gyp-build/build-test.js" "$@"
fi

17
node_modules/.bin/node-gyp-build-test.cmd generated vendored Normal file
View File

@ -0,0 +1,17 @@
@ECHO off
GOTO start
:find_dp0
SET dp0=%~dp0
EXIT /b
:start
SETLOCAL
CALL :find_dp0
IF EXIST "%dp0%\node.exe" (
SET "_prog=%dp0%\node.exe"
) ELSE (
SET "_prog=node"
SET PATHEXT=%PATHEXT:;.JS;=;%
)
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\node-gyp-build\build-test.js" %*

28
node_modules/.bin/node-gyp-build-test.ps1 generated vendored Normal file
View File

@ -0,0 +1,28 @@
#!/usr/bin/env pwsh
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
$exe=""
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
# Fix case when both the Windows and Linux builds of Node
# are installed in the same directory
$exe=".exe"
}
$ret=0
if (Test-Path "$basedir/node$exe") {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "$basedir/node$exe" "$basedir/../node-gyp-build/build-test.js" $args
} else {
& "$basedir/node$exe" "$basedir/../node-gyp-build/build-test.js" $args
}
$ret=$LASTEXITCODE
} else {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "node$exe" "$basedir/../node-gyp-build/build-test.js" $args
} else {
& "node$exe" "$basedir/../node-gyp-build/build-test.js" $args
}
$ret=$LASTEXITCODE
}
exit $ret

17
node_modules/.bin/node-gyp-build.cmd generated vendored Normal file
View File

@ -0,0 +1,17 @@
@ECHO off
GOTO start
:find_dp0
SET dp0=%~dp0
EXIT /b
:start
SETLOCAL
CALL :find_dp0
IF EXIST "%dp0%\node.exe" (
SET "_prog=%dp0%\node.exe"
) ELSE (
SET "_prog=node"
SET PATHEXT=%PATHEXT:;.JS;=;%
)
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\node-gyp-build\bin.js" %*

28
node_modules/.bin/node-gyp-build.ps1 generated vendored Normal file
View File

@ -0,0 +1,28 @@
#!/usr/bin/env pwsh
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
$exe=""
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
# Fix case when both the Windows and Linux builds of Node
# are installed in the same directory
$exe=".exe"
}
$ret=0
if (Test-Path "$basedir/node$exe") {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "$basedir/node$exe" "$basedir/../node-gyp-build/bin.js" $args
} else {
& "$basedir/node$exe" "$basedir/../node-gyp-build/bin.js" $args
}
$ret=$LASTEXITCODE
} else {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "node$exe" "$basedir/../node-gyp-build/bin.js" $args
} else {
& "node$exe" "$basedir/../node-gyp-build/bin.js" $args
}
$ret=$LASTEXITCODE
}
exit $ret

16
node_modules/.bin/semver generated vendored Normal file
View File

@ -0,0 +1,16 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*|*MINGW*|*MSYS*)
if command -v cygpath > /dev/null 2>&1; then
basedir=`cygpath -w "$basedir"`
fi
;;
esac
if [ -x "$basedir/node" ]; then
exec "$basedir/node" "$basedir/../semver/bin/semver.js" "$@"
else
exec node "$basedir/../semver/bin/semver.js" "$@"
fi

17
node_modules/.bin/semver.cmd generated vendored Normal file
View File

@ -0,0 +1,17 @@
@ECHO off
GOTO start
:find_dp0
SET dp0=%~dp0
EXIT /b
:start
SETLOCAL
CALL :find_dp0
IF EXIST "%dp0%\node.exe" (
SET "_prog=%dp0%\node.exe"
) ELSE (
SET "_prog=node"
SET PATHEXT=%PATHEXT:;.JS;=;%
)
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\semver\bin\semver.js" %*

28
node_modules/.bin/semver.ps1 generated vendored Normal file
View File

@ -0,0 +1,28 @@
#!/usr/bin/env pwsh
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
$exe=""
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
# Fix case when both the Windows and Linux builds of Node
# are installed in the same directory
$exe=".exe"
}
$ret=0
if (Test-Path "$basedir/node$exe") {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "$basedir/node$exe" "$basedir/../semver/bin/semver.js" $args
} else {
& "$basedir/node$exe" "$basedir/../semver/bin/semver.js" $args
}
$ret=$LASTEXITCODE
} else {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "node$exe" "$basedir/../semver/bin/semver.js" $args
} else {
& "node$exe" "$basedir/../semver/bin/semver.js" $args
}
$ret=$LASTEXITCODE
}
exit $ret

1236
node_modules/.package-lock.json generated vendored Normal file

File diff suppressed because it is too large Load Diff

250
node_modules/accepts/HISTORY.md generated vendored Normal file
View File

@ -0,0 +1,250 @@
2.0.0 / 2024-08-31
==================
* Drop node <18 support
* deps: mime-types@^3.0.0
* deps: negotiator@^1.0.0
1.3.8 / 2022-02-02
==================
* deps: mime-types@~2.1.34
- deps: mime-db@~1.51.0
* deps: negotiator@0.6.3
1.3.7 / 2019-04-29
==================
* deps: negotiator@0.6.2
- Fix sorting charset, encoding, and language with extra parameters
1.3.6 / 2019-04-28
==================
* deps: mime-types@~2.1.24
- deps: mime-db@~1.40.0
1.3.5 / 2018-02-28
==================
* deps: mime-types@~2.1.18
- deps: mime-db@~1.33.0
1.3.4 / 2017-08-22
==================
* deps: mime-types@~2.1.16
- deps: mime-db@~1.29.0
1.3.3 / 2016-05-02
==================
* deps: mime-types@~2.1.11
- deps: mime-db@~1.23.0
* deps: negotiator@0.6.1
- perf: improve `Accept` parsing speed
- perf: improve `Accept-Charset` parsing speed
- perf: improve `Accept-Encoding` parsing speed
- perf: improve `Accept-Language` parsing speed
1.3.2 / 2016-03-08
==================
* deps: mime-types@~2.1.10
- Fix extension of `application/dash+xml`
- Update primary extension for `audio/mp4`
- deps: mime-db@~1.22.0
1.3.1 / 2016-01-19
==================
* deps: mime-types@~2.1.9
- deps: mime-db@~1.21.0
1.3.0 / 2015-09-29
==================
* deps: mime-types@~2.1.7
- deps: mime-db@~1.19.0
* deps: negotiator@0.6.0
- Fix including type extensions in parameters in `Accept` parsing
- Fix parsing `Accept` parameters with quoted equals
- Fix parsing `Accept` parameters with quoted semicolons
- Lazy-load modules from main entry point
- perf: delay type concatenation until needed
- perf: enable strict mode
- perf: hoist regular expressions
- perf: remove closures getting spec properties
- perf: remove a closure from media type parsing
- perf: remove property delete from media type parsing
1.2.13 / 2015-09-06
===================
* deps: mime-types@~2.1.6
- deps: mime-db@~1.18.0
1.2.12 / 2015-07-30
===================
* deps: mime-types@~2.1.4
- deps: mime-db@~1.16.0
1.2.11 / 2015-07-16
===================
* deps: mime-types@~2.1.3
- deps: mime-db@~1.15.0
1.2.10 / 2015-07-01
===================
* deps: mime-types@~2.1.2
- deps: mime-db@~1.14.0
1.2.9 / 2015-06-08
==================
* deps: mime-types@~2.1.1
- perf: fix deopt during mapping
1.2.8 / 2015-06-07
==================
* deps: mime-types@~2.1.0
- deps: mime-db@~1.13.0
* perf: avoid argument reassignment & argument slice
* perf: avoid negotiator recursive construction
* perf: enable strict mode
* perf: remove unnecessary bitwise operator
1.2.7 / 2015-05-10
==================
* deps: negotiator@0.5.3
- Fix media type parameter matching to be case-insensitive
1.2.6 / 2015-05-07
==================
* deps: mime-types@~2.0.11
- deps: mime-db@~1.9.1
* deps: negotiator@0.5.2
- Fix comparing media types with quoted values
- Fix splitting media types with quoted commas
1.2.5 / 2015-03-13
==================
* deps: mime-types@~2.0.10
- deps: mime-db@~1.8.0
1.2.4 / 2015-02-14
==================
* Support Node.js 0.6
* deps: mime-types@~2.0.9
- deps: mime-db@~1.7.0
* deps: negotiator@0.5.1
- Fix preference sorting to be stable for long acceptable lists
1.2.3 / 2015-01-31
==================
* deps: mime-types@~2.0.8
- deps: mime-db@~1.6.0
1.2.2 / 2014-12-30
==================
* deps: mime-types@~2.0.7
- deps: mime-db@~1.5.0
1.2.1 / 2014-12-30
==================
* deps: mime-types@~2.0.5
- deps: mime-db@~1.3.1
1.2.0 / 2014-12-19
==================
* deps: negotiator@0.5.0
- Fix list return order when large accepted list
- Fix missing identity encoding when q=0 exists
- Remove dynamic building of Negotiator class
1.1.4 / 2014-12-10
==================
* deps: mime-types@~2.0.4
- deps: mime-db@~1.3.0
1.1.3 / 2014-11-09
==================
* deps: mime-types@~2.0.3
- deps: mime-db@~1.2.0
1.1.2 / 2014-10-14
==================
* deps: negotiator@0.4.9
- Fix error when media type has invalid parameter
1.1.1 / 2014-09-28
==================
* deps: mime-types@~2.0.2
- deps: mime-db@~1.1.0
* deps: negotiator@0.4.8
- Fix all negotiations to be case-insensitive
- Stable sort preferences of same quality according to client order
1.1.0 / 2014-09-02
==================
* update `mime-types`
1.0.7 / 2014-07-04
==================
* Fix wrong type returned from `type` when match after unknown extension
1.0.6 / 2014-06-24
==================
* deps: negotiator@0.4.7
1.0.5 / 2014-06-20
==================
* fix crash when unknown extension given
1.0.4 / 2014-06-19
==================
* use `mime-types`
1.0.3 / 2014-06-11
==================
* deps: negotiator@0.4.6
- Order by specificity when quality is the same
1.0.2 / 2014-05-29
==================
* Fix interpretation when header not in request
* deps: pin negotiator@0.4.5
1.0.1 / 2014-01-18
==================
* Identity encoding isn't always acceptable
* deps: negotiator@~0.4.0
1.0.0 / 2013-12-27
==================
* Genesis

23
node_modules/accepts/LICENSE generated vendored Normal file
View File

@ -0,0 +1,23 @@
(The MIT License)
Copyright (c) 2014 Jonathan Ong <me@jongleberry.com>
Copyright (c) 2015 Douglas Christopher Wilson <doug@somethingdoug.com>
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
'Software'), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

140
node_modules/accepts/README.md generated vendored Normal file
View File

@ -0,0 +1,140 @@
# accepts
[![NPM Version][npm-version-image]][npm-url]
[![NPM Downloads][npm-downloads-image]][npm-url]
[![Node.js Version][node-version-image]][node-version-url]
[![Build Status][github-actions-ci-image]][github-actions-ci-url]
[![Test Coverage][coveralls-image]][coveralls-url]
Higher level content negotiation based on [negotiator](https://www.npmjs.com/package/negotiator).
Extracted from [koa](https://www.npmjs.com/package/koa) for general use.
In addition to negotiator, it allows:
- Allows types as an array or arguments list, ie `(['text/html', 'application/json'])`
as well as `('text/html', 'application/json')`.
- Allows type shorthands such as `json`.
- Returns `false` when no types match
- Treats non-existent headers as `*`
## Installation
This is a [Node.js](https://nodejs.org/en/) module available through the
[npm registry](https://www.npmjs.com/). Installation is done using the
[`npm install` command](https://docs.npmjs.com/getting-started/installing-npm-packages-locally):
```sh
$ npm install accepts
```
## API
```js
var accepts = require('accepts')
```
### accepts(req)
Create a new `Accepts` object for the given `req`.
#### .charset(charsets)
Return the first accepted charset. If nothing in `charsets` is accepted,
then `false` is returned.
#### .charsets()
Return the charsets that the request accepts, in the order of the client's
preference (most preferred first).
#### .encoding(encodings)
Return the first accepted encoding. If nothing in `encodings` is accepted,
then `false` is returned.
#### .encodings()
Return the encodings that the request accepts, in the order of the client's
preference (most preferred first).
#### .language(languages)
Return the first accepted language. If nothing in `languages` is accepted,
then `false` is returned.
#### .languages()
Return the languages that the request accepts, in the order of the client's
preference (most preferred first).
#### .type(types)
Return the first accepted type (and it is returned as the same text as what
appears in the `types` array). If nothing in `types` is accepted, then `false`
is returned.
The `types` array can contain full MIME types or file extensions. Any value
that is not a full MIME type is passed to `require('mime-types').lookup`.
#### .types()
Return the types that the request accepts, in the order of the client's
preference (most preferred first).
## Examples
### Simple type negotiation
This simple example shows how to use `accepts` to return a different typed
respond body based on what the client wants to accept. The server lists it's
preferences in order and will get back the best match between the client and
server.
```js
var accepts = require('accepts')
var http = require('http')
function app (req, res) {
var accept = accepts(req)
// the order of this list is significant; should be server preferred order
switch (accept.type(['json', 'html'])) {
case 'json':
res.setHeader('Content-Type', 'application/json')
res.write('{"hello":"world!"}')
break
case 'html':
res.setHeader('Content-Type', 'text/html')
res.write('<b>hello, world!</b>')
break
default:
// the fallback is text/plain, so no need to specify it above
res.setHeader('Content-Type', 'text/plain')
res.write('hello, world!')
break
}
res.end()
}
http.createServer(app).listen(3000)
```
You can test this out with the cURL program:
```sh
curl -I -H'Accept: text/html' http://localhost:3000/
```
## License
[MIT](LICENSE)
[coveralls-image]: https://badgen.net/coveralls/c/github/jshttp/accepts/master
[coveralls-url]: https://coveralls.io/r/jshttp/accepts?branch=master
[github-actions-ci-image]: https://badgen.net/github/checks/jshttp/accepts/master?label=ci
[github-actions-ci-url]: https://github.com/jshttp/accepts/actions/workflows/ci.yml
[node-version-image]: https://badgen.net/npm/node/accepts
[node-version-url]: https://nodejs.org/en/download
[npm-downloads-image]: https://badgen.net/npm/dm/accepts
[npm-url]: https://npmjs.org/package/accepts
[npm-version-image]: https://badgen.net/npm/v/accepts

238
node_modules/accepts/index.js generated vendored Normal file
View File

@ -0,0 +1,238 @@
/*!
* accepts
* Copyright(c) 2014 Jonathan Ong
* Copyright(c) 2015 Douglas Christopher Wilson
* MIT Licensed
*/
'use strict'
/**
* Module dependencies.
* @private
*/
var Negotiator = require('negotiator')
var mime = require('mime-types')
/**
* Module exports.
* @public
*/
module.exports = Accepts
/**
* Create a new Accepts object for the given req.
*
* @param {object} req
* @public
*/
function Accepts (req) {
if (!(this instanceof Accepts)) {
return new Accepts(req)
}
this.headers = req.headers
this.negotiator = new Negotiator(req)
}
/**
* Check if the given `type(s)` is acceptable, returning
* the best match when true, otherwise `undefined`, in which
* case you should respond with 406 "Not Acceptable".
*
* The `type` value may be a single mime type string
* such as "application/json", the extension name
* such as "json" or an array `["json", "html", "text/plain"]`. When a list
* or array is given the _best_ match, if any is returned.
*
* Examples:
*
* // Accept: text/html
* this.types('html');
* // => "html"
*
* // Accept: text/*, application/json
* this.types('html');
* // => "html"
* this.types('text/html');
* // => "text/html"
* this.types('json', 'text');
* // => "json"
* this.types('application/json');
* // => "application/json"
*
* // Accept: text/*, application/json
* this.types('image/png');
* this.types('png');
* // => undefined
*
* // Accept: text/*;q=.5, application/json
* this.types(['html', 'json']);
* this.types('html', 'json');
* // => "json"
*
* @param {String|Array} types...
* @return {String|Array|Boolean}
* @public
*/
Accepts.prototype.type =
Accepts.prototype.types = function (types_) {
var types = types_
// support flattened arguments
if (types && !Array.isArray(types)) {
types = new Array(arguments.length)
for (var i = 0; i < types.length; i++) {
types[i] = arguments[i]
}
}
// no types, return all requested types
if (!types || types.length === 0) {
return this.negotiator.mediaTypes()
}
// no accept header, return first given type
if (!this.headers.accept) {
return types[0]
}
var mimes = types.map(extToMime)
var accepts = this.negotiator.mediaTypes(mimes.filter(validMime))
var first = accepts[0]
return first
? types[mimes.indexOf(first)]
: false
}
/**
* Return accepted encodings or best fit based on `encodings`.
*
* Given `Accept-Encoding: gzip, deflate`
* an array sorted by quality is returned:
*
* ['gzip', 'deflate']
*
* @param {String|Array} encodings...
* @return {String|Array}
* @public
*/
Accepts.prototype.encoding =
Accepts.prototype.encodings = function (encodings_) {
var encodings = encodings_
// support flattened arguments
if (encodings && !Array.isArray(encodings)) {
encodings = new Array(arguments.length)
for (var i = 0; i < encodings.length; i++) {
encodings[i] = arguments[i]
}
}
// no encodings, return all requested encodings
if (!encodings || encodings.length === 0) {
return this.negotiator.encodings()
}
return this.negotiator.encodings(encodings)[0] || false
}
/**
* Return accepted charsets or best fit based on `charsets`.
*
* Given `Accept-Charset: utf-8, iso-8859-1;q=0.2, utf-7;q=0.5`
* an array sorted by quality is returned:
*
* ['utf-8', 'utf-7', 'iso-8859-1']
*
* @param {String|Array} charsets...
* @return {String|Array}
* @public
*/
Accepts.prototype.charset =
Accepts.prototype.charsets = function (charsets_) {
var charsets = charsets_
// support flattened arguments
if (charsets && !Array.isArray(charsets)) {
charsets = new Array(arguments.length)
for (var i = 0; i < charsets.length; i++) {
charsets[i] = arguments[i]
}
}
// no charsets, return all requested charsets
if (!charsets || charsets.length === 0) {
return this.negotiator.charsets()
}
return this.negotiator.charsets(charsets)[0] || false
}
/**
* Return accepted languages or best fit based on `langs`.
*
* Given `Accept-Language: en;q=0.8, es, pt`
* an array sorted by quality is returned:
*
* ['es', 'pt', 'en']
*
* @param {String|Array} langs...
* @return {Array|String}
* @public
*/
Accepts.prototype.lang =
Accepts.prototype.langs =
Accepts.prototype.language =
Accepts.prototype.languages = function (languages_) {
var languages = languages_
// support flattened arguments
if (languages && !Array.isArray(languages)) {
languages = new Array(arguments.length)
for (var i = 0; i < languages.length; i++) {
languages[i] = arguments[i]
}
}
// no languages, return all requested languages
if (!languages || languages.length === 0) {
return this.negotiator.languages()
}
return this.negotiator.languages(languages)[0] || false
}
/**
* Convert extnames to mime.
*
* @param {String} type
* @return {String}
* @private
*/
function extToMime (type) {
return type.indexOf('/') === -1
? mime.lookup(type)
: type
}
/**
* Check if mime is valid.
*
* @param {String} type
* @return {Boolean}
* @private
*/
function validMime (type) {
return typeof type === 'string'
}

47
node_modules/accepts/package.json generated vendored Normal file
View File

@ -0,0 +1,47 @@
{
"name": "accepts",
"description": "Higher-level content negotiation",
"version": "2.0.0",
"contributors": [
"Douglas Christopher Wilson <doug@somethingdoug.com>",
"Jonathan Ong <me@jongleberry.com> (http://jongleberry.com)"
],
"license": "MIT",
"repository": "jshttp/accepts",
"dependencies": {
"mime-types": "^3.0.0",
"negotiator": "^1.0.0"
},
"devDependencies": {
"deep-equal": "1.0.1",
"eslint": "7.32.0",
"eslint-config-standard": "14.1.1",
"eslint-plugin-import": "2.25.4",
"eslint-plugin-markdown": "2.2.1",
"eslint-plugin-node": "11.1.0",
"eslint-plugin-promise": "4.3.1",
"eslint-plugin-standard": "4.1.0",
"mocha": "9.2.0",
"nyc": "15.1.0"
},
"files": [
"LICENSE",
"HISTORY.md",
"index.js"
],
"engines": {
"node": ">= 0.6"
},
"scripts": {
"lint": "eslint .",
"test": "mocha --reporter spec --check-leaks --bail test/",
"test-ci": "nyc --reporter=lcov --reporter=text npm test",
"test-cov": "nyc --reporter=html --reporter=text npm test"
},
"keywords": [
"content",
"negotiation",
"accept",
"accepts"
]
}

21
node_modules/asynckit/LICENSE generated vendored Normal file
View File

@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2016 Alex Indigo
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

233
node_modules/asynckit/README.md generated vendored Normal file
View File

@ -0,0 +1,233 @@
# asynckit [![NPM Module](https://img.shields.io/npm/v/asynckit.svg?style=flat)](https://www.npmjs.com/package/asynckit)
Minimal async jobs utility library, with streams support.
[![PhantomJS Build](https://img.shields.io/travis/alexindigo/asynckit/v0.4.0.svg?label=browser&style=flat)](https://travis-ci.org/alexindigo/asynckit)
[![Linux Build](https://img.shields.io/travis/alexindigo/asynckit/v0.4.0.svg?label=linux:0.12-6.x&style=flat)](https://travis-ci.org/alexindigo/asynckit)
[![Windows Build](https://img.shields.io/appveyor/ci/alexindigo/asynckit/v0.4.0.svg?label=windows:0.12-6.x&style=flat)](https://ci.appveyor.com/project/alexindigo/asynckit)
[![Coverage Status](https://img.shields.io/coveralls/alexindigo/asynckit/v0.4.0.svg?label=code+coverage&style=flat)](https://coveralls.io/github/alexindigo/asynckit?branch=master)
[![Dependency Status](https://img.shields.io/david/alexindigo/asynckit/v0.4.0.svg?style=flat)](https://david-dm.org/alexindigo/asynckit)
[![bitHound Overall Score](https://www.bithound.io/github/alexindigo/asynckit/badges/score.svg)](https://www.bithound.io/github/alexindigo/asynckit)
<!-- [![Readme](https://img.shields.io/badge/readme-tested-brightgreen.svg?style=flat)](https://www.npmjs.com/package/reamde) -->
AsyncKit provides harness for `parallel` and `serial` iterators over list of items represented by arrays or objects.
Optionally it accepts abort function (should be synchronously return by iterator for each item), and terminates left over jobs upon an error event. For specific iteration order built-in (`ascending` and `descending`) and custom sort helpers also supported, via `asynckit.serialOrdered` method.
It ensures async operations to keep behavior more stable and prevent `Maximum call stack size exceeded` errors, from sync iterators.
| compression | size |
| :----------------- | -------: |
| asynckit.js | 12.34 kB |
| asynckit.min.js | 4.11 kB |
| asynckit.min.js.gz | 1.47 kB |
## Install
```sh
$ npm install --save asynckit
```
## Examples
### Parallel Jobs
Runs iterator over provided array in parallel. Stores output in the `result` array,
on the matching positions. In unlikely event of an error from one of the jobs,
will terminate rest of the active jobs (if abort function is provided)
and return error along with salvaged data to the main callback function.
#### Input Array
```javascript
var parallel = require('asynckit').parallel
, assert = require('assert')
;
var source = [ 1, 1, 4, 16, 64, 32, 8, 2 ]
, expectedResult = [ 2, 2, 8, 32, 128, 64, 16, 4 ]
, expectedTarget = [ 1, 1, 2, 4, 8, 16, 32, 64 ]
, target = []
;
parallel(source, asyncJob, function(err, result)
{
assert.deepEqual(result, expectedResult);
assert.deepEqual(target, expectedTarget);
});
// async job accepts one element from the array
// and a callback function
function asyncJob(item, cb)
{
// different delays (in ms) per item
var delay = item * 25;
// pretend different jobs take different time to finish
// and not in consequential order
var timeoutId = setTimeout(function() {
target.push(item);
cb(null, item * 2);
}, delay);
// allow to cancel "leftover" jobs upon error
// return function, invoking of which will abort this job
return clearTimeout.bind(null, timeoutId);
}
```
More examples could be found in [test/test-parallel-array.js](test/test-parallel-array.js).
#### Input Object
Also it supports named jobs, listed via object.
```javascript
var parallel = require('asynckit/parallel')
, assert = require('assert')
;
var source = { first: 1, one: 1, four: 4, sixteen: 16, sixtyFour: 64, thirtyTwo: 32, eight: 8, two: 2 }
, expectedResult = { first: 2, one: 2, four: 8, sixteen: 32, sixtyFour: 128, thirtyTwo: 64, eight: 16, two: 4 }
, expectedTarget = [ 1, 1, 2, 4, 8, 16, 32, 64 ]
, expectedKeys = [ 'first', 'one', 'two', 'four', 'eight', 'sixteen', 'thirtyTwo', 'sixtyFour' ]
, target = []
, keys = []
;
parallel(source, asyncJob, function(err, result)
{
assert.deepEqual(result, expectedResult);
assert.deepEqual(target, expectedTarget);
assert.deepEqual(keys, expectedKeys);
});
// supports full value, key, callback (shortcut) interface
function asyncJob(item, key, cb)
{
// different delays (in ms) per item
var delay = item * 25;
// pretend different jobs take different time to finish
// and not in consequential order
var timeoutId = setTimeout(function() {
keys.push(key);
target.push(item);
cb(null, item * 2);
}, delay);
// allow to cancel "leftover" jobs upon error
// return function, invoking of which will abort this job
return clearTimeout.bind(null, timeoutId);
}
```
More examples could be found in [test/test-parallel-object.js](test/test-parallel-object.js).
### Serial Jobs
Runs iterator over provided array sequentially. Stores output in the `result` array,
on the matching positions. In unlikely event of an error from one of the jobs,
will not proceed to the rest of the items in the list
and return error along with salvaged data to the main callback function.
#### Input Array
```javascript
var serial = require('asynckit/serial')
, assert = require('assert')
;
var source = [ 1, 1, 4, 16, 64, 32, 8, 2 ]
, expectedResult = [ 2, 2, 8, 32, 128, 64, 16, 4 ]
, expectedTarget = [ 0, 1, 2, 3, 4, 5, 6, 7 ]
, target = []
;
serial(source, asyncJob, function(err, result)
{
assert.deepEqual(result, expectedResult);
assert.deepEqual(target, expectedTarget);
});
// extended interface (item, key, callback)
// also supported for arrays
function asyncJob(item, key, cb)
{
target.push(key);
// it will be automatically made async
// even it iterator "returns" in the same event loop
cb(null, item * 2);
}
```
More examples could be found in [test/test-serial-array.js](test/test-serial-array.js).
#### Input Object
Also it supports named jobs, listed via object.
```javascript
var serial = require('asynckit').serial
, assert = require('assert')
;
var source = [ 1, 1, 4, 16, 64, 32, 8, 2 ]
, expectedResult = [ 2, 2, 8, 32, 128, 64, 16, 4 ]
, expectedTarget = [ 0, 1, 2, 3, 4, 5, 6, 7 ]
, target = []
;
var source = { first: 1, one: 1, four: 4, sixteen: 16, sixtyFour: 64, thirtyTwo: 32, eight: 8, two: 2 }
, expectedResult = { first: 2, one: 2, four: 8, sixteen: 32, sixtyFour: 128, thirtyTwo: 64, eight: 16, two: 4 }
, expectedTarget = [ 1, 1, 4, 16, 64, 32, 8, 2 ]
, target = []
;
serial(source, asyncJob, function(err, result)
{
assert.deepEqual(result, expectedResult);
assert.deepEqual(target, expectedTarget);
});
// shortcut interface (item, callback)
// works for object as well as for the arrays
function asyncJob(item, cb)
{
target.push(item);
// it will be automatically made async
// even it iterator "returns" in the same event loop
cb(null, item * 2);
}
```
More examples could be found in [test/test-serial-object.js](test/test-serial-object.js).
_Note: Since _object_ is an _unordered_ collection of properties,
it may produce unexpected results with sequential iterations.
Whenever order of the jobs' execution is important please use `serialOrdered` method._
### Ordered Serial Iterations
TBD
For example [compare-property](compare-property) package.
### Streaming interface
TBD
## Want to Know More?
More examples can be found in [test folder](test/).
Or open an [issue](https://github.com/alexindigo/asynckit/issues) with questions and/or suggestions.
## License
AsyncKit is licensed under the MIT license.

76
node_modules/asynckit/bench.js generated vendored Normal file
View File

@ -0,0 +1,76 @@
/* eslint no-console: "off" */
var asynckit = require('./')
, async = require('async')
, assert = require('assert')
, expected = 0
;
var Benchmark = require('benchmark');
var suite = new Benchmark.Suite;
var source = [];
for (var z = 1; z < 100; z++)
{
source.push(z);
expected += z;
}
suite
// add tests
.add('async.map', function(deferred)
{
var total = 0;
async.map(source,
function(i, cb)
{
setImmediate(function()
{
total += i;
cb(null, total);
});
},
function(err, result)
{
assert.ifError(err);
assert.equal(result[result.length - 1], expected);
deferred.resolve();
});
}, {'defer': true})
.add('asynckit.parallel', function(deferred)
{
var total = 0;
asynckit.parallel(source,
function(i, cb)
{
setImmediate(function()
{
total += i;
cb(null, total);
});
},
function(err, result)
{
assert.ifError(err);
assert.equal(result[result.length - 1], expected);
deferred.resolve();
});
}, {'defer': true})
// add listeners
.on('cycle', function(ev)
{
console.log(String(ev.target));
})
.on('complete', function()
{
console.log('Fastest is ' + this.filter('fastest').map('name'));
})
// run async
.run({ 'async': true });

6
node_modules/asynckit/index.js generated vendored Normal file
View File

@ -0,0 +1,6 @@
module.exports =
{
parallel : require('./parallel.js'),
serial : require('./serial.js'),
serialOrdered : require('./serialOrdered.js')
};

29
node_modules/asynckit/lib/abort.js generated vendored Normal file
View File

@ -0,0 +1,29 @@
// API
module.exports = abort;
/**
* Aborts leftover active jobs
*
* @param {object} state - current state object
*/
function abort(state)
{
Object.keys(state.jobs).forEach(clean.bind(state));
// reset leftover jobs
state.jobs = {};
}
/**
* Cleans up leftover job by invoking abort function for the provided job id
*
* @this state
* @param {string|number} key - job id to abort
*/
function clean(key)
{
if (typeof this.jobs[key] == 'function')
{
this.jobs[key]();
}
}

34
node_modules/asynckit/lib/async.js generated vendored Normal file
View File

@ -0,0 +1,34 @@
var defer = require('./defer.js');
// API
module.exports = async;
/**
* Runs provided callback asynchronously
* even if callback itself is not
*
* @param {function} callback - callback to invoke
* @returns {function} - augmented callback
*/
function async(callback)
{
var isAsync = false;
// check if async happened
defer(function() { isAsync = true; });
return function async_callback(err, result)
{
if (isAsync)
{
callback(err, result);
}
else
{
defer(function nextTick_callback()
{
callback(err, result);
});
}
};
}

26
node_modules/asynckit/lib/defer.js generated vendored Normal file
View File

@ -0,0 +1,26 @@
module.exports = defer;
/**
* Runs provided function on next iteration of the event loop
*
* @param {function} fn - function to run
*/
function defer(fn)
{
var nextTick = typeof setImmediate == 'function'
? setImmediate
: (
typeof process == 'object' && typeof process.nextTick == 'function'
? process.nextTick
: null
);
if (nextTick)
{
nextTick(fn);
}
else
{
setTimeout(fn, 0);
}
}

75
node_modules/asynckit/lib/iterate.js generated vendored Normal file
View File

@ -0,0 +1,75 @@
var async = require('./async.js')
, abort = require('./abort.js')
;
// API
module.exports = iterate;
/**
* Iterates over each job object
*
* @param {array|object} list - array or object (named list) to iterate over
* @param {function} iterator - iterator to run
* @param {object} state - current job status
* @param {function} callback - invoked when all elements processed
*/
function iterate(list, iterator, state, callback)
{
// store current index
var key = state['keyedList'] ? state['keyedList'][state.index] : state.index;
state.jobs[key] = runJob(iterator, key, list[key], function(error, output)
{
// don't repeat yourself
// skip secondary callbacks
if (!(key in state.jobs))
{
return;
}
// clean up jobs
delete state.jobs[key];
if (error)
{
// don't process rest of the results
// stop still active jobs
// and reset the list
abort(state);
}
else
{
state.results[key] = output;
}
// return salvaged results
callback(error, state.results);
});
}
/**
* Runs iterator over provided job element
*
* @param {function} iterator - iterator to invoke
* @param {string|number} key - key/index of the element in the list of jobs
* @param {mixed} item - job description
* @param {function} callback - invoked after iterator is done with the job
* @returns {function|mixed} - job abort function or something else
*/
function runJob(iterator, key, item, callback)
{
var aborter;
// allow shortcut if iterator expects only two arguments
if (iterator.length == 2)
{
aborter = iterator(item, async(callback));
}
// otherwise go with full three arguments
else
{
aborter = iterator(item, key, async(callback));
}
return aborter;
}

91
node_modules/asynckit/lib/readable_asynckit.js generated vendored Normal file
View File

@ -0,0 +1,91 @@
var streamify = require('./streamify.js')
, defer = require('./defer.js')
;
// API
module.exports = ReadableAsyncKit;
/**
* Base constructor for all streams
* used to hold properties/methods
*/
function ReadableAsyncKit()
{
ReadableAsyncKit.super_.apply(this, arguments);
// list of active jobs
this.jobs = {};
// add stream methods
this.destroy = destroy;
this._start = _start;
this._read = _read;
}
/**
* Destroys readable stream,
* by aborting outstanding jobs
*
* @returns {void}
*/
function destroy()
{
if (this.destroyed)
{
return;
}
this.destroyed = true;
if (typeof this.terminator == 'function')
{
this.terminator();
}
}
/**
* Starts provided jobs in async manner
*
* @private
*/
function _start()
{
// first argument runner function
var runner = arguments[0]
// take away first argument
, args = Array.prototype.slice.call(arguments, 1)
// second argument - input data
, input = args[0]
// last argument - result callback
, endCb = streamify.callback.call(this, args[args.length - 1])
;
args[args.length - 1] = endCb;
// third argument - iterator
args[1] = streamify.iterator.call(this, args[1]);
// allow time for proper setup
defer(function()
{
if (!this.destroyed)
{
this.terminator = runner.apply(null, args);
}
else
{
endCb(null, Array.isArray(input) ? [] : {});
}
}.bind(this));
}
/**
* Implement _read to comply with Readable streams
* Doesn't really make sense for flowing object mode
*
* @private
*/
function _read()
{
}

25
node_modules/asynckit/lib/readable_parallel.js generated vendored Normal file
View File

@ -0,0 +1,25 @@
var parallel = require('../parallel.js');
// API
module.exports = ReadableParallel;
/**
* Streaming wrapper to `asynckit.parallel`
*
* @param {array|object} list - array or object (named list) to iterate over
* @param {function} iterator - iterator to run
* @param {function} callback - invoked when all elements processed
* @returns {stream.Readable#}
*/
function ReadableParallel(list, iterator, callback)
{
if (!(this instanceof ReadableParallel))
{
return new ReadableParallel(list, iterator, callback);
}
// turn on object mode
ReadableParallel.super_.call(this, {objectMode: true});
this._start(parallel, list, iterator, callback);
}

25
node_modules/asynckit/lib/readable_serial.js generated vendored Normal file
View File

@ -0,0 +1,25 @@
var serial = require('../serial.js');
// API
module.exports = ReadableSerial;
/**
* Streaming wrapper to `asynckit.serial`
*
* @param {array|object} list - array or object (named list) to iterate over
* @param {function} iterator - iterator to run
* @param {function} callback - invoked when all elements processed
* @returns {stream.Readable#}
*/
function ReadableSerial(list, iterator, callback)
{
if (!(this instanceof ReadableSerial))
{
return new ReadableSerial(list, iterator, callback);
}
// turn on object mode
ReadableSerial.super_.call(this, {objectMode: true});
this._start(serial, list, iterator, callback);
}

29
node_modules/asynckit/lib/readable_serial_ordered.js generated vendored Normal file
View File

@ -0,0 +1,29 @@
var serialOrdered = require('../serialOrdered.js');
// API
module.exports = ReadableSerialOrdered;
// expose sort helpers
module.exports.ascending = serialOrdered.ascending;
module.exports.descending = serialOrdered.descending;
/**
* Streaming wrapper to `asynckit.serialOrdered`
*
* @param {array|object} list - array or object (named list) to iterate over
* @param {function} iterator - iterator to run
* @param {function} sortMethod - custom sort function
* @param {function} callback - invoked when all elements processed
* @returns {stream.Readable#}
*/
function ReadableSerialOrdered(list, iterator, sortMethod, callback)
{
if (!(this instanceof ReadableSerialOrdered))
{
return new ReadableSerialOrdered(list, iterator, sortMethod, callback);
}
// turn on object mode
ReadableSerialOrdered.super_.call(this, {objectMode: true});
this._start(serialOrdered, list, iterator, sortMethod, callback);
}

37
node_modules/asynckit/lib/state.js generated vendored Normal file
View File

@ -0,0 +1,37 @@
// API
module.exports = state;
/**
* Creates initial state object
* for iteration over list
*
* @param {array|object} list - list to iterate over
* @param {function|null} sortMethod - function to use for keys sort,
* or `null` to keep them as is
* @returns {object} - initial state object
*/
function state(list, sortMethod)
{
var isNamedList = !Array.isArray(list)
, initState =
{
index : 0,
keyedList: isNamedList || sortMethod ? Object.keys(list) : null,
jobs : {},
results : isNamedList ? {} : [],
size : isNamedList ? Object.keys(list).length : list.length
}
;
if (sortMethod)
{
// sort array keys based on it's values
// sort object's keys just on own merit
initState.keyedList.sort(isNamedList ? sortMethod : function(a, b)
{
return sortMethod(list[a], list[b]);
});
}
return initState;
}

141
node_modules/asynckit/lib/streamify.js generated vendored Normal file
View File

@ -0,0 +1,141 @@
var async = require('./async.js');
// API
module.exports = {
iterator: wrapIterator,
callback: wrapCallback
};
/**
* Wraps iterators with long signature
*
* @this ReadableAsyncKit#
* @param {function} iterator - function to wrap
* @returns {function} - wrapped function
*/
function wrapIterator(iterator)
{
var stream = this;
return function(item, key, cb)
{
var aborter
, wrappedCb = async(wrapIteratorCallback.call(stream, cb, key))
;
stream.jobs[key] = wrappedCb;
// it's either shortcut (item, cb)
if (iterator.length == 2)
{
aborter = iterator(item, wrappedCb);
}
// or long format (item, key, cb)
else
{
aborter = iterator(item, key, wrappedCb);
}
return aborter;
};
}
/**
* Wraps provided callback function
* allowing to execute snitch function before
* real callback
*
* @this ReadableAsyncKit#
* @param {function} callback - function to wrap
* @returns {function} - wrapped function
*/
function wrapCallback(callback)
{
var stream = this;
var wrapped = function(error, result)
{
return finisher.call(stream, error, result, callback);
};
return wrapped;
}
/**
* Wraps provided iterator callback function
* makes sure snitch only called once,
* but passes secondary calls to the original callback
*
* @this ReadableAsyncKit#
* @param {function} callback - callback to wrap
* @param {number|string} key - iteration key
* @returns {function} wrapped callback
*/
function wrapIteratorCallback(callback, key)
{
var stream = this;
return function(error, output)
{
// don't repeat yourself
if (!(key in stream.jobs))
{
callback(error, output);
return;
}
// clean up jobs
delete stream.jobs[key];
return streamer.call(stream, error, {key: key, value: output}, callback);
};
}
/**
* Stream wrapper for iterator callback
*
* @this ReadableAsyncKit#
* @param {mixed} error - error response
* @param {mixed} output - iterator output
* @param {function} callback - callback that expects iterator results
*/
function streamer(error, output, callback)
{
if (error && !this.error)
{
this.error = error;
this.pause();
this.emit('error', error);
// send back value only, as expected
callback(error, output && output.value);
return;
}
// stream stuff
this.push(output);
// back to original track
// send back value only, as expected
callback(error, output && output.value);
}
/**
* Stream wrapper for finishing callback
*
* @this ReadableAsyncKit#
* @param {mixed} error - error response
* @param {mixed} output - iterator output
* @param {function} callback - callback that expects final results
*/
function finisher(error, output, callback)
{
// signal end of the stream
// only for successfully finished streams
if (!error)
{
this.push(null);
}
// back to original track
callback(error, output);
}

29
node_modules/asynckit/lib/terminator.js generated vendored Normal file
View File

@ -0,0 +1,29 @@
var abort = require('./abort.js')
, async = require('./async.js')
;
// API
module.exports = terminator;
/**
* Terminates jobs in the attached state context
*
* @this AsyncKitState#
* @param {function} callback - final callback to invoke after termination
*/
function terminator(callback)
{
if (!Object.keys(this.jobs).length)
{
return;
}
// fast forward iteration index
this.index = this.size;
// abort jobs
abort(this);
// send back results we have so far
async(callback)(null, this.results);
}

63
node_modules/asynckit/package.json generated vendored Normal file
View File

@ -0,0 +1,63 @@
{
"name": "asynckit",
"version": "0.4.0",
"description": "Minimal async jobs utility library, with streams support",
"main": "index.js",
"scripts": {
"clean": "rimraf coverage",
"lint": "eslint *.js lib/*.js test/*.js",
"test": "istanbul cover --reporter=json tape -- 'test/test-*.js' | tap-spec",
"win-test": "tape test/test-*.js",
"browser": "browserify -t browserify-istanbul test/lib/browserify_adjustment.js test/test-*.js | obake --coverage | tap-spec",
"report": "istanbul report",
"size": "browserify index.js | size-table asynckit",
"debug": "tape test/test-*.js"
},
"pre-commit": [
"clean",
"lint",
"test",
"browser",
"report",
"size"
],
"repository": {
"type": "git",
"url": "git+https://github.com/alexindigo/asynckit.git"
},
"keywords": [
"async",
"jobs",
"parallel",
"serial",
"iterator",
"array",
"object",
"stream",
"destroy",
"terminate",
"abort"
],
"author": "Alex Indigo <iam@alexindigo.com>",
"license": "MIT",
"bugs": {
"url": "https://github.com/alexindigo/asynckit/issues"
},
"homepage": "https://github.com/alexindigo/asynckit#readme",
"devDependencies": {
"browserify": "^13.0.0",
"browserify-istanbul": "^2.0.0",
"coveralls": "^2.11.9",
"eslint": "^2.9.0",
"istanbul": "^0.4.3",
"obake": "^0.1.2",
"phantomjs-prebuilt": "^2.1.7",
"pre-commit": "^1.1.3",
"reamde": "^1.1.0",
"rimraf": "^2.5.2",
"size-table": "^0.2.0",
"tap-spec": "^4.1.1",
"tape": "^4.5.1"
},
"dependencies": {}
}

43
node_modules/asynckit/parallel.js generated vendored Normal file
View File

@ -0,0 +1,43 @@
var iterate = require('./lib/iterate.js')
, initState = require('./lib/state.js')
, terminator = require('./lib/terminator.js')
;
// Public API
module.exports = parallel;
/**
* Runs iterator over provided array elements in parallel
*
* @param {array|object} list - array or object (named list) to iterate over
* @param {function} iterator - iterator to run
* @param {function} callback - invoked when all elements processed
* @returns {function} - jobs terminator
*/
function parallel(list, iterator, callback)
{
var state = initState(list);
while (state.index < (state['keyedList'] || list).length)
{
iterate(list, iterator, state, function(error, result)
{
if (error)
{
callback(error, result);
return;
}
// looks like it's the last one
if (Object.keys(state.jobs).length === 0)
{
callback(null, state.results);
return;
}
});
state.index++;
}
return terminator.bind(state, callback);
}

17
node_modules/asynckit/serial.js generated vendored Normal file
View File

@ -0,0 +1,17 @@
var serialOrdered = require('./serialOrdered.js');
// Public API
module.exports = serial;
/**
* Runs iterator over provided array elements in series
*
* @param {array|object} list - array or object (named list) to iterate over
* @param {function} iterator - iterator to run
* @param {function} callback - invoked when all elements processed
* @returns {function} - jobs terminator
*/
function serial(list, iterator, callback)
{
return serialOrdered(list, iterator, null, callback);
}

75
node_modules/asynckit/serialOrdered.js generated vendored Normal file
View File

@ -0,0 +1,75 @@
var iterate = require('./lib/iterate.js')
, initState = require('./lib/state.js')
, terminator = require('./lib/terminator.js')
;
// Public API
module.exports = serialOrdered;
// sorting helpers
module.exports.ascending = ascending;
module.exports.descending = descending;
/**
* Runs iterator over provided sorted array elements in series
*
* @param {array|object} list - array or object (named list) to iterate over
* @param {function} iterator - iterator to run
* @param {function} sortMethod - custom sort function
* @param {function} callback - invoked when all elements processed
* @returns {function} - jobs terminator
*/
function serialOrdered(list, iterator, sortMethod, callback)
{
var state = initState(list, sortMethod);
iterate(list, iterator, state, function iteratorHandler(error, result)
{
if (error)
{
callback(error, result);
return;
}
state.index++;
// are we there yet?
if (state.index < (state['keyedList'] || list).length)
{
iterate(list, iterator, state, iteratorHandler);
return;
}
// done here
callback(null, state.results);
});
return terminator.bind(state, callback);
}
/*
* -- Sort methods
*/
/**
* sort helper to sort array elements in ascending order
*
* @param {mixed} a - an item to compare
* @param {mixed} b - an item to compare
* @returns {number} - comparison result
*/
function ascending(a, b)
{
return a < b ? -1 : a > b ? 1 : 0;
}
/**
* sort helper to sort array elements in descending order
*
* @param {mixed} a - an item to compare
* @param {mixed} b - an item to compare
* @returns {number} - comparison result
*/
function descending(a, b)
{
return -1 * ascending(a, b);
}

21
node_modules/asynckit/stream.js generated vendored Normal file
View File

@ -0,0 +1,21 @@
var inherits = require('util').inherits
, Readable = require('stream').Readable
, ReadableAsyncKit = require('./lib/readable_asynckit.js')
, ReadableParallel = require('./lib/readable_parallel.js')
, ReadableSerial = require('./lib/readable_serial.js')
, ReadableSerialOrdered = require('./lib/readable_serial_ordered.js')
;
// API
module.exports =
{
parallel : ReadableParallel,
serial : ReadableSerial,
serialOrdered : ReadableSerialOrdered,
};
inherits(ReadableAsyncKit, Readable);
inherits(ReadableParallel, ReadableAsyncKit);
inherits(ReadableSerial, ReadableAsyncKit);
inherits(ReadableSerialOrdered, ReadableAsyncKit);

19
node_modules/aws-ssl-profiles/LICENSE generated vendored Normal file
View File

@ -0,0 +1,19 @@
Copyright (c) 2024 Andrey Sidorov, Douglas Wilson, Weslley Araújo and contributors.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

146
node_modules/aws-ssl-profiles/README.md generated vendored Normal file
View File

@ -0,0 +1,146 @@
# AWS SSL Profiles
[**AWS RDS**](https://aws.amazon.com/rds/) **SSL** Certificates Bundles.
**Table of Contents**
- [Installation](#installation)
- [Usage](#usage)
- [**mysqljs/mysql**](#mysqljsmysql)
- [**MySQL2**](#mysql2)
- [**node-postgres**](#node-postgres)
- [Custom `ssl` options](#custom-ssl-options)
- [License](#license)
- [Security](#security)
- [Contributing](#contributing)
- [Acknowledgements](#acknowledgements)
---
## Installation
```bash
npm install --save aws-ssl-profiles
```
---
## Usage
### [mysqljs/mysql](https://github.com/mysqljs/mysql)
```js
const mysql = require('mysql');
const awsCaBundle = require('aws-ssl-profiles');
// mysql connection
const connection = mysql.createConnection({
//...
ssl: awsCaBundle,
});
// mysql connection pool
const pool = mysql.createPool({
//...
ssl: awsCaBundle,
});
```
### [MySQL2](https://github.com/sidorares/node-mysql2)
```js
const mysql = require('mysql2');
const awsCaBundle = require('aws-ssl-profiles');
// mysql2 connection
const connection = mysql.createConnection({
//...
ssl: awsCaBundle,
});
// mysql2 connection pool
const pool = mysql.createPool({
//...
ssl: awsCaBundle,
});
```
### [node-postgres](https://github.com/brianc/node-postgres)
```js
const pg = require('pg');
const awsCaBundle = require('aws-ssl-profiles');
// pg connection
const client = new pg.Client({
// ...
ssl: awsCaBundle,
});
// pg connection pool
const pool = new pg.Pool({
// ...
ssl: awsCaBundle,
});
```
### Custom `ssl` options
Using **AWS SSL Profiles** with custom `ssl` options:
```js
{
// ...
ssl: {
...awsCaBundle,
rejectUnauthorized: true,
// ...
}
}
```
```js
{
// ...
ssl: {
ca: awsCaBundle.ca,
rejectUnauthorized: true,
// ...
}
}
```
### Custom bundles
```js
const { proxyBundle } = require('aws-ssl-profiles');
{
// ...
ssl: proxyBundle,
}
```
---
## License
**AWS SSL Profiles** is under the [**MIT License**](./LICENSE).
---
## Security
Please check the [**SECURITY.md**](./SECURITY.md).
---
## Contributing
Please check the [**CONTRIBUTING.md**](./CONTRIBUTING.md) for instructions.
---
## Acknowledgements
[**Contributors**](https://github.com/mysqljs/aws-ssl-profiles/graphs/contributors).

View File

@ -0,0 +1,4 @@
export type CA = string[];
export type Profiles = {
ca: CA;
};

2
node_modules/aws-ssl-profiles/lib/@types/profiles.js generated vendored Normal file
View File

@ -0,0 +1,2 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });

8
node_modules/aws-ssl-profiles/lib/index.d.ts generated vendored Normal file
View File

@ -0,0 +1,8 @@
import type { Profiles } from "./@types/profiles.js";
export declare const proxyBundle: Profiles;
declare const profiles: Profiles;
declare module "aws-ssl-profiles" {
const profiles: Profiles & { proxyBundle: Profiles };
export = profiles;
}
export default profiles;

13
node_modules/aws-ssl-profiles/lib/index.js generated vendored Normal file
View File

@ -0,0 +1,13 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const defaults_js_1 = require("./profiles/ca/defaults.js");
const proxies_js_1 = require("./profiles/ca/proxies.js");
const proxyBundle = {
ca: proxies_js_1.proxies,
};
const profiles = {
ca: [...defaults_js_1.defaults, ...proxies_js_1.proxies],
};
module.exports = profiles;
module.exports.proxyBundle = proxyBundle;
module.exports.default = profiles;

View File

@ -0,0 +1,9 @@
import type { CA } from '../../@types/profiles.js';
/**
* CA Certificates for **Amazon RDS** (2024)
*
* - https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/UsingWithRDS.SSL.html
* - https://docs.amazonaws.cn/en_us/AmazonRDS/latest/AuroraUserGuide/UsingWithRDS.SSL.html
* - https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/aurora-serverless.html#aurora-serverless.tls
*/
export declare const defaults: CA;

2888
node_modules/aws-ssl-profiles/lib/profiles/ca/defaults.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,8 @@
import type { CA } from '../../@types/profiles.js';
/**
* CA Certificates for **Amazon RDS Proxy** (2024)
*
* - https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/rds-proxy.howitworks.html#rds-proxy-security.tls
* - https://www.amazontrust.com/repository/
*/
export declare const proxies: CA;

View File

@ -0,0 +1,111 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.proxies = void 0;
/**
* CA Certificates for **Amazon RDS Proxy** (2024)
*
* - https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/rds-proxy.howitworks.html#rds-proxy-security.tls
* - https://www.amazontrust.com/repository/
*/
exports.proxies = [
'-----BEGIN CERTIFICATE-----\n' +
'MIIDQTCCAimgAwIBAgITBmyfz5m/jAo54vB4ikPmljZbyjANBgkqhkiG9w0BAQsF\n' +
'ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6\n' +
'b24gUm9vdCBDQSAxMB4XDTE1MDUyNjAwMDAwMFoXDTM4MDExNzAwMDAwMFowOTEL\n' +
'MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv\n' +
'b3QgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALJ4gHHKeNXj\n' +
'ca9HgFB0fW7Y14h29Jlo91ghYPl0hAEvrAIthtOgQ3pOsqTQNroBvo3bSMgHFzZM\n' +
'9O6II8c+6zf1tRn4SWiw3te5djgdYZ6k/oI2peVKVuRF4fn9tBb6dNqcmzU5L/qw\n' +
'IFAGbHrQgLKm+a/sRxmPUDgH3KKHOVj4utWp+UhnMJbulHheb4mjUcAwhmahRWa6\n' +
'VOujw5H5SNz/0egwLX0tdHA114gk957EWW67c4cX8jJGKLhD+rcdqsq08p8kDi1L\n' +
'93FcXmn/6pUCyziKrlA4b9v7LWIbxcceVOF34GfID5yHI9Y/QCB/IIDEgEw+OyQm\n' +
'jgSubJrIqg0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC\n' +
'AYYwHQYDVR0OBBYEFIQYzIU07LwMlJQuCFmcx7IQTgoIMA0GCSqGSIb3DQEBCwUA\n' +
'A4IBAQCY8jdaQZChGsV2USggNiMOruYou6r4lK5IpDB/G/wkjUu0yKGX9rbxenDI\n' +
'U5PMCCjjmCXPI6T53iHTfIUJrU6adTrCC2qJeHZERxhlbI1Bjjt/msv0tadQ1wUs\n' +
'N+gDS63pYaACbvXy8MWy7Vu33PqUXHeeE6V/Uq2V8viTO96LXFvKWlJbYK8U90vv\n' +
'o/ufQJVtMVT8QtPHRh8jrdkPSHCa2XV4cdFyQzR1bldZwgJcJmApzyMZFo6IQ6XU\n' +
'5MsI+yMRQ+hDKXJioaldXgjUkK642M4UwtBV8ob2xJNDd2ZhwLnoQdeXeGADbkpy\n' +
'rqXRfboQnoZsG4q5WTP468SQvvG5\n' +
'-----END CERTIFICATE-----\n',
'-----BEGIN CERTIFICATE-----\n' +
'MIIFQTCCAymgAwIBAgITBmyf0pY1hp8KD+WGePhbJruKNzANBgkqhkiG9w0BAQwF\n' +
'ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6\n' +
'b24gUm9vdCBDQSAyMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTEL\n' +
'MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv\n' +
'b3QgQ0EgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK2Wny2cSkxK\n' +
'gXlRmeyKy2tgURO8TW0G/LAIjd0ZEGrHJgw12MBvIITplLGbhQPDW9tK6Mj4kHbZ\n' +
'W0/jTOgGNk3Mmqw9DJArktQGGWCsN0R5hYGCrVo34A3MnaZMUnbqQ523BNFQ9lXg\n' +
'1dKmSYXpN+nKfq5clU1Imj+uIFptiJXZNLhSGkOQsL9sBbm2eLfq0OQ6PBJTYv9K\n' +
'8nu+NQWpEjTj82R0Yiw9AElaKP4yRLuH3WUnAnE72kr3H9rN9yFVkE8P7K6C4Z9r\n' +
'2UXTu/Bfh+08LDmG2j/e7HJV63mjrdvdfLC6HM783k81ds8P+HgfajZRRidhW+me\n' +
'z/CiVX18JYpvL7TFz4QuK/0NURBs+18bvBt+xa47mAExkv8LV/SasrlX6avvDXbR\n' +
'8O70zoan4G7ptGmh32n2M8ZpLpcTnqWHsFcQgTfJU7O7f/aS0ZzQGPSSbtqDT6Zj\n' +
'mUyl+17vIWR6IF9sZIUVyzfpYgwLKhbcAS4y2j5L9Z469hdAlO+ekQiG+r5jqFoz\n' +
'7Mt0Q5X5bGlSNscpb/xVA1wf+5+9R+vnSUeVC06JIglJ4PVhHvG/LopyboBZ/1c6\n' +
'+XUyo05f7O0oYtlNc/LMgRdg7c3r3NunysV+Ar3yVAhU/bQtCSwXVEqY0VThUWcI\n' +
'0u1ufm8/0i2BWSlmy5A5lREedCf+3euvAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMB\n' +
'Af8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSwDPBMMPQFWAJI/TPlUq9LhONm\n' +
'UjANBgkqhkiG9w0BAQwFAAOCAgEAqqiAjw54o+Ci1M3m9Zh6O+oAA7CXDpO8Wqj2\n' +
'LIxyh6mx/H9z/WNxeKWHWc8w4Q0QshNabYL1auaAn6AFC2jkR2vHat+2/XcycuUY\n' +
'+gn0oJMsXdKMdYV2ZZAMA3m3MSNjrXiDCYZohMr/+c8mmpJ5581LxedhpxfL86kS\n' +
'k5Nrp+gvU5LEYFiwzAJRGFuFjWJZY7attN6a+yb3ACfAXVU3dJnJUH/jWS5E4ywl\n' +
'7uxMMne0nxrpS10gxdr9HIcWxkPo1LsmmkVwXqkLN1PiRnsn/eBG8om3zEK2yygm\n' +
'btmlyTrIQRNg91CMFa6ybRoVGld45pIq2WWQgj9sAq+uEjonljYE1x2igGOpm/Hl\n' +
'urR8FLBOybEfdF849lHqm/osohHUqS0nGkWxr7JOcQ3AWEbWaQbLU8uz/mtBzUF+\n' +
'fUwPfHJ5elnNXkoOrJupmHN5fLT0zLm4BwyydFy4x2+IoZCn9Kr5v2c69BoVYh63\n' +
'n749sSmvZ6ES8lgQGVMDMBu4Gon2nL2XA46jCfMdiyHxtN/kHNGfZQIG6lzWE7OE\n' +
'76KlXIx3KadowGuuQNKotOrN8I1LOJwZmhsoVLiJkO/KdYE+HvJkJMcYr07/R54H\n' +
'9jVlpNMKVv/1F2Rs76giJUmTtt8AF9pYfl3uxRuw0dFfIRDH+fO6AgonB8Xx1sfT\n' +
'4PsJYGw=\n' +
'-----END CERTIFICATE-----\n',
'-----BEGIN CERTIFICATE-----\n' +
'MIIBtjCCAVugAwIBAgITBmyf1XSXNmY/Owua2eiedgPySjAKBggqhkjOPQQDAjA5\n' +
'MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g\n' +
'Um9vdCBDQSAzMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG\n' +
'A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg\n' +
'Q0EgMzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABCmXp8ZBf8ANm+gBG1bG8lKl\n' +
'ui2yEujSLtf6ycXYqm0fc4E7O5hrOXwzpcVOho6AF2hiRVd9RFgdszflZwjrZt6j\n' +
'QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSr\n' +
'ttvXBp43rDCGB5Fwx5zEGbF4wDAKBggqhkjOPQQDAgNJADBGAiEA4IWSoxe3jfkr\n' +
'BqWTrBqYaGFy+uGh0PsceGCmQ5nFuMQCIQCcAu/xlJyzlvnrxir4tiz+OpAUFteM\n' +
'YyRIHN8wfdVoOw==\n' +
'-----END CERTIFICATE-----\n',
'-----BEGIN CERTIFICATE-----\n' +
'MIIB8jCCAXigAwIBAgITBmyf18G7EEwpQ+Vxe3ssyBrBDjAKBggqhkjOPQQDAzA5\n' +
'MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g\n' +
'Um9vdCBDQSA0MB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG\n' +
'A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg\n' +
'Q0EgNDB2MBAGByqGSM49AgEGBSuBBAAiA2IABNKrijdPo1MN/sGKe0uoe0ZLY7Bi\n' +
'9i0b2whxIdIA6GO9mif78DluXeo9pcmBqqNbIJhFXRbb/egQbeOc4OO9X4Ri83Bk\n' +
'M6DLJC9wuoihKqB1+IGuYgbEgds5bimwHvouXKNCMEAwDwYDVR0TAQH/BAUwAwEB\n' +
'/zAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFNPsxzplbszh2naaVvuc84ZtV+WB\n' +
'MAoGCCqGSM49BAMDA2gAMGUCMDqLIfG9fhGt0O9Yli/W651+kI0rz2ZVwyzjKKlw\n' +
'CkcO8DdZEv8tmZQoTipPNU0zWgIxAOp1AE47xDqUEpHJWEadIRNyp4iciuRMStuW\n' +
'1KyLa2tJElMzrdfkviT8tQp21KW8EA==\n' +
'-----END CERTIFICATE-----\n',
'-----BEGIN CERTIFICATE-----\n' +
'MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx\n' +
'EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT\n' +
'HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs\n' +
'ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5\n' +
'MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD\n' +
'VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy\n' +
'ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy\n' +
'dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI\n' +
'hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p\n' +
'OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2\n' +
'8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K\n' +
'Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe\n' +
'hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk\n' +
'6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw\n' +
'DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q\n' +
'AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI\n' +
'bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB\n' +
've6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z\n' +
'qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd\n' +
'iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn\n' +
'0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN\n' +
'sSi6\n' +
'-----END CERTIFICATE-----\n',
];

52
node_modules/aws-ssl-profiles/package.json generated vendored Normal file
View File

@ -0,0 +1,52 @@
{
"name": "aws-ssl-profiles",
"version": "1.1.2",
"main": "lib/index.js",
"author": "https://github.com/wellwelwel",
"description": "AWS RDS SSL certificates bundles.",
"license": "MIT",
"repository": {
"type": "git",
"url": "https://github.com/mysqljs/aws-ssl-profiles"
},
"bugs": {
"url": "https://github.com/mysqljs/aws-ssl-profiles/issues"
},
"devDependencies": {
"@biomejs/biome": "^1.8.3",
"@types/node": "^22.5.1",
"@types/x509.js": "^1.0.3",
"poku": "^2.5.0",
"prettier": "^3.3.3",
"tsx": "^4.19.0",
"typescript": "^5.5.4",
"x509.js": "^1.0.0"
},
"files": [
"lib"
],
"engines": {
"node": ">= 6.0.0"
},
"keywords": [
"mysql",
"mysql2",
"pg",
"postgres",
"aws",
"rds",
"ssl",
"certificates",
"ca",
"bundle"
],
"scripts": {
"build": "npx tsc",
"postbuild": "cp src/index.d.ts lib/index.d.ts",
"lint": "npx @biomejs/biome lint && prettier --check .",
"lint:fix": "npx @biomejs/biome lint --write . && prettier --write .",
"pretest": "npm run build",
"test": "poku --parallel ./test",
"test:ci": "npm run lint && npm run test"
}
}

1196
node_modules/axios/CHANGELOG.md generated vendored Normal file

File diff suppressed because it is too large Load Diff

7
node_modules/axios/LICENSE generated vendored Normal file
View File

@ -0,0 +1,7 @@
# Copyright (c) 2014-present Matt Zabriskie & Collaborators
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

3
node_modules/axios/MIGRATION_GUIDE.md generated vendored Normal file
View File

@ -0,0 +1,3 @@
# Migration Guide
## 0.x.x -> 1.1.0

1679
node_modules/axios/README.md generated vendored Normal file

File diff suppressed because it is too large Load Diff

4293
node_modules/axios/dist/axios.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

1
node_modules/axios/dist/axios.js.map generated vendored Normal file

File diff suppressed because one or more lines are too long

3
node_modules/axios/dist/axios.min.js generated vendored Normal file

File diff suppressed because one or more lines are too long

1
node_modules/axios/dist/axios.min.js.map generated vendored Normal file

File diff suppressed because one or more lines are too long

3726
node_modules/axios/dist/browser/axios.cjs generated vendored Normal file

File diff suppressed because it is too large Load Diff

1
node_modules/axios/dist/browser/axios.cjs.map generated vendored Normal file

File diff suppressed because one or more lines are too long

3749
node_modules/axios/dist/esm/axios.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

1
node_modules/axios/dist/esm/axios.js.map generated vendored Normal file

File diff suppressed because one or more lines are too long

3
node_modules/axios/dist/esm/axios.min.js generated vendored Normal file

File diff suppressed because one or more lines are too long

1
node_modules/axios/dist/esm/axios.min.js.map generated vendored Normal file

File diff suppressed because one or more lines are too long

4783
node_modules/axios/dist/node/axios.cjs generated vendored Normal file

File diff suppressed because it is too large Load Diff

1
node_modules/axios/dist/node/axios.cjs.map generated vendored Normal file

File diff suppressed because one or more lines are too long

550
node_modules/axios/index.d.cts generated vendored Normal file
View File

@ -0,0 +1,550 @@
interface RawAxiosHeaders {
[key: string]: axios.AxiosHeaderValue;
}
type MethodsHeaders = Partial<{
[Key in axios.Method as Lowercase<Key>]: AxiosHeaders;
} & {common: AxiosHeaders}>;
type AxiosHeaderMatcher = (this: AxiosHeaders, value: string, name: string, headers: RawAxiosHeaders) => boolean;
type AxiosHeaderParser = (this: AxiosHeaders, value: axios.AxiosHeaderValue, header: string) => any;
type CommonRequestHeadersList = 'Accept' | 'Content-Length' | 'User-Agent'| 'Content-Encoding' | 'Authorization';
type ContentType = axios.AxiosHeaderValue | 'text/html' | 'text/plain' | 'multipart/form-data' | 'application/json' | 'application/x-www-form-urlencoded' | 'application/octet-stream';
type CommonResponseHeadersList = 'Server' | 'Content-Type' | 'Content-Length' | 'Cache-Control'| 'Content-Encoding';
declare class AxiosHeaders {
constructor(
headers?: RawAxiosHeaders | AxiosHeaders | string
);
[key: string]: any;
set(headerName?: string, value?: axios.AxiosHeaderValue, rewrite?: boolean | AxiosHeaderMatcher): AxiosHeaders;
set(headers?: RawAxiosHeaders | AxiosHeaders | string, rewrite?: boolean): AxiosHeaders;
get(headerName: string, parser: RegExp): RegExpExecArray | null;
get(headerName: string, matcher?: true | AxiosHeaderParser): axios.AxiosHeaderValue;
has(header: string, matcher?: AxiosHeaderMatcher): boolean;
delete(header: string | string[], matcher?: AxiosHeaderMatcher): boolean;
clear(matcher?: AxiosHeaderMatcher): boolean;
normalize(format: boolean): AxiosHeaders;
concat(...targets: Array<AxiosHeaders | RawAxiosHeaders | string | undefined | null>): AxiosHeaders;
toJSON(asStrings?: boolean): RawAxiosHeaders;
static from(thing?: AxiosHeaders | RawAxiosHeaders | string): AxiosHeaders;
static accessor(header: string | string[]): AxiosHeaders;
static concat(...targets: Array<AxiosHeaders | RawAxiosHeaders | string | undefined | null>): AxiosHeaders;
setContentType(value: ContentType, rewrite?: boolean | AxiosHeaderMatcher): AxiosHeaders;
getContentType(parser?: RegExp): RegExpExecArray | null;
getContentType(matcher?: AxiosHeaderMatcher): axios.AxiosHeaderValue;
hasContentType(matcher?: AxiosHeaderMatcher): boolean;
setContentLength(value: axios.AxiosHeaderValue, rewrite?: boolean | AxiosHeaderMatcher): AxiosHeaders;
getContentLength(parser?: RegExp): RegExpExecArray | null;
getContentLength(matcher?: AxiosHeaderMatcher): axios.AxiosHeaderValue;
hasContentLength(matcher?: AxiosHeaderMatcher): boolean;
setAccept(value: axios.AxiosHeaderValue, rewrite?: boolean | AxiosHeaderMatcher): AxiosHeaders;
getAccept(parser?: RegExp): RegExpExecArray | null;
getAccept(matcher?: AxiosHeaderMatcher): axios.AxiosHeaderValue;
hasAccept(matcher?: AxiosHeaderMatcher): boolean;
setUserAgent(value: axios.AxiosHeaderValue, rewrite?: boolean | AxiosHeaderMatcher): AxiosHeaders;
getUserAgent(parser?: RegExp): RegExpExecArray | null;
getUserAgent(matcher?: AxiosHeaderMatcher): axios.AxiosHeaderValue;
hasUserAgent(matcher?: AxiosHeaderMatcher): boolean;
setContentEncoding(value: axios.AxiosHeaderValue, rewrite?: boolean | AxiosHeaderMatcher): AxiosHeaders;
getContentEncoding(parser?: RegExp): RegExpExecArray | null;
getContentEncoding(matcher?: AxiosHeaderMatcher): axios.AxiosHeaderValue;
hasContentEncoding(matcher?: AxiosHeaderMatcher): boolean;
setAuthorization(value: axios.AxiosHeaderValue, rewrite?: boolean | AxiosHeaderMatcher): AxiosHeaders;
getAuthorization(parser?: RegExp): RegExpExecArray | null;
getAuthorization(matcher?: AxiosHeaderMatcher): axios.AxiosHeaderValue;
hasAuthorization(matcher?: AxiosHeaderMatcher): boolean;
[Symbol.iterator](): IterableIterator<[string, axios.AxiosHeaderValue]>;
}
declare class AxiosError<T = unknown, D = any> extends Error {
constructor(
message?: string,
code?: string,
config?: axios.InternalAxiosRequestConfig<D>,
request?: any,
response?: axios.AxiosResponse<T, D>
);
config?: axios.InternalAxiosRequestConfig<D>;
code?: string;
request?: any;
response?: axios.AxiosResponse<T, D>;
isAxiosError: boolean;
status?: number;
toJSON: () => object;
cause?: Error;
static readonly ERR_FR_TOO_MANY_REDIRECTS = "ERR_FR_TOO_MANY_REDIRECTS";
static readonly ERR_BAD_OPTION_VALUE = "ERR_BAD_OPTION_VALUE";
static readonly ERR_BAD_OPTION = "ERR_BAD_OPTION";
static readonly ERR_NETWORK = "ERR_NETWORK";
static readonly ERR_DEPRECATED = "ERR_DEPRECATED";
static readonly ERR_BAD_RESPONSE = "ERR_BAD_RESPONSE";
static readonly ERR_BAD_REQUEST = "ERR_BAD_REQUEST";
static readonly ERR_NOT_SUPPORT = "ERR_NOT_SUPPORT";
static readonly ERR_INVALID_URL = "ERR_INVALID_URL";
static readonly ERR_CANCELED = "ERR_CANCELED";
static readonly ECONNABORTED = "ECONNABORTED";
static readonly ETIMEDOUT = "ETIMEDOUT";
}
declare class CanceledError<T> extends AxiosError<T> {
}
declare class Axios {
constructor(config?: axios.AxiosRequestConfig);
defaults: axios.AxiosDefaults;
interceptors: {
request: axios.AxiosInterceptorManager<axios.InternalAxiosRequestConfig>;
response: axios.AxiosInterceptorManager<axios.AxiosResponse>;
};
getUri(config?: axios.AxiosRequestConfig): string;
request<T = any, R = axios.AxiosResponse<T>, D = any>(config: axios.AxiosRequestConfig<D>): Promise<R>;
get<T = any, R = axios.AxiosResponse<T>, D = any>(url: string, config?: axios.AxiosRequestConfig<D>): Promise<R>;
delete<T = any, R = axios.AxiosResponse<T>, D = any>(url: string, config?: axios.AxiosRequestConfig<D>): Promise<R>;
head<T = any, R = axios.AxiosResponse<T>, D = any>(url: string, config?: axios.AxiosRequestConfig<D>): Promise<R>;
options<T = any, R = axios.AxiosResponse<T>, D = any>(url: string, config?: axios.AxiosRequestConfig<D>): Promise<R>;
post<T = any, R = axios.AxiosResponse<T>, D = any>(url: string, data?: D, config?: axios.AxiosRequestConfig<D>): Promise<R>;
put<T = any, R = axios.AxiosResponse<T>, D = any>(url: string, data?: D, config?: axios.AxiosRequestConfig<D>): Promise<R>;
patch<T = any, R = axios.AxiosResponse<T>, D = any>(url: string, data?: D, config?: axios.AxiosRequestConfig<D>): Promise<R>;
postForm<T = any, R = axios.AxiosResponse<T>, D = any>(url: string, data?: D, config?: axios.AxiosRequestConfig<D>): Promise<R>;
putForm<T = any, R = axios.AxiosResponse<T>, D = any>(url: string, data?: D, config?: axios.AxiosRequestConfig<D>): Promise<R>;
patchForm<T = any, R = axios.AxiosResponse<T>, D = any>(url: string, data?: D, config?: axios.AxiosRequestConfig<D>): Promise<R>;
}
declare enum HttpStatusCode {
Continue = 100,
SwitchingProtocols = 101,
Processing = 102,
EarlyHints = 103,
Ok = 200,
Created = 201,
Accepted = 202,
NonAuthoritativeInformation = 203,
NoContent = 204,
ResetContent = 205,
PartialContent = 206,
MultiStatus = 207,
AlreadyReported = 208,
ImUsed = 226,
MultipleChoices = 300,
MovedPermanently = 301,
Found = 302,
SeeOther = 303,
NotModified = 304,
UseProxy = 305,
Unused = 306,
TemporaryRedirect = 307,
PermanentRedirect = 308,
BadRequest = 400,
Unauthorized = 401,
PaymentRequired = 402,
Forbidden = 403,
NotFound = 404,
MethodNotAllowed = 405,
NotAcceptable = 406,
ProxyAuthenticationRequired = 407,
RequestTimeout = 408,
Conflict = 409,
Gone = 410,
LengthRequired = 411,
PreconditionFailed = 412,
PayloadTooLarge = 413,
UriTooLong = 414,
UnsupportedMediaType = 415,
RangeNotSatisfiable = 416,
ExpectationFailed = 417,
ImATeapot = 418,
MisdirectedRequest = 421,
UnprocessableEntity = 422,
Locked = 423,
FailedDependency = 424,
TooEarly = 425,
UpgradeRequired = 426,
PreconditionRequired = 428,
TooManyRequests = 429,
RequestHeaderFieldsTooLarge = 431,
UnavailableForLegalReasons = 451,
InternalServerError = 500,
NotImplemented = 501,
BadGateway = 502,
ServiceUnavailable = 503,
GatewayTimeout = 504,
HttpVersionNotSupported = 505,
VariantAlsoNegotiates = 506,
InsufficientStorage = 507,
LoopDetected = 508,
NotExtended = 510,
NetworkAuthenticationRequired = 511,
}
type InternalAxiosError<T = unknown, D = any> = AxiosError<T, D>;
declare namespace axios {
type AxiosError<T = unknown, D = any> = InternalAxiosError<T, D>;
type RawAxiosRequestHeaders = Partial<RawAxiosHeaders & {
[Key in CommonRequestHeadersList]: AxiosHeaderValue;
} & {
'Content-Type': ContentType
}>;
type AxiosRequestHeaders = RawAxiosRequestHeaders & AxiosHeaders;
type AxiosHeaderValue = AxiosHeaders | string | string[] | number | boolean | null;
type RawCommonResponseHeaders = {
[Key in CommonResponseHeadersList]: AxiosHeaderValue;
} & {
"set-cookie": string[];
};
type RawAxiosResponseHeaders = Partial<RawAxiosHeaders & RawCommonResponseHeaders>;
type AxiosResponseHeaders = RawAxiosResponseHeaders & AxiosHeaders;
interface AxiosRequestTransformer {
(this: InternalAxiosRequestConfig, data: any, headers: AxiosRequestHeaders): any;
}
interface AxiosResponseTransformer {
(this: InternalAxiosRequestConfig, data: any, headers: AxiosResponseHeaders, status?: number): any;
}
interface AxiosAdapter {
(config: InternalAxiosRequestConfig): AxiosPromise;
}
interface AxiosBasicCredentials {
username: string;
password: string;
}
interface AxiosProxyConfig {
host: string;
port: number;
auth?: AxiosBasicCredentials;
protocol?: string;
}
type Method =
| 'get' | 'GET'
| 'delete' | 'DELETE'
| 'head' | 'HEAD'
| 'options' | 'OPTIONS'
| 'post' | 'POST'
| 'put' | 'PUT'
| 'patch' | 'PATCH'
| 'purge' | 'PURGE'
| 'link' | 'LINK'
| 'unlink' | 'UNLINK';
type ResponseType =
| 'arraybuffer'
| 'blob'
| 'document'
| 'json'
| 'text'
| 'stream'
| 'formdata';
type responseEncoding =
| 'ascii' | 'ASCII'
| 'ansi' | 'ANSI'
| 'binary' | 'BINARY'
| 'base64' | 'BASE64'
| 'base64url' | 'BASE64URL'
| 'hex' | 'HEX'
| 'latin1' | 'LATIN1'
| 'ucs-2' | 'UCS-2'
| 'ucs2' | 'UCS2'
| 'utf-8' | 'UTF-8'
| 'utf8' | 'UTF8'
| 'utf16le' | 'UTF16LE';
interface TransitionalOptions {
silentJSONParsing?: boolean;
forcedJSONParsing?: boolean;
clarifyTimeoutError?: boolean;
}
interface GenericAbortSignal {
readonly aborted: boolean;
onabort?: ((...args: any) => any) | null;
addEventListener?: (...args: any) => any;
removeEventListener?: (...args: any) => any;
}
interface FormDataVisitorHelpers {
defaultVisitor: SerializerVisitor;
convertValue: (value: any) => any;
isVisitable: (value: any) => boolean;
}
interface SerializerVisitor {
(
this: GenericFormData,
value: any,
key: string | number,
path: null | Array<string | number>,
helpers: FormDataVisitorHelpers
): boolean;
}
interface SerializerOptions {
visitor?: SerializerVisitor;
dots?: boolean;
metaTokens?: boolean;
indexes?: boolean | null;
}
// tslint:disable-next-line
interface FormSerializerOptions extends SerializerOptions {
}
interface ParamEncoder {
(value: any, defaultEncoder: (value: any) => any): any;
}
interface CustomParamsSerializer {
(params: Record<string, any>, options?: ParamsSerializerOptions): string;
}
interface ParamsSerializerOptions extends SerializerOptions {
encode?: ParamEncoder;
serialize?: CustomParamsSerializer;
}
type MaxUploadRate = number;
type MaxDownloadRate = number;
type BrowserProgressEvent = any;
interface AxiosProgressEvent {
loaded: number;
total?: number;
progress?: number;
bytes: number;
rate?: number;
estimated?: number;
upload?: boolean;
download?: boolean;
event?: BrowserProgressEvent;
lengthComputable: boolean;
}
type Milliseconds = number;
type AxiosAdapterName = 'fetch' | 'xhr' | 'http' | (string & {});
type AxiosAdapterConfig = AxiosAdapter | AxiosAdapterName;
type AddressFamily = 4 | 6 | undefined;
interface LookupAddressEntry {
address: string;
family?: AddressFamily;
}
type LookupAddress = string | LookupAddressEntry;
interface AxiosRequestConfig<D = any> {
url?: string;
method?: Method | string;
baseURL?: string;
allowAbsoluteUrls?: boolean;
transformRequest?: AxiosRequestTransformer | AxiosRequestTransformer[];
transformResponse?: AxiosResponseTransformer | AxiosResponseTransformer[];
headers?: (RawAxiosRequestHeaders & MethodsHeaders) | AxiosHeaders;
params?: any;
paramsSerializer?: ParamsSerializerOptions | CustomParamsSerializer;
data?: D;
timeout?: Milliseconds;
timeoutErrorMessage?: string;
withCredentials?: boolean;
adapter?: AxiosAdapterConfig | AxiosAdapterConfig[];
auth?: AxiosBasicCredentials;
responseType?: ResponseType;
responseEncoding?: responseEncoding | string;
xsrfCookieName?: string;
xsrfHeaderName?: string;
onUploadProgress?: (progressEvent: AxiosProgressEvent) => void;
onDownloadProgress?: (progressEvent: AxiosProgressEvent) => void;
maxContentLength?: number;
validateStatus?: ((status: number) => boolean) | null;
maxBodyLength?: number;
maxRedirects?: number;
maxRate?: number | [MaxUploadRate, MaxDownloadRate];
beforeRedirect?: (options: Record<string, any>, responseDetails: {headers: Record<string, string>, statusCode: HttpStatusCode}) => void;
socketPath?: string | null;
transport?: any;
httpAgent?: any;
httpsAgent?: any;
proxy?: AxiosProxyConfig | false;
cancelToken?: CancelToken;
decompress?: boolean;
transitional?: TransitionalOptions;
signal?: GenericAbortSignal;
insecureHTTPParser?: boolean;
env?: {
FormData?: new (...args: any[]) => object;
};
formSerializer?: FormSerializerOptions;
family?: AddressFamily;
lookup?: ((hostname: string, options: object, cb: (err: Error | null, address: LookupAddress | LookupAddress[], family?: AddressFamily) => void) => void) |
((hostname: string, options: object) => Promise<[address: LookupAddressEntry | LookupAddressEntry[], family?: AddressFamily] | LookupAddress>);
withXSRFToken?: boolean | ((config: InternalAxiosRequestConfig) => boolean | undefined);
fetchOptions?: Record<string, any>;
}
// Alias
type RawAxiosRequestConfig<D = any> = AxiosRequestConfig<D>;
interface InternalAxiosRequestConfig<D = any> extends AxiosRequestConfig {
headers: AxiosRequestHeaders;
}
interface HeadersDefaults {
common: RawAxiosRequestHeaders;
delete: RawAxiosRequestHeaders;
get: RawAxiosRequestHeaders;
head: RawAxiosRequestHeaders;
post: RawAxiosRequestHeaders;
put: RawAxiosRequestHeaders;
patch: RawAxiosRequestHeaders;
options?: RawAxiosRequestHeaders;
purge?: RawAxiosRequestHeaders;
link?: RawAxiosRequestHeaders;
unlink?: RawAxiosRequestHeaders;
}
interface AxiosDefaults<D = any> extends Omit<AxiosRequestConfig<D>, 'headers'> {
headers: HeadersDefaults;
}
interface CreateAxiosDefaults<D = any> extends Omit<AxiosRequestConfig<D>, 'headers'> {
headers?: RawAxiosRequestHeaders | AxiosHeaders | Partial<HeadersDefaults>;
}
interface AxiosResponse<T = any, D = any> {
data: T;
status: number;
statusText: string;
headers: RawAxiosResponseHeaders | AxiosResponseHeaders;
config: InternalAxiosRequestConfig<D>;
request?: any;
}
type AxiosPromise<T = any> = Promise<AxiosResponse<T>>;
interface CancelStatic {
new (message?: string): Cancel;
}
interface Cancel {
message: string | undefined;
}
interface Canceler {
(message?: string, config?: AxiosRequestConfig, request?: any): void;
}
interface CancelTokenStatic {
new (executor: (cancel: Canceler) => void): CancelToken;
source(): CancelTokenSource;
}
interface CancelToken {
promise: Promise<Cancel>;
reason?: Cancel;
throwIfRequested(): void;
}
interface CancelTokenSource {
token: CancelToken;
cancel: Canceler;
}
interface AxiosInterceptorOptions {
synchronous?: boolean;
runWhen?: (config: InternalAxiosRequestConfig) => boolean;
}
type AxiosRequestInterceptorUse<T> = (onFulfilled?: ((value: T) => T | Promise<T>) | null, onRejected?: ((error: any) => any) | null, options?: AxiosInterceptorOptions) => number;
type AxiosResponseInterceptorUse<T> = (onFulfilled?: ((value: T) => T | Promise<T>) | null, onRejected?: ((error: any) => any) | null) => number;
interface AxiosInterceptorManager<V> {
use: V extends AxiosResponse ? AxiosResponseInterceptorUse<V> : AxiosRequestInterceptorUse<V>;
eject(id: number): void;
clear(): void;
}
interface AxiosInstance extends Axios {
<T = any, R = AxiosResponse<T>, D = any>(config: AxiosRequestConfig<D>): Promise<R>;
<T = any, R = AxiosResponse<T>, D = any>(url: string, config?: AxiosRequestConfig<D>): Promise<R>;
create(config?: CreateAxiosDefaults): AxiosInstance;
defaults: Omit<AxiosDefaults, 'headers'> & {
headers: HeadersDefaults & {
[key: string]: AxiosHeaderValue
}
};
}
interface GenericFormData {
append(name: string, value: any, options?: any): any;
}
interface GenericHTMLFormElement {
name: string;
method: string;
submit(): void;
}
interface AxiosStatic extends AxiosInstance {
Cancel: CancelStatic;
CancelToken: CancelTokenStatic;
Axios: typeof Axios;
AxiosError: typeof AxiosError;
CanceledError: typeof CanceledError;
HttpStatusCode: typeof HttpStatusCode;
readonly VERSION: string;
isCancel(value: any): value is Cancel;
all<T>(values: Array<T | Promise<T>>): Promise<T[]>;
spread<T, R>(callback: (...args: T[]) => R): (array: T[]) => R;
isAxiosError<T = any, D = any>(payload: any): payload is AxiosError<T, D>;
toFormData(sourceObj: object, targetFormData?: GenericFormData, options?: FormSerializerOptions): GenericFormData;
formToJSON(form: GenericFormData|GenericHTMLFormElement): object;
getAdapter(adapters: AxiosAdapterConfig | AxiosAdapterConfig[] | undefined): AxiosAdapter;
AxiosHeaders: typeof AxiosHeaders;
}
}
declare const axios: axios.AxiosStatic;
export = axios;

572
node_modules/axios/index.d.ts generated vendored Normal file
View File

@ -0,0 +1,572 @@
// TypeScript Version: 4.7
export type AxiosHeaderValue = AxiosHeaders | string | string[] | number | boolean | null;
interface RawAxiosHeaders {
[key: string]: AxiosHeaderValue;
}
type MethodsHeaders = Partial<{
[Key in Method as Lowercase<Key>]: AxiosHeaders;
} & {common: AxiosHeaders}>;
type AxiosHeaderMatcher = string | RegExp | ((this: AxiosHeaders, value: string, name: string) => boolean);
type AxiosHeaderParser = (this: AxiosHeaders, value: AxiosHeaderValue, header: string) => any;
export class AxiosHeaders {
constructor(
headers?: RawAxiosHeaders | AxiosHeaders | string
);
[key: string]: any;
set(headerName?: string, value?: AxiosHeaderValue, rewrite?: boolean | AxiosHeaderMatcher): AxiosHeaders;
set(headers?: RawAxiosHeaders | AxiosHeaders | string, rewrite?: boolean): AxiosHeaders;
get(headerName: string, parser: RegExp): RegExpExecArray | null;
get(headerName: string, matcher?: true | AxiosHeaderParser): AxiosHeaderValue;
has(header: string, matcher?: AxiosHeaderMatcher): boolean;
delete(header: string | string[], matcher?: AxiosHeaderMatcher): boolean;
clear(matcher?: AxiosHeaderMatcher): boolean;
normalize(format: boolean): AxiosHeaders;
concat(...targets: Array<AxiosHeaders | RawAxiosHeaders | string | undefined | null>): AxiosHeaders;
toJSON(asStrings?: boolean): RawAxiosHeaders;
static from(thing?: AxiosHeaders | RawAxiosHeaders | string): AxiosHeaders;
static accessor(header: string | string[]): AxiosHeaders;
static concat(...targets: Array<AxiosHeaders | RawAxiosHeaders | string | undefined | null>): AxiosHeaders;
setContentType(value: ContentType, rewrite?: boolean | AxiosHeaderMatcher): AxiosHeaders;
getContentType(parser?: RegExp): RegExpExecArray | null;
getContentType(matcher?: AxiosHeaderMatcher): AxiosHeaderValue;
hasContentType(matcher?: AxiosHeaderMatcher): boolean;
setContentLength(value: AxiosHeaderValue, rewrite?: boolean | AxiosHeaderMatcher): AxiosHeaders;
getContentLength(parser?: RegExp): RegExpExecArray | null;
getContentLength(matcher?: AxiosHeaderMatcher): AxiosHeaderValue;
hasContentLength(matcher?: AxiosHeaderMatcher): boolean;
setAccept(value: AxiosHeaderValue, rewrite?: boolean | AxiosHeaderMatcher): AxiosHeaders;
getAccept(parser?: RegExp): RegExpExecArray | null;
getAccept(matcher?: AxiosHeaderMatcher): AxiosHeaderValue;
hasAccept(matcher?: AxiosHeaderMatcher): boolean;
setUserAgent(value: AxiosHeaderValue, rewrite?: boolean | AxiosHeaderMatcher): AxiosHeaders;
getUserAgent(parser?: RegExp): RegExpExecArray | null;
getUserAgent(matcher?: AxiosHeaderMatcher): AxiosHeaderValue;
hasUserAgent(matcher?: AxiosHeaderMatcher): boolean;
setContentEncoding(value: AxiosHeaderValue, rewrite?: boolean | AxiosHeaderMatcher): AxiosHeaders;
getContentEncoding(parser?: RegExp): RegExpExecArray | null;
getContentEncoding(matcher?: AxiosHeaderMatcher): AxiosHeaderValue;
hasContentEncoding(matcher?: AxiosHeaderMatcher): boolean;
setAuthorization(value: AxiosHeaderValue, rewrite?: boolean | AxiosHeaderMatcher): AxiosHeaders;
getAuthorization(parser?: RegExp): RegExpExecArray | null;
getAuthorization(matcher?: AxiosHeaderMatcher): AxiosHeaderValue;
hasAuthorization(matcher?: AxiosHeaderMatcher): boolean;
getSetCookie(): string[];
[Symbol.iterator](): IterableIterator<[string, AxiosHeaderValue]>;
}
type CommonRequestHeadersList = 'Accept' | 'Content-Length' | 'User-Agent' | 'Content-Encoding' | 'Authorization';
type ContentType = AxiosHeaderValue | 'text/html' | 'text/plain' | 'multipart/form-data' | 'application/json' | 'application/x-www-form-urlencoded' | 'application/octet-stream';
export type RawAxiosRequestHeaders = Partial<RawAxiosHeaders & {
[Key in CommonRequestHeadersList]: AxiosHeaderValue;
} & {
'Content-Type': ContentType
}>;
export type AxiosRequestHeaders = RawAxiosRequestHeaders & AxiosHeaders;
type CommonResponseHeadersList = 'Server' | 'Content-Type' | 'Content-Length' | 'Cache-Control'| 'Content-Encoding';
type RawCommonResponseHeaders = {
[Key in CommonResponseHeadersList]: AxiosHeaderValue;
} & {
"set-cookie": string[];
};
export type RawAxiosResponseHeaders = Partial<RawAxiosHeaders & RawCommonResponseHeaders>;
export type AxiosResponseHeaders = RawAxiosResponseHeaders & AxiosHeaders;
export interface AxiosRequestTransformer {
(this: InternalAxiosRequestConfig, data: any, headers: AxiosRequestHeaders): any;
}
export interface AxiosResponseTransformer {
(this: InternalAxiosRequestConfig, data: any, headers: AxiosResponseHeaders, status?: number): any;
}
export interface AxiosAdapter {
(config: InternalAxiosRequestConfig): AxiosPromise;
}
export interface AxiosBasicCredentials {
username: string;
password: string;
}
export interface AxiosProxyConfig {
host: string;
port: number;
auth?: AxiosBasicCredentials;
protocol?: string;
}
export enum HttpStatusCode {
Continue = 100,
SwitchingProtocols = 101,
Processing = 102,
EarlyHints = 103,
Ok = 200,
Created = 201,
Accepted = 202,
NonAuthoritativeInformation = 203,
NoContent = 204,
ResetContent = 205,
PartialContent = 206,
MultiStatus = 207,
AlreadyReported = 208,
ImUsed = 226,
MultipleChoices = 300,
MovedPermanently = 301,
Found = 302,
SeeOther = 303,
NotModified = 304,
UseProxy = 305,
Unused = 306,
TemporaryRedirect = 307,
PermanentRedirect = 308,
BadRequest = 400,
Unauthorized = 401,
PaymentRequired = 402,
Forbidden = 403,
NotFound = 404,
MethodNotAllowed = 405,
NotAcceptable = 406,
ProxyAuthenticationRequired = 407,
RequestTimeout = 408,
Conflict = 409,
Gone = 410,
LengthRequired = 411,
PreconditionFailed = 412,
PayloadTooLarge = 413,
UriTooLong = 414,
UnsupportedMediaType = 415,
RangeNotSatisfiable = 416,
ExpectationFailed = 417,
ImATeapot = 418,
MisdirectedRequest = 421,
UnprocessableEntity = 422,
Locked = 423,
FailedDependency = 424,
TooEarly = 425,
UpgradeRequired = 426,
PreconditionRequired = 428,
TooManyRequests = 429,
RequestHeaderFieldsTooLarge = 431,
UnavailableForLegalReasons = 451,
InternalServerError = 500,
NotImplemented = 501,
BadGateway = 502,
ServiceUnavailable = 503,
GatewayTimeout = 504,
HttpVersionNotSupported = 505,
VariantAlsoNegotiates = 506,
InsufficientStorage = 507,
LoopDetected = 508,
NotExtended = 510,
NetworkAuthenticationRequired = 511,
}
export type Method =
| 'get' | 'GET'
| 'delete' | 'DELETE'
| 'head' | 'HEAD'
| 'options' | 'OPTIONS'
| 'post' | 'POST'
| 'put' | 'PUT'
| 'patch' | 'PATCH'
| 'purge' | 'PURGE'
| 'link' | 'LINK'
| 'unlink' | 'UNLINK';
export type ResponseType =
| 'arraybuffer'
| 'blob'
| 'document'
| 'json'
| 'text'
| 'stream'
| 'formdata';
export type responseEncoding =
| 'ascii' | 'ASCII'
| 'ansi' | 'ANSI'
| 'binary' | 'BINARY'
| 'base64' | 'BASE64'
| 'base64url' | 'BASE64URL'
| 'hex' | 'HEX'
| 'latin1' | 'LATIN1'
| 'ucs-2' | 'UCS-2'
| 'ucs2' | 'UCS2'
| 'utf-8' | 'UTF-8'
| 'utf8' | 'UTF8'
| 'utf16le' | 'UTF16LE';
export interface TransitionalOptions {
silentJSONParsing?: boolean;
forcedJSONParsing?: boolean;
clarifyTimeoutError?: boolean;
}
export interface GenericAbortSignal {
readonly aborted: boolean;
onabort?: ((...args: any) => any) | null;
addEventListener?: (...args: any) => any;
removeEventListener?: (...args: any) => any;
}
export interface FormDataVisitorHelpers {
defaultVisitor: SerializerVisitor;
convertValue: (value: any) => any;
isVisitable: (value: any) => boolean;
}
export interface SerializerVisitor {
(
this: GenericFormData,
value: any,
key: string | number,
path: null | Array<string | number>,
helpers: FormDataVisitorHelpers
): boolean;
}
export interface SerializerOptions {
visitor?: SerializerVisitor;
dots?: boolean;
metaTokens?: boolean;
indexes?: boolean | null;
}
// tslint:disable-next-line
export interface FormSerializerOptions extends SerializerOptions {
}
export interface ParamEncoder {
(value: any, defaultEncoder: (value: any) => any): any;
}
export interface CustomParamsSerializer {
(params: Record<string, any>, options?: ParamsSerializerOptions): string;
}
export interface ParamsSerializerOptions extends SerializerOptions {
encode?: ParamEncoder;
serialize?: CustomParamsSerializer;
}
type MaxUploadRate = number;
type MaxDownloadRate = number;
type BrowserProgressEvent = any;
export interface AxiosProgressEvent {
loaded: number;
total?: number;
progress?: number;
bytes: number;
rate?: number;
estimated?: number;
upload?: boolean;
download?: boolean;
event?: BrowserProgressEvent;
lengthComputable: boolean;
}
type Milliseconds = number;
type AxiosAdapterName = 'fetch' | 'xhr' | 'http' | (string & {});
type AxiosAdapterConfig = AxiosAdapter | AxiosAdapterName;
export type AddressFamily = 4 | 6 | undefined;
export interface LookupAddressEntry {
address: string;
family?: AddressFamily;
}
export type LookupAddress = string | LookupAddressEntry;
export interface AxiosRequestConfig<D = any> {
url?: string;
method?: Method | string;
baseURL?: string;
allowAbsoluteUrls?: boolean;
transformRequest?: AxiosRequestTransformer | AxiosRequestTransformer[];
transformResponse?: AxiosResponseTransformer | AxiosResponseTransformer[];
headers?: (RawAxiosRequestHeaders & MethodsHeaders) | AxiosHeaders;
params?: any;
paramsSerializer?: ParamsSerializerOptions | CustomParamsSerializer;
data?: D;
timeout?: Milliseconds;
timeoutErrorMessage?: string;
withCredentials?: boolean;
adapter?: AxiosAdapterConfig | AxiosAdapterConfig[];
auth?: AxiosBasicCredentials;
responseType?: ResponseType;
responseEncoding?: responseEncoding | string;
xsrfCookieName?: string;
xsrfHeaderName?: string;
onUploadProgress?: (progressEvent: AxiosProgressEvent) => void;
onDownloadProgress?: (progressEvent: AxiosProgressEvent) => void;
maxContentLength?: number;
validateStatus?: ((status: number) => boolean) | null;
maxBodyLength?: number;
maxRedirects?: number;
maxRate?: number | [MaxUploadRate, MaxDownloadRate];
beforeRedirect?: (options: Record<string, any>, responseDetails: {headers: Record<string, string>, statusCode: HttpStatusCode}) => void;
socketPath?: string | null;
transport?: any;
httpAgent?: any;
httpsAgent?: any;
proxy?: AxiosProxyConfig | false;
cancelToken?: CancelToken;
decompress?: boolean;
transitional?: TransitionalOptions;
signal?: GenericAbortSignal;
insecureHTTPParser?: boolean;
env?: {
FormData?: new (...args: any[]) => object;
};
formSerializer?: FormSerializerOptions;
family?: AddressFamily;
lookup?: ((hostname: string, options: object, cb: (err: Error | null, address: LookupAddress | LookupAddress[], family?: AddressFamily) => void) => void) |
((hostname: string, options: object) => Promise<[address: LookupAddressEntry | LookupAddressEntry[], family?: AddressFamily] | LookupAddress>);
withXSRFToken?: boolean | ((config: InternalAxiosRequestConfig) => boolean | undefined);
fetchOptions?: Record<string, any>;
}
// Alias
export type RawAxiosRequestConfig<D = any> = AxiosRequestConfig<D>;
export interface InternalAxiosRequestConfig<D = any> extends AxiosRequestConfig<D> {
headers: AxiosRequestHeaders;
}
export interface HeadersDefaults {
common: RawAxiosRequestHeaders;
delete: RawAxiosRequestHeaders;
get: RawAxiosRequestHeaders;
head: RawAxiosRequestHeaders;
post: RawAxiosRequestHeaders;
put: RawAxiosRequestHeaders;
patch: RawAxiosRequestHeaders;
options?: RawAxiosRequestHeaders;
purge?: RawAxiosRequestHeaders;
link?: RawAxiosRequestHeaders;
unlink?: RawAxiosRequestHeaders;
}
export interface AxiosDefaults<D = any> extends Omit<AxiosRequestConfig<D>, 'headers'> {
headers: HeadersDefaults;
}
export interface CreateAxiosDefaults<D = any> extends Omit<AxiosRequestConfig<D>, 'headers'> {
headers?: RawAxiosRequestHeaders | AxiosHeaders | Partial<HeadersDefaults>;
}
export interface AxiosResponse<T = any, D = any> {
data: T;
status: number;
statusText: string;
headers: RawAxiosResponseHeaders | AxiosResponseHeaders;
config: InternalAxiosRequestConfig<D>;
request?: any;
}
export class AxiosError<T = unknown, D = any> extends Error {
constructor(
message?: string,
code?: string,
config?: InternalAxiosRequestConfig<D>,
request?: any,
response?: AxiosResponse<T, D>
);
config?: InternalAxiosRequestConfig<D>;
code?: string;
request?: any;
response?: AxiosResponse<T, D>;
isAxiosError: boolean;
status?: number;
toJSON: () => object;
cause?: Error;
static from<T = unknown, D = any>(
error: Error | unknown,
code?: string,
config?: InternalAxiosRequestConfig<D>,
request?: any,
response?: AxiosResponse<T, D>,
customProps?: object,
): AxiosError<T, D>;
static readonly ERR_FR_TOO_MANY_REDIRECTS = "ERR_FR_TOO_MANY_REDIRECTS";
static readonly ERR_BAD_OPTION_VALUE = "ERR_BAD_OPTION_VALUE";
static readonly ERR_BAD_OPTION = "ERR_BAD_OPTION";
static readonly ERR_NETWORK = "ERR_NETWORK";
static readonly ERR_DEPRECATED = "ERR_DEPRECATED";
static readonly ERR_BAD_RESPONSE = "ERR_BAD_RESPONSE";
static readonly ERR_BAD_REQUEST = "ERR_BAD_REQUEST";
static readonly ERR_NOT_SUPPORT = "ERR_NOT_SUPPORT";
static readonly ERR_INVALID_URL = "ERR_INVALID_URL";
static readonly ERR_CANCELED = "ERR_CANCELED";
static readonly ECONNABORTED = "ECONNABORTED";
static readonly ETIMEDOUT = "ETIMEDOUT";
}
export class CanceledError<T> extends AxiosError<T> {
}
export type AxiosPromise<T = any> = Promise<AxiosResponse<T>>;
export interface CancelStatic {
new (message?: string): Cancel;
}
export interface Cancel {
message: string | undefined;
}
export interface Canceler {
(message?: string, config?: AxiosRequestConfig, request?: any): void;
}
export interface CancelTokenStatic {
new (executor: (cancel: Canceler) => void): CancelToken;
source(): CancelTokenSource;
}
export interface CancelToken {
promise: Promise<Cancel>;
reason?: Cancel;
throwIfRequested(): void;
}
export interface CancelTokenSource {
token: CancelToken;
cancel: Canceler;
}
export interface AxiosInterceptorOptions {
synchronous?: boolean;
runWhen?: (config: InternalAxiosRequestConfig) => boolean;
}
type AxiosRequestInterceptorUse<T> = (onFulfilled?: ((value: T) => T | Promise<T>) | null, onRejected?: ((error: any) => any) | null, options?: AxiosInterceptorOptions) => number;
type AxiosResponseInterceptorUse<T> = (onFulfilled?: ((value: T) => T | Promise<T>) | null, onRejected?: ((error: any) => any) | null) => number;
export interface AxiosInterceptorManager<V> {
use: V extends AxiosResponse ? AxiosResponseInterceptorUse<V> : AxiosRequestInterceptorUse<V>;
eject(id: number): void;
clear(): void;
}
export class Axios {
constructor(config?: AxiosRequestConfig);
defaults: AxiosDefaults;
interceptors: {
request: AxiosInterceptorManager<InternalAxiosRequestConfig>;
response: AxiosInterceptorManager<AxiosResponse>;
};
getUri(config?: AxiosRequestConfig): string;
request<T = any, R = AxiosResponse<T>, D = any>(config: AxiosRequestConfig<D>): Promise<R>;
get<T = any, R = AxiosResponse<T>, D = any>(url: string, config?: AxiosRequestConfig<D>): Promise<R>;
delete<T = any, R = AxiosResponse<T>, D = any>(url: string, config?: AxiosRequestConfig<D>): Promise<R>;
head<T = any, R = AxiosResponse<T>, D = any>(url: string, config?: AxiosRequestConfig<D>): Promise<R>;
options<T = any, R = AxiosResponse<T>, D = any>(url: string, config?: AxiosRequestConfig<D>): Promise<R>;
post<T = any, R = AxiosResponse<T>, D = any>(url: string, data?: D, config?: AxiosRequestConfig<D>): Promise<R>;
put<T = any, R = AxiosResponse<T>, D = any>(url: string, data?: D, config?: AxiosRequestConfig<D>): Promise<R>;
patch<T = any, R = AxiosResponse<T>, D = any>(url: string, data?: D, config?: AxiosRequestConfig<D>): Promise<R>;
postForm<T = any, R = AxiosResponse<T>, D = any>(url: string, data?: D, config?: AxiosRequestConfig<D>): Promise<R>;
putForm<T = any, R = AxiosResponse<T>, D = any>(url: string, data?: D, config?: AxiosRequestConfig<D>): Promise<R>;
patchForm<T = any, R = AxiosResponse<T>, D = any>(url: string, data?: D, config?: AxiosRequestConfig<D>): Promise<R>;
}
export interface AxiosInstance extends Axios {
<T = any, R = AxiosResponse<T>, D = any>(config: AxiosRequestConfig<D>): Promise<R>;
<T = any, R = AxiosResponse<T>, D = any>(url: string, config?: AxiosRequestConfig<D>): Promise<R>;
create(config?: CreateAxiosDefaults): AxiosInstance;
defaults: Omit<AxiosDefaults, 'headers'> & {
headers: HeadersDefaults & {
[key: string]: AxiosHeaderValue
}
};
}
export interface GenericFormData {
append(name: string, value: any, options?: any): any;
}
export interface GenericHTMLFormElement {
name: string;
method: string;
submit(): void;
}
export function getAdapter(adapters: AxiosAdapterConfig | AxiosAdapterConfig[] | undefined): AxiosAdapter;
export function toFormData(sourceObj: object, targetFormData?: GenericFormData, options?: FormSerializerOptions): GenericFormData;
export function formToJSON(form: GenericFormData|GenericHTMLFormElement): object;
export function isAxiosError<T = any, D = any>(payload: any): payload is AxiosError<T, D>;
export function spread<T, R>(callback: (...args: T[]) => R): (array: T[]) => R;
export function isCancel(value: any): value is Cancel;
export function all<T>(values: Array<T | Promise<T>>): Promise<T[]>;
export function mergeConfig<D = any>(config1: AxiosRequestConfig<D>, config2: AxiosRequestConfig<D>): AxiosRequestConfig<D>;
export interface AxiosStatic extends AxiosInstance {
Cancel: CancelStatic;
CancelToken: CancelTokenStatic;
Axios: typeof Axios;
AxiosError: typeof AxiosError;
HttpStatusCode: typeof HttpStatusCode;
readonly VERSION: string;
isCancel: typeof isCancel;
all: typeof all;
spread: typeof spread;
isAxiosError: typeof isAxiosError;
toFormData: typeof toFormData;
formToJSON: typeof formToJSON;
getAdapter: typeof getAdapter;
CanceledError: typeof CanceledError;
AxiosHeaders: typeof AxiosHeaders;
mergeConfig: typeof mergeConfig;
}
declare const axios: AxiosStatic;
export default axios;

43
node_modules/axios/index.js generated vendored Normal file
View File

@ -0,0 +1,43 @@
import axios from './lib/axios.js';
// This module is intended to unwrap Axios default export as named.
// Keep top-level export same with static properties
// so that it can keep same with es module or cjs
const {
Axios,
AxiosError,
CanceledError,
isCancel,
CancelToken,
VERSION,
all,
Cancel,
isAxiosError,
spread,
toFormData,
AxiosHeaders,
HttpStatusCode,
formToJSON,
getAdapter,
mergeConfig
} = axios;
export {
axios as default,
Axios,
AxiosError,
CanceledError,
isCancel,
CancelToken,
VERSION,
all,
Cancel,
isAxiosError,
spread,
toFormData,
AxiosHeaders,
HttpStatusCode,
formToJSON,
getAdapter,
mergeConfig
}

37
node_modules/axios/lib/adapters/README.md generated vendored Normal file
View File

@ -0,0 +1,37 @@
# axios // adapters
The modules under `adapters/` are modules that handle dispatching a request and settling a returned `Promise` once a response is received.
## Example
```js
var settle = require('./../core/settle');
module.exports = function myAdapter(config) {
// At this point:
// - config has been merged with defaults
// - request transformers have already run
// - request interceptors have already run
// Make the request using config provided
// Upon response settle the Promise
return new Promise(function(resolve, reject) {
var response = {
data: responseData,
status: request.status,
statusText: request.statusText,
headers: responseHeaders,
config: config,
request: request
};
settle(resolve, reject, response);
// From here:
// - response transformers will run
// - response interceptors will run
});
}
```

79
node_modules/axios/lib/adapters/adapters.js generated vendored Normal file
View File

@ -0,0 +1,79 @@
import utils from '../utils.js';
import httpAdapter from './http.js';
import xhrAdapter from './xhr.js';
import fetchAdapter from './fetch.js';
import AxiosError from "../core/AxiosError.js";
const knownAdapters = {
http: httpAdapter,
xhr: xhrAdapter,
fetch: fetchAdapter
}
utils.forEach(knownAdapters, (fn, value) => {
if (fn) {
try {
Object.defineProperty(fn, 'name', {value});
} catch (e) {
// eslint-disable-next-line no-empty
}
Object.defineProperty(fn, 'adapterName', {value});
}
});
const renderReason = (reason) => `- ${reason}`;
const isResolvedHandle = (adapter) => utils.isFunction(adapter) || adapter === null || adapter === false;
export default {
getAdapter: (adapters) => {
adapters = utils.isArray(adapters) ? adapters : [adapters];
const {length} = adapters;
let nameOrAdapter;
let adapter;
const rejectedReasons = {};
for (let i = 0; i < length; i++) {
nameOrAdapter = adapters[i];
let id;
adapter = nameOrAdapter;
if (!isResolvedHandle(nameOrAdapter)) {
adapter = knownAdapters[(id = String(nameOrAdapter)).toLowerCase()];
if (adapter === undefined) {
throw new AxiosError(`Unknown adapter '${id}'`);
}
}
if (adapter) {
break;
}
rejectedReasons[id || '#' + i] = adapter;
}
if (!adapter) {
const reasons = Object.entries(rejectedReasons)
.map(([id, state]) => `adapter ${id} ` +
(state === false ? 'is not supported by the environment' : 'is not available in the build')
);
let s = length ?
(reasons.length > 1 ? 'since :\n' + reasons.map(renderReason).join('\n') : ' ' + renderReason(reasons[0])) :
'as no adapter specified';
throw new AxiosError(
`There is no suitable adapter to dispatch the request ` + s,
'ERR_NOT_SUPPORT'
);
}
return adapter;
},
adapters: knownAdapters
}

229
node_modules/axios/lib/adapters/fetch.js generated vendored Normal file
View File

@ -0,0 +1,229 @@
import platform from "../platform/index.js";
import utils from "../utils.js";
import AxiosError from "../core/AxiosError.js";
import composeSignals from "../helpers/composeSignals.js";
import {trackStream} from "../helpers/trackStream.js";
import AxiosHeaders from "../core/AxiosHeaders.js";
import {progressEventReducer, progressEventDecorator, asyncDecorator} from "../helpers/progressEventReducer.js";
import resolveConfig from "../helpers/resolveConfig.js";
import settle from "../core/settle.js";
const isFetchSupported = typeof fetch === 'function' && typeof Request === 'function' && typeof Response === 'function';
const isReadableStreamSupported = isFetchSupported && typeof ReadableStream === 'function';
// used only inside the fetch adapter
const encodeText = isFetchSupported && (typeof TextEncoder === 'function' ?
((encoder) => (str) => encoder.encode(str))(new TextEncoder()) :
async (str) => new Uint8Array(await new Response(str).arrayBuffer())
);
const test = (fn, ...args) => {
try {
return !!fn(...args);
} catch (e) {
return false
}
}
const supportsRequestStream = isReadableStreamSupported && test(() => {
let duplexAccessed = false;
const hasContentType = new Request(platform.origin, {
body: new ReadableStream(),
method: 'POST',
get duplex() {
duplexAccessed = true;
return 'half';
},
}).headers.has('Content-Type');
return duplexAccessed && !hasContentType;
});
const DEFAULT_CHUNK_SIZE = 64 * 1024;
const supportsResponseStream = isReadableStreamSupported &&
test(() => utils.isReadableStream(new Response('').body));
const resolvers = {
stream: supportsResponseStream && ((res) => res.body)
};
isFetchSupported && (((res) => {
['text', 'arrayBuffer', 'blob', 'formData', 'stream'].forEach(type => {
!resolvers[type] && (resolvers[type] = utils.isFunction(res[type]) ? (res) => res[type]() :
(_, config) => {
throw new AxiosError(`Response type '${type}' is not supported`, AxiosError.ERR_NOT_SUPPORT, config);
})
});
})(new Response));
const getBodyLength = async (body) => {
if (body == null) {
return 0;
}
if(utils.isBlob(body)) {
return body.size;
}
if(utils.isSpecCompliantForm(body)) {
const _request = new Request(platform.origin, {
method: 'POST',
body,
});
return (await _request.arrayBuffer()).byteLength;
}
if(utils.isArrayBufferView(body) || utils.isArrayBuffer(body)) {
return body.byteLength;
}
if(utils.isURLSearchParams(body)) {
body = body + '';
}
if(utils.isString(body)) {
return (await encodeText(body)).byteLength;
}
}
const resolveBodyLength = async (headers, body) => {
const length = utils.toFiniteNumber(headers.getContentLength());
return length == null ? getBodyLength(body) : length;
}
export default isFetchSupported && (async (config) => {
let {
url,
method,
data,
signal,
cancelToken,
timeout,
onDownloadProgress,
onUploadProgress,
responseType,
headers,
withCredentials = 'same-origin',
fetchOptions
} = resolveConfig(config);
responseType = responseType ? (responseType + '').toLowerCase() : 'text';
let composedSignal = composeSignals([signal, cancelToken && cancelToken.toAbortSignal()], timeout);
let request;
const unsubscribe = composedSignal && composedSignal.unsubscribe && (() => {
composedSignal.unsubscribe();
});
let requestContentLength;
try {
if (
onUploadProgress && supportsRequestStream && method !== 'get' && method !== 'head' &&
(requestContentLength = await resolveBodyLength(headers, data)) !== 0
) {
let _request = new Request(url, {
method: 'POST',
body: data,
duplex: "half"
});
let contentTypeHeader;
if (utils.isFormData(data) && (contentTypeHeader = _request.headers.get('content-type'))) {
headers.setContentType(contentTypeHeader)
}
if (_request.body) {
const [onProgress, flush] = progressEventDecorator(
requestContentLength,
progressEventReducer(asyncDecorator(onUploadProgress))
);
data = trackStream(_request.body, DEFAULT_CHUNK_SIZE, onProgress, flush);
}
}
if (!utils.isString(withCredentials)) {
withCredentials = withCredentials ? 'include' : 'omit';
}
// Cloudflare Workers throws when credentials are defined
// see https://github.com/cloudflare/workerd/issues/902
const isCredentialsSupported = "credentials" in Request.prototype;
request = new Request(url, {
...fetchOptions,
signal: composedSignal,
method: method.toUpperCase(),
headers: headers.normalize().toJSON(),
body: data,
duplex: "half",
credentials: isCredentialsSupported ? withCredentials : undefined
});
let response = await fetch(request);
const isStreamResponse = supportsResponseStream && (responseType === 'stream' || responseType === 'response');
if (supportsResponseStream && (onDownloadProgress || (isStreamResponse && unsubscribe))) {
const options = {};
['status', 'statusText', 'headers'].forEach(prop => {
options[prop] = response[prop];
});
const responseContentLength = utils.toFiniteNumber(response.headers.get('content-length'));
const [onProgress, flush] = onDownloadProgress && progressEventDecorator(
responseContentLength,
progressEventReducer(asyncDecorator(onDownloadProgress), true)
) || [];
response = new Response(
trackStream(response.body, DEFAULT_CHUNK_SIZE, onProgress, () => {
flush && flush();
unsubscribe && unsubscribe();
}),
options
);
}
responseType = responseType || 'text';
let responseData = await resolvers[utils.findKey(resolvers, responseType) || 'text'](response, config);
!isStreamResponse && unsubscribe && unsubscribe();
return await new Promise((resolve, reject) => {
settle(resolve, reject, {
data: responseData,
headers: AxiosHeaders.from(response.headers),
status: response.status,
statusText: response.statusText,
config,
request
})
})
} catch (err) {
unsubscribe && unsubscribe();
if (err && err.name === 'TypeError' && /Load failed|fetch/i.test(err.message)) {
throw Object.assign(
new AxiosError('Network Error', AxiosError.ERR_NETWORK, config, request),
{
cause: err.cause || err
}
)
}
throw AxiosError.from(err, err && err.code, config, request);
}
});

695
node_modules/axios/lib/adapters/http.js generated vendored Normal file
View File

@ -0,0 +1,695 @@
'use strict';
import utils from './../utils.js';
import settle from './../core/settle.js';
import buildFullPath from '../core/buildFullPath.js';
import buildURL from './../helpers/buildURL.js';
import proxyFromEnv from 'proxy-from-env';
import http from 'http';
import https from 'https';
import util from 'util';
import followRedirects from 'follow-redirects';
import zlib from 'zlib';
import {VERSION} from '../env/data.js';
import transitionalDefaults from '../defaults/transitional.js';
import AxiosError from '../core/AxiosError.js';
import CanceledError from '../cancel/CanceledError.js';
import platform from '../platform/index.js';
import fromDataURI from '../helpers/fromDataURI.js';
import stream from 'stream';
import AxiosHeaders from '../core/AxiosHeaders.js';
import AxiosTransformStream from '../helpers/AxiosTransformStream.js';
import {EventEmitter} from 'events';
import formDataToStream from "../helpers/formDataToStream.js";
import readBlob from "../helpers/readBlob.js";
import ZlibHeaderTransformStream from '../helpers/ZlibHeaderTransformStream.js';
import callbackify from "../helpers/callbackify.js";
import {progressEventReducer, progressEventDecorator, asyncDecorator} from "../helpers/progressEventReducer.js";
const zlibOptions = {
flush: zlib.constants.Z_SYNC_FLUSH,
finishFlush: zlib.constants.Z_SYNC_FLUSH
};
const brotliOptions = {
flush: zlib.constants.BROTLI_OPERATION_FLUSH,
finishFlush: zlib.constants.BROTLI_OPERATION_FLUSH
}
const isBrotliSupported = utils.isFunction(zlib.createBrotliDecompress);
const {http: httpFollow, https: httpsFollow} = followRedirects;
const isHttps = /https:?/;
const supportedProtocols = platform.protocols.map(protocol => {
return protocol + ':';
});
const flushOnFinish = (stream, [throttled, flush]) => {
stream
.on('end', flush)
.on('error', flush);
return throttled;
}
/**
* If the proxy or config beforeRedirects functions are defined, call them with the options
* object.
*
* @param {Object<string, any>} options - The options object that was passed to the request.
*
* @returns {Object<string, any>}
*/
function dispatchBeforeRedirect(options, responseDetails) {
if (options.beforeRedirects.proxy) {
options.beforeRedirects.proxy(options);
}
if (options.beforeRedirects.config) {
options.beforeRedirects.config(options, responseDetails);
}
}
/**
* If the proxy or config afterRedirects functions are defined, call them with the options
*
* @param {http.ClientRequestArgs} options
* @param {AxiosProxyConfig} configProxy configuration from Axios options object
* @param {string} location
*
* @returns {http.ClientRequestArgs}
*/
function setProxy(options, configProxy, location) {
let proxy = configProxy;
if (!proxy && proxy !== false) {
const proxyUrl = proxyFromEnv.getProxyForUrl(location);
if (proxyUrl) {
proxy = new URL(proxyUrl);
}
}
if (proxy) {
// Basic proxy authorization
if (proxy.username) {
proxy.auth = (proxy.username || '') + ':' + (proxy.password || '');
}
if (proxy.auth) {
// Support proxy auth object form
if (proxy.auth.username || proxy.auth.password) {
proxy.auth = (proxy.auth.username || '') + ':' + (proxy.auth.password || '');
}
const base64 = Buffer
.from(proxy.auth, 'utf8')
.toString('base64');
options.headers['Proxy-Authorization'] = 'Basic ' + base64;
}
options.headers.host = options.hostname + (options.port ? ':' + options.port : '');
const proxyHost = proxy.hostname || proxy.host;
options.hostname = proxyHost;
// Replace 'host' since options is not a URL object
options.host = proxyHost;
options.port = proxy.port;
options.path = location;
if (proxy.protocol) {
options.protocol = proxy.protocol.includes(':') ? proxy.protocol : `${proxy.protocol}:`;
}
}
options.beforeRedirects.proxy = function beforeRedirect(redirectOptions) {
// Configure proxy for redirected request, passing the original config proxy to apply
// the exact same logic as if the redirected request was performed by axios directly.
setProxy(redirectOptions, configProxy, redirectOptions.href);
};
}
const isHttpAdapterSupported = typeof process !== 'undefined' && utils.kindOf(process) === 'process';
// temporary hotfix
const wrapAsync = (asyncExecutor) => {
return new Promise((resolve, reject) => {
let onDone;
let isDone;
const done = (value, isRejected) => {
if (isDone) return;
isDone = true;
onDone && onDone(value, isRejected);
}
const _resolve = (value) => {
done(value);
resolve(value);
};
const _reject = (reason) => {
done(reason, true);
reject(reason);
}
asyncExecutor(_resolve, _reject, (onDoneHandler) => (onDone = onDoneHandler)).catch(_reject);
})
};
const resolveFamily = ({address, family}) => {
if (!utils.isString(address)) {
throw TypeError('address must be a string');
}
return ({
address,
family: family || (address.indexOf('.') < 0 ? 6 : 4)
});
}
const buildAddressEntry = (address, family) => resolveFamily(utils.isObject(address) ? address : {address, family});
/*eslint consistent-return:0*/
export default isHttpAdapterSupported && function httpAdapter(config) {
return wrapAsync(async function dispatchHttpRequest(resolve, reject, onDone) {
let {data, lookup, family} = config;
const {responseType, responseEncoding} = config;
const method = config.method.toUpperCase();
let isDone;
let rejected = false;
let req;
if (lookup) {
const _lookup = callbackify(lookup, (value) => utils.isArray(value) ? value : [value]);
// hotfix to support opt.all option which is required for node 20.x
lookup = (hostname, opt, cb) => {
_lookup(hostname, opt, (err, arg0, arg1) => {
if (err) {
return cb(err);
}
const addresses = utils.isArray(arg0) ? arg0.map(addr => buildAddressEntry(addr)) : [buildAddressEntry(arg0, arg1)];
opt.all ? cb(err, addresses) : cb(err, addresses[0].address, addresses[0].family);
});
}
}
// temporary internal emitter until the AxiosRequest class will be implemented
const emitter = new EventEmitter();
const onFinished = () => {
if (config.cancelToken) {
config.cancelToken.unsubscribe(abort);
}
if (config.signal) {
config.signal.removeEventListener('abort', abort);
}
emitter.removeAllListeners();
}
onDone((value, isRejected) => {
isDone = true;
if (isRejected) {
rejected = true;
onFinished();
}
});
function abort(reason) {
emitter.emit('abort', !reason || reason.type ? new CanceledError(null, config, req) : reason);
}
emitter.once('abort', reject);
if (config.cancelToken || config.signal) {
config.cancelToken && config.cancelToken.subscribe(abort);
if (config.signal) {
config.signal.aborted ? abort() : config.signal.addEventListener('abort', abort);
}
}
// Parse url
const fullPath = buildFullPath(config.baseURL, config.url, config.allowAbsoluteUrls);
const parsed = new URL(fullPath, platform.hasBrowserEnv ? platform.origin : undefined);
const protocol = parsed.protocol || supportedProtocols[0];
if (protocol === 'data:') {
let convertedData;
if (method !== 'GET') {
return settle(resolve, reject, {
status: 405,
statusText: 'method not allowed',
headers: {},
config
});
}
try {
convertedData = fromDataURI(config.url, responseType === 'blob', {
Blob: config.env && config.env.Blob
});
} catch (err) {
throw AxiosError.from(err, AxiosError.ERR_BAD_REQUEST, config);
}
if (responseType === 'text') {
convertedData = convertedData.toString(responseEncoding);
if (!responseEncoding || responseEncoding === 'utf8') {
convertedData = utils.stripBOM(convertedData);
}
} else if (responseType === 'stream') {
convertedData = stream.Readable.from(convertedData);
}
return settle(resolve, reject, {
data: convertedData,
status: 200,
statusText: 'OK',
headers: new AxiosHeaders(),
config
});
}
if (supportedProtocols.indexOf(protocol) === -1) {
return reject(new AxiosError(
'Unsupported protocol ' + protocol,
AxiosError.ERR_BAD_REQUEST,
config
));
}
const headers = AxiosHeaders.from(config.headers).normalize();
// Set User-Agent (required by some servers)
// See https://github.com/axios/axios/issues/69
// User-Agent is specified; handle case where no UA header is desired
// Only set header if it hasn't been set in config
headers.set('User-Agent', 'axios/' + VERSION, false);
const {onUploadProgress, onDownloadProgress} = config;
const maxRate = config.maxRate;
let maxUploadRate = undefined;
let maxDownloadRate = undefined;
// support for spec compliant FormData objects
if (utils.isSpecCompliantForm(data)) {
const userBoundary = headers.getContentType(/boundary=([-_\w\d]{10,70})/i);
data = formDataToStream(data, (formHeaders) => {
headers.set(formHeaders);
}, {
tag: `axios-${VERSION}-boundary`,
boundary: userBoundary && userBoundary[1] || undefined
});
// support for https://www.npmjs.com/package/form-data api
} else if (utils.isFormData(data) && utils.isFunction(data.getHeaders)) {
headers.set(data.getHeaders());
if (!headers.hasContentLength()) {
try {
const knownLength = await util.promisify(data.getLength).call(data);
Number.isFinite(knownLength) && knownLength >= 0 && headers.setContentLength(knownLength);
/*eslint no-empty:0*/
} catch (e) {
}
}
} else if (utils.isBlob(data) || utils.isFile(data)) {
data.size && headers.setContentType(data.type || 'application/octet-stream');
headers.setContentLength(data.size || 0);
data = stream.Readable.from(readBlob(data));
} else if (data && !utils.isStream(data)) {
if (Buffer.isBuffer(data)) {
// Nothing to do...
} else if (utils.isArrayBuffer(data)) {
data = Buffer.from(new Uint8Array(data));
} else if (utils.isString(data)) {
data = Buffer.from(data, 'utf-8');
} else {
return reject(new AxiosError(
'Data after transformation must be a string, an ArrayBuffer, a Buffer, or a Stream',
AxiosError.ERR_BAD_REQUEST,
config
));
}
// Add Content-Length header if data exists
headers.setContentLength(data.length, false);
if (config.maxBodyLength > -1 && data.length > config.maxBodyLength) {
return reject(new AxiosError(
'Request body larger than maxBodyLength limit',
AxiosError.ERR_BAD_REQUEST,
config
));
}
}
const contentLength = utils.toFiniteNumber(headers.getContentLength());
if (utils.isArray(maxRate)) {
maxUploadRate = maxRate[0];
maxDownloadRate = maxRate[1];
} else {
maxUploadRate = maxDownloadRate = maxRate;
}
if (data && (onUploadProgress || maxUploadRate)) {
if (!utils.isStream(data)) {
data = stream.Readable.from(data, {objectMode: false});
}
data = stream.pipeline([data, new AxiosTransformStream({
maxRate: utils.toFiniteNumber(maxUploadRate)
})], utils.noop);
onUploadProgress && data.on('progress', flushOnFinish(
data,
progressEventDecorator(
contentLength,
progressEventReducer(asyncDecorator(onUploadProgress), false, 3)
)
));
}
// HTTP basic authentication
let auth = undefined;
if (config.auth) {
const username = config.auth.username || '';
const password = config.auth.password || '';
auth = username + ':' + password;
}
if (!auth && parsed.username) {
const urlUsername = parsed.username;
const urlPassword = parsed.password;
auth = urlUsername + ':' + urlPassword;
}
auth && headers.delete('authorization');
let path;
try {
path = buildURL(
parsed.pathname + parsed.search,
config.params,
config.paramsSerializer
).replace(/^\?/, '');
} catch (err) {
const customErr = new Error(err.message);
customErr.config = config;
customErr.url = config.url;
customErr.exists = true;
return reject(customErr);
}
headers.set(
'Accept-Encoding',
'gzip, compress, deflate' + (isBrotliSupported ? ', br' : ''), false
);
const options = {
path,
method: method,
headers: headers.toJSON(),
agents: { http: config.httpAgent, https: config.httpsAgent },
auth,
protocol,
family,
beforeRedirect: dispatchBeforeRedirect,
beforeRedirects: {}
};
// cacheable-lookup integration hotfix
!utils.isUndefined(lookup) && (options.lookup = lookup);
if (config.socketPath) {
options.socketPath = config.socketPath;
} else {
options.hostname = parsed.hostname.startsWith("[") ? parsed.hostname.slice(1, -1) : parsed.hostname;
options.port = parsed.port;
setProxy(options, config.proxy, protocol + '//' + parsed.hostname + (parsed.port ? ':' + parsed.port : '') + options.path);
}
let transport;
const isHttpsRequest = isHttps.test(options.protocol);
options.agent = isHttpsRequest ? config.httpsAgent : config.httpAgent;
if (config.transport) {
transport = config.transport;
} else if (config.maxRedirects === 0) {
transport = isHttpsRequest ? https : http;
} else {
if (config.maxRedirects) {
options.maxRedirects = config.maxRedirects;
}
if (config.beforeRedirect) {
options.beforeRedirects.config = config.beforeRedirect;
}
transport = isHttpsRequest ? httpsFollow : httpFollow;
}
if (config.maxBodyLength > -1) {
options.maxBodyLength = config.maxBodyLength;
} else {
// follow-redirects does not skip comparison, so it should always succeed for axios -1 unlimited
options.maxBodyLength = Infinity;
}
if (config.insecureHTTPParser) {
options.insecureHTTPParser = config.insecureHTTPParser;
}
// Create the request
req = transport.request(options, function handleResponse(res) {
if (req.destroyed) return;
const streams = [res];
const responseLength = +res.headers['content-length'];
if (onDownloadProgress || maxDownloadRate) {
const transformStream = new AxiosTransformStream({
maxRate: utils.toFiniteNumber(maxDownloadRate)
});
onDownloadProgress && transformStream.on('progress', flushOnFinish(
transformStream,
progressEventDecorator(
responseLength,
progressEventReducer(asyncDecorator(onDownloadProgress), true, 3)
)
));
streams.push(transformStream);
}
// decompress the response body transparently if required
let responseStream = res;
// return the last request in case of redirects
const lastRequest = res.req || req;
// if decompress disabled we should not decompress
if (config.decompress !== false && res.headers['content-encoding']) {
// if no content, but headers still say that it is encoded,
// remove the header not confuse downstream operations
if (method === 'HEAD' || res.statusCode === 204) {
delete res.headers['content-encoding'];
}
switch ((res.headers['content-encoding'] || '').toLowerCase()) {
/*eslint default-case:0*/
case 'gzip':
case 'x-gzip':
case 'compress':
case 'x-compress':
// add the unzipper to the body stream processing pipeline
streams.push(zlib.createUnzip(zlibOptions));
// remove the content-encoding in order to not confuse downstream operations
delete res.headers['content-encoding'];
break;
case 'deflate':
streams.push(new ZlibHeaderTransformStream());
// add the unzipper to the body stream processing pipeline
streams.push(zlib.createUnzip(zlibOptions));
// remove the content-encoding in order to not confuse downstream operations
delete res.headers['content-encoding'];
break;
case 'br':
if (isBrotliSupported) {
streams.push(zlib.createBrotliDecompress(brotliOptions));
delete res.headers['content-encoding'];
}
}
}
responseStream = streams.length > 1 ? stream.pipeline(streams, utils.noop) : streams[0];
const offListeners = stream.finished(responseStream, () => {
offListeners();
onFinished();
});
const response = {
status: res.statusCode,
statusText: res.statusMessage,
headers: new AxiosHeaders(res.headers),
config,
request: lastRequest
};
if (responseType === 'stream') {
response.data = responseStream;
settle(resolve, reject, response);
} else {
const responseBuffer = [];
let totalResponseBytes = 0;
responseStream.on('data', function handleStreamData(chunk) {
responseBuffer.push(chunk);
totalResponseBytes += chunk.length;
// make sure the content length is not over the maxContentLength if specified
if (config.maxContentLength > -1 && totalResponseBytes > config.maxContentLength) {
// stream.destroy() emit aborted event before calling reject() on Node.js v16
rejected = true;
responseStream.destroy();
reject(new AxiosError('maxContentLength size of ' + config.maxContentLength + ' exceeded',
AxiosError.ERR_BAD_RESPONSE, config, lastRequest));
}
});
responseStream.on('aborted', function handlerStreamAborted() {
if (rejected) {
return;
}
const err = new AxiosError(
'stream has been aborted',
AxiosError.ERR_BAD_RESPONSE,
config,
lastRequest
);
responseStream.destroy(err);
reject(err);
});
responseStream.on('error', function handleStreamError(err) {
if (req.destroyed) return;
reject(AxiosError.from(err, null, config, lastRequest));
});
responseStream.on('end', function handleStreamEnd() {
try {
let responseData = responseBuffer.length === 1 ? responseBuffer[0] : Buffer.concat(responseBuffer);
if (responseType !== 'arraybuffer') {
responseData = responseData.toString(responseEncoding);
if (!responseEncoding || responseEncoding === 'utf8') {
responseData = utils.stripBOM(responseData);
}
}
response.data = responseData;
} catch (err) {
return reject(AxiosError.from(err, null, config, response.request, response));
}
settle(resolve, reject, response);
});
}
emitter.once('abort', err => {
if (!responseStream.destroyed) {
responseStream.emit('error', err);
responseStream.destroy();
}
});
});
emitter.once('abort', err => {
reject(err);
req.destroy(err);
});
// Handle errors
req.on('error', function handleRequestError(err) {
// @todo remove
// if (req.aborted && err.code !== AxiosError.ERR_FR_TOO_MANY_REDIRECTS) return;
reject(AxiosError.from(err, null, config, req));
});
// set tcp keep alive to prevent drop connection by peer
req.on('socket', function handleRequestSocket(socket) {
// default interval of sending ack packet is 1 minute
socket.setKeepAlive(true, 1000 * 60);
});
// Handle request timeout
if (config.timeout) {
// This is forcing a int timeout to avoid problems if the `req` interface doesn't handle other types.
const timeout = parseInt(config.timeout, 10);
if (Number.isNaN(timeout)) {
reject(new AxiosError(
'error trying to parse `config.timeout` to int',
AxiosError.ERR_BAD_OPTION_VALUE,
config,
req
));
return;
}
// Sometime, the response will be very slow, and does not respond, the connect event will be block by event loop system.
// And timer callback will be fired, and abort() will be invoked before connection, then get "socket hang up" and code ECONNRESET.
// At this time, if we have a large number of request, nodejs will hang up some socket on background. and the number will up and up.
// And then these socket which be hang up will devouring CPU little by little.
// ClientRequest.setTimeout will be fired on the specify milliseconds, and can make sure that abort() will be fired after connect.
req.setTimeout(timeout, function handleRequestTimeout() {
if (isDone) return;
let timeoutErrorMessage = config.timeout ? 'timeout of ' + config.timeout + 'ms exceeded' : 'timeout exceeded';
const transitional = config.transitional || transitionalDefaults;
if (config.timeoutErrorMessage) {
timeoutErrorMessage = config.timeoutErrorMessage;
}
reject(new AxiosError(
timeoutErrorMessage,
transitional.clarifyTimeoutError ? AxiosError.ETIMEDOUT : AxiosError.ECONNABORTED,
config,
req
));
abort();
});
}
// Send the request
if (utils.isStream(data)) {
let ended = false;
let errored = false;
data.on('end', () => {
ended = true;
});
data.once('error', err => {
errored = true;
req.destroy(err);
});
data.on('close', () => {
if (!ended && !errored) {
abort(new CanceledError('Request stream has been aborted', config, req));
}
});
data.pipe(req);
} else {
req.end(data);
}
});
}
export const __setProxy = setProxy;

197
node_modules/axios/lib/adapters/xhr.js generated vendored Normal file
View File

@ -0,0 +1,197 @@
import utils from './../utils.js';
import settle from './../core/settle.js';
import transitionalDefaults from '../defaults/transitional.js';
import AxiosError from '../core/AxiosError.js';
import CanceledError from '../cancel/CanceledError.js';
import parseProtocol from '../helpers/parseProtocol.js';
import platform from '../platform/index.js';
import AxiosHeaders from '../core/AxiosHeaders.js';
import {progressEventReducer} from '../helpers/progressEventReducer.js';
import resolveConfig from "../helpers/resolveConfig.js";
const isXHRAdapterSupported = typeof XMLHttpRequest !== 'undefined';
export default isXHRAdapterSupported && function (config) {
return new Promise(function dispatchXhrRequest(resolve, reject) {
const _config = resolveConfig(config);
let requestData = _config.data;
const requestHeaders = AxiosHeaders.from(_config.headers).normalize();
let {responseType, onUploadProgress, onDownloadProgress} = _config;
let onCanceled;
let uploadThrottled, downloadThrottled;
let flushUpload, flushDownload;
function done() {
flushUpload && flushUpload(); // flush events
flushDownload && flushDownload(); // flush events
_config.cancelToken && _config.cancelToken.unsubscribe(onCanceled);
_config.signal && _config.signal.removeEventListener('abort', onCanceled);
}
let request = new XMLHttpRequest();
request.open(_config.method.toUpperCase(), _config.url, true);
// Set the request timeout in MS
request.timeout = _config.timeout;
function onloadend() {
if (!request) {
return;
}
// Prepare the response
const responseHeaders = AxiosHeaders.from(
'getAllResponseHeaders' in request && request.getAllResponseHeaders()
);
const responseData = !responseType || responseType === 'text' || responseType === 'json' ?
request.responseText : request.response;
const response = {
data: responseData,
status: request.status,
statusText: request.statusText,
headers: responseHeaders,
config,
request
};
settle(function _resolve(value) {
resolve(value);
done();
}, function _reject(err) {
reject(err);
done();
}, response);
// Clean up request
request = null;
}
if ('onloadend' in request) {
// Use onloadend if available
request.onloadend = onloadend;
} else {
// Listen for ready state to emulate onloadend
request.onreadystatechange = function handleLoad() {
if (!request || request.readyState !== 4) {
return;
}
// The request errored out and we didn't get a response, this will be
// handled by onerror instead
// With one exception: request that using file: protocol, most browsers
// will return status as 0 even though it's a successful request
if (request.status === 0 && !(request.responseURL && request.responseURL.indexOf('file:') === 0)) {
return;
}
// readystate handler is calling before onerror or ontimeout handlers,
// so we should call onloadend on the next 'tick'
setTimeout(onloadend);
};
}
// Handle browser request cancellation (as opposed to a manual cancellation)
request.onabort = function handleAbort() {
if (!request) {
return;
}
reject(new AxiosError('Request aborted', AxiosError.ECONNABORTED, config, request));
// Clean up request
request = null;
};
// Handle low level network errors
request.onerror = function handleError() {
// Real errors are hidden from us by the browser
// onerror should only fire if it's a network error
reject(new AxiosError('Network Error', AxiosError.ERR_NETWORK, config, request));
// Clean up request
request = null;
};
// Handle timeout
request.ontimeout = function handleTimeout() {
let timeoutErrorMessage = _config.timeout ? 'timeout of ' + _config.timeout + 'ms exceeded' : 'timeout exceeded';
const transitional = _config.transitional || transitionalDefaults;
if (_config.timeoutErrorMessage) {
timeoutErrorMessage = _config.timeoutErrorMessage;
}
reject(new AxiosError(
timeoutErrorMessage,
transitional.clarifyTimeoutError ? AxiosError.ETIMEDOUT : AxiosError.ECONNABORTED,
config,
request));
// Clean up request
request = null;
};
// Remove Content-Type if data is undefined
requestData === undefined && requestHeaders.setContentType(null);
// Add headers to the request
if ('setRequestHeader' in request) {
utils.forEach(requestHeaders.toJSON(), function setRequestHeader(val, key) {
request.setRequestHeader(key, val);
});
}
// Add withCredentials to request if needed
if (!utils.isUndefined(_config.withCredentials)) {
request.withCredentials = !!_config.withCredentials;
}
// Add responseType to request if needed
if (responseType && responseType !== 'json') {
request.responseType = _config.responseType;
}
// Handle progress if needed
if (onDownloadProgress) {
([downloadThrottled, flushDownload] = progressEventReducer(onDownloadProgress, true));
request.addEventListener('progress', downloadThrottled);
}
// Not all browsers support upload events
if (onUploadProgress && request.upload) {
([uploadThrottled, flushUpload] = progressEventReducer(onUploadProgress));
request.upload.addEventListener('progress', uploadThrottled);
request.upload.addEventListener('loadend', flushUpload);
}
if (_config.cancelToken || _config.signal) {
// Handle cancellation
// eslint-disable-next-line func-names
onCanceled = cancel => {
if (!request) {
return;
}
reject(!cancel || cancel.type ? new CanceledError(null, config, request) : cancel);
request.abort();
request = null;
};
_config.cancelToken && _config.cancelToken.subscribe(onCanceled);
if (_config.signal) {
_config.signal.aborted ? onCanceled() : _config.signal.addEventListener('abort', onCanceled);
}
}
const protocol = parseProtocol(_config.url);
if (protocol && platform.protocols.indexOf(protocol) === -1) {
reject(new AxiosError('Unsupported protocol ' + protocol + ':', AxiosError.ERR_BAD_REQUEST, config));
return;
}
// Send the request
request.send(requestData || null);
});
}

89
node_modules/axios/lib/axios.js generated vendored Normal file
View File

@ -0,0 +1,89 @@
'use strict';
import utils from './utils.js';
import bind from './helpers/bind.js';
import Axios from './core/Axios.js';
import mergeConfig from './core/mergeConfig.js';
import defaults from './defaults/index.js';
import formDataToJSON from './helpers/formDataToJSON.js';
import CanceledError from './cancel/CanceledError.js';
import CancelToken from './cancel/CancelToken.js';
import isCancel from './cancel/isCancel.js';
import {VERSION} from './env/data.js';
import toFormData from './helpers/toFormData.js';
import AxiosError from './core/AxiosError.js';
import spread from './helpers/spread.js';
import isAxiosError from './helpers/isAxiosError.js';
import AxiosHeaders from "./core/AxiosHeaders.js";
import adapters from './adapters/adapters.js';
import HttpStatusCode from './helpers/HttpStatusCode.js';
/**
* Create an instance of Axios
*
* @param {Object} defaultConfig The default config for the instance
*
* @returns {Axios} A new instance of Axios
*/
function createInstance(defaultConfig) {
const context = new Axios(defaultConfig);
const instance = bind(Axios.prototype.request, context);
// Copy axios.prototype to instance
utils.extend(instance, Axios.prototype, context, {allOwnKeys: true});
// Copy context to instance
utils.extend(instance, context, null, {allOwnKeys: true});
// Factory for creating new instances
instance.create = function create(instanceConfig) {
return createInstance(mergeConfig(defaultConfig, instanceConfig));
};
return instance;
}
// Create the default instance to be exported
const axios = createInstance(defaults);
// Expose Axios class to allow class inheritance
axios.Axios = Axios;
// Expose Cancel & CancelToken
axios.CanceledError = CanceledError;
axios.CancelToken = CancelToken;
axios.isCancel = isCancel;
axios.VERSION = VERSION;
axios.toFormData = toFormData;
// Expose AxiosError class
axios.AxiosError = AxiosError;
// alias for CanceledError for backward compatibility
axios.Cancel = axios.CanceledError;
// Expose all/spread
axios.all = function all(promises) {
return Promise.all(promises);
};
axios.spread = spread;
// Expose isAxiosError
axios.isAxiosError = isAxiosError;
// Expose mergeConfig
axios.mergeConfig = mergeConfig;
axios.AxiosHeaders = AxiosHeaders;
axios.formToJSON = thing => formDataToJSON(utils.isHTMLForm(thing) ? new FormData(thing) : thing);
axios.getAdapter = adapters.getAdapter;
axios.HttpStatusCode = HttpStatusCode;
axios.default = axios;
// this module should only have a default export
export default axios

135
node_modules/axios/lib/cancel/CancelToken.js generated vendored Normal file
View File

@ -0,0 +1,135 @@
'use strict';
import CanceledError from './CanceledError.js';
/**
* A `CancelToken` is an object that can be used to request cancellation of an operation.
*
* @param {Function} executor The executor function.
*
* @returns {CancelToken}
*/
class CancelToken {
constructor(executor) {
if (typeof executor !== 'function') {
throw new TypeError('executor must be a function.');
}
let resolvePromise;
this.promise = new Promise(function promiseExecutor(resolve) {
resolvePromise = resolve;
});
const token = this;
// eslint-disable-next-line func-names
this.promise.then(cancel => {
if (!token._listeners) return;
let i = token._listeners.length;
while (i-- > 0) {
token._listeners[i](cancel);
}
token._listeners = null;
});
// eslint-disable-next-line func-names
this.promise.then = onfulfilled => {
let _resolve;
// eslint-disable-next-line func-names
const promise = new Promise(resolve => {
token.subscribe(resolve);
_resolve = resolve;
}).then(onfulfilled);
promise.cancel = function reject() {
token.unsubscribe(_resolve);
};
return promise;
};
executor(function cancel(message, config, request) {
if (token.reason) {
// Cancellation has already been requested
return;
}
token.reason = new CanceledError(message, config, request);
resolvePromise(token.reason);
});
}
/**
* Throws a `CanceledError` if cancellation has been requested.
*/
throwIfRequested() {
if (this.reason) {
throw this.reason;
}
}
/**
* Subscribe to the cancel signal
*/
subscribe(listener) {
if (this.reason) {
listener(this.reason);
return;
}
if (this._listeners) {
this._listeners.push(listener);
} else {
this._listeners = [listener];
}
}
/**
* Unsubscribe from the cancel signal
*/
unsubscribe(listener) {
if (!this._listeners) {
return;
}
const index = this._listeners.indexOf(listener);
if (index !== -1) {
this._listeners.splice(index, 1);
}
}
toAbortSignal() {
const controller = new AbortController();
const abort = (err) => {
controller.abort(err);
};
this.subscribe(abort);
controller.signal.unsubscribe = () => this.unsubscribe(abort);
return controller.signal;
}
/**
* Returns an object that contains a new `CancelToken` and a function that, when called,
* cancels the `CancelToken`.
*/
static source() {
let cancel;
const token = new CancelToken(function executor(c) {
cancel = c;
});
return {
token,
cancel
};
}
}
export default CancelToken;

25
node_modules/axios/lib/cancel/CanceledError.js generated vendored Normal file
View File

@ -0,0 +1,25 @@
'use strict';
import AxiosError from '../core/AxiosError.js';
import utils from '../utils.js';
/**
* A `CanceledError` is an object that is thrown when an operation is canceled.
*
* @param {string=} message The message.
* @param {Object=} config The config.
* @param {Object=} request The request.
*
* @returns {CanceledError} The created error.
*/
function CanceledError(message, config, request) {
// eslint-disable-next-line no-eq-null,eqeqeq
AxiosError.call(this, message == null ? 'canceled' : message, AxiosError.ERR_CANCELED, config, request);
this.name = 'CanceledError';
}
utils.inherits(CanceledError, AxiosError, {
__CANCEL__: true
});
export default CanceledError;

5
node_modules/axios/lib/cancel/isCancel.js generated vendored Normal file
View File

@ -0,0 +1,5 @@
'use strict';
export default function isCancel(value) {
return !!(value && value.__CANCEL__);
}

242
node_modules/axios/lib/core/Axios.js generated vendored Normal file
View File

@ -0,0 +1,242 @@
'use strict';
import utils from './../utils.js';
import buildURL from '../helpers/buildURL.js';
import InterceptorManager from './InterceptorManager.js';
import dispatchRequest from './dispatchRequest.js';
import mergeConfig from './mergeConfig.js';
import buildFullPath from './buildFullPath.js';
import validator from '../helpers/validator.js';
import AxiosHeaders from './AxiosHeaders.js';
const validators = validator.validators;
/**
* Create a new instance of Axios
*
* @param {Object} instanceConfig The default config for the instance
*
* @return {Axios} A new instance of Axios
*/
class Axios {
constructor(instanceConfig) {
this.defaults = instanceConfig || {};
this.interceptors = {
request: new InterceptorManager(),
response: new InterceptorManager()
};
}
/**
* Dispatch a request
*
* @param {String|Object} configOrUrl The config specific for this request (merged with this.defaults)
* @param {?Object} config
*
* @returns {Promise} The Promise to be fulfilled
*/
async request(configOrUrl, config) {
try {
return await this._request(configOrUrl, config);
} catch (err) {
if (err instanceof Error) {
let dummy = {};
Error.captureStackTrace ? Error.captureStackTrace(dummy) : (dummy = new Error());
// slice off the Error: ... line
const stack = dummy.stack ? dummy.stack.replace(/^.+\n/, '') : '';
try {
if (!err.stack) {
err.stack = stack;
// match without the 2 top stack lines
} else if (stack && !String(err.stack).endsWith(stack.replace(/^.+\n.+\n/, ''))) {
err.stack += '\n' + stack
}
} catch (e) {
// ignore the case where "stack" is an un-writable property
}
}
throw err;
}
}
_request(configOrUrl, config) {
/*eslint no-param-reassign:0*/
// Allow for axios('example/url'[, config]) a la fetch API
if (typeof configOrUrl === 'string') {
config = config || {};
config.url = configOrUrl;
} else {
config = configOrUrl || {};
}
config = mergeConfig(this.defaults, config);
const {transitional, paramsSerializer, headers} = config;
if (transitional !== undefined) {
validator.assertOptions(transitional, {
silentJSONParsing: validators.transitional(validators.boolean),
forcedJSONParsing: validators.transitional(validators.boolean),
clarifyTimeoutError: validators.transitional(validators.boolean)
}, false);
}
if (paramsSerializer != null) {
if (utils.isFunction(paramsSerializer)) {
config.paramsSerializer = {
serialize: paramsSerializer
}
} else {
validator.assertOptions(paramsSerializer, {
encode: validators.function,
serialize: validators.function
}, true);
}
}
// Set config.allowAbsoluteUrls
if (config.allowAbsoluteUrls !== undefined) {
// do nothing
} else if (this.defaults.allowAbsoluteUrls !== undefined) {
config.allowAbsoluteUrls = this.defaults.allowAbsoluteUrls;
} else {
config.allowAbsoluteUrls = true;
}
validator.assertOptions(config, {
baseUrl: validators.spelling('baseURL'),
withXsrfToken: validators.spelling('withXSRFToken')
}, true);
// Set config.method
config.method = (config.method || this.defaults.method || 'get').toLowerCase();
// Flatten headers
let contextHeaders = headers && utils.merge(
headers.common,
headers[config.method]
);
headers && utils.forEach(
['delete', 'get', 'head', 'post', 'put', 'patch', 'common'],
(method) => {
delete headers[method];
}
);
config.headers = AxiosHeaders.concat(contextHeaders, headers);
// filter out skipped interceptors
const requestInterceptorChain = [];
let synchronousRequestInterceptors = true;
this.interceptors.request.forEach(function unshiftRequestInterceptors(interceptor) {
if (typeof interceptor.runWhen === 'function' && interceptor.runWhen(config) === false) {
return;
}
synchronousRequestInterceptors = synchronousRequestInterceptors && interceptor.synchronous;
requestInterceptorChain.unshift(interceptor.fulfilled, interceptor.rejected);
});
const responseInterceptorChain = [];
this.interceptors.response.forEach(function pushResponseInterceptors(interceptor) {
responseInterceptorChain.push(interceptor.fulfilled, interceptor.rejected);
});
let promise;
let i = 0;
let len;
if (!synchronousRequestInterceptors) {
const chain = [dispatchRequest.bind(this), undefined];
chain.unshift.apply(chain, requestInterceptorChain);
chain.push.apply(chain, responseInterceptorChain);
len = chain.length;
promise = Promise.resolve(config);
while (i < len) {
promise = promise.then(chain[i++], chain[i++]);
}
return promise;
}
len = requestInterceptorChain.length;
let newConfig = config;
i = 0;
while (i < len) {
const onFulfilled = requestInterceptorChain[i++];
const onRejected = requestInterceptorChain[i++];
try {
newConfig = onFulfilled(newConfig);
} catch (error) {
onRejected.call(this, error);
break;
}
}
try {
promise = dispatchRequest.call(this, newConfig);
} catch (error) {
return Promise.reject(error);
}
i = 0;
len = responseInterceptorChain.length;
while (i < len) {
promise = promise.then(responseInterceptorChain[i++], responseInterceptorChain[i++]);
}
return promise;
}
getUri(config) {
config = mergeConfig(this.defaults, config);
const fullPath = buildFullPath(config.baseURL, config.url, config.allowAbsoluteUrls);
return buildURL(fullPath, config.params, config.paramsSerializer);
}
}
// Provide aliases for supported request methods
utils.forEach(['delete', 'get', 'head', 'options'], function forEachMethodNoData(method) {
/*eslint func-names:0*/
Axios.prototype[method] = function(url, config) {
return this.request(mergeConfig(config || {}, {
method,
url,
data: (config || {}).data
}));
};
});
utils.forEach(['post', 'put', 'patch'], function forEachMethodWithData(method) {
/*eslint func-names:0*/
function generateHTTPMethod(isForm) {
return function httpMethod(url, data, config) {
return this.request(mergeConfig(config || {}, {
method,
headers: isForm ? {
'Content-Type': 'multipart/form-data'
} : {},
url,
data
}));
};
}
Axios.prototype[method] = generateHTTPMethod();
Axios.prototype[method + 'Form'] = generateHTTPMethod(true);
});
export default Axios;

103
node_modules/axios/lib/core/AxiosError.js generated vendored Normal file
View File

@ -0,0 +1,103 @@
'use strict';
import utils from '../utils.js';
/**
* Create an Error with the specified message, config, error code, request and response.
*
* @param {string} message The error message.
* @param {string} [code] The error code (for example, 'ECONNABORTED').
* @param {Object} [config] The config.
* @param {Object} [request] The request.
* @param {Object} [response] The response.
*
* @returns {Error} The created error.
*/
function AxiosError(message, code, config, request, response) {
Error.call(this);
if (Error.captureStackTrace) {
Error.captureStackTrace(this, this.constructor);
} else {
this.stack = (new Error()).stack;
}
this.message = message;
this.name = 'AxiosError';
code && (this.code = code);
config && (this.config = config);
request && (this.request = request);
if (response) {
this.response = response;
this.status = response.status ? response.status : null;
}
}
utils.inherits(AxiosError, Error, {
toJSON: function toJSON() {
return {
// Standard
message: this.message,
name: this.name,
// Microsoft
description: this.description,
number: this.number,
// Mozilla
fileName: this.fileName,
lineNumber: this.lineNumber,
columnNumber: this.columnNumber,
stack: this.stack,
// Axios
config: utils.toJSONObject(this.config),
code: this.code,
status: this.status
};
}
});
const prototype = AxiosError.prototype;
const descriptors = {};
[
'ERR_BAD_OPTION_VALUE',
'ERR_BAD_OPTION',
'ECONNABORTED',
'ETIMEDOUT',
'ERR_NETWORK',
'ERR_FR_TOO_MANY_REDIRECTS',
'ERR_DEPRECATED',
'ERR_BAD_RESPONSE',
'ERR_BAD_REQUEST',
'ERR_CANCELED',
'ERR_NOT_SUPPORT',
'ERR_INVALID_URL'
// eslint-disable-next-line func-names
].forEach(code => {
descriptors[code] = {value: code};
});
Object.defineProperties(AxiosError, descriptors);
Object.defineProperty(prototype, 'isAxiosError', {value: true});
// eslint-disable-next-line func-names
AxiosError.from = (error, code, config, request, response, customProps) => {
const axiosError = Object.create(prototype);
utils.toFlatObject(error, axiosError, function filter(obj) {
return obj !== Error.prototype;
}, prop => {
return prop !== 'isAxiosError';
});
AxiosError.call(axiosError, error.message, code, config, request, response);
axiosError.cause = error;
axiosError.name = error.name;
customProps && Object.assign(axiosError, customProps);
return axiosError;
};
export default AxiosError;

314
node_modules/axios/lib/core/AxiosHeaders.js generated vendored Normal file
View File

@ -0,0 +1,314 @@
'use strict';
import utils from '../utils.js';
import parseHeaders from '../helpers/parseHeaders.js';
const $internals = Symbol('internals');
function normalizeHeader(header) {
return header && String(header).trim().toLowerCase();
}
function normalizeValue(value) {
if (value === false || value == null) {
return value;
}
return utils.isArray(value) ? value.map(normalizeValue) : String(value);
}
function parseTokens(str) {
const tokens = Object.create(null);
const tokensRE = /([^\s,;=]+)\s*(?:=\s*([^,;]+))?/g;
let match;
while ((match = tokensRE.exec(str))) {
tokens[match[1]] = match[2];
}
return tokens;
}
const isValidHeaderName = (str) => /^[-_a-zA-Z0-9^`|~,!#$%&'*+.]+$/.test(str.trim());
function matchHeaderValue(context, value, header, filter, isHeaderNameFilter) {
if (utils.isFunction(filter)) {
return filter.call(this, value, header);
}
if (isHeaderNameFilter) {
value = header;
}
if (!utils.isString(value)) return;
if (utils.isString(filter)) {
return value.indexOf(filter) !== -1;
}
if (utils.isRegExp(filter)) {
return filter.test(value);
}
}
function formatHeader(header) {
return header.trim()
.toLowerCase().replace(/([a-z\d])(\w*)/g, (w, char, str) => {
return char.toUpperCase() + str;
});
}
function buildAccessors(obj, header) {
const accessorName = utils.toCamelCase(' ' + header);
['get', 'set', 'has'].forEach(methodName => {
Object.defineProperty(obj, methodName + accessorName, {
value: function(arg1, arg2, arg3) {
return this[methodName].call(this, header, arg1, arg2, arg3);
},
configurable: true
});
});
}
class AxiosHeaders {
constructor(headers) {
headers && this.set(headers);
}
set(header, valueOrRewrite, rewrite) {
const self = this;
function setHeader(_value, _header, _rewrite) {
const lHeader = normalizeHeader(_header);
if (!lHeader) {
throw new Error('header name must be a non-empty string');
}
const key = utils.findKey(self, lHeader);
if(!key || self[key] === undefined || _rewrite === true || (_rewrite === undefined && self[key] !== false)) {
self[key || _header] = normalizeValue(_value);
}
}
const setHeaders = (headers, _rewrite) =>
utils.forEach(headers, (_value, _header) => setHeader(_value, _header, _rewrite));
if (utils.isPlainObject(header) || header instanceof this.constructor) {
setHeaders(header, valueOrRewrite)
} else if(utils.isString(header) && (header = header.trim()) && !isValidHeaderName(header)) {
setHeaders(parseHeaders(header), valueOrRewrite);
} else if (utils.isObject(header) && utils.isIterable(header)) {
let obj = {}, dest, key;
for (const entry of header) {
if (!utils.isArray(entry)) {
throw TypeError('Object iterator must return a key-value pair');
}
obj[key = entry[0]] = (dest = obj[key]) ?
(utils.isArray(dest) ? [...dest, entry[1]] : [dest, entry[1]]) : entry[1];
}
setHeaders(obj, valueOrRewrite)
} else {
header != null && setHeader(valueOrRewrite, header, rewrite);
}
return this;
}
get(header, parser) {
header = normalizeHeader(header);
if (header) {
const key = utils.findKey(this, header);
if (key) {
const value = this[key];
if (!parser) {
return value;
}
if (parser === true) {
return parseTokens(value);
}
if (utils.isFunction(parser)) {
return parser.call(this, value, key);
}
if (utils.isRegExp(parser)) {
return parser.exec(value);
}
throw new TypeError('parser must be boolean|regexp|function');
}
}
}
has(header, matcher) {
header = normalizeHeader(header);
if (header) {
const key = utils.findKey(this, header);
return !!(key && this[key] !== undefined && (!matcher || matchHeaderValue(this, this[key], key, matcher)));
}
return false;
}
delete(header, matcher) {
const self = this;
let deleted = false;
function deleteHeader(_header) {
_header = normalizeHeader(_header);
if (_header) {
const key = utils.findKey(self, _header);
if (key && (!matcher || matchHeaderValue(self, self[key], key, matcher))) {
delete self[key];
deleted = true;
}
}
}
if (utils.isArray(header)) {
header.forEach(deleteHeader);
} else {
deleteHeader(header);
}
return deleted;
}
clear(matcher) {
const keys = Object.keys(this);
let i = keys.length;
let deleted = false;
while (i--) {
const key = keys[i];
if(!matcher || matchHeaderValue(this, this[key], key, matcher, true)) {
delete this[key];
deleted = true;
}
}
return deleted;
}
normalize(format) {
const self = this;
const headers = {};
utils.forEach(this, (value, header) => {
const key = utils.findKey(headers, header);
if (key) {
self[key] = normalizeValue(value);
delete self[header];
return;
}
const normalized = format ? formatHeader(header) : String(header).trim();
if (normalized !== header) {
delete self[header];
}
self[normalized] = normalizeValue(value);
headers[normalized] = true;
});
return this;
}
concat(...targets) {
return this.constructor.concat(this, ...targets);
}
toJSON(asStrings) {
const obj = Object.create(null);
utils.forEach(this, (value, header) => {
value != null && value !== false && (obj[header] = asStrings && utils.isArray(value) ? value.join(', ') : value);
});
return obj;
}
[Symbol.iterator]() {
return Object.entries(this.toJSON())[Symbol.iterator]();
}
toString() {
return Object.entries(this.toJSON()).map(([header, value]) => header + ': ' + value).join('\n');
}
getSetCookie() {
return this.get("set-cookie") || [];
}
get [Symbol.toStringTag]() {
return 'AxiosHeaders';
}
static from(thing) {
return thing instanceof this ? thing : new this(thing);
}
static concat(first, ...targets) {
const computed = new this(first);
targets.forEach((target) => computed.set(target));
return computed;
}
static accessor(header) {
const internals = this[$internals] = (this[$internals] = {
accessors: {}
});
const accessors = internals.accessors;
const prototype = this.prototype;
function defineAccessor(_header) {
const lHeader = normalizeHeader(_header);
if (!accessors[lHeader]) {
buildAccessors(prototype, _header);
accessors[lHeader] = true;
}
}
utils.isArray(header) ? header.forEach(defineAccessor) : defineAccessor(header);
return this;
}
}
AxiosHeaders.accessor(['Content-Type', 'Content-Length', 'Accept', 'Accept-Encoding', 'User-Agent', 'Authorization']);
// reserved names hotfix
utils.reduceDescriptors(AxiosHeaders.prototype, ({value}, key) => {
let mapped = key[0].toUpperCase() + key.slice(1); // map `set` => `Set`
return {
get: () => value,
set(headerValue) {
this[mapped] = headerValue;
}
}
});
utils.freezeMethods(AxiosHeaders);
export default AxiosHeaders;

71
node_modules/axios/lib/core/InterceptorManager.js generated vendored Normal file
View File

@ -0,0 +1,71 @@
'use strict';
import utils from './../utils.js';
class InterceptorManager {
constructor() {
this.handlers = [];
}
/**
* Add a new interceptor to the stack
*
* @param {Function} fulfilled The function to handle `then` for a `Promise`
* @param {Function} rejected The function to handle `reject` for a `Promise`
*
* @return {Number} An ID used to remove interceptor later
*/
use(fulfilled, rejected, options) {
this.handlers.push({
fulfilled,
rejected,
synchronous: options ? options.synchronous : false,
runWhen: options ? options.runWhen : null
});
return this.handlers.length - 1;
}
/**
* Remove an interceptor from the stack
*
* @param {Number} id The ID that was returned by `use`
*
* @returns {Boolean} `true` if the interceptor was removed, `false` otherwise
*/
eject(id) {
if (this.handlers[id]) {
this.handlers[id] = null;
}
}
/**
* Clear all interceptors from the stack
*
* @returns {void}
*/
clear() {
if (this.handlers) {
this.handlers = [];
}
}
/**
* Iterate over all the registered interceptors
*
* This method is particularly useful for skipping over any
* interceptors that may have become `null` calling `eject`.
*
* @param {Function} fn The function to call for each interceptor
*
* @returns {void}
*/
forEach(fn) {
utils.forEach(this.handlers, function forEachHandler(h) {
if (h !== null) {
fn(h);
}
});
}
}
export default InterceptorManager;

8
node_modules/axios/lib/core/README.md generated vendored Normal file
View File

@ -0,0 +1,8 @@
# axios // core
The modules found in `core/` should be modules that are specific to the domain logic of axios. These modules would most likely not make sense to be consumed outside of the axios module, as their logic is too specific. Some examples of core modules are:
- Dispatching requests
- Requests sent via `adapters/` (see lib/adapters/README.md)
- Managing interceptors
- Handling config

22
node_modules/axios/lib/core/buildFullPath.js generated vendored Normal file
View File

@ -0,0 +1,22 @@
'use strict';
import isAbsoluteURL from '../helpers/isAbsoluteURL.js';
import combineURLs from '../helpers/combineURLs.js';
/**
* Creates a new URL by combining the baseURL with the requestedURL,
* only when the requestedURL is not already an absolute URL.
* If the requestURL is absolute, this function returns the requestedURL untouched.
*
* @param {string} baseURL The base URL
* @param {string} requestedURL Absolute or relative URL to combine
*
* @returns {string} The combined full path
*/
export default function buildFullPath(baseURL, requestedURL, allowAbsoluteUrls) {
let isRelativeUrl = !isAbsoluteURL(requestedURL);
if (baseURL && (isRelativeUrl || allowAbsoluteUrls == false)) {
return combineURLs(baseURL, requestedURL);
}
return requestedURL;
}

81
node_modules/axios/lib/core/dispatchRequest.js generated vendored Normal file
View File

@ -0,0 +1,81 @@
'use strict';
import transformData from './transformData.js';
import isCancel from '../cancel/isCancel.js';
import defaults from '../defaults/index.js';
import CanceledError from '../cancel/CanceledError.js';
import AxiosHeaders from '../core/AxiosHeaders.js';
import adapters from "../adapters/adapters.js";
/**
* Throws a `CanceledError` if cancellation has been requested.
*
* @param {Object} config The config that is to be used for the request
*
* @returns {void}
*/
function throwIfCancellationRequested(config) {
if (config.cancelToken) {
config.cancelToken.throwIfRequested();
}
if (config.signal && config.signal.aborted) {
throw new CanceledError(null, config);
}
}
/**
* Dispatch a request to the server using the configured adapter.
*
* @param {object} config The config that is to be used for the request
*
* @returns {Promise} The Promise to be fulfilled
*/
export default function dispatchRequest(config) {
throwIfCancellationRequested(config);
config.headers = AxiosHeaders.from(config.headers);
// Transform request data
config.data = transformData.call(
config,
config.transformRequest
);
if (['post', 'put', 'patch'].indexOf(config.method) !== -1) {
config.headers.setContentType('application/x-www-form-urlencoded', false);
}
const adapter = adapters.getAdapter(config.adapter || defaults.adapter);
return adapter(config).then(function onAdapterResolution(response) {
throwIfCancellationRequested(config);
// Transform response data
response.data = transformData.call(
config,
config.transformResponse,
response
);
response.headers = AxiosHeaders.from(response.headers);
return response;
}, function onAdapterRejection(reason) {
if (!isCancel(reason)) {
throwIfCancellationRequested(config);
// Transform response data
if (reason && reason.response) {
reason.response.data = transformData.call(
config,
config.transformResponse,
reason.response
);
reason.response.headers = AxiosHeaders.from(reason.response.headers);
}
}
return Promise.reject(reason);
});
}

106
node_modules/axios/lib/core/mergeConfig.js generated vendored Normal file
View File

@ -0,0 +1,106 @@
'use strict';
import utils from '../utils.js';
import AxiosHeaders from "./AxiosHeaders.js";
const headersToObject = (thing) => thing instanceof AxiosHeaders ? { ...thing } : thing;
/**
* Config-specific merge-function which creates a new config-object
* by merging two configuration objects together.
*
* @param {Object} config1
* @param {Object} config2
*
* @returns {Object} New object resulting from merging config2 to config1
*/
export default function mergeConfig(config1, config2) {
// eslint-disable-next-line no-param-reassign
config2 = config2 || {};
const config = {};
function getMergedValue(target, source, prop, caseless) {
if (utils.isPlainObject(target) && utils.isPlainObject(source)) {
return utils.merge.call({caseless}, target, source);
} else if (utils.isPlainObject(source)) {
return utils.merge({}, source);
} else if (utils.isArray(source)) {
return source.slice();
}
return source;
}
// eslint-disable-next-line consistent-return
function mergeDeepProperties(a, b, prop , caseless) {
if (!utils.isUndefined(b)) {
return getMergedValue(a, b, prop , caseless);
} else if (!utils.isUndefined(a)) {
return getMergedValue(undefined, a, prop , caseless);
}
}
// eslint-disable-next-line consistent-return
function valueFromConfig2(a, b) {
if (!utils.isUndefined(b)) {
return getMergedValue(undefined, b);
}
}
// eslint-disable-next-line consistent-return
function defaultToConfig2(a, b) {
if (!utils.isUndefined(b)) {
return getMergedValue(undefined, b);
} else if (!utils.isUndefined(a)) {
return getMergedValue(undefined, a);
}
}
// eslint-disable-next-line consistent-return
function mergeDirectKeys(a, b, prop) {
if (prop in config2) {
return getMergedValue(a, b);
} else if (prop in config1) {
return getMergedValue(undefined, a);
}
}
const mergeMap = {
url: valueFromConfig2,
method: valueFromConfig2,
data: valueFromConfig2,
baseURL: defaultToConfig2,
transformRequest: defaultToConfig2,
transformResponse: defaultToConfig2,
paramsSerializer: defaultToConfig2,
timeout: defaultToConfig2,
timeoutMessage: defaultToConfig2,
withCredentials: defaultToConfig2,
withXSRFToken: defaultToConfig2,
adapter: defaultToConfig2,
responseType: defaultToConfig2,
xsrfCookieName: defaultToConfig2,
xsrfHeaderName: defaultToConfig2,
onUploadProgress: defaultToConfig2,
onDownloadProgress: defaultToConfig2,
decompress: defaultToConfig2,
maxContentLength: defaultToConfig2,
maxBodyLength: defaultToConfig2,
beforeRedirect: defaultToConfig2,
transport: defaultToConfig2,
httpAgent: defaultToConfig2,
httpsAgent: defaultToConfig2,
cancelToken: defaultToConfig2,
socketPath: defaultToConfig2,
responseEncoding: defaultToConfig2,
validateStatus: mergeDirectKeys,
headers: (a, b , prop) => mergeDeepProperties(headersToObject(a), headersToObject(b),prop, true)
};
utils.forEach(Object.keys(Object.assign({}, config1, config2)), function computeConfigValue(prop) {
const merge = mergeMap[prop] || mergeDeepProperties;
const configValue = merge(config1[prop], config2[prop], prop);
(utils.isUndefined(configValue) && merge !== mergeDirectKeys) || (config[prop] = configValue);
});
return config;
}

27
node_modules/axios/lib/core/settle.js generated vendored Normal file
View File

@ -0,0 +1,27 @@
'use strict';
import AxiosError from './AxiosError.js';
/**
* Resolve or reject a Promise based on response status.
*
* @param {Function} resolve A function that resolves the promise.
* @param {Function} reject A function that rejects the promise.
* @param {object} response The response.
*
* @returns {object} The response.
*/
export default function settle(resolve, reject, response) {
const validateStatus = response.config.validateStatus;
if (!response.status || !validateStatus || validateStatus(response.status)) {
resolve(response);
} else {
reject(new AxiosError(
'Request failed with status code ' + response.status,
[AxiosError.ERR_BAD_REQUEST, AxiosError.ERR_BAD_RESPONSE][Math.floor(response.status / 100) - 4],
response.config,
response.request,
response
));
}
}

28
node_modules/axios/lib/core/transformData.js generated vendored Normal file
View File

@ -0,0 +1,28 @@
'use strict';
import utils from './../utils.js';
import defaults from '../defaults/index.js';
import AxiosHeaders from '../core/AxiosHeaders.js';
/**
* Transform the data for a request or a response
*
* @param {Array|Function} fns A single function or Array of functions
* @param {?Object} response The response object
*
* @returns {*} The resulting transformed data
*/
export default function transformData(fns, response) {
const config = this || defaults;
const context = response || config;
const headers = AxiosHeaders.from(context.headers);
let data = context.data;
utils.forEach(fns, function transform(fn) {
data = fn.call(config, data, headers.normalize(), response ? response.status : undefined);
});
headers.normalize();
return data;
}

161
node_modules/axios/lib/defaults/index.js generated vendored Normal file
View File

@ -0,0 +1,161 @@
'use strict';
import utils from '../utils.js';
import AxiosError from '../core/AxiosError.js';
import transitionalDefaults from './transitional.js';
import toFormData from '../helpers/toFormData.js';
import toURLEncodedForm from '../helpers/toURLEncodedForm.js';
import platform from '../platform/index.js';
import formDataToJSON from '../helpers/formDataToJSON.js';
/**
* It takes a string, tries to parse it, and if it fails, it returns the stringified version
* of the input
*
* @param {any} rawValue - The value to be stringified.
* @param {Function} parser - A function that parses a string into a JavaScript object.
* @param {Function} encoder - A function that takes a value and returns a string.
*
* @returns {string} A stringified version of the rawValue.
*/
function stringifySafely(rawValue, parser, encoder) {
if (utils.isString(rawValue)) {
try {
(parser || JSON.parse)(rawValue);
return utils.trim(rawValue);
} catch (e) {
if (e.name !== 'SyntaxError') {
throw e;
}
}
}
return (encoder || JSON.stringify)(rawValue);
}
const defaults = {
transitional: transitionalDefaults,
adapter: ['xhr', 'http', 'fetch'],
transformRequest: [function transformRequest(data, headers) {
const contentType = headers.getContentType() || '';
const hasJSONContentType = contentType.indexOf('application/json') > -1;
const isObjectPayload = utils.isObject(data);
if (isObjectPayload && utils.isHTMLForm(data)) {
data = new FormData(data);
}
const isFormData = utils.isFormData(data);
if (isFormData) {
return hasJSONContentType ? JSON.stringify(formDataToJSON(data)) : data;
}
if (utils.isArrayBuffer(data) ||
utils.isBuffer(data) ||
utils.isStream(data) ||
utils.isFile(data) ||
utils.isBlob(data) ||
utils.isReadableStream(data)
) {
return data;
}
if (utils.isArrayBufferView(data)) {
return data.buffer;
}
if (utils.isURLSearchParams(data)) {
headers.setContentType('application/x-www-form-urlencoded;charset=utf-8', false);
return data.toString();
}
let isFileList;
if (isObjectPayload) {
if (contentType.indexOf('application/x-www-form-urlencoded') > -1) {
return toURLEncodedForm(data, this.formSerializer).toString();
}
if ((isFileList = utils.isFileList(data)) || contentType.indexOf('multipart/form-data') > -1) {
const _FormData = this.env && this.env.FormData;
return toFormData(
isFileList ? {'files[]': data} : data,
_FormData && new _FormData(),
this.formSerializer
);
}
}
if (isObjectPayload || hasJSONContentType ) {
headers.setContentType('application/json', false);
return stringifySafely(data);
}
return data;
}],
transformResponse: [function transformResponse(data) {
const transitional = this.transitional || defaults.transitional;
const forcedJSONParsing = transitional && transitional.forcedJSONParsing;
const JSONRequested = this.responseType === 'json';
if (utils.isResponse(data) || utils.isReadableStream(data)) {
return data;
}
if (data && utils.isString(data) && ((forcedJSONParsing && !this.responseType) || JSONRequested)) {
const silentJSONParsing = transitional && transitional.silentJSONParsing;
const strictJSONParsing = !silentJSONParsing && JSONRequested;
try {
return JSON.parse(data);
} catch (e) {
if (strictJSONParsing) {
if (e.name === 'SyntaxError') {
throw AxiosError.from(e, AxiosError.ERR_BAD_RESPONSE, this, null, this.response);
}
throw e;
}
}
}
return data;
}],
/**
* A timeout in milliseconds to abort a request. If set to 0 (default) a
* timeout is not created.
*/
timeout: 0,
xsrfCookieName: 'XSRF-TOKEN',
xsrfHeaderName: 'X-XSRF-TOKEN',
maxContentLength: -1,
maxBodyLength: -1,
env: {
FormData: platform.classes.FormData,
Blob: platform.classes.Blob
},
validateStatus: function validateStatus(status) {
return status >= 200 && status < 300;
},
headers: {
common: {
'Accept': 'application/json, text/plain, */*',
'Content-Type': undefined
}
}
};
utils.forEach(['delete', 'get', 'head', 'post', 'put', 'patch'], (method) => {
defaults.headers[method] = {};
});
export default defaults;

7
node_modules/axios/lib/defaults/transitional.js generated vendored Normal file
View File

@ -0,0 +1,7 @@
'use strict';
export default {
silentJSONParsing: true,
forcedJSONParsing: true,
clarifyTimeoutError: false
};

Some files were not shown because too many files have changed in this diff Show More