2023-01-06 05:27:11 +08:00
/******/ ( ( ) => { // webpackBootstrap
/******/ var _ _webpack _modules _ _ = ( {
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
/***/ 2605 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
"use strict" ;
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . create = void 0 ;
const artifact _client _1 = _ _nccwpck _require _ _ ( 8802 ) ;
/ * *
* Constructs an ArtifactClient
* /
function create ( ) {
return artifact _client _1 . DefaultArtifactClient . create ( ) ;
}
exports . create = create ;
//# sourceMappingURL=artifact-client.js.map
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
/***/ } ) ,
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
/***/ 8802 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
"use strict" ;
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
2023-09-07 02:42:11 +08:00
var desc = Object . getOwnPropertyDescriptor ( m , k ) ;
if ( ! desc || ( "get" in desc ? ! m . _ _esModule : desc . writable || desc . configurable ) ) {
desc = { enumerable : true , get : function ( ) { return m [ k ] ; } } ;
}
Object . defineProperty ( o , k2 , desc ) ;
2023-01-06 05:27:11 +08:00
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
2023-09-07 02:42:11 +08:00
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . prototype . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
2023-01-06 05:27:11 +08:00
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . DefaultArtifactClient = void 0 ;
const core = _ _importStar ( _ _nccwpck _require _ _ ( 2186 ) ) ;
const upload _specification _1 = _ _nccwpck _require _ _ ( 183 ) ;
const upload _http _client _1 = _ _nccwpck _require _ _ ( 4354 ) ;
const utils _1 = _ _nccwpck _require _ _ ( 6327 ) ;
const path _and _artifact _name _validation _1 = _ _nccwpck _require _ _ ( 7398 ) ;
const download _http _client _1 = _ _nccwpck _require _ _ ( 8538 ) ;
const download _specification _1 = _ _nccwpck _require _ _ ( 5686 ) ;
const config _variables _1 = _ _nccwpck _require _ _ ( 2222 ) ;
const path _1 = _ _nccwpck _require _ _ ( 1017 ) ;
class DefaultArtifactClient {
/ * *
* Constructs a DefaultArtifactClient
* /
static create ( ) {
return new DefaultArtifactClient ( ) ;
2022-05-20 04:33:04 +08:00
}
2023-01-06 05:27:11 +08:00
/ * *
* Uploads an artifact
* /
uploadArtifact ( name , files , rootDirectory , options ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
core . info ( ` Starting artifact upload
For more detailed logs during the artifact upload process , enable step - debugging : https : //docs.github.com/actions/monitoring-and-troubleshooting-workflows/enabling-debug-logging#enabling-step-debug-logging`);
2023-09-07 02:42:11 +08:00
( 0 , path _and _artifact _name _validation _1 . checkArtifactName ) ( name ) ;
2023-01-06 05:27:11 +08:00
// Get specification for the files being uploaded
2023-09-07 02:42:11 +08:00
const uploadSpecification = ( 0 , upload _specification _1 . getUploadSpecification ) ( name , rootDirectory , files ) ;
2023-01-06 05:27:11 +08:00
const uploadResponse = {
artifactName : name ,
artifactItems : [ ] ,
size : 0 ,
failedItems : [ ]
} ;
const uploadHttpClient = new upload _http _client _1 . UploadHttpClient ( ) ;
if ( uploadSpecification . length === 0 ) {
core . warning ( ` No files found that can be uploaded ` ) ;
}
else {
// Create an entry for the artifact in the file container
const response = yield uploadHttpClient . createArtifactInFileContainer ( name , options ) ;
if ( ! response . fileContainerResourceUrl ) {
core . debug ( response . toString ( ) ) ;
throw new Error ( 'No URL provided by the Artifact Service to upload an artifact to' ) ;
}
core . debug ( ` Upload Resource URL: ${ response . fileContainerResourceUrl } ` ) ;
core . info ( ` Container for artifact " ${ name } " successfully created. Starting upload of file(s) ` ) ;
// Upload each of the files that were found concurrently
const uploadResult = yield uploadHttpClient . uploadArtifactToFileContainer ( response . fileContainerResourceUrl , uploadSpecification , options ) ;
// Update the size of the artifact to indicate we are done uploading
// The uncompressed size is used for display when downloading a zip of the artifact from the UI
core . info ( ` File upload process has finished. Finalizing the artifact upload ` ) ;
yield uploadHttpClient . patchArtifactSize ( uploadResult . totalSize , name ) ;
if ( uploadResult . failedItems . length > 0 ) {
core . info ( ` Upload finished. There were ${ uploadResult . failedItems . length } items that failed to upload ` ) ;
}
else {
core . info ( ` Artifact has been finalized. All files have been successfully uploaded! ` ) ;
}
core . info ( `
The raw size of all the files that were specified for upload is $ { uploadResult . totalSize } bytes
The size of all the files that were uploaded is $ { uploadResult . uploadSize } bytes . This takes into account any gzip compression used to reduce the upload size , time and storage
Note : The size of downloaded zips can differ significantly from the reported size . For more information see : https : //github.com/actions/upload-artifact#zipped-artifact-downloads \r\n`);
uploadResponse . artifactItems = uploadSpecification . map ( item => item . absoluteFilePath ) ;
uploadResponse . size = uploadResult . uploadSize ;
uploadResponse . failedItems = uploadResult . failedItems ;
}
return uploadResponse ;
} ) ;
}
downloadArtifact ( name , path , options ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const downloadHttpClient = new download _http _client _1 . DownloadHttpClient ( ) ;
const artifacts = yield downloadHttpClient . listArtifacts ( ) ;
if ( artifacts . count === 0 ) {
throw new Error ( ` Unable to find any artifacts for the associated workflow ` ) ;
}
const artifactToDownload = artifacts . value . find ( artifact => {
return artifact . name === name ;
} ) ;
if ( ! artifactToDownload ) {
throw new Error ( ` Unable to find an artifact with the name: ${ name } ` ) ;
}
const items = yield downloadHttpClient . getContainerItems ( artifactToDownload . name , artifactToDownload . fileContainerResourceUrl ) ;
if ( ! path ) {
2023-09-07 02:42:11 +08:00
path = ( 0 , config _variables _1 . getWorkSpaceDirectory ) ( ) ;
2023-01-06 05:27:11 +08:00
}
2023-09-07 02:42:11 +08:00
path = ( 0 , path _1 . normalize ) ( path ) ;
path = ( 0 , path _1 . resolve ) ( path ) ;
2023-01-06 05:27:11 +08:00
// During upload, empty directories are rejected by the remote server so there should be no artifacts that consist of only empty directories
2023-09-07 02:42:11 +08:00
const downloadSpecification = ( 0 , download _specification _1 . getDownloadSpecification ) ( name , items . value , path , ( options === null || options === void 0 ? void 0 : options . createArtifactFolder ) || false ) ;
2023-01-06 05:27:11 +08:00
if ( downloadSpecification . filesToDownload . length === 0 ) {
core . info ( ` No downloadable files were found for the artifact: ${ artifactToDownload . name } ` ) ;
}
else {
// Create all necessary directories recursively before starting any download
2023-09-07 02:42:11 +08:00
yield ( 0 , utils _1 . createDirectoriesForArtifact ) ( downloadSpecification . directoryStructure ) ;
core . info ( 'Directory structure has been set up for the artifact' ) ;
yield ( 0 , utils _1 . createEmptyFilesForArtifact ) ( downloadSpecification . emptyFilesToCreate ) ;
2023-01-06 05:27:11 +08:00
yield downloadHttpClient . downloadSingleArtifact ( downloadSpecification . filesToDownload ) ;
}
return {
artifactName : name ,
downloadPath : downloadSpecification . rootDownloadLocation
} ;
} ) ;
}
downloadAllArtifacts ( path ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const downloadHttpClient = new download _http _client _1 . DownloadHttpClient ( ) ;
const response = [ ] ;
const artifacts = yield downloadHttpClient . listArtifacts ( ) ;
if ( artifacts . count === 0 ) {
core . info ( 'Unable to find any artifacts for the associated workflow' ) ;
return response ;
}
if ( ! path ) {
2023-09-07 02:42:11 +08:00
path = ( 0 , config _variables _1 . getWorkSpaceDirectory ) ( ) ;
2023-01-06 05:27:11 +08:00
}
2023-09-07 02:42:11 +08:00
path = ( 0 , path _1 . normalize ) ( path ) ;
path = ( 0 , path _1 . resolve ) ( path ) ;
2023-01-06 05:27:11 +08:00
let downloadedArtifacts = 0 ;
while ( downloadedArtifacts < artifacts . count ) {
const currentArtifactToDownload = artifacts . value [ downloadedArtifacts ] ;
downloadedArtifacts += 1 ;
core . info ( ` starting download of artifact ${ currentArtifactToDownload . name } : ${ downloadedArtifacts } / ${ artifacts . count } ` ) ;
// Get container entries for the specific artifact
const items = yield downloadHttpClient . getContainerItems ( currentArtifactToDownload . name , currentArtifactToDownload . fileContainerResourceUrl ) ;
2023-09-07 02:42:11 +08:00
const downloadSpecification = ( 0 , download _specification _1 . getDownloadSpecification ) ( currentArtifactToDownload . name , items . value , path , true ) ;
2023-01-06 05:27:11 +08:00
if ( downloadSpecification . filesToDownload . length === 0 ) {
core . info ( ` No downloadable files were found for any artifact ${ currentArtifactToDownload . name } ` ) ;
}
else {
2023-09-07 02:42:11 +08:00
yield ( 0 , utils _1 . createDirectoriesForArtifact ) ( downloadSpecification . directoryStructure ) ;
yield ( 0 , utils _1 . createEmptyFilesForArtifact ) ( downloadSpecification . emptyFilesToCreate ) ;
2023-01-06 05:27:11 +08:00
yield downloadHttpClient . downloadSingleArtifact ( downloadSpecification . filesToDownload ) ;
}
response . push ( {
artifactName : currentArtifactToDownload . name ,
downloadPath : downloadSpecification . rootDownloadLocation
} ) ;
}
return response ;
} ) ;
2020-04-28 23:18:53 +08:00
}
}
2023-01-06 05:27:11 +08:00
exports . DefaultArtifactClient = DefaultArtifactClient ;
//# sourceMappingURL=artifact-client.js.map
2020-04-28 23:18:53 +08:00
2022-05-20 04:33:04 +08:00
/***/ } ) ,
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
/***/ 2222 :
/***/ ( ( _ _unused _webpack _module , exports ) => {
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
"use strict" ;
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
2023-09-07 02:42:11 +08:00
exports . isGhes = exports . getRetentionDays = exports . getWorkSpaceDirectory = exports . getWorkFlowRunId = exports . getRuntimeUrl = exports . getRuntimeToken = exports . getDownloadFileConcurrency = exports . getInitialRetryIntervalInMilliseconds = exports . getRetryMultiplier = exports . getRetryLimit = exports . getUploadChunkSize = exports . getUploadFileConcurrency = void 0 ;
2023-01-06 05:27:11 +08:00
// The number of concurrent uploads that happens at the same time
function getUploadFileConcurrency ( ) {
return 2 ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
exports . getUploadFileConcurrency = getUploadFileConcurrency ;
// When uploading large files that can't be uploaded with a single http call, this controls
// the chunk size that is used during upload
function getUploadChunkSize ( ) {
return 8 * 1024 * 1024 ; // 8 MB Chunks
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
exports . getUploadChunkSize = getUploadChunkSize ;
// The maximum number of retries that can be attempted before an upload or download fails
function getRetryLimit ( ) {
return 5 ;
2022-05-20 04:33:04 +08:00
}
2023-01-06 05:27:11 +08:00
exports . getRetryLimit = getRetryLimit ;
// With exponential backoff, the larger the retry count, the larger the wait time before another attempt
// The retry multiplier controls by how much the backOff time increases depending on the number of retries
function getRetryMultiplier ( ) {
return 1.5 ;
2022-05-20 04:33:04 +08:00
}
2023-01-06 05:27:11 +08:00
exports . getRetryMultiplier = getRetryMultiplier ;
// The initial wait time if an upload or download fails and a retry is being attempted for the first time
function getInitialRetryIntervalInMilliseconds ( ) {
return 3000 ;
2020-04-28 23:18:53 +08:00
}
2023-01-06 05:27:11 +08:00
exports . getInitialRetryIntervalInMilliseconds = getInitialRetryIntervalInMilliseconds ;
// The number of concurrent downloads that happens at the same time
function getDownloadFileConcurrency ( ) {
return 2 ;
}
exports . getDownloadFileConcurrency = getDownloadFileConcurrency ;
function getRuntimeToken ( ) {
const token = process . env [ 'ACTIONS_RUNTIME_TOKEN' ] ;
if ( ! token ) {
throw new Error ( 'Unable to get ACTIONS_RUNTIME_TOKEN env variable' ) ;
}
return token ;
}
exports . getRuntimeToken = getRuntimeToken ;
function getRuntimeUrl ( ) {
const runtimeUrl = process . env [ 'ACTIONS_RUNTIME_URL' ] ;
if ( ! runtimeUrl ) {
throw new Error ( 'Unable to get ACTIONS_RUNTIME_URL env variable' ) ;
}
return runtimeUrl ;
}
exports . getRuntimeUrl = getRuntimeUrl ;
function getWorkFlowRunId ( ) {
const workFlowRunId = process . env [ 'GITHUB_RUN_ID' ] ;
if ( ! workFlowRunId ) {
throw new Error ( 'Unable to get GITHUB_RUN_ID env variable' ) ;
}
return workFlowRunId ;
}
exports . getWorkFlowRunId = getWorkFlowRunId ;
function getWorkSpaceDirectory ( ) {
const workspaceDirectory = process . env [ 'GITHUB_WORKSPACE' ] ;
if ( ! workspaceDirectory ) {
throw new Error ( 'Unable to get GITHUB_WORKSPACE env variable' ) ;
}
return workspaceDirectory ;
}
exports . getWorkSpaceDirectory = getWorkSpaceDirectory ;
function getRetentionDays ( ) {
return process . env [ 'GITHUB_RETENTION_DAYS' ] ;
2021-12-08 00:45:04 +08:00
}
2023-01-06 05:27:11 +08:00
exports . getRetentionDays = getRetentionDays ;
2023-09-07 02:42:11 +08:00
function isGhes ( ) {
const ghUrl = new URL ( process . env [ 'GITHUB_SERVER_URL' ] || 'https://github.com' ) ;
return ghUrl . hostname . toUpperCase ( ) !== 'GITHUB.COM' ;
}
exports . isGhes = isGhes ;
2023-01-06 05:27:11 +08:00
//# sourceMappingURL=config-variables.js.map
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
/***/ } ) ,
2021-12-08 00:45:04 +08:00
2023-01-06 05:27:11 +08:00
/***/ 3549 :
/***/ ( ( _ _unused _webpack _module , exports ) => {
2021-12-08 00:45:04 +08:00
2023-01-06 05:27:11 +08:00
"use strict" ;
2022-05-20 04:33:04 +08:00
2022-10-22 03:17:17 +08:00
/ * *
2023-01-06 05:27:11 +08:00
* CRC64 : cyclic redundancy check , 64 - bits
*
* In order to validate that artifacts are not being corrupted over the wire , this redundancy check allows us to
* validate that there was no corruption during transmission . The implementation here is based on Go ' s hash / crc64 pkg ,
* but without the slicing - by - 8 optimization : https : //cs.opensource.google/go/go/+/master:src/hash/crc64/crc64.go
*
* This implementation uses a pregenerated table based on 0x9A6C9329AC4BC9B5 as the polynomial , the same polynomial that
* is used for Azure Storage : https : //github.com/Azure/azure-storage-net/blob/cbe605f9faa01bfc3003d75fc5a16b2eaccfe102/Lib/Common/Core/Util/Crc64.cs#L27
2022-10-22 03:17:17 +08:00
* /
2023-01-06 05:27:11 +08:00
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
// when transpile target is >= ES2020 (after dropping node 12) these can be changed to bigint literals - ts(2737)
const PREGEN _POLY _TABLE = [
BigInt ( '0x0000000000000000' ) ,
BigInt ( '0x7F6EF0C830358979' ) ,
BigInt ( '0xFEDDE190606B12F2' ) ,
BigInt ( '0x81B31158505E9B8B' ) ,
BigInt ( '0xC962E5739841B68F' ) ,
BigInt ( '0xB60C15BBA8743FF6' ) ,
BigInt ( '0x37BF04E3F82AA47D' ) ,
BigInt ( '0x48D1F42BC81F2D04' ) ,
BigInt ( '0xA61CECB46814FE75' ) ,
BigInt ( '0xD9721C7C5821770C' ) ,
BigInt ( '0x58C10D24087FEC87' ) ,
BigInt ( '0x27AFFDEC384A65FE' ) ,
BigInt ( '0x6F7E09C7F05548FA' ) ,
BigInt ( '0x1010F90FC060C183' ) ,
BigInt ( '0x91A3E857903E5A08' ) ,
BigInt ( '0xEECD189FA00BD371' ) ,
BigInt ( '0x78E0FF3B88BE6F81' ) ,
BigInt ( '0x078E0FF3B88BE6F8' ) ,
BigInt ( '0x863D1EABE8D57D73' ) ,
BigInt ( '0xF953EE63D8E0F40A' ) ,
BigInt ( '0xB1821A4810FFD90E' ) ,
BigInt ( '0xCEECEA8020CA5077' ) ,
BigInt ( '0x4F5FFBD87094CBFC' ) ,
BigInt ( '0x30310B1040A14285' ) ,
BigInt ( '0xDEFC138FE0AA91F4' ) ,
BigInt ( '0xA192E347D09F188D' ) ,
BigInt ( '0x2021F21F80C18306' ) ,
BigInt ( '0x5F4F02D7B0F40A7F' ) ,
BigInt ( '0x179EF6FC78EB277B' ) ,
BigInt ( '0x68F0063448DEAE02' ) ,
BigInt ( '0xE943176C18803589' ) ,
BigInt ( '0x962DE7A428B5BCF0' ) ,
BigInt ( '0xF1C1FE77117CDF02' ) ,
BigInt ( '0x8EAF0EBF2149567B' ) ,
BigInt ( '0x0F1C1FE77117CDF0' ) ,
BigInt ( '0x7072EF2F41224489' ) ,
BigInt ( '0x38A31B04893D698D' ) ,
BigInt ( '0x47CDEBCCB908E0F4' ) ,
BigInt ( '0xC67EFA94E9567B7F' ) ,
BigInt ( '0xB9100A5CD963F206' ) ,
BigInt ( '0x57DD12C379682177' ) ,
BigInt ( '0x28B3E20B495DA80E' ) ,
BigInt ( '0xA900F35319033385' ) ,
BigInt ( '0xD66E039B2936BAFC' ) ,
BigInt ( '0x9EBFF7B0E12997F8' ) ,
BigInt ( '0xE1D10778D11C1E81' ) ,
BigInt ( '0x606216208142850A' ) ,
BigInt ( '0x1F0CE6E8B1770C73' ) ,
BigInt ( '0x8921014C99C2B083' ) ,
BigInt ( '0xF64FF184A9F739FA' ) ,
BigInt ( '0x77FCE0DCF9A9A271' ) ,
BigInt ( '0x08921014C99C2B08' ) ,
BigInt ( '0x4043E43F0183060C' ) ,
BigInt ( '0x3F2D14F731B68F75' ) ,
BigInt ( '0xBE9E05AF61E814FE' ) ,
BigInt ( '0xC1F0F56751DD9D87' ) ,
BigInt ( '0x2F3DEDF8F1D64EF6' ) ,
BigInt ( '0x50531D30C1E3C78F' ) ,
BigInt ( '0xD1E00C6891BD5C04' ) ,
BigInt ( '0xAE8EFCA0A188D57D' ) ,
BigInt ( '0xE65F088B6997F879' ) ,
BigInt ( '0x9931F84359A27100' ) ,
BigInt ( '0x1882E91B09FCEA8B' ) ,
BigInt ( '0x67EC19D339C963F2' ) ,
BigInt ( '0xD75ADABD7A6E2D6F' ) ,
BigInt ( '0xA8342A754A5BA416' ) ,
BigInt ( '0x29873B2D1A053F9D' ) ,
BigInt ( '0x56E9CBE52A30B6E4' ) ,
BigInt ( '0x1E383FCEE22F9BE0' ) ,
BigInt ( '0x6156CF06D21A1299' ) ,
BigInt ( '0xE0E5DE5E82448912' ) ,
BigInt ( '0x9F8B2E96B271006B' ) ,
BigInt ( '0x71463609127AD31A' ) ,
BigInt ( '0x0E28C6C1224F5A63' ) ,
BigInt ( '0x8F9BD7997211C1E8' ) ,
BigInt ( '0xF0F5275142244891' ) ,
BigInt ( '0xB824D37A8A3B6595' ) ,
BigInt ( '0xC74A23B2BA0EECEC' ) ,
BigInt ( '0x46F932EAEA507767' ) ,
BigInt ( '0x3997C222DA65FE1E' ) ,
BigInt ( '0xAFBA2586F2D042EE' ) ,
BigInt ( '0xD0D4D54EC2E5CB97' ) ,
BigInt ( '0x5167C41692BB501C' ) ,
BigInt ( '0x2E0934DEA28ED965' ) ,
BigInt ( '0x66D8C0F56A91F461' ) ,
BigInt ( '0x19B6303D5AA47D18' ) ,
BigInt ( '0x980521650AFAE693' ) ,
BigInt ( '0xE76BD1AD3ACF6FEA' ) ,
BigInt ( '0x09A6C9329AC4BC9B' ) ,
BigInt ( '0x76C839FAAAF135E2' ) ,
BigInt ( '0xF77B28A2FAAFAE69' ) ,
BigInt ( '0x8815D86ACA9A2710' ) ,
BigInt ( '0xC0C42C4102850A14' ) ,
BigInt ( '0xBFAADC8932B0836D' ) ,
BigInt ( '0x3E19CDD162EE18E6' ) ,
BigInt ( '0x41773D1952DB919F' ) ,
BigInt ( '0x269B24CA6B12F26D' ) ,
BigInt ( '0x59F5D4025B277B14' ) ,
BigInt ( '0xD846C55A0B79E09F' ) ,
BigInt ( '0xA72835923B4C69E6' ) ,
BigInt ( '0xEFF9C1B9F35344E2' ) ,
BigInt ( '0x90973171C366CD9B' ) ,
BigInt ( '0x1124202993385610' ) ,
BigInt ( '0x6E4AD0E1A30DDF69' ) ,
BigInt ( '0x8087C87E03060C18' ) ,
BigInt ( '0xFFE938B633338561' ) ,
BigInt ( '0x7E5A29EE636D1EEA' ) ,
BigInt ( '0x0134D92653589793' ) ,
BigInt ( '0x49E52D0D9B47BA97' ) ,
BigInt ( '0x368BDDC5AB7233EE' ) ,
BigInt ( '0xB738CC9DFB2CA865' ) ,
BigInt ( '0xC8563C55CB19211C' ) ,
BigInt ( '0x5E7BDBF1E3AC9DEC' ) ,
BigInt ( '0x21152B39D3991495' ) ,
BigInt ( '0xA0A63A6183C78F1E' ) ,
BigInt ( '0xDFC8CAA9B3F20667' ) ,
BigInt ( '0x97193E827BED2B63' ) ,
BigInt ( '0xE877CE4A4BD8A21A' ) ,
BigInt ( '0x69C4DF121B863991' ) ,
BigInt ( '0x16AA2FDA2BB3B0E8' ) ,
BigInt ( '0xF86737458BB86399' ) ,
BigInt ( '0x8709C78DBB8DEAE0' ) ,
BigInt ( '0x06BAD6D5EBD3716B' ) ,
BigInt ( '0x79D4261DDBE6F812' ) ,
BigInt ( '0x3105D23613F9D516' ) ,
BigInt ( '0x4E6B22FE23CC5C6F' ) ,
BigInt ( '0xCFD833A67392C7E4' ) ,
BigInt ( '0xB0B6C36E43A74E9D' ) ,
BigInt ( '0x9A6C9329AC4BC9B5' ) ,
BigInt ( '0xE50263E19C7E40CC' ) ,
BigInt ( '0x64B172B9CC20DB47' ) ,
BigInt ( '0x1BDF8271FC15523E' ) ,
BigInt ( '0x530E765A340A7F3A' ) ,
BigInt ( '0x2C608692043FF643' ) ,
BigInt ( '0xADD397CA54616DC8' ) ,
BigInt ( '0xD2BD67026454E4B1' ) ,
BigInt ( '0x3C707F9DC45F37C0' ) ,
BigInt ( '0x431E8F55F46ABEB9' ) ,
BigInt ( '0xC2AD9E0DA4342532' ) ,
BigInt ( '0xBDC36EC59401AC4B' ) ,
BigInt ( '0xF5129AEE5C1E814F' ) ,
BigInt ( '0x8A7C6A266C2B0836' ) ,
BigInt ( '0x0BCF7B7E3C7593BD' ) ,
BigInt ( '0x74A18BB60C401AC4' ) ,
BigInt ( '0xE28C6C1224F5A634' ) ,
BigInt ( '0x9DE29CDA14C02F4D' ) ,
BigInt ( '0x1C518D82449EB4C6' ) ,
BigInt ( '0x633F7D4A74AB3DBF' ) ,
BigInt ( '0x2BEE8961BCB410BB' ) ,
BigInt ( '0x548079A98C8199C2' ) ,
BigInt ( '0xD53368F1DCDF0249' ) ,
BigInt ( '0xAA5D9839ECEA8B30' ) ,
BigInt ( '0x449080A64CE15841' ) ,
BigInt ( '0x3BFE706E7CD4D138' ) ,
BigInt ( '0xBA4D61362C8A4AB3' ) ,
BigInt ( '0xC52391FE1CBFC3CA' ) ,
BigInt ( '0x8DF265D5D4A0EECE' ) ,
BigInt ( '0xF29C951DE49567B7' ) ,
BigInt ( '0x732F8445B4CBFC3C' ) ,
BigInt ( '0x0C41748D84FE7545' ) ,
BigInt ( '0x6BAD6D5EBD3716B7' ) ,
BigInt ( '0x14C39D968D029FCE' ) ,
BigInt ( '0x95708CCEDD5C0445' ) ,
BigInt ( '0xEA1E7C06ED698D3C' ) ,
BigInt ( '0xA2CF882D2576A038' ) ,
BigInt ( '0xDDA178E515432941' ) ,
BigInt ( '0x5C1269BD451DB2CA' ) ,
BigInt ( '0x237C997575283BB3' ) ,
BigInt ( '0xCDB181EAD523E8C2' ) ,
BigInt ( '0xB2DF7122E51661BB' ) ,
BigInt ( '0x336C607AB548FA30' ) ,
BigInt ( '0x4C0290B2857D7349' ) ,
BigInt ( '0x04D364994D625E4D' ) ,
BigInt ( '0x7BBD94517D57D734' ) ,
BigInt ( '0xFA0E85092D094CBF' ) ,
BigInt ( '0x856075C11D3CC5C6' ) ,
BigInt ( '0x134D926535897936' ) ,
BigInt ( '0x6C2362AD05BCF04F' ) ,
BigInt ( '0xED9073F555E26BC4' ) ,
BigInt ( '0x92FE833D65D7E2BD' ) ,
BigInt ( '0xDA2F7716ADC8CFB9' ) ,
BigInt ( '0xA54187DE9DFD46C0' ) ,
BigInt ( '0x24F29686CDA3DD4B' ) ,
BigInt ( '0x5B9C664EFD965432' ) ,
BigInt ( '0xB5517ED15D9D8743' ) ,
BigInt ( '0xCA3F8E196DA80E3A' ) ,
BigInt ( '0x4B8C9F413DF695B1' ) ,
BigInt ( '0x34E26F890DC31CC8' ) ,
BigInt ( '0x7C339BA2C5DC31CC' ) ,
BigInt ( '0x035D6B6AF5E9B8B5' ) ,
BigInt ( '0x82EE7A32A5B7233E' ) ,
BigInt ( '0xFD808AFA9582AA47' ) ,
BigInt ( '0x4D364994D625E4DA' ) ,
BigInt ( '0x3258B95CE6106DA3' ) ,
BigInt ( '0xB3EBA804B64EF628' ) ,
BigInt ( '0xCC8558CC867B7F51' ) ,
BigInt ( '0x8454ACE74E645255' ) ,
BigInt ( '0xFB3A5C2F7E51DB2C' ) ,
BigInt ( '0x7A894D772E0F40A7' ) ,
BigInt ( '0x05E7BDBF1E3AC9DE' ) ,
BigInt ( '0xEB2AA520BE311AAF' ) ,
BigInt ( '0x944455E88E0493D6' ) ,
BigInt ( '0x15F744B0DE5A085D' ) ,
BigInt ( '0x6A99B478EE6F8124' ) ,
BigInt ( '0x224840532670AC20' ) ,
BigInt ( '0x5D26B09B16452559' ) ,
BigInt ( '0xDC95A1C3461BBED2' ) ,
BigInt ( '0xA3FB510B762E37AB' ) ,
BigInt ( '0x35D6B6AF5E9B8B5B' ) ,
BigInt ( '0x4AB846676EAE0222' ) ,
BigInt ( '0xCB0B573F3EF099A9' ) ,
BigInt ( '0xB465A7F70EC510D0' ) ,
BigInt ( '0xFCB453DCC6DA3DD4' ) ,
BigInt ( '0x83DAA314F6EFB4AD' ) ,
BigInt ( '0x0269B24CA6B12F26' ) ,
BigInt ( '0x7D0742849684A65F' ) ,
BigInt ( '0x93CA5A1B368F752E' ) ,
BigInt ( '0xECA4AAD306BAFC57' ) ,
BigInt ( '0x6D17BB8B56E467DC' ) ,
BigInt ( '0x12794B4366D1EEA5' ) ,
BigInt ( '0x5AA8BF68AECEC3A1' ) ,
BigInt ( '0x25C64FA09EFB4AD8' ) ,
BigInt ( '0xA4755EF8CEA5D153' ) ,
BigInt ( '0xDB1BAE30FE90582A' ) ,
BigInt ( '0xBCF7B7E3C7593BD8' ) ,
BigInt ( '0xC399472BF76CB2A1' ) ,
BigInt ( '0x422A5673A732292A' ) ,
BigInt ( '0x3D44A6BB9707A053' ) ,
BigInt ( '0x759552905F188D57' ) ,
BigInt ( '0x0AFBA2586F2D042E' ) ,
BigInt ( '0x8B48B3003F739FA5' ) ,
BigInt ( '0xF42643C80F4616DC' ) ,
BigInt ( '0x1AEB5B57AF4DC5AD' ) ,
BigInt ( '0x6585AB9F9F784CD4' ) ,
BigInt ( '0xE436BAC7CF26D75F' ) ,
BigInt ( '0x9B584A0FFF135E26' ) ,
BigInt ( '0xD389BE24370C7322' ) ,
BigInt ( '0xACE74EEC0739FA5B' ) ,
BigInt ( '0x2D545FB4576761D0' ) ,
BigInt ( '0x523AAF7C6752E8A9' ) ,
BigInt ( '0xC41748D84FE75459' ) ,
BigInt ( '0xBB79B8107FD2DD20' ) ,
BigInt ( '0x3ACAA9482F8C46AB' ) ,
BigInt ( '0x45A459801FB9CFD2' ) ,
BigInt ( '0x0D75ADABD7A6E2D6' ) ,
BigInt ( '0x721B5D63E7936BAF' ) ,
BigInt ( '0xF3A84C3BB7CDF024' ) ,
BigInt ( '0x8CC6BCF387F8795D' ) ,
BigInt ( '0x620BA46C27F3AA2C' ) ,
BigInt ( '0x1D6554A417C62355' ) ,
BigInt ( '0x9CD645FC4798B8DE' ) ,
BigInt ( '0xE3B8B53477AD31A7' ) ,
BigInt ( '0xAB69411FBFB21CA3' ) ,
BigInt ( '0xD407B1D78F8795DA' ) ,
BigInt ( '0x55B4A08FDFD90E51' ) ,
BigInt ( '0x2ADA5047EFEC8728' )
] ;
class CRC64 {
constructor ( ) {
this . _crc = BigInt ( 0 ) ;
}
update ( data ) {
const buffer = typeof data === 'string' ? Buffer . from ( data ) : data ;
let crc = CRC64 . flip64Bits ( this . _crc ) ;
for ( const dataByte of buffer ) {
const crcByte = Number ( crc & BigInt ( 0xff ) ) ;
crc = PREGEN _POLY _TABLE [ crcByte ^ dataByte ] ^ ( crc >> BigInt ( 8 ) ) ;
}
this . _crc = CRC64 . flip64Bits ( crc ) ;
}
digest ( encoding ) {
switch ( encoding ) {
case 'hex' :
return this . _crc . toString ( 16 ) . toUpperCase ( ) ;
case 'base64' :
return this . toBuffer ( ) . toString ( 'base64' ) ;
default :
return this . toBuffer ( ) ;
}
}
toBuffer ( ) {
return Buffer . from ( [ 0 , 8 , 16 , 24 , 32 , 40 , 48 , 56 ] . map ( s => Number ( ( this . _crc >> BigInt ( s ) ) & BigInt ( 0xff ) ) ) ) ;
}
static flip64Bits ( n ) {
return ( BigInt ( 1 ) << BigInt ( 64 ) ) - BigInt ( 1 ) - n ;
}
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
exports [ "default" ] = CRC64 ;
//# sourceMappingURL=crc64.js.map
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
/***/ } ) ,
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
/***/ 8538 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
"use strict" ;
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
2023-09-07 02:42:11 +08:00
var desc = Object . getOwnPropertyDescriptor ( m , k ) ;
if ( ! desc || ( "get" in desc ? ! m . _ _esModule : desc . writable || desc . configurable ) ) {
desc = { enumerable : true , get : function ( ) { return m [ k ] ; } } ;
}
Object . defineProperty ( o , k2 , desc ) ;
2023-01-06 05:27:11 +08:00
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
2023-09-07 02:42:11 +08:00
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . prototype . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
2023-01-06 05:27:11 +08:00
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . DownloadHttpClient = void 0 ;
const fs = _ _importStar ( _ _nccwpck _require _ _ ( 7147 ) ) ;
const core = _ _importStar ( _ _nccwpck _require _ _ ( 2186 ) ) ;
const zlib = _ _importStar ( _ _nccwpck _require _ _ ( 9796 ) ) ;
const utils _1 = _ _nccwpck _require _ _ ( 6327 ) ;
const url _1 = _ _nccwpck _require _ _ ( 7310 ) ;
const status _reporter _1 = _ _nccwpck _require _ _ ( 9081 ) ;
const perf _hooks _1 = _ _nccwpck _require _ _ ( 4074 ) ;
const http _manager _1 = _ _nccwpck _require _ _ ( 6527 ) ;
const config _variables _1 = _ _nccwpck _require _ _ ( 2222 ) ;
const requestUtils _1 = _ _nccwpck _require _ _ ( 755 ) ;
class DownloadHttpClient {
constructor ( ) {
2023-09-07 02:42:11 +08:00
this . downloadHttpManager = new http _manager _1 . HttpManager ( ( 0 , config _variables _1 . getDownloadFileConcurrency ) ( ) , '@actions/artifact-download' ) ;
2023-01-06 05:27:11 +08:00
// downloads are usually significantly faster than uploads so display status information every second
this . statusReporter = new status _reporter _1 . StatusReporter ( 1000 ) ;
}
/ * *
* Gets a list of all artifacts that are in a specific container
* /
listArtifacts ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2023-09-07 02:42:11 +08:00
const artifactUrl = ( 0 , utils _1 . getArtifactUrl ) ( ) ;
2023-01-06 05:27:11 +08:00
// use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately
const client = this . downloadHttpManager . getClient ( 0 ) ;
2023-09-07 02:42:11 +08:00
const headers = ( 0 , utils _1 . getDownloadHeaders ) ( 'application/json' ) ;
const response = yield ( 0 , requestUtils _1 . retryHttpClientRequest ) ( 'List Artifacts' , ( ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) { return client . get ( artifactUrl , headers ) ; } ) ) ;
2023-01-06 05:27:11 +08:00
const body = yield response . readBody ( ) ;
return JSON . parse ( body ) ;
} ) ;
}
/ * *
* Fetches a set of container items that describe the contents of an artifact
* @ param artifactName the name of the artifact
* @ param containerUrl the artifact container URL for the run
* /
getContainerItems ( artifactName , containerUrl ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
// the itemPath search parameter controls which containers will be returned
const resourceUrl = new url _1 . URL ( containerUrl ) ;
resourceUrl . searchParams . append ( 'itemPath' , artifactName ) ;
// use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately
const client = this . downloadHttpManager . getClient ( 0 ) ;
2023-09-07 02:42:11 +08:00
const headers = ( 0 , utils _1 . getDownloadHeaders ) ( 'application/json' ) ;
const response = yield ( 0 , requestUtils _1 . retryHttpClientRequest ) ( 'Get Container Items' , ( ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) { return client . get ( resourceUrl . toString ( ) , headers ) ; } ) ) ;
2023-01-06 05:27:11 +08:00
const body = yield response . readBody ( ) ;
return JSON . parse ( body ) ;
} ) ;
}
/ * *
* Concurrently downloads all the files that are part of an artifact
* @ param downloadItems information about what items to download and where to save them
* /
downloadSingleArtifact ( downloadItems ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2023-09-07 02:42:11 +08:00
const DOWNLOAD _CONCURRENCY = ( 0 , config _variables _1 . getDownloadFileConcurrency ) ( ) ;
2023-01-06 05:27:11 +08:00
// limit the number of files downloaded at a single time
core . debug ( ` Download file concurrency is set to ${ DOWNLOAD _CONCURRENCY } ` ) ;
const parallelDownloads = [ ... new Array ( DOWNLOAD _CONCURRENCY ) . keys ( ) ] ;
let currentFile = 0 ;
let downloadedFiles = 0 ;
core . info ( ` Total number of files that will be downloaded: ${ downloadItems . length } ` ) ;
this . statusReporter . setTotalNumberOfFilesToProcess ( downloadItems . length ) ;
this . statusReporter . start ( ) ;
yield Promise . all ( parallelDownloads . map ( ( index ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
while ( currentFile < downloadItems . length ) {
const currentFileToDownload = downloadItems [ currentFile ] ;
currentFile += 1 ;
const startTime = perf _hooks _1 . performance . now ( ) ;
yield this . downloadIndividualFile ( index , currentFileToDownload . sourceLocation , currentFileToDownload . targetPath ) ;
if ( core . isDebug ( ) ) {
core . debug ( ` File: ${ ++ downloadedFiles } / ${ downloadItems . length } . ${ currentFileToDownload . targetPath } took ${ ( perf _hooks _1 . performance . now ( ) - startTime ) . toFixed ( 3 ) } milliseconds to finish downloading ` ) ;
}
this . statusReporter . incrementProcessedCount ( ) ;
}
} ) ) )
. catch ( error => {
throw new Error ( ` Unable to download the artifact: ${ error } ` ) ;
} )
. finally ( ( ) => {
this . statusReporter . stop ( ) ;
// safety dispose all connections
this . downloadHttpManager . disposeAndReplaceAllClients ( ) ;
} ) ;
} ) ;
}
/ * *
* Downloads an individual file
* @ param httpClientIndex the index of the http client that is used to make all of the calls
* @ param artifactLocation origin location where a file will be downloaded from
* @ param downloadPath destination location for the file being downloaded
* /
downloadIndividualFile ( httpClientIndex , artifactLocation , downloadPath ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
let retryCount = 0 ;
2023-09-07 02:42:11 +08:00
const retryLimit = ( 0 , config _variables _1 . getRetryLimit ) ( ) ;
2023-01-06 05:27:11 +08:00
let destinationStream = fs . createWriteStream ( downloadPath ) ;
2023-09-07 02:42:11 +08:00
const headers = ( 0 , utils _1 . getDownloadHeaders ) ( 'application/json' , true , true ) ;
2023-01-06 05:27:11 +08:00
// a single GET request is used to download a file
const makeDownloadRequest = ( ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const client = this . downloadHttpManager . getClient ( httpClientIndex ) ;
return yield client . get ( artifactLocation , headers ) ;
} ) ;
// check the response headers to determine if the file was compressed using gzip
const isGzip = ( incomingHeaders ) => {
return ( 'content-encoding' in incomingHeaders &&
incomingHeaders [ 'content-encoding' ] === 'gzip' ) ;
} ;
// Increments the current retry count and then checks if the retry limit has been reached
// If there have been too many retries, fail so the download stops. If there is a retryAfterValue value provided,
// it will be used
const backOff = ( retryAfterValue ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
retryCount ++ ;
if ( retryCount > retryLimit ) {
return Promise . reject ( new Error ( ` Retry limit has been reached. Unable to download ${ artifactLocation } ` ) ) ;
}
else {
this . downloadHttpManager . disposeAndReplaceClient ( httpClientIndex ) ;
if ( retryAfterValue ) {
// Back off by waiting the specified time denoted by the retry-after header
core . info ( ` Backoff due to too many requests, retry # ${ retryCount } . Waiting for ${ retryAfterValue } milliseconds before continuing the download ` ) ;
2023-09-07 02:42:11 +08:00
yield ( 0 , utils _1 . sleep ) ( retryAfterValue ) ;
2023-01-06 05:27:11 +08:00
}
else {
// Back off using an exponential value that depends on the retry count
2023-09-07 02:42:11 +08:00
const backoffTime = ( 0 , utils _1 . getExponentialRetryTimeInMilliseconds ) ( retryCount ) ;
2023-01-06 05:27:11 +08:00
core . info ( ` Exponential backoff for retry # ${ retryCount } . Waiting for ${ backoffTime } milliseconds before continuing the download ` ) ;
2023-09-07 02:42:11 +08:00
yield ( 0 , utils _1 . sleep ) ( backoffTime ) ;
2023-01-06 05:27:11 +08:00
}
core . info ( ` Finished backoff for retry # ${ retryCount } , continuing with download ` ) ;
}
} ) ;
const isAllBytesReceived = ( expected , received ) => {
// be lenient, if any input is missing, assume success, i.e. not truncated
if ( ! expected ||
! received ||
process . env [ 'ACTIONS_ARTIFACT_SKIP_DOWNLOAD_VALIDATION' ] ) {
core . info ( 'Skipping download validation.' ) ;
return true ;
}
return parseInt ( expected ) === received ;
} ;
const resetDestinationStream = ( fileDownloadPath ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
destinationStream . close ( ) ;
// await until file is created at downloadpath; node15 and up fs.createWriteStream had not created a file yet
yield new Promise ( resolve => {
destinationStream . on ( 'close' , resolve ) ;
if ( destinationStream . writableFinished ) {
resolve ( ) ;
}
} ) ;
2023-09-07 02:42:11 +08:00
yield ( 0 , utils _1 . rmFile ) ( fileDownloadPath ) ;
2023-01-06 05:27:11 +08:00
destinationStream = fs . createWriteStream ( fileDownloadPath ) ;
} ) ;
// keep trying to download a file until a retry limit has been reached
while ( retryCount <= retryLimit ) {
let response ;
try {
response = yield makeDownloadRequest ( ) ;
}
catch ( error ) {
// if an error is caught, it is usually indicative of a timeout so retry the download
core . info ( 'An error occurred while attempting to download a file' ) ;
// eslint-disable-next-line no-console
console . log ( error ) ;
// increment the retryCount and use exponential backoff to wait before making the next request
yield backOff ( ) ;
continue ;
}
let forceRetry = false ;
2023-09-07 02:42:11 +08:00
if ( ( 0 , utils _1 . isSuccessStatusCode ) ( response . message . statusCode ) ) {
2023-01-06 05:27:11 +08:00
// The body contains the contents of the file however calling response.readBody() causes all the content to be converted to a string
// which can cause some gzip encoded data to be lost
// Instead of using response.readBody(), response.message is a readableStream that can be directly used to get the raw body contents
try {
const isGzipped = isGzip ( response . message . headers ) ;
yield this . pipeResponseToFile ( response , destinationStream , isGzipped ) ;
if ( isGzipped ||
2023-09-07 02:42:11 +08:00
isAllBytesReceived ( response . message . headers [ 'content-length' ] , yield ( 0 , utils _1 . getFileSize ) ( downloadPath ) ) ) {
2023-01-06 05:27:11 +08:00
return ;
}
else {
forceRetry = true ;
}
}
catch ( error ) {
// retry on error, most likely streams were corrupted
forceRetry = true ;
}
}
2023-09-07 02:42:11 +08:00
if ( forceRetry || ( 0 , utils _1 . isRetryableStatusCode ) ( response . message . statusCode ) ) {
2023-01-06 05:27:11 +08:00
core . info ( ` A ${ response . message . statusCode } response code has been received while attempting to download an artifact ` ) ;
resetDestinationStream ( downloadPath ) ;
// if a throttled status code is received, try to get the retryAfter header value, else differ to standard exponential backoff
2023-09-07 02:42:11 +08:00
( 0 , utils _1 . isThrottledStatusCode ) ( response . message . statusCode )
? yield backOff ( ( 0 , utils _1 . tryGetRetryAfterValueTimeInMilliseconds ) ( response . message . headers ) )
2023-01-06 05:27:11 +08:00
: yield backOff ( ) ;
}
else {
// Some unexpected response code, fail immediately and stop the download
2023-09-07 02:42:11 +08:00
( 0 , utils _1 . displayHttpDiagnostics ) ( response ) ;
2023-01-06 05:27:11 +08:00
return Promise . reject ( new Error ( ` Unexpected http ${ response . message . statusCode } during download for ${ artifactLocation } ` ) ) ;
}
}
} ) ;
}
/ * *
* Pipes the response from downloading an individual file to the appropriate destination stream while decoding gzip content if necessary
* @ param response the http response received when downloading a file
* @ param destinationStream the stream where the file should be written to
* @ param isGzip a boolean denoting if the content is compressed using gzip and if we need to decode it
* /
pipeResponseToFile ( response , destinationStream , isGzip ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
yield new Promise ( ( resolve , reject ) => {
if ( isGzip ) {
const gunzip = zlib . createGunzip ( ) ;
response . message
. on ( 'error' , error => {
2023-09-07 02:42:11 +08:00
core . info ( ` An error occurred while attempting to read the response stream ` ) ;
2023-01-06 05:27:11 +08:00
gunzip . close ( ) ;
destinationStream . close ( ) ;
reject ( error ) ;
} )
. pipe ( gunzip )
. on ( 'error' , error => {
2023-09-07 02:42:11 +08:00
core . info ( ` An error occurred while attempting to decompress the response stream ` ) ;
2023-01-06 05:27:11 +08:00
destinationStream . close ( ) ;
reject ( error ) ;
} )
. pipe ( destinationStream )
. on ( 'close' , ( ) => {
resolve ( ) ;
} )
. on ( 'error' , error => {
2023-09-07 02:42:11 +08:00
core . info ( ` An error occurred while writing a downloaded file to ${ destinationStream . path } ` ) ;
2023-01-06 05:27:11 +08:00
reject ( error ) ;
} ) ;
}
else {
response . message
. on ( 'error' , error => {
2023-09-07 02:42:11 +08:00
core . info ( ` An error occurred while attempting to read the response stream ` ) ;
2023-01-06 05:27:11 +08:00
destinationStream . close ( ) ;
reject ( error ) ;
} )
. pipe ( destinationStream )
. on ( 'close' , ( ) => {
resolve ( ) ;
} )
. on ( 'error' , error => {
2023-09-07 02:42:11 +08:00
core . info ( ` An error occurred while writing a downloaded file to ${ destinationStream . path } ` ) ;
2023-01-06 05:27:11 +08:00
reject ( error ) ;
} ) ;
}
} ) ;
return ;
} ) ;
2022-05-20 05:17:44 +08:00
}
}
2023-01-06 05:27:11 +08:00
exports . DownloadHttpClient = DownloadHttpClient ;
//# sourceMappingURL=download-http-client.js.map
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
/***/ } ) ,
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
/***/ 5686 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
"use strict" ;
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
2023-09-07 02:42:11 +08:00
var desc = Object . getOwnPropertyDescriptor ( m , k ) ;
if ( ! desc || ( "get" in desc ? ! m . _ _esModule : desc . writable || desc . configurable ) ) {
desc = { enumerable : true , get : function ( ) { return m [ k ] ; } } ;
}
Object . defineProperty ( o , k2 , desc ) ;
2023-01-06 05:27:11 +08:00
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
2023-09-07 02:42:11 +08:00
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . prototype . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
2023-01-06 05:27:11 +08:00
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . getDownloadSpecification = void 0 ;
const path = _ _importStar ( _ _nccwpck _require _ _ ( 1017 ) ) ;
2022-05-20 05:17:44 +08:00
/ * *
2023-01-06 05:27:11 +08:00
* Creates a specification for a set of files that will be downloaded
* @ param artifactName the name of the artifact
* @ param artifactEntries a set of container entries that describe that files that make up an artifact
* @ param downloadPath the path where the artifact will be downloaded to
* @ param includeRootDirectory specifies if there should be an extra directory ( denoted by the artifact name ) where the artifact files should be downloaded to
2022-05-20 05:17:44 +08:00
* /
2023-01-06 05:27:11 +08:00
function getDownloadSpecification ( artifactName , artifactEntries , downloadPath , includeRootDirectory ) {
// use a set for the directory paths so that there are no duplicates
const directories = new Set ( ) ;
const specifications = {
rootDownloadLocation : includeRootDirectory
? path . join ( downloadPath , artifactName )
: downloadPath ,
directoryStructure : [ ] ,
emptyFilesToCreate : [ ] ,
filesToDownload : [ ]
} ;
for ( const entry of artifactEntries ) {
// Ignore artifacts in the container that don't begin with the same name
if ( entry . path . startsWith ( ` ${ artifactName } / ` ) ||
entry . path . startsWith ( ` ${ artifactName } \\ ` ) ) {
// normalize all separators to the local OS
const normalizedPathEntry = path . normalize ( entry . path ) ;
// entry.path always starts with the artifact name, if includeRootDirectory is false, remove the name from the beginning of the path
const filePath = path . join ( downloadPath , includeRootDirectory
? normalizedPathEntry
: normalizedPathEntry . replace ( artifactName , '' ) ) ;
// Case insensitive folder structure maintained in the backend, not every folder is created so the 'folder'
// itemType cannot be relied upon. The file must be used to determine the directory structure
if ( entry . itemType === 'file' ) {
// Get the directories that we need to create from the filePath for each individual file
directories . add ( path . dirname ( filePath ) ) ;
if ( entry . fileLength === 0 ) {
// An empty file was uploaded, create the empty files locally so that no extra http calls are made
specifications . emptyFilesToCreate . push ( filePath ) ;
}
else {
specifications . filesToDownload . push ( {
sourceLocation : entry . contentLocation ,
targetPath : filePath
} ) ;
}
}
}
}
specifications . directoryStructure = Array . from ( directories ) ;
return specifications ;
}
exports . getDownloadSpecification = getDownloadSpecification ;
//# sourceMappingURL=download-specification.js.map
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
/***/ } ) ,
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
/***/ 6527 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
"use strict" ;
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . HttpManager = void 0 ;
const utils _1 = _ _nccwpck _require _ _ ( 6327 ) ;
2022-05-20 05:17:44 +08:00
/ * *
2023-01-06 05:27:11 +08:00
* Used for managing http clients during either upload or download
2022-05-20 05:17:44 +08:00
* /
2023-01-06 05:27:11 +08:00
class HttpManager {
constructor ( clientCount , userAgent ) {
if ( clientCount < 1 ) {
throw new Error ( 'There must be at least one client' ) ;
}
this . userAgent = userAgent ;
2023-09-07 02:42:11 +08:00
this . clients = new Array ( clientCount ) . fill ( ( 0 , utils _1 . createHttpClient ) ( userAgent ) ) ;
2023-01-06 05:27:11 +08:00
}
getClient ( index ) {
return this . clients [ index ] ;
}
// client disposal is necessary if a keep-alive connection is used to properly close the connection
// for more information see: https://github.com/actions/http-client/blob/04e5ad73cd3fd1f5610a32116b0759eddf6570d2/index.ts#L292
disposeAndReplaceClient ( index ) {
this . clients [ index ] . dispose ( ) ;
2023-09-07 02:42:11 +08:00
this . clients [ index ] = ( 0 , utils _1 . createHttpClient ) ( this . userAgent ) ;
2023-01-06 05:27:11 +08:00
}
disposeAndReplaceAllClients ( ) {
for ( const [ index ] of this . clients . entries ( ) ) {
this . disposeAndReplaceClient ( index ) ;
}
}
}
exports . HttpManager = HttpManager ;
//# sourceMappingURL=http-manager.js.map
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
/***/ } ) ,
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
/***/ 7398 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . checkArtifactFilePath = exports . checkArtifactName = void 0 ;
const core _1 = _ _nccwpck _require _ _ ( 2186 ) ;
2022-05-20 05:17:44 +08:00
/ * *
2023-01-06 05:27:11 +08:00
* Invalid characters that cannot be in the artifact name or an uploaded file . Will be rejected
* from the server if attempted to be sent over . These characters are not allowed due to limitations with certain
* file systems such as NTFS . To maintain platform - agnostic behavior , all characters that are not supported by an
* individual filesystem / platform will not be supported on all fileSystems / platforms
2022-05-20 05:17:44 +08:00
*
2023-01-06 05:27:11 +08:00
* FilePaths can include characters such as \ and / which are not permitted in the artifact name alone
2022-05-20 05:17:44 +08:00
* /
2023-01-06 05:27:11 +08:00
const invalidArtifactFilePathCharacters = new Map ( [
[ '"' , ' Double quote "' ] ,
[ ':' , ' Colon :' ] ,
[ '<' , ' Less than <' ] ,
[ '>' , ' Greater than >' ] ,
[ '|' , ' Vertical bar |' ] ,
[ '*' , ' Asterisk *' ] ,
[ '?' , ' Question mark ?' ] ,
[ '\r' , ' Carriage return \\r' ] ,
[ '\n' , ' Line feed \\n' ]
] ) ;
const invalidArtifactNameCharacters = new Map ( [
... invalidArtifactFilePathCharacters ,
[ '\\' , ' Backslash \\' ] ,
[ '/' , ' Forward slash /' ]
] ) ;
2022-05-20 05:17:44 +08:00
/ * *
2023-01-06 05:27:11 +08:00
* Scans the name of the artifact to make sure there are no illegal characters
2022-05-20 05:17:44 +08:00
* /
2023-01-06 05:27:11 +08:00
function checkArtifactName ( name ) {
if ( ! name ) {
throw new Error ( ` Artifact name: ${ name } , is incorrectly provided ` ) ;
}
for ( const [ invalidCharacterKey , errorMessageForCharacter ] of invalidArtifactNameCharacters ) {
if ( name . includes ( invalidCharacterKey ) ) {
throw new Error ( ` Artifact name is not valid: ${ name } . Contains the following character: ${ errorMessageForCharacter }
Invalid characters include : $ { Array . from ( invalidArtifactNameCharacters . values ( ) ) . toString ( ) }
These characters are not allowed in the artifact name due to limitations with certain file systems such as NTFS . To maintain file system agnostic behavior , these characters are intentionally not allowed to prevent potential problems with downloads on different file systems . ` );
}
}
2023-09-07 02:42:11 +08:00
( 0 , core _1 . info ) ( ` Artifact name is valid! ` ) ;
2023-01-06 05:27:11 +08:00
}
exports . checkArtifactName = checkArtifactName ;
2022-05-20 05:17:44 +08:00
/ * *
2023-01-06 05:27:11 +08:00
* Scans the name of the filePath used to make sure there are no illegal characters
2022-05-20 05:17:44 +08:00
* /
2023-01-06 05:27:11 +08:00
function checkArtifactFilePath ( path ) {
if ( ! path ) {
throw new Error ( ` Artifact path: ${ path } , is incorrectly provided ` ) ;
}
for ( const [ invalidCharacterKey , errorMessageForCharacter ] of invalidArtifactFilePathCharacters ) {
if ( path . includes ( invalidCharacterKey ) ) {
throw new Error ( ` Artifact path is not valid: ${ path } . Contains the following character: ${ errorMessageForCharacter }
Invalid characters include : $ { Array . from ( invalidArtifactFilePathCharacters . values ( ) ) . toString ( ) }
The following characters are not allowed in files that are uploaded due to limitations with certain file systems such as NTFS . To maintain file system agnostic behavior , these characters are intentionally not allowed to prevent potential problems with downloads on different file systems .
` );
}
}
}
exports . checkArtifactFilePath = checkArtifactFilePath ;
//# sourceMappingURL=path-and-artifact-name-validation.js.map
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
/***/ } ) ,
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
/***/ 755 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
"use strict" ;
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
2023-09-07 02:42:11 +08:00
var desc = Object . getOwnPropertyDescriptor ( m , k ) ;
if ( ! desc || ( "get" in desc ? ! m . _ _esModule : desc . writable || desc . configurable ) ) {
desc = { enumerable : true , get : function ( ) { return m [ k ] ; } } ;
}
Object . defineProperty ( o , k2 , desc ) ;
2023-01-06 05:27:11 +08:00
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
2023-09-07 02:42:11 +08:00
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . prototype . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
2023-01-06 05:27:11 +08:00
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . retryHttpClientRequest = exports . retry = void 0 ;
const utils _1 = _ _nccwpck _require _ _ ( 6327 ) ;
const core = _ _importStar ( _ _nccwpck _require _ _ ( 2186 ) ) ;
const config _variables _1 = _ _nccwpck _require _ _ ( 2222 ) ;
function retry ( name , operation , customErrorMessages , maxAttempts ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
let response = undefined ;
let statusCode = undefined ;
let isRetryable = false ;
let errorMessage = '' ;
let customErrorInformation = undefined ;
let attempt = 1 ;
while ( attempt <= maxAttempts ) {
try {
response = yield operation ( ) ;
statusCode = response . message . statusCode ;
2023-09-07 02:42:11 +08:00
if ( ( 0 , utils _1 . isSuccessStatusCode ) ( statusCode ) ) {
2023-01-06 05:27:11 +08:00
return response ;
}
// Extra error information that we want to display if a particular response code is hit
if ( statusCode ) {
customErrorInformation = customErrorMessages . get ( statusCode ) ;
}
2023-09-07 02:42:11 +08:00
isRetryable = ( 0 , utils _1 . isRetryableStatusCode ) ( statusCode ) ;
2023-01-06 05:27:11 +08:00
errorMessage = ` Artifact service responded with ${ statusCode } ` ;
}
catch ( error ) {
isRetryable = true ;
errorMessage = error . message ;
}
if ( ! isRetryable ) {
core . info ( ` ${ name } - Error is not retryable ` ) ;
if ( response ) {
2023-09-07 02:42:11 +08:00
( 0 , utils _1 . displayHttpDiagnostics ) ( response ) ;
2023-01-06 05:27:11 +08:00
}
break ;
}
core . info ( ` ${ name } - Attempt ${ attempt } of ${ maxAttempts } failed with error: ${ errorMessage } ` ) ;
2023-09-07 02:42:11 +08:00
yield ( 0 , utils _1 . sleep ) ( ( 0 , utils _1 . getExponentialRetryTimeInMilliseconds ) ( attempt ) ) ;
2023-01-06 05:27:11 +08:00
attempt ++ ;
}
if ( response ) {
2023-09-07 02:42:11 +08:00
( 0 , utils _1 . displayHttpDiagnostics ) ( response ) ;
2023-01-06 05:27:11 +08:00
}
if ( customErrorInformation ) {
throw Error ( ` ${ name } failed: ${ customErrorInformation } ` ) ;
}
throw Error ( ` ${ name } failed: ${ errorMessage } ` ) ;
} ) ;
}
exports . retry = retry ;
2023-09-07 02:42:11 +08:00
function retryHttpClientRequest ( name , method , customErrorMessages = new Map ( ) , maxAttempts = ( 0 , config _variables _1 . getRetryLimit ) ( ) ) {
2023-01-06 05:27:11 +08:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return yield retry ( name , method , customErrorMessages , maxAttempts ) ;
} ) ;
}
exports . retryHttpClientRequest = retryHttpClientRequest ;
//# sourceMappingURL=requestUtils.js.map
2022-05-20 05:17:44 +08:00
/***/ } ) ,
2023-01-06 05:27:11 +08:00
/***/ 9081 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
2022-05-20 05:17:44 +08:00
"use strict" ;
2023-01-06 05:27:11 +08:00
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
2022-05-20 05:17:44 +08:00
exports . StatusReporter = void 0 ;
2023-01-06 05:27:11 +08:00
const core _1 = _ _nccwpck _require _ _ ( 2186 ) ;
2022-05-20 05:17:44 +08:00
/ * *
* Status Reporter that displays information about the progress / status of an artifact that is being uploaded or downloaded
*
* Variable display time that can be adjusted using the displayFrequencyInMilliseconds variable
* The total status of the upload / download gets displayed according to this value
* If there is a large file that is being uploaded , extra information about the individual status can also be displayed using the updateLargeFileStatus function
* /
class StatusReporter {
constructor ( displayFrequencyInMilliseconds ) {
this . totalNumberOfFilesToProcess = 0 ;
this . processedCount = 0 ;
this . largeFiles = new Map ( ) ;
this . totalFileStatus = undefined ;
this . displayFrequencyInMilliseconds = displayFrequencyInMilliseconds ;
}
setTotalNumberOfFilesToProcess ( fileTotal ) {
this . totalNumberOfFilesToProcess = fileTotal ;
this . processedCount = 0 ;
}
start ( ) {
// displays information about the total upload/download status
this . totalFileStatus = setInterval ( ( ) => {
// display 1 decimal place without any rounding
const percentage = this . formatPercentage ( this . processedCount , this . totalNumberOfFilesToProcess ) ;
2023-09-07 02:42:11 +08:00
( 0 , core _1 . info ) ( ` Total file count: ${ this . totalNumberOfFilesToProcess } ---- Processed file # ${ this . processedCount } ( ${ percentage . slice ( 0 , percentage . indexOf ( '.' ) + 2 ) } %) ` ) ;
2022-05-20 05:17:44 +08:00
} , this . displayFrequencyInMilliseconds ) ;
}
// if there is a large file that is being uploaded in chunks, this is used to display extra information about the status of the upload
updateLargeFileStatus ( fileName , chunkStartIndex , chunkEndIndex , totalUploadFileSize ) {
// display 1 decimal place without any rounding
const percentage = this . formatPercentage ( chunkEndIndex , totalUploadFileSize ) ;
2023-09-07 02:42:11 +08:00
( 0 , core _1 . info ) ( ` Uploaded ${ fileName } ( ${ percentage . slice ( 0 , percentage . indexOf ( '.' ) + 2 ) } %) bytes ${ chunkStartIndex } : ${ chunkEndIndex } ` ) ;
2022-05-20 05:17:44 +08:00
}
stop ( ) {
if ( this . totalFileStatus ) {
clearInterval ( this . totalFileStatus ) ;
}
}
incrementProcessedCount ( ) {
this . processedCount ++ ;
}
formatPercentage ( numerator , denominator ) {
// toFixed() rounds, so use extra precision to display accurate information even though 4 decimal places are not displayed
return ( ( numerator / denominator ) * 100 ) . toFixed ( 4 ) . toString ( ) ;
}
}
exports . StatusReporter = StatusReporter ;
//# sourceMappingURL=status-reporter.js.map
/***/ } ) ,
2023-01-06 05:27:11 +08:00
/***/ 606 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2022-05-20 05:17:44 +08:00
"use strict" ;
2023-01-06 05:27:11 +08:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
2023-09-07 02:42:11 +08:00
var desc = Object . getOwnPropertyDescriptor ( m , k ) ;
if ( ! desc || ( "get" in desc ? ! m . _ _esModule : desc . writable || desc . configurable ) ) {
desc = { enumerable : true , get : function ( ) { return m [ k ] ; } } ;
}
Object . defineProperty ( o , k2 , desc ) ;
2023-01-06 05:27:11 +08:00
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
2023-09-07 02:42:11 +08:00
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . prototype . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
2023-01-06 05:27:11 +08:00
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
var _ _asyncValues = ( this && this . _ _asyncValues ) || function ( o ) {
if ( ! Symbol . asyncIterator ) throw new TypeError ( "Symbol.asyncIterator is not defined." ) ;
var m = o [ Symbol . asyncIterator ] , i ;
return m ? m . call ( o ) : ( o = typeof _ _values === "function" ? _ _values ( o ) : o [ Symbol . iterator ] ( ) , i = { } , verb ( "next" ) , verb ( "throw" ) , verb ( "return" ) , i [ Symbol . asyncIterator ] = function ( ) { return this ; } , i ) ;
function verb ( n ) { i [ n ] = o [ n ] && function ( v ) { return new Promise ( function ( resolve , reject ) { v = o [ n ] ( v ) , settle ( resolve , reject , v . done , v . value ) ; } ) ; } ; }
function settle ( resolve , reject , d , v ) { Promise . resolve ( v ) . then ( function ( v ) { resolve ( { value : v , done : d } ) ; } , reject ) ; }
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . createGZipFileInBuffer = exports . createGZipFileOnDisk = void 0 ;
const fs = _ _importStar ( _ _nccwpck _require _ _ ( 7147 ) ) ;
const zlib = _ _importStar ( _ _nccwpck _require _ _ ( 9796 ) ) ;
const util _1 = _ _nccwpck _require _ _ ( 3837 ) ;
2023-09-07 02:42:11 +08:00
const stat = ( 0 , util _1 . promisify ) ( fs . stat ) ;
2023-01-06 05:27:11 +08:00
/ * *
* GZipping certain files that are already compressed will likely not yield further size reductions . Creating large temporary gzip
* files then will just waste a lot of time before ultimately being discarded ( especially for very large files ) .
* If any of these types of files are encountered then on - disk gzip creation will be skipped and the original file will be uploaded as - is
* /
const gzipExemptFileExtensions = [
2023-09-07 02:42:11 +08:00
'.gz' ,
2023-01-06 05:27:11 +08:00
'.gzip' ,
2023-09-07 02:42:11 +08:00
'.tgz' ,
'.taz' ,
'.Z' ,
'.taZ' ,
'.bz2' ,
'.tbz' ,
'.tbz2' ,
'.tz2' ,
'.lz' ,
'.lzma' ,
'.tlz' ,
'.lzo' ,
'.xz' ,
'.txz' ,
'.zst' ,
'.zstd' ,
'.tzst' ,
2023-01-06 05:27:11 +08:00
'.zip' ,
2023-09-07 02:42:11 +08:00
'.7z' // 7ZIP
2023-01-06 05:27:11 +08:00
] ;
/ * *
* Creates a Gzip compressed file of an original file at the provided temporary filepath location
* @ param { string } originalFilePath filepath of whatever will be compressed . The original file will be unmodified
* @ param { string } tempFilePath the location of where the Gzip file will be created
* @ returns the size of gzip file that gets created
* /
function createGZipFileOnDisk ( originalFilePath , tempFilePath ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
for ( const gzipExemptExtension of gzipExemptFileExtensions ) {
if ( originalFilePath . endsWith ( gzipExemptExtension ) ) {
// return a really large number so that the original file gets uploaded
return Number . MAX _SAFE _INTEGER ;
}
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
return new Promise ( ( resolve , reject ) => {
const inputStream = fs . createReadStream ( originalFilePath ) ;
const gzip = zlib . createGzip ( ) ;
const outputStream = fs . createWriteStream ( tempFilePath ) ;
inputStream . pipe ( gzip ) . pipe ( outputStream ) ;
outputStream . on ( 'finish' , ( ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
// wait for stream to finish before calculating the size which is needed as part of the Content-Length header when starting an upload
const size = ( yield stat ( tempFilePath ) ) . size ;
resolve ( size ) ;
} ) ) ;
outputStream . on ( 'error' , error => {
// eslint-disable-next-line no-console
console . log ( error ) ;
2023-09-07 02:42:11 +08:00
reject ( error ) ;
2023-01-06 05:27:11 +08:00
} ) ;
} ) ;
} ) ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
exports . createGZipFileOnDisk = createGZipFileOnDisk ;
/ * *
* Creates a GZip file in memory using a buffer . Should be used for smaller files to reduce disk I / O
* @ param originalFilePath the path to the original file that is being GZipped
* @ returns a buffer with the GZip file
* /
function createGZipFileInBuffer ( originalFilePath ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return new Promise ( ( resolve ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2023-09-07 02:42:11 +08:00
var _a , e _1 , _b , _c ;
2023-01-06 05:27:11 +08:00
const inputStream = fs . createReadStream ( originalFilePath ) ;
const gzip = zlib . createGzip ( ) ;
inputStream . pipe ( gzip ) ;
// read stream into buffer, using experimental async iterators see https://github.com/nodejs/readable-stream/issues/403#issuecomment-479069043
const chunks = [ ] ;
try {
2023-09-07 02:42:11 +08:00
for ( var _d = true , gzip _1 = _ _asyncValues ( gzip ) , gzip _1 _1 ; gzip _1 _1 = yield gzip _1 . next ( ) , _a = gzip _1 _1 . done , ! _a ; ) {
_c = gzip _1 _1 . value ;
_d = false ;
try {
const chunk = _c ;
chunks . push ( chunk ) ;
}
finally {
_d = true ;
}
2023-01-06 05:27:11 +08:00
}
}
catch ( e _1 _1 ) { e _1 = { error : e _1 _1 } ; }
finally {
try {
2023-09-07 02:42:11 +08:00
if ( ! _d && ! _a && ( _b = gzip _1 . return ) ) yield _b . call ( gzip _1 ) ;
2023-01-06 05:27:11 +08:00
}
finally { if ( e _1 ) throw e _1 . error ; }
}
resolve ( Buffer . concat ( chunks ) ) ;
} ) ) ;
} ) ;
}
exports . createGZipFileInBuffer = createGZipFileInBuffer ;
//# sourceMappingURL=upload-gzip.js.map
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
/***/ } ) ,
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
/***/ 4354 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
"use strict" ;
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
2023-09-07 02:42:11 +08:00
var desc = Object . getOwnPropertyDescriptor ( m , k ) ;
if ( ! desc || ( "get" in desc ? ! m . _ _esModule : desc . writable || desc . configurable ) ) {
desc = { enumerable : true , get : function ( ) { return m [ k ] ; } } ;
}
Object . defineProperty ( o , k2 , desc ) ;
2023-01-06 05:27:11 +08:00
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
2023-09-07 02:42:11 +08:00
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . prototype . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
2023-01-06 05:27:11 +08:00
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . UploadHttpClient = void 0 ;
const fs = _ _importStar ( _ _nccwpck _require _ _ ( 7147 ) ) ;
const core = _ _importStar ( _ _nccwpck _require _ _ ( 2186 ) ) ;
const tmp = _ _importStar ( _ _nccwpck _require _ _ ( 8065 ) ) ;
const stream = _ _importStar ( _ _nccwpck _require _ _ ( 2781 ) ) ;
const utils _1 = _ _nccwpck _require _ _ ( 6327 ) ;
const config _variables _1 = _ _nccwpck _require _ _ ( 2222 ) ;
const util _1 = _ _nccwpck _require _ _ ( 3837 ) ;
const url _1 = _ _nccwpck _require _ _ ( 7310 ) ;
const perf _hooks _1 = _ _nccwpck _require _ _ ( 4074 ) ;
const status _reporter _1 = _ _nccwpck _require _ _ ( 9081 ) ;
const http _client _1 = _ _nccwpck _require _ _ ( 6255 ) ;
const http _manager _1 = _ _nccwpck _require _ _ ( 6527 ) ;
const upload _gzip _1 = _ _nccwpck _require _ _ ( 606 ) ;
const requestUtils _1 = _ _nccwpck _require _ _ ( 755 ) ;
2023-09-07 02:42:11 +08:00
const stat = ( 0 , util _1 . promisify ) ( fs . stat ) ;
2023-01-06 05:27:11 +08:00
class UploadHttpClient {
constructor ( ) {
2023-09-07 02:42:11 +08:00
this . uploadHttpManager = new http _manager _1 . HttpManager ( ( 0 , config _variables _1 . getUploadFileConcurrency ) ( ) , '@actions/artifact-upload' ) ;
2023-01-06 05:27:11 +08:00
this . statusReporter = new status _reporter _1 . StatusReporter ( 10000 ) ;
2022-10-22 03:17:17 +08:00
}
2023-01-06 05:27:11 +08:00
/ * *
* Creates a file container for the new artifact in the remote blob storage / file service
* @ param { string } artifactName Name of the artifact being created
* @ returns The response from the Artifact Service if the file container was successfully created
* /
createArtifactInFileContainer ( artifactName , options ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const parameters = {
Type : 'actions_storage' ,
Name : artifactName
} ;
// calculate retention period
if ( options && options . retentionDays ) {
2023-09-07 02:42:11 +08:00
const maxRetentionStr = ( 0 , config _variables _1 . getRetentionDays ) ( ) ;
parameters . RetentionDays = ( 0 , utils _1 . getProperRetention ) ( options . retentionDays , maxRetentionStr ) ;
2023-01-06 05:27:11 +08:00
}
const data = JSON . stringify ( parameters , null , 2 ) ;
2023-09-07 02:42:11 +08:00
const artifactUrl = ( 0 , utils _1 . getArtifactUrl ) ( ) ;
2023-01-06 05:27:11 +08:00
// use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately
const client = this . uploadHttpManager . getClient ( 0 ) ;
2023-09-07 02:42:11 +08:00
const headers = ( 0 , utils _1 . getUploadHeaders ) ( 'application/json' , false ) ;
2023-01-06 05:27:11 +08:00
// Extra information to display when a particular HTTP code is returned
// If a 403 is returned when trying to create a file container, the customer has exceeded
// their storage quota so no new artifact containers can be created
const customErrorMessages = new Map ( [
[
http _client _1 . HttpCodes . Forbidden ,
2023-09-07 02:42:11 +08:00
( 0 , config _variables _1 . isGhes ) ( )
? 'Please reference [Enabling GitHub Actions for GitHub Enterprise Server](https://docs.github.com/en/enterprise-server@3.8/admin/github-actions/enabling-github-actions-for-github-enterprise-server) to ensure Actions storage is configured correctly.'
: 'Artifact storage quota has been hit. Unable to upload any new artifacts'
2023-01-06 05:27:11 +08:00
] ,
[
http _client _1 . HttpCodes . BadRequest ,
` The artifact name ${ artifactName } is not valid. Request URL ${ artifactUrl } `
]
] ) ;
2023-09-07 02:42:11 +08:00
const response = yield ( 0 , requestUtils _1 . retryHttpClientRequest ) ( 'Create Artifact Container' , ( ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) { return client . post ( artifactUrl , data , headers ) ; } ) , customErrorMessages ) ;
2023-01-06 05:27:11 +08:00
const body = yield response . readBody ( ) ;
return JSON . parse ( body ) ;
} ) ;
2022-10-22 03:17:17 +08:00
}
2023-01-06 05:27:11 +08:00
/ * *
* Concurrently upload all of the files in chunks
* @ param { string } uploadUrl Base Url for the artifact that was created
* @ param { SearchResult [ ] } filesToUpload A list of information about the files being uploaded
* @ returns The size of all the files uploaded in bytes
* /
uploadArtifactToFileContainer ( uploadUrl , filesToUpload , options ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2023-09-07 02:42:11 +08:00
const FILE _CONCURRENCY = ( 0 , config _variables _1 . getUploadFileConcurrency ) ( ) ;
const MAX _CHUNK _SIZE = ( 0 , config _variables _1 . getUploadChunkSize ) ( ) ;
2023-01-06 05:27:11 +08:00
core . debug ( ` File Concurrency: ${ FILE _CONCURRENCY } , and Chunk Size: ${ MAX _CHUNK _SIZE } ` ) ;
const parameters = [ ] ;
// by default, file uploads will continue if there is an error unless specified differently in the options
let continueOnError = true ;
if ( options ) {
if ( options . continueOnError === false ) {
continueOnError = false ;
}
}
// prepare the necessary parameters to upload all the files
for ( const file of filesToUpload ) {
const resourceUrl = new url _1 . URL ( uploadUrl ) ;
resourceUrl . searchParams . append ( 'itemPath' , file . uploadFilePath ) ;
parameters . push ( {
file : file . absoluteFilePath ,
resourceUrl : resourceUrl . toString ( ) ,
maxChunkSize : MAX _CHUNK _SIZE ,
continueOnError
} ) ;
}
const parallelUploads = [ ... new Array ( FILE _CONCURRENCY ) . keys ( ) ] ;
const failedItemsToReport = [ ] ;
let currentFile = 0 ;
let completedFiles = 0 ;
let uploadFileSize = 0 ;
let totalFileSize = 0 ;
let abortPendingFileUploads = false ;
this . statusReporter . setTotalNumberOfFilesToProcess ( filesToUpload . length ) ;
this . statusReporter . start ( ) ;
// only allow a certain amount of files to be uploaded at once, this is done to reduce potential errors
yield Promise . all ( parallelUploads . map ( ( index ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
while ( currentFile < filesToUpload . length ) {
const currentFileParameters = parameters [ currentFile ] ;
currentFile += 1 ;
if ( abortPendingFileUploads ) {
failedItemsToReport . push ( currentFileParameters . file ) ;
continue ;
}
const startTime = perf _hooks _1 . performance . now ( ) ;
const uploadFileResult = yield this . uploadFileAsync ( index , currentFileParameters ) ;
if ( core . isDebug ( ) ) {
core . debug ( ` File: ${ ++ completedFiles } / ${ filesToUpload . length } . ${ currentFileParameters . file } took ${ ( perf _hooks _1 . performance . now ( ) - startTime ) . toFixed ( 3 ) } milliseconds to finish upload ` ) ;
}
uploadFileSize += uploadFileResult . successfulUploadSize ;
totalFileSize += uploadFileResult . totalSize ;
if ( uploadFileResult . isSuccess === false ) {
failedItemsToReport . push ( currentFileParameters . file ) ;
if ( ! continueOnError ) {
// fail fast
core . error ( ` aborting artifact upload ` ) ;
abortPendingFileUploads = true ;
}
}
this . statusReporter . incrementProcessedCount ( ) ;
}
} ) ) ) ;
this . statusReporter . stop ( ) ;
// done uploading, safety dispose all connections
this . uploadHttpManager . disposeAndReplaceAllClients ( ) ;
core . info ( ` Total size of all the files uploaded is ${ uploadFileSize } bytes ` ) ;
return {
uploadSize : uploadFileSize ,
totalSize : totalFileSize ,
failedItems : failedItemsToReport
} ;
} ) ;
}
/ * *
* Asynchronously uploads a file . The file is compressed and uploaded using GZip if it is determined to save space .
* If the upload file is bigger than the max chunk size it will be uploaded via multiple calls
* @ param { number } httpClientIndex The index of the httpClient that is being used to make all of the calls
* @ param { UploadFileParameters } parameters Information about the file that needs to be uploaded
* @ returns The size of the file that was uploaded in bytes along with any failed uploads
* /
uploadFileAsync ( httpClientIndex , parameters ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const fileStat = yield stat ( parameters . file ) ;
const totalFileSize = fileStat . size ;
const isFIFO = fileStat . isFIFO ( ) ;
let offset = 0 ;
let isUploadSuccessful = true ;
let failedChunkSizes = 0 ;
let uploadFileSize = 0 ;
let isGzip = true ;
// the file that is being uploaded is less than 64k in size to increase throughput and to minimize disk I/O
// for creating a new GZip file, an in-memory buffer is used for compression
// with named pipes the file size is reported as zero in that case don't read the file in memory
if ( ! isFIFO && totalFileSize < 65536 ) {
core . debug ( ` ${ parameters . file } is less than 64k in size. Creating a gzip file in-memory to potentially reduce the upload size ` ) ;
2023-09-07 02:42:11 +08:00
const buffer = yield ( 0 , upload _gzip _1 . createGZipFileInBuffer ) ( parameters . file ) ;
2023-01-06 05:27:11 +08:00
// An open stream is needed in the event of a failure and we need to retry. If a NodeJS.ReadableStream is directly passed in,
// it will not properly get reset to the start of the stream if a chunk upload needs to be retried
let openUploadStream ;
if ( totalFileSize < buffer . byteLength ) {
// compression did not help with reducing the size, use a readable stream from the original file for upload
core . debug ( ` The gzip file created for ${ parameters . file } did not help with reducing the size of the file. The original file will be uploaded as-is ` ) ;
openUploadStream = ( ) => fs . createReadStream ( parameters . file ) ;
isGzip = false ;
uploadFileSize = totalFileSize ;
}
else {
// create a readable stream using a PassThrough stream that is both readable and writable
core . debug ( ` A gzip file created for ${ parameters . file } helped with reducing the size of the original file. The file will be uploaded using gzip. ` ) ;
openUploadStream = ( ) => {
const passThrough = new stream . PassThrough ( ) ;
passThrough . end ( buffer ) ;
return passThrough ;
} ;
uploadFileSize = buffer . byteLength ;
}
const result = yield this . uploadChunk ( httpClientIndex , parameters . resourceUrl , openUploadStream , 0 , uploadFileSize - 1 , uploadFileSize , isGzip , totalFileSize ) ;
if ( ! result ) {
// chunk failed to upload
isUploadSuccessful = false ;
failedChunkSizes += uploadFileSize ;
core . warning ( ` Aborting upload for ${ parameters . file } due to failure ` ) ;
}
return {
isSuccess : isUploadSuccessful ,
successfulUploadSize : uploadFileSize - failedChunkSizes ,
totalSize : totalFileSize
} ;
}
else {
// the file that is being uploaded is greater than 64k in size, a temporary file gets created on disk using the
// npm tmp-promise package and this file gets used to create a GZipped file
const tempFile = yield tmp . file ( ) ;
core . debug ( ` ${ parameters . file } is greater than 64k in size. Creating a gzip file on-disk ${ tempFile . path } to potentially reduce the upload size ` ) ;
// create a GZip file of the original file being uploaded, the original file should not be modified in any way
2023-09-07 02:42:11 +08:00
uploadFileSize = yield ( 0 , upload _gzip _1 . createGZipFileOnDisk ) ( parameters . file , tempFile . path ) ;
2023-01-06 05:27:11 +08:00
let uploadFilePath = tempFile . path ;
// compression did not help with size reduction, use the original file for upload and delete the temp GZip file
// for named pipes totalFileSize is zero, this assumes compression did help
if ( ! isFIFO && totalFileSize < uploadFileSize ) {
core . debug ( ` The gzip file created for ${ parameters . file } did not help with reducing the size of the file. The original file will be uploaded as-is ` ) ;
uploadFileSize = totalFileSize ;
uploadFilePath = parameters . file ;
isGzip = false ;
}
else {
core . debug ( ` The gzip file created for ${ parameters . file } is smaller than the original file. The file will be uploaded using gzip. ` ) ;
}
let abortFileUpload = false ;
// upload only a single chunk at a time
while ( offset < uploadFileSize ) {
const chunkSize = Math . min ( uploadFileSize - offset , parameters . maxChunkSize ) ;
const startChunkIndex = offset ;
const endChunkIndex = offset + chunkSize - 1 ;
offset += parameters . maxChunkSize ;
if ( abortFileUpload ) {
// if we don't want to continue in the event of an error, any pending upload chunks will be marked as failed
failedChunkSizes += chunkSize ;
continue ;
}
const result = yield this . uploadChunk ( httpClientIndex , parameters . resourceUrl , ( ) => fs . createReadStream ( uploadFilePath , {
start : startChunkIndex ,
end : endChunkIndex ,
autoClose : false
} ) , startChunkIndex , endChunkIndex , uploadFileSize , isGzip , totalFileSize ) ;
if ( ! result ) {
// Chunk failed to upload, report as failed and do not continue uploading any more chunks for the file. It is possible that part of a chunk was
// successfully uploaded so the server may report a different size for what was uploaded
isUploadSuccessful = false ;
failedChunkSizes += chunkSize ;
core . warning ( ` Aborting upload for ${ parameters . file } due to failure ` ) ;
abortFileUpload = true ;
}
else {
// if an individual file is greater than 8MB (1024*1024*8) in size, display extra information about the upload status
if ( uploadFileSize > 8388608 ) {
this . statusReporter . updateLargeFileStatus ( parameters . file , startChunkIndex , endChunkIndex , uploadFileSize ) ;
}
}
}
// Delete the temporary file that was created as part of the upload. If the temp file does not get manually deleted by
// calling cleanup, it gets removed when the node process exits. For more info see: https://www.npmjs.com/package/tmp-promise#about
core . debug ( ` deleting temporary gzip file ${ tempFile . path } ` ) ;
yield tempFile . cleanup ( ) ;
return {
isSuccess : isUploadSuccessful ,
successfulUploadSize : uploadFileSize - failedChunkSizes ,
totalSize : totalFileSize
} ;
}
} ) ;
2022-10-22 03:17:17 +08:00
}
2023-01-06 05:27:11 +08:00
/ * *
* Uploads a chunk of an individual file to the specified resourceUrl . If the upload fails and the status code
* indicates a retryable status , we try to upload the chunk as well
* @ param { number } httpClientIndex The index of the httpClient being used to make all the necessary calls
* @ param { string } resourceUrl Url of the resource that the chunk will be uploaded to
* @ param { NodeJS . ReadableStream } openStream Stream of the file that will be uploaded
* @ param { number } start Starting byte index of file that the chunk belongs to
* @ param { number } end Ending byte index of file that the chunk belongs to
* @ param { number } uploadFileSize Total size of the file in bytes that is being uploaded
* @ param { boolean } isGzip Denotes if we are uploading a Gzip compressed stream
* @ param { number } totalFileSize Original total size of the file that is being uploaded
* @ returns if the chunk was successfully uploaded
* /
uploadChunk ( httpClientIndex , resourceUrl , openStream , start , end , uploadFileSize , isGzip , totalFileSize ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
// open a new stream and read it to compute the digest
2023-09-07 02:42:11 +08:00
const digest = yield ( 0 , utils _1 . digestForStream ) ( openStream ( ) ) ;
2023-01-06 05:27:11 +08:00
// prepare all the necessary headers before making any http call
2023-09-07 02:42:11 +08:00
const headers = ( 0 , utils _1 . getUploadHeaders ) ( 'application/octet-stream' , true , isGzip , totalFileSize , end - start + 1 , ( 0 , utils _1 . getContentRange ) ( start , end , uploadFileSize ) , digest ) ;
2023-01-06 05:27:11 +08:00
const uploadChunkRequest = ( ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const client = this . uploadHttpManager . getClient ( httpClientIndex ) ;
return yield client . sendStream ( 'PUT' , resourceUrl , openStream ( ) , headers ) ;
} ) ;
let retryCount = 0 ;
2023-09-07 02:42:11 +08:00
const retryLimit = ( 0 , config _variables _1 . getRetryLimit ) ( ) ;
2023-01-06 05:27:11 +08:00
// Increments the current retry count and then checks if the retry limit has been reached
// If there have been too many retries, fail so the download stops
const incrementAndCheckRetryLimit = ( response ) => {
retryCount ++ ;
if ( retryCount > retryLimit ) {
if ( response ) {
2023-09-07 02:42:11 +08:00
( 0 , utils _1 . displayHttpDiagnostics ) ( response ) ;
2023-01-06 05:27:11 +08:00
}
core . info ( ` Retry limit has been reached for chunk at offset ${ start } to ${ resourceUrl } ` ) ;
return true ;
}
return false ;
} ;
const backOff = ( retryAfterValue ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
this . uploadHttpManager . disposeAndReplaceClient ( httpClientIndex ) ;
if ( retryAfterValue ) {
core . info ( ` Backoff due to too many requests, retry # ${ retryCount } . Waiting for ${ retryAfterValue } milliseconds before continuing the upload ` ) ;
2023-09-07 02:42:11 +08:00
yield ( 0 , utils _1 . sleep ) ( retryAfterValue ) ;
2023-01-06 05:27:11 +08:00
}
else {
2023-09-07 02:42:11 +08:00
const backoffTime = ( 0 , utils _1 . getExponentialRetryTimeInMilliseconds ) ( retryCount ) ;
2023-01-06 05:27:11 +08:00
core . info ( ` Exponential backoff for retry # ${ retryCount } . Waiting for ${ backoffTime } milliseconds before continuing the upload at offset ${ start } ` ) ;
2023-09-07 02:42:11 +08:00
yield ( 0 , utils _1 . sleep ) ( backoffTime ) ;
2023-01-06 05:27:11 +08:00
}
core . info ( ` Finished backoff for retry # ${ retryCount } , continuing with upload ` ) ;
return ;
} ) ;
// allow for failed chunks to be retried multiple times
while ( retryCount <= retryLimit ) {
let response ;
try {
response = yield uploadChunkRequest ( ) ;
}
catch ( error ) {
// if an error is caught, it is usually indicative of a timeout so retry the upload
core . info ( ` An error has been caught http-client index ${ httpClientIndex } , retrying the upload ` ) ;
// eslint-disable-next-line no-console
console . log ( error ) ;
if ( incrementAndCheckRetryLimit ( ) ) {
return false ;
}
yield backOff ( ) ;
continue ;
}
// Always read the body of the response. There is potential for a resource leak if the body is not read which will
// result in the connection remaining open along with unintended consequences when trying to dispose of the client
yield response . readBody ( ) ;
2023-09-07 02:42:11 +08:00
if ( ( 0 , utils _1 . isSuccessStatusCode ) ( response . message . statusCode ) ) {
2023-01-06 05:27:11 +08:00
return true ;
}
2023-09-07 02:42:11 +08:00
else if ( ( 0 , utils _1 . isRetryableStatusCode ) ( response . message . statusCode ) ) {
2023-01-06 05:27:11 +08:00
core . info ( ` A ${ response . message . statusCode } status code has been received, will attempt to retry the upload ` ) ;
if ( incrementAndCheckRetryLimit ( response ) ) {
return false ;
}
2023-09-07 02:42:11 +08:00
( 0 , utils _1 . isThrottledStatusCode ) ( response . message . statusCode )
? yield backOff ( ( 0 , utils _1 . tryGetRetryAfterValueTimeInMilliseconds ) ( response . message . headers ) )
2023-01-06 05:27:11 +08:00
: yield backOff ( ) ;
}
else {
core . error ( ` Unexpected response. Unable to upload chunk to ${ resourceUrl } ` ) ;
2023-09-07 02:42:11 +08:00
( 0 , utils _1 . displayHttpDiagnostics ) ( response ) ;
2023-01-06 05:27:11 +08:00
return false ;
}
}
return false ;
} ) ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
/ * *
* Updates the size of the artifact from - 1 which was initially set when the container was first created for the artifact .
* Updating the size indicates that we are done uploading all the contents of the artifact
* /
patchArtifactSize ( size , artifactName ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2023-09-07 02:42:11 +08:00
const resourceUrl = new url _1 . URL ( ( 0 , utils _1 . getArtifactUrl ) ( ) ) ;
2023-01-06 05:27:11 +08:00
resourceUrl . searchParams . append ( 'artifactName' , artifactName ) ;
const parameters = { Size : size } ;
const data = JSON . stringify ( parameters , null , 2 ) ;
core . debug ( ` URL is ${ resourceUrl . toString ( ) } ` ) ;
// use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately
const client = this . uploadHttpManager . getClient ( 0 ) ;
2023-09-07 02:42:11 +08:00
const headers = ( 0 , utils _1 . getUploadHeaders ) ( 'application/json' , false ) ;
2023-01-06 05:27:11 +08:00
// Extra information to display when a particular HTTP code is returned
const customErrorMessages = new Map ( [
[
http _client _1 . HttpCodes . NotFound ,
` An Artifact with the name ${ artifactName } was not found `
]
] ) ;
// TODO retry for all possible response codes, the artifact upload is pretty much complete so it at all costs we should try to finish this
2023-09-07 02:42:11 +08:00
const response = yield ( 0 , requestUtils _1 . retryHttpClientRequest ) ( 'Finalize artifact upload' , ( ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) { return client . patch ( resourceUrl . toString ( ) , data , headers ) ; } ) , customErrorMessages ) ;
2023-01-06 05:27:11 +08:00
yield response . readBody ( ) ;
core . debug ( ` Artifact ${ artifactName } has been successfully uploaded, total size in bytes: ${ size } ` ) ;
} ) ;
2022-05-20 05:17:44 +08:00
}
}
2023-01-06 05:27:11 +08:00
exports . UploadHttpClient = UploadHttpClient ;
//# sourceMappingURL=upload-http-client.js.map
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
/***/ } ) ,
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
/***/ 183 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
"use strict" ;
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
2023-09-07 02:42:11 +08:00
var desc = Object . getOwnPropertyDescriptor ( m , k ) ;
if ( ! desc || ( "get" in desc ? ! m . _ _esModule : desc . writable || desc . configurable ) ) {
desc = { enumerable : true , get : function ( ) { return m [ k ] ; } } ;
}
Object . defineProperty ( o , k2 , desc ) ;
2023-01-06 05:27:11 +08:00
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
2023-09-07 02:42:11 +08:00
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . prototype . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
2023-01-06 05:27:11 +08:00
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . getUploadSpecification = void 0 ;
const fs = _ _importStar ( _ _nccwpck _require _ _ ( 7147 ) ) ;
const core _1 = _ _nccwpck _require _ _ ( 2186 ) ;
const path _1 = _ _nccwpck _require _ _ ( 1017 ) ;
const path _and _artifact _name _validation _1 = _ _nccwpck _require _ _ ( 7398 ) ;
/ * *
* Creates a specification that describes how each file that is part of the artifact will be uploaded
* @ param artifactName the name of the artifact being uploaded . Used during upload to denote where the artifact is stored on the server
* @ param rootDirectory an absolute file path that denotes the path that should be removed from the beginning of each artifact file
* @ param artifactFiles a list of absolute file paths that denote what should be uploaded as part of the artifact
* /
function getUploadSpecification ( artifactName , rootDirectory , artifactFiles ) {
// artifact name was checked earlier on, no need to check again
const specifications = [ ] ;
if ( ! fs . existsSync ( rootDirectory ) ) {
throw new Error ( ` Provided rootDirectory ${ rootDirectory } does not exist ` ) ;
}
2023-09-07 02:42:11 +08:00
if ( ! fs . statSync ( rootDirectory ) . isDirectory ( ) ) {
2023-01-06 05:27:11 +08:00
throw new Error ( ` Provided rootDirectory ${ rootDirectory } is not a valid directory ` ) ;
}
// Normalize and resolve, this allows for either absolute or relative paths to be used
2023-09-07 02:42:11 +08:00
rootDirectory = ( 0 , path _1 . normalize ) ( rootDirectory ) ;
rootDirectory = ( 0 , path _1 . resolve ) ( rootDirectory ) ;
2023-01-06 05:27:11 +08:00
/ *
Example to demonstrate behavior
Input :
artifactName : my - artifact
rootDirectory : '/home/user/files/plz-upload'
artifactFiles : [
'/home/user/files/plz-upload/file1.txt' ,
'/home/user/files/plz-upload/file2.txt' ,
'/home/user/files/plz-upload/dir/file3.txt'
]
Output :
specifications : [
[ '/home/user/files/plz-upload/file1.txt' , 'my-artifact/file1.txt' ] ,
[ '/home/user/files/plz-upload/file1.txt' , 'my-artifact/file2.txt' ] ,
[ '/home/user/files/plz-upload/file1.txt' , 'my-artifact/dir/file3.txt' ]
]
* /
for ( let file of artifactFiles ) {
if ( ! fs . existsSync ( file ) ) {
throw new Error ( ` File ${ file } does not exist ` ) ;
}
2023-09-07 02:42:11 +08:00
if ( ! fs . statSync ( file ) . isDirectory ( ) ) {
2023-01-06 05:27:11 +08:00
// Normalize and resolve, this allows for either absolute or relative paths to be used
2023-09-07 02:42:11 +08:00
file = ( 0 , path _1 . normalize ) ( file ) ;
file = ( 0 , path _1 . resolve ) ( file ) ;
2023-01-06 05:27:11 +08:00
if ( ! file . startsWith ( rootDirectory ) ) {
throw new Error ( ` The rootDirectory: ${ rootDirectory } is not a parent directory of the file: ${ file } ` ) ;
}
// Check for forbidden characters in file paths that will be rejected during upload
const uploadPath = file . replace ( rootDirectory , '' ) ;
2023-09-07 02:42:11 +08:00
( 0 , path _and _artifact _name _validation _1 . checkArtifactFilePath ) ( uploadPath ) ;
2023-01-06 05:27:11 +08:00
/ *
uploadFilePath denotes where the file will be uploaded in the file container on the server . During a run , if multiple artifacts are uploaded , they will all
be saved in the same container . The artifact name is used as the root directory in the container to separate and distinguish uploaded artifacts
path . join handles all the following cases and would return ' artifact - name / file - to - upload . txt
join ( 'artifact-name/' , 'file-to-upload.txt' )
join ( 'artifact-name/' , '/file-to-upload.txt' )
join ( 'artifact-name' , 'file-to-upload.txt' )
join ( 'artifact-name' , '/file-to-upload.txt' )
* /
specifications . push ( {
absoluteFilePath : file ,
2023-09-07 02:42:11 +08:00
uploadFilePath : ( 0 , path _1 . join ) ( artifactName , uploadPath )
2023-01-06 05:27:11 +08:00
} ) ;
}
else {
// Directories are rejected by the server during upload
2023-09-07 02:42:11 +08:00
( 0 , core _1 . debug ) ( ` Removing ${ file } from rawSearchResults because it is a directory ` ) ;
2023-01-06 05:27:11 +08:00
}
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
return specifications ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
exports . getUploadSpecification = getUploadSpecification ;
//# sourceMappingURL=upload-specification.js.map
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
/***/ } ) ,
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
/***/ 6327 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
"use strict" ;
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
var _ _importDefault = ( this && this . _ _importDefault ) || function ( mod ) {
return ( mod && mod . _ _esModule ) ? mod : { "default" : mod } ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . digestForStream = exports . sleep = exports . getProperRetention = exports . rmFile = exports . getFileSize = exports . createEmptyFilesForArtifact = exports . createDirectoriesForArtifact = exports . displayHttpDiagnostics = exports . getArtifactUrl = exports . createHttpClient = exports . getUploadHeaders = exports . getDownloadHeaders = exports . getContentRange = exports . tryGetRetryAfterValueTimeInMilliseconds = exports . isThrottledStatusCode = exports . isRetryableStatusCode = exports . isForbiddenStatusCode = exports . isSuccessStatusCode = exports . getApiVersion = exports . parseEnvNumber = exports . getExponentialRetryTimeInMilliseconds = void 0 ;
const crypto _1 = _ _importDefault ( _ _nccwpck _require _ _ ( 6113 ) ) ;
const fs _1 = _ _nccwpck _require _ _ ( 7147 ) ;
const core _1 = _ _nccwpck _require _ _ ( 2186 ) ;
const http _client _1 = _ _nccwpck _require _ _ ( 6255 ) ;
const auth _1 = _ _nccwpck _require _ _ ( 5526 ) ;
const config _variables _1 = _ _nccwpck _require _ _ ( 2222 ) ;
const crc64 _1 = _ _importDefault ( _ _nccwpck _require _ _ ( 3549 ) ) ;
/ * *
* Returns a retry time in milliseconds that exponentially gets larger
* depending on the amount of retries that have been attempted
* /
function getExponentialRetryTimeInMilliseconds ( retryCount ) {
if ( retryCount < 0 ) {
throw new Error ( 'RetryCount should not be negative' ) ;
}
else if ( retryCount === 0 ) {
2023-09-07 02:42:11 +08:00
return ( 0 , config _variables _1 . getInitialRetryIntervalInMilliseconds ) ( ) ;
2023-01-06 05:27:11 +08:00
}
2023-09-07 02:42:11 +08:00
const minTime = ( 0 , config _variables _1 . getInitialRetryIntervalInMilliseconds ) ( ) * ( 0 , config _variables _1 . getRetryMultiplier ) ( ) * retryCount ;
const maxTime = minTime * ( 0 , config _variables _1 . getRetryMultiplier ) ( ) ;
2023-01-06 05:27:11 +08:00
// returns a random number between the minTime (inclusive) and the maxTime (exclusive)
return Math . trunc ( Math . random ( ) * ( maxTime - minTime ) + minTime ) ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
exports . getExponentialRetryTimeInMilliseconds = getExponentialRetryTimeInMilliseconds ;
/ * *
* Parses a env variable that is a number
* /
function parseEnvNumber ( key ) {
const value = Number ( process . env [ key ] ) ;
if ( Number . isNaN ( value ) || value < 0 ) {
return undefined ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
return value ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
exports . parseEnvNumber = parseEnvNumber ;
/ * *
* Various utility functions to help with the necessary API calls
* /
function getApiVersion ( ) {
return '6.0-preview' ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
exports . getApiVersion = getApiVersion ;
function isSuccessStatusCode ( statusCode ) {
if ( ! statusCode ) {
return false ;
}
return statusCode >= 200 && statusCode < 300 ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
exports . isSuccessStatusCode = isSuccessStatusCode ;
function isForbiddenStatusCode ( statusCode ) {
if ( ! statusCode ) {
return false ;
}
return statusCode === http _client _1 . HttpCodes . Forbidden ;
}
exports . isForbiddenStatusCode = isForbiddenStatusCode ;
function isRetryableStatusCode ( statusCode ) {
if ( ! statusCode ) {
return false ;
}
const retryableStatusCodes = [
http _client _1 . HttpCodes . BadGateway ,
http _client _1 . HttpCodes . GatewayTimeout ,
http _client _1 . HttpCodes . InternalServerError ,
http _client _1 . HttpCodes . ServiceUnavailable ,
http _client _1 . HttpCodes . TooManyRequests ,
413 // Payload Too Large
] ;
return retryableStatusCodes . includes ( statusCode ) ;
}
exports . isRetryableStatusCode = isRetryableStatusCode ;
function isThrottledStatusCode ( statusCode ) {
if ( ! statusCode ) {
return false ;
}
return statusCode === http _client _1 . HttpCodes . TooManyRequests ;
}
exports . isThrottledStatusCode = isThrottledStatusCode ;
/ * *
* Attempts to get the retry - after value from a set of http headers . The retry time
* is originally denoted in seconds , so if present , it is converted to milliseconds
* @ param headers all the headers received when making an http call
* /
function tryGetRetryAfterValueTimeInMilliseconds ( headers ) {
if ( headers [ 'retry-after' ] ) {
const retryTime = Number ( headers [ 'retry-after' ] ) ;
if ( ! isNaN ( retryTime ) ) {
2023-09-07 02:42:11 +08:00
( 0 , core _1 . info ) ( ` Retry-After header is present with a value of ${ retryTime } ` ) ;
2023-01-06 05:27:11 +08:00
return retryTime * 1000 ;
}
2023-09-07 02:42:11 +08:00
( 0 , core _1 . info ) ( ` Returned retry-after header value: ${ retryTime } is non-numeric and cannot be used ` ) ;
2023-01-06 05:27:11 +08:00
return undefined ;
2022-05-20 05:17:44 +08:00
}
2023-09-07 02:42:11 +08:00
( 0 , core _1 . info ) ( ` No retry-after header was found. Dumping all headers for diagnostic purposes ` ) ;
2023-01-06 05:27:11 +08:00
// eslint-disable-next-line no-console
console . log ( headers ) ;
return undefined ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
exports . tryGetRetryAfterValueTimeInMilliseconds = tryGetRetryAfterValueTimeInMilliseconds ;
function getContentRange ( start , end , total ) {
// Format: `bytes start-end/fileSize
// start and end are inclusive
// For a 200 byte chunk starting at byte 0:
// Content-Range: bytes 0-199/200
return ` bytes ${ start } - ${ end } / ${ total } ` ;
}
exports . getContentRange = getContentRange ;
/ * *
* Sets all the necessary headers when downloading an artifact
* @ param { string } contentType the type of content being uploaded
* @ param { boolean } isKeepAlive is the same connection being used to make multiple calls
* @ param { boolean } acceptGzip can we accept a gzip encoded response
* @ param { string } acceptType the type of content that we can accept
* @ returns appropriate headers to make a specific http call during artifact download
* /
function getDownloadHeaders ( contentType , isKeepAlive , acceptGzip ) {
const requestOptions = { } ;
if ( contentType ) {
requestOptions [ 'Content-Type' ] = contentType ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
if ( isKeepAlive ) {
requestOptions [ 'Connection' ] = 'Keep-Alive' ;
// keep alive for at least 10 seconds before closing the connection
requestOptions [ 'Keep-Alive' ] = '10' ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
if ( acceptGzip ) {
// if we are expecting a response with gzip encoding, it should be using an octet-stream in the accept header
requestOptions [ 'Accept-Encoding' ] = 'gzip' ;
requestOptions [ 'Accept' ] = ` application/octet-stream;api-version= ${ getApiVersion ( ) } ` ;
}
else {
// default to application/json if we are not working with gzip content
requestOptions [ 'Accept' ] = ` application/json;api-version= ${ getApiVersion ( ) } ` ;
}
return requestOptions ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
exports . getDownloadHeaders = getDownloadHeaders ;
/ * *
* Sets all the necessary headers when uploading an artifact
* @ param { string } contentType the type of content being uploaded
* @ param { boolean } isKeepAlive is the same connection being used to make multiple calls
* @ param { boolean } isGzip is the connection being used to upload GZip compressed content
* @ param { number } uncompressedLength the original size of the content if something is being uploaded that has been compressed
* @ param { number } contentLength the length of the content that is being uploaded
* @ param { string } contentRange the range of the content that is being uploaded
* @ returns appropriate headers to make a specific http call during artifact upload
* /
function getUploadHeaders ( contentType , isKeepAlive , isGzip , uncompressedLength , contentLength , contentRange , digest ) {
const requestOptions = { } ;
requestOptions [ 'Accept' ] = ` application/json;api-version= ${ getApiVersion ( ) } ` ;
if ( contentType ) {
requestOptions [ 'Content-Type' ] = contentType ;
}
if ( isKeepAlive ) {
requestOptions [ 'Connection' ] = 'Keep-Alive' ;
// keep alive for at least 10 seconds before closing the connection
requestOptions [ 'Keep-Alive' ] = '10' ;
}
if ( isGzip ) {
requestOptions [ 'Content-Encoding' ] = 'gzip' ;
requestOptions [ 'x-tfs-filelength' ] = uncompressedLength ;
}
if ( contentLength ) {
requestOptions [ 'Content-Length' ] = contentLength ;
}
if ( contentRange ) {
requestOptions [ 'Content-Range' ] = contentRange ;
}
if ( digest ) {
requestOptions [ 'x-actions-results-crc64' ] = digest . crc64 ;
requestOptions [ 'x-actions-results-md5' ] = digest . md5 ;
}
return requestOptions ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
exports . getUploadHeaders = getUploadHeaders ;
function createHttpClient ( userAgent ) {
return new http _client _1 . HttpClient ( userAgent , [
2023-09-07 02:42:11 +08:00
new auth _1 . BearerCredentialHandler ( ( 0 , config _variables _1 . getRuntimeToken ) ( ) )
2023-01-06 05:27:11 +08:00
] ) ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
exports . createHttpClient = createHttpClient ;
function getArtifactUrl ( ) {
2023-09-07 02:42:11 +08:00
const artifactUrl = ` ${ ( 0 , config _variables _1 . getRuntimeUrl ) ( ) } _apis/pipelines/workflows/ ${ ( 0 , config _variables _1 . getWorkFlowRunId ) ( ) } /artifacts?api-version= ${ getApiVersion ( ) } ` ;
( 0 , core _1 . debug ) ( ` Artifact Url: ${ artifactUrl } ` ) ;
2023-01-06 05:27:11 +08:00
return artifactUrl ;
}
exports . getArtifactUrl = getArtifactUrl ;
2022-05-20 05:17:44 +08:00
/ * *
2023-01-06 05:27:11 +08:00
* Uh oh ! Something might have gone wrong during either upload or download . The IHtttpClientResponse object contains information
* about the http call that was made by the actions http client . This information might be useful to display for diagnostic purposes , but
* this entire object is really big and most of the information is not really useful . This function takes the response object and displays only
* the information that we want .
2022-05-20 05:17:44 +08:00
*
2023-01-06 05:27:11 +08:00
* Certain information such as the TLSSocket and the Readable state are not really useful for diagnostic purposes so they can be avoided .
* Other information such as the headers , the response code and message might be useful , so this is displayed .
2022-05-20 05:17:44 +08:00
* /
2023-01-06 05:27:11 +08:00
function displayHttpDiagnostics ( response ) {
2023-09-07 02:42:11 +08:00
( 0 , core _1 . info ) ( ` ##### Begin Diagnostic HTTP information #####
2023-01-06 05:27:11 +08:00
Status Code : $ { response . message . statusCode }
Status Message : $ { response . message . statusMessage }
Header Information : $ { JSON . stringify ( response . message . headers , undefined , 2 ) }
# # # # # # End Diagnostic HTTP information # # # # # # ` );
}
exports . displayHttpDiagnostics = displayHttpDiagnostics ;
function createDirectoriesForArtifact ( directories ) {
2022-05-20 05:17:44 +08:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2023-01-06 05:27:11 +08:00
for ( const directory of directories ) {
yield fs _1 . promises . mkdir ( directory , {
recursive : true
} ) ;
}
2022-05-20 05:17:44 +08:00
} ) ;
}
2023-01-06 05:27:11 +08:00
exports . createDirectoriesForArtifact = createDirectoriesForArtifact ;
function createEmptyFilesForArtifact ( emptyFilesToCreate ) {
2023-01-05 02:55:10 +08:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2023-01-06 05:27:11 +08:00
for ( const filePath of emptyFilesToCreate ) {
yield ( yield fs _1 . promises . open ( filePath , 'w' ) ) . close ( ) ;
2023-01-05 02:55:10 +08:00
}
} ) ;
}
2023-01-06 05:27:11 +08:00
exports . createEmptyFilesForArtifact = createEmptyFilesForArtifact ;
function getFileSize ( filePath ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const stats = yield fs _1 . promises . stat ( filePath ) ;
2023-09-07 02:42:11 +08:00
( 0 , core _1 . debug ) ( ` ${ filePath } size:( ${ stats . size } ) blksize:( ${ stats . blksize } ) blocks:( ${ stats . blocks } ) ` ) ;
2023-01-06 05:27:11 +08:00
return stats . size ;
} ) ;
}
exports . getFileSize = getFileSize ;
function rmFile ( filePath ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
yield fs _1 . promises . unlink ( filePath ) ;
} ) ;
}
exports . rmFile = rmFile ;
function getProperRetention ( retentionInput , retentionSetting ) {
if ( retentionInput < 0 ) {
throw new Error ( 'Invalid retention, minimum value is 1.' ) ;
}
let retention = retentionInput ;
if ( retentionSetting ) {
const maxRetention = parseInt ( retentionSetting ) ;
if ( ! isNaN ( maxRetention ) && maxRetention < retention ) {
2023-09-07 02:42:11 +08:00
( 0 , core _1 . warning ) ( ` Retention days is greater than the max value allowed by the repository setting, reduce retention to ${ maxRetention } days ` ) ;
2023-01-06 05:27:11 +08:00
retention = maxRetention ;
}
}
return retention ;
}
exports . getProperRetention = getProperRetention ;
function sleep ( milliseconds ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return new Promise ( resolve => setTimeout ( resolve , milliseconds ) ) ;
} ) ;
}
exports . sleep = sleep ;
function digestForStream ( stream ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return new Promise ( ( resolve , reject ) => {
const crc64 = new crc64 _1 . default ( ) ;
const md5 = crypto _1 . default . createHash ( 'md5' ) ;
stream
. on ( 'data' , data => {
crc64 . update ( data ) ;
md5 . update ( data ) ;
} )
. on ( 'end' , ( ) => resolve ( {
crc64 : crc64 . digest ( 'base64' ) ,
md5 : md5 . digest ( 'base64' )
} ) )
. on ( 'error' , reject ) ;
} ) ;
} ) ;
}
exports . digestForStream = digestForStream ;
//# sourceMappingURL=utils.js.map
2022-05-20 05:17:44 +08:00
/***/ } ) ,
2023-01-06 05:27:11 +08:00
/***/ 7351 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2022-05-20 05:17:44 +08:00
"use strict" ;
2023-01-05 02:55:10 +08:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
2023-01-06 05:27:11 +08:00
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . issue = exports . issueCommand = void 0 ;
const os = _ _importStar ( _ _nccwpck _require _ _ ( 2037 ) ) ;
const utils _1 = _ _nccwpck _require _ _ ( 5278 ) ;
/ * *
* Commands
*
* Command Format :
* : : name key = value , key = value : : message
*
* Examples :
* : : warning : : This is the message
* : : set - env name = MY _VAR : : some value
* /
function issueCommand ( command , properties , message ) {
const cmd = new Command ( command , properties , message ) ;
process . stdout . write ( cmd . toString ( ) + os . EOL ) ;
}
exports . issueCommand = issueCommand ;
function issue ( name , message = '' ) {
issueCommand ( name , { } , message ) ;
}
exports . issue = issue ;
const CMD _STRING = '::' ;
class Command {
constructor ( command , properties , message ) {
if ( ! command ) {
command = 'missing.command' ;
}
this . command = command ;
this . properties = properties ;
this . message = message ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
toString ( ) {
let cmdStr = CMD _STRING + this . command ;
if ( this . properties && Object . keys ( this . properties ) . length > 0 ) {
cmdStr += ' ' ;
let first = true ;
for ( const key in this . properties ) {
if ( this . properties . hasOwnProperty ( key ) ) {
const val = this . properties [ key ] ;
if ( val ) {
if ( first ) {
first = false ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
else {
cmdStr += ',' ;
}
cmdStr += ` ${ key } = ${ escapeProperty ( val ) } ` ;
}
2022-05-20 05:17:44 +08:00
}
}
2023-01-06 05:27:11 +08:00
}
cmdStr += ` ${ CMD _STRING } ${ escapeData ( this . message ) } ` ;
return cmdStr ;
2022-05-20 05:17:44 +08:00
}
}
2023-01-06 05:27:11 +08:00
function escapeData ( s ) {
return utils _1 . toCommandValue ( s )
. replace ( /%/g , '%25' )
. replace ( /\r/g , '%0D' )
. replace ( /\n/g , '%0A' ) ;
}
function escapeProperty ( s ) {
return utils _1 . toCommandValue ( s )
. replace ( /%/g , '%25' )
. replace ( /\r/g , '%0D' )
. replace ( /\n/g , '%0A' )
. replace ( /:/g , '%3A' )
. replace ( /,/g , '%2C' ) ;
}
//# sourceMappingURL=command.js.map
2022-05-20 05:17:44 +08:00
/***/ } ) ,
2023-01-06 05:27:11 +08:00
/***/ 2186 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
"use strict" ;
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . getIDToken = exports . getState = exports . saveState = exports . group = exports . endGroup = exports . startGroup = exports . info = exports . notice = exports . warning = exports . error = exports . debug = exports . isDebug = exports . setFailed = exports . setCommandEcho = exports . setOutput = exports . getBooleanInput = exports . getMultilineInput = exports . getInput = exports . addPath = exports . setSecret = exports . exportVariable = exports . ExitCode = void 0 ;
const command _1 = _ _nccwpck _require _ _ ( 7351 ) ;
const file _command _1 = _ _nccwpck _require _ _ ( 717 ) ;
const utils _1 = _ _nccwpck _require _ _ ( 5278 ) ;
const os = _ _importStar ( _ _nccwpck _require _ _ ( 2037 ) ) ;
const path = _ _importStar ( _ _nccwpck _require _ _ ( 1017 ) ) ;
const oidc _utils _1 = _ _nccwpck _require _ _ ( 8041 ) ;
/ * *
* The code to exit an action
* /
var ExitCode ;
( function ( ExitCode ) {
/ * *
* A code indicating that the action was successful
* /
ExitCode [ ExitCode [ "Success" ] = 0 ] = "Success" ;
/ * *
* A code indicating that the action was a failure
* /
ExitCode [ ExitCode [ "Failure" ] = 1 ] = "Failure" ;
} ) ( ExitCode = exports . ExitCode || ( exports . ExitCode = { } ) ) ;
//-----------------------------------------------------------------------
// Variables
//-----------------------------------------------------------------------
/ * *
* Sets env variable for this action and future actions in the job
* @ param name the name of the variable to set
* @ param val the value of the variable . Non - string values will be converted to a string via JSON . stringify
* /
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function exportVariable ( name , val ) {
const convertedVal = utils _1 . toCommandValue ( val ) ;
process . env [ name ] = convertedVal ;
const filePath = process . env [ 'GITHUB_ENV' ] || '' ;
if ( filePath ) {
return file _command _1 . issueFileCommand ( 'ENV' , file _command _1 . prepareKeyValueMessage ( name , val ) ) ;
}
command _1 . issueCommand ( 'set-env' , { name } , convertedVal ) ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
exports . exportVariable = exportVariable ;
/ * *
* Registers a secret which will get masked from logs
* @ param secret value of the secret
* /
function setSecret ( secret ) {
command _1 . issueCommand ( 'add-mask' , { } , secret ) ;
}
exports . setSecret = setSecret ;
/ * *
* Prepends inputPath to the PATH ( for this action and future actions )
* @ param inputPath
* /
function addPath ( inputPath ) {
const filePath = process . env [ 'GITHUB_PATH' ] || '' ;
if ( filePath ) {
file _command _1 . issueFileCommand ( 'PATH' , inputPath ) ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
else {
command _1 . issueCommand ( 'add-path' , { } , inputPath ) ;
}
process . env [ 'PATH' ] = ` ${ inputPath } ${ path . delimiter } ${ process . env [ 'PATH' ] } ` ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
exports . addPath = addPath ;
/ * *
* Gets the value of an input .
* Unless trimWhitespace is set to false in InputOptions , the value is also trimmed .
* Returns an empty string if the value is not defined .
*
* @ param name name of the input to get
* @ param options optional . See InputOptions .
* @ returns string
* /
function getInput ( name , options ) {
const val = process . env [ ` INPUT_ ${ name . replace ( / /g , '_' ) . toUpperCase ( ) } ` ] || '' ;
if ( options && options . required && ! val ) {
throw new Error ( ` Input required and not supplied: ${ name } ` ) ;
2022-05-20 04:33:04 +08:00
}
2023-01-06 05:27:11 +08:00
if ( options && options . trimWhitespace === false ) {
return val ;
}
return val . trim ( ) ;
2020-04-28 23:18:53 +08:00
}
2023-01-06 05:27:11 +08:00
exports . getInput = getInput ;
/ * *
* Gets the values of an multiline input . Each value is also trimmed .
*
* @ param name name of the input to get
* @ param options optional . See InputOptions .
* @ returns string [ ]
*
* /
function getMultilineInput ( name , options ) {
const inputs = getInput ( name , options )
. split ( '\n' )
. filter ( x => x !== '' ) ;
if ( options && options . trimWhitespace === false ) {
return inputs ;
}
return inputs . map ( input => input . trim ( ) ) ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
exports . getMultilineInput = getMultilineInput ;
/ * *
* Gets the input value of the boolean type in the YAML 1.2 "core schema" specification .
* Support boolean input list : ` true | True | TRUE | false | False | FALSE ` .
* The return value is also in boolean type .
* ref : https : //yaml.org/spec/1.2/spec.html#id2804923
*
* @ param name name of the input to get
* @ param options optional . See InputOptions .
* @ returns boolean
* /
function getBooleanInput ( name , options ) {
const trueValue = [ 'true' , 'True' , 'TRUE' ] ;
const falseValue = [ 'false' , 'False' , 'FALSE' ] ;
const val = getInput ( name , options ) ;
if ( trueValue . includes ( val ) )
return true ;
if ( falseValue . includes ( val ) )
return false ;
throw new TypeError ( ` Input does not meet YAML 1.2 "Core Schema" specification: ${ name } \n ` +
` Support boolean input list: \` true | True | TRUE | false | False | FALSE \` ` ) ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
exports . getBooleanInput = getBooleanInput ;
/ * *
* Sets the value of an output .
*
* @ param name name of the output to set
* @ param value value to store . Non - string values will be converted to a string via JSON . stringify
* /
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function setOutput ( name , value ) {
const filePath = process . env [ 'GITHUB_OUTPUT' ] || '' ;
if ( filePath ) {
return file _command _1 . issueFileCommand ( 'OUTPUT' , file _command _1 . prepareKeyValueMessage ( name , value ) ) ;
}
process . stdout . write ( os . EOL ) ;
command _1 . issueCommand ( 'set-output' , { name } , utils _1 . toCommandValue ( value ) ) ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
exports . setOutput = setOutput ;
/ * *
* Enables or disables the echoing of commands into stdout for the rest of the step .
* Echoing is disabled by default if ACTIONS _STEP _DEBUG is not set .
*
* /
function setCommandEcho ( enabled ) {
command _1 . issue ( 'echo' , enabled ? 'on' : 'off' ) ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
exports . setCommandEcho = setCommandEcho ;
//-----------------------------------------------------------------------
// Results
//-----------------------------------------------------------------------
/ * *
* Sets the action status to failed .
* When the action exits it will be with an exit code of 1
* @ param message add error issue message
* /
function setFailed ( message ) {
process . exitCode = ExitCode . Failure ;
error ( message ) ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
exports . setFailed = setFailed ;
//-----------------------------------------------------------------------
// Logging Commands
//-----------------------------------------------------------------------
/ * *
* Gets whether Actions Step Debug is on or not
* /
function isDebug ( ) {
return process . env [ 'RUNNER_DEBUG' ] === '1' ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
exports . isDebug = isDebug ;
/ * *
* Writes debug message to user log
* @ param message debug message
* /
function debug ( message ) {
command _1 . issueCommand ( 'debug' , { } , message ) ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
exports . debug = debug ;
/ * *
* Adds an error issue
* @ param message error issue message . Errors will be converted to string via toString ( )
* @ param properties optional properties to add to the annotation .
* /
function error ( message , properties = { } ) {
command _1 . issueCommand ( 'error' , utils _1 . toCommandProperties ( properties ) , message instanceof Error ? message . toString ( ) : message ) ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
exports . error = error ;
/ * *
* Adds a warning issue
* @ param message warning issue message . Errors will be converted to string via toString ( )
* @ param properties optional properties to add to the annotation .
* /
function warning ( message , properties = { } ) {
command _1 . issueCommand ( 'warning' , utils _1 . toCommandProperties ( properties ) , message instanceof Error ? message . toString ( ) : message ) ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
exports . warning = warning ;
/ * *
* Adds a notice issue
* @ param message notice issue message . Errors will be converted to string via toString ( )
* @ param properties optional properties to add to the annotation .
* /
function notice ( message , properties = { } ) {
command _1 . issueCommand ( 'notice' , utils _1 . toCommandProperties ( properties ) , message instanceof Error ? message . toString ( ) : message ) ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
exports . notice = notice ;
/ * *
* Writes info to log with console . log .
* @ param message info message
* /
function info ( message ) {
process . stdout . write ( message + os . EOL ) ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
exports . info = info ;
/ * *
* Begin an output group .
*
* Output until the next ` groupEnd ` will be foldable in this group
*
* @ param name The name of the output group
* /
function startGroup ( name ) {
command _1 . issue ( 'group' , name ) ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
exports . startGroup = startGroup ;
/ * *
* End an output group .
* /
function endGroup ( ) {
command _1 . issue ( 'endgroup' ) ;
2020-04-28 23:18:53 +08:00
}
2023-01-06 05:27:11 +08:00
exports . endGroup = endGroup ;
/ * *
* Wrap an asynchronous function call in a group .
*
* Returns the same type as the function itself .
*
* @ param name The name of the group
* @ param fn The function to wrap in the group
* /
function group ( name , fn ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
startGroup ( name ) ;
let result ;
try {
result = yield fn ( ) ;
2022-05-20 04:33:04 +08:00
}
2023-01-06 05:27:11 +08:00
finally {
endGroup ( ) ;
}
return result ;
2022-05-20 04:33:04 +08:00
} ) ;
2023-01-06 05:27:11 +08:00
}
exports . group = group ;
//-----------------------------------------------------------------------
// Wrapper action state
//-----------------------------------------------------------------------
/ * *
* Saves state for current action , the state can only be retrieved by this action ' s post job execution .
*
* @ param name name of the state to store
* @ param value value to store . Non - string values will be converted to a string via JSON . stringify
* /
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function saveState ( name , value ) {
const filePath = process . env [ 'GITHUB_STATE' ] || '' ;
if ( filePath ) {
return file _command _1 . issueFileCommand ( 'STATE' , file _command _1 . prepareKeyValueMessage ( name , value ) ) ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
command _1 . issueCommand ( 'save-state' , { name } , utils _1 . toCommandValue ( value ) ) ;
}
exports . saveState = saveState ;
/ * *
* Gets the value of an state set by this action ' s main execution .
*
* @ param name name of the state to get
* @ returns string
* /
function getState ( name ) {
return process . env [ ` STATE_ ${ name } ` ] || '' ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
exports . getState = getState ;
function getIDToken ( aud ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return yield oidc _utils _1 . OidcClient . getIDToken ( aud ) ;
} ) ;
}
exports . getIDToken = getIDToken ;
/ * *
* Summary exports
* /
var summary _1 = _ _nccwpck _require _ _ ( 1327 ) ;
Object . defineProperty ( exports , "summary" , ( { enumerable : true , get : function ( ) { return summary _1 . summary ; } } ) ) ;
/ * *
* @ deprecated use core . summary
* /
var summary _2 = _ _nccwpck _require _ _ ( 1327 ) ;
Object . defineProperty ( exports , "markdownSummary" , ( { enumerable : true , get : function ( ) { return summary _2 . markdownSummary ; } } ) ) ;
/ * *
* Path exports
* /
var path _utils _1 = _ _nccwpck _require _ _ ( 2981 ) ;
Object . defineProperty ( exports , "toPosixPath" , ( { enumerable : true , get : function ( ) { return path _utils _1 . toPosixPath ; } } ) ) ;
Object . defineProperty ( exports , "toWin32Path" , ( { enumerable : true , get : function ( ) { return path _utils _1 . toWin32Path ; } } ) ) ;
Object . defineProperty ( exports , "toPlatformPath" , ( { enumerable : true , get : function ( ) { return path _utils _1 . toPlatformPath ; } } ) ) ;
//# sourceMappingURL=core.js.map
2022-05-20 05:17:44 +08:00
/***/ } ) ,
2023-01-06 05:27:11 +08:00
/***/ 717 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2022-05-20 05:17:44 +08:00
"use strict" ;
2023-01-06 05:27:11 +08:00
// For internal use, subject to change.
2023-01-05 02:55:10 +08:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
2023-01-06 05:27:11 +08:00
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . prepareKeyValueMessage = exports . issueFileCommand = void 0 ;
// We use any as a valid input type
/* eslint-disable @typescript-eslint/no-explicit-any */
const fs = _ _importStar ( _ _nccwpck _require _ _ ( 7147 ) ) ;
const os = _ _importStar ( _ _nccwpck _require _ _ ( 2037 ) ) ;
const uuid _1 = _ _nccwpck _require _ _ ( 5840 ) ;
const utils _1 = _ _nccwpck _require _ _ ( 5278 ) ;
function issueFileCommand ( command , message ) {
const filePath = process . env [ ` GITHUB_ ${ command } ` ] ;
if ( ! filePath ) {
throw new Error ( ` Unable to find environment variable for file command ${ command } ` ) ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
if ( ! fs . existsSync ( filePath ) ) {
throw new Error ( ` Missing file at path: ${ filePath } ` ) ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
fs . appendFileSync ( filePath , ` ${ utils _1 . toCommandValue ( message ) } ${ os . EOL } ` , {
encoding : 'utf8'
} ) ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
exports . issueFileCommand = issueFileCommand ;
function prepareKeyValueMessage ( key , value ) {
const delimiter = ` ghadelimiter_ ${ uuid _1 . v4 ( ) } ` ;
const convertedValue = utils _1 . toCommandValue ( value ) ;
// These should realistically never happen, but just in case someone finds a
// way to exploit uuid generation let's not allow keys or values that contain
// the delimiter.
if ( key . includes ( delimiter ) ) {
throw new Error ( ` Unexpected input: name should not contain the delimiter " ${ delimiter } " ` ) ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
if ( convertedValue . includes ( delimiter ) ) {
throw new Error ( ` Unexpected input: value should not contain the delimiter " ${ delimiter } " ` ) ;
}
return ` ${ key } << ${ delimiter } ${ os . EOL } ${ convertedValue } ${ os . EOL } ${ delimiter } ` ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
exports . prepareKeyValueMessage = prepareKeyValueMessage ;
//# sourceMappingURL=file-command.js.map
2022-05-20 05:17:44 +08:00
/***/ } ) ,
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
/***/ 8041 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
"use strict" ;
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . OidcClient = void 0 ;
const http _client _1 = _ _nccwpck _require _ _ ( 6255 ) ;
const auth _1 = _ _nccwpck _require _ _ ( 5526 ) ;
const core _1 = _ _nccwpck _require _ _ ( 2186 ) ;
class OidcClient {
static createHttpClient ( allowRetry = true , maxRetry = 10 ) {
const requestOptions = {
allowRetries : allowRetry ,
maxRetries : maxRetry
} ;
return new http _client _1 . HttpClient ( 'actions/oidc-client' , [ new auth _1 . BearerCredentialHandler ( OidcClient . getRequestToken ( ) ) ] , requestOptions ) ;
}
static getRequestToken ( ) {
const token = process . env [ 'ACTIONS_ID_TOKEN_REQUEST_TOKEN' ] ;
if ( ! token ) {
throw new Error ( 'Unable to get ACTIONS_ID_TOKEN_REQUEST_TOKEN env variable' ) ;
}
return token ;
}
static getIDTokenUrl ( ) {
const runtimeUrl = process . env [ 'ACTIONS_ID_TOKEN_REQUEST_URL' ] ;
if ( ! runtimeUrl ) {
throw new Error ( 'Unable to get ACTIONS_ID_TOKEN_REQUEST_URL env variable' ) ;
}
return runtimeUrl ;
}
static getCall ( id _token _url ) {
var _a ;
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const httpclient = OidcClient . createHttpClient ( ) ;
const res = yield httpclient
. getJson ( id _token _url )
. catch ( error => {
throw new Error ( ` Failed to get ID Token. \n
Error Code : $ { error . statusCode } \ n
Error Message : $ { error . result . message } ` );
} ) ;
const id _token = ( _a = res . result ) === null || _a === void 0 ? void 0 : _a . value ;
if ( ! id _token ) {
throw new Error ( 'Response json body do not have ID Token field' ) ;
}
return id _token ;
} ) ;
}
static getIDToken ( audience ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
try {
// New ID Token is requested from action service
let id _token _url = OidcClient . getIDTokenUrl ( ) ;
if ( audience ) {
const encodedAudience = encodeURIComponent ( audience ) ;
id _token _url = ` ${ id _token _url } &audience= ${ encodedAudience } ` ;
}
core _1 . debug ( ` ID token url is ${ id _token _url } ` ) ;
const id _token = yield OidcClient . getCall ( id _token _url ) ;
core _1 . setSecret ( id _token ) ;
return id _token ;
}
catch ( error ) {
throw new Error ( ` Error message: ${ error . message } ` ) ;
}
} ) ;
}
2022-05-20 04:33:04 +08:00
}
2023-01-06 05:27:11 +08:00
exports . OidcClient = OidcClient ;
//# sourceMappingURL=oidc-utils.js.map
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
/***/ } ) ,
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
/***/ 2981 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
"use strict" ;
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . toPlatformPath = exports . toWin32Path = exports . toPosixPath = void 0 ;
const path = _ _importStar ( _ _nccwpck _require _ _ ( 1017 ) ) ;
/ * *
* toPosixPath converts the given path to the posix form . On Windows , \ \ will be
* replaced with / .
*
* @ param pth . Path to transform .
* @ return string Posix path .
* /
function toPosixPath ( pth ) {
return pth . replace ( /[\\]/g , '/' ) ;
2022-05-20 04:33:04 +08:00
}
2023-01-06 05:27:11 +08:00
exports . toPosixPath = toPosixPath ;
/ * *
* toWin32Path converts the given path to the win32 form . On Linux , / w i l l b e
* replaced with \ \ .
*
* @ param pth . Path to transform .
* @ return string Win32 path .
* /
function toWin32Path ( pth ) {
return pth . replace ( /[/]/g , '\\' ) ;
}
exports . toWin32Path = toWin32Path ;
/ * *
* toPlatformPath converts the given path to a platform - specific path . It does
* this by replacing instances of / a n d \ w i t h t h e p l a t f o r m - s p e c i f i c p a t h
* separator .
*
* @ param pth The path to platformize .
* @ return string The platform - specific path .
* /
function toPlatformPath ( pth ) {
return pth . replace ( /[/\\]/g , path . sep ) ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
exports . toPlatformPath = toPlatformPath ;
//# sourceMappingURL=path-utils.js.map
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
/***/ } ) ,
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
/***/ 1327 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
"use strict" ;
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . summary = exports . markdownSummary = exports . SUMMARY _DOCS _URL = exports . SUMMARY _ENV _VAR = void 0 ;
const os _1 = _ _nccwpck _require _ _ ( 2037 ) ;
const fs _1 = _ _nccwpck _require _ _ ( 7147 ) ;
const { access , appendFile , writeFile } = fs _1 . promises ;
exports . SUMMARY _ENV _VAR = 'GITHUB_STEP_SUMMARY' ;
exports . SUMMARY _DOCS _URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary' ;
class Summary {
constructor ( ) {
this . _buffer = '' ;
}
/ * *
* Finds the summary file path from the environment , rejects if env var is not found or file does not exist
* Also checks r / w permissions .
*
* @ returns step summary file path
* /
filePath ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
if ( this . _filePath ) {
return this . _filePath ;
}
const pathFromEnv = process . env [ exports . SUMMARY _ENV _VAR ] ;
if ( ! pathFromEnv ) {
throw new Error ( ` Unable to find environment variable for $ ${ exports . SUMMARY _ENV _VAR } . Check if your runtime environment supports job summaries. ` ) ;
}
try {
yield access ( pathFromEnv , fs _1 . constants . R _OK | fs _1 . constants . W _OK ) ;
}
catch ( _a ) {
throw new Error ( ` Unable to access summary file: ' ${ pathFromEnv } '. Check if the file has correct read/write permissions. ` ) ;
}
this . _filePath = pathFromEnv ;
return this . _filePath ;
} ) ;
}
/ * *
* Wraps content in an HTML tag , adding any HTML attributes
*
* @ param { string } tag HTML tag to wrap
* @ param { string | null } content content within the tag
* @ param { [ attribute : string ] : string } attrs key - value list of HTML attributes to add
*
* @ returns { string } content wrapped in HTML element
* /
wrap ( tag , content , attrs = { } ) {
const htmlAttrs = Object . entries ( attrs )
. map ( ( [ key , value ] ) => ` ${ key } =" ${ value } " ` )
. join ( '' ) ;
if ( ! content ) {
return ` < ${ tag } ${ htmlAttrs } > ` ;
}
return ` < ${ tag } ${ htmlAttrs } > ${ content } </ ${ tag } > ` ;
}
/ * *
* Writes text in the buffer to the summary buffer file and empties buffer . Will append by default .
*
* @ param { SummaryWriteOptions } [ options ] ( optional ) options for write operation
*
* @ returns { Promise < Summary > } summary instance
* /
write ( options ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const overwrite = ! ! ( options === null || options === void 0 ? void 0 : options . overwrite ) ;
const filePath = yield this . filePath ( ) ;
const writeFunc = overwrite ? writeFile : appendFile ;
yield writeFunc ( filePath , this . _buffer , { encoding : 'utf8' } ) ;
return this . emptyBuffer ( ) ;
} ) ;
}
/ * *
* Clears the summary buffer and wipes the summary file
*
* @ returns { Summary } summary instance
* /
clear ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return this . emptyBuffer ( ) . write ( { overwrite : true } ) ;
} ) ;
}
/ * *
* Returns the current summary buffer as a string
*
* @ returns { string } string of summary buffer
* /
stringify ( ) {
return this . _buffer ;
}
/ * *
* If the summary buffer is empty
*
* @ returns { boolen } true if the buffer is empty
* /
isEmptyBuffer ( ) {
return this . _buffer . length === 0 ;
}
/ * *
* Resets the summary buffer without writing to summary file
*
* @ returns { Summary } summary instance
* /
emptyBuffer ( ) {
this . _buffer = '' ;
return this ;
}
/ * *
* Adds raw text to the summary buffer
*
* @ param { string } text content to add
* @ param { boolean } [ addEOL = false ] ( optional ) append an EOL to the raw text ( default : false )
*
* @ returns { Summary } summary instance
* /
addRaw ( text , addEOL = false ) {
this . _buffer += text ;
return addEOL ? this . addEOL ( ) : this ;
}
/ * *
* Adds the operating system - specific end - of - line marker to the buffer
*
* @ returns { Summary } summary instance
* /
addEOL ( ) {
return this . addRaw ( os _1 . EOL ) ;
}
/ * *
* Adds an HTML codeblock to the summary buffer
*
* @ param { string } code content to render within fenced code block
* @ param { string } lang ( optional ) language to syntax highlight code
*
* @ returns { Summary } summary instance
* /
addCodeBlock ( code , lang ) {
const attrs = Object . assign ( { } , ( lang && { lang } ) ) ;
const element = this . wrap ( 'pre' , this . wrap ( 'code' , code ) , attrs ) ;
return this . addRaw ( element ) . addEOL ( ) ;
}
/ * *
* Adds an HTML list to the summary buffer
*
* @ param { string [ ] } items list of items to render
* @ param { boolean } [ ordered = false ] ( optional ) if the rendered list should be ordered or not ( default : false )
*
* @ returns { Summary } summary instance
* /
addList ( items , ordered = false ) {
const tag = ordered ? 'ol' : 'ul' ;
const listItems = items . map ( item => this . wrap ( 'li' , item ) ) . join ( '' ) ;
const element = this . wrap ( tag , listItems ) ;
return this . addRaw ( element ) . addEOL ( ) ;
}
/ * *
* Adds an HTML table to the summary buffer
*
* @ param { SummaryTableCell [ ] } rows table rows
*
* @ returns { Summary } summary instance
* /
addTable ( rows ) {
const tableBody = rows
. map ( row => {
const cells = row
. map ( cell => {
if ( typeof cell === 'string' ) {
return this . wrap ( 'td' , cell ) ;
}
const { header , data , colspan , rowspan } = cell ;
const tag = header ? 'th' : 'td' ;
const attrs = Object . assign ( Object . assign ( { } , ( colspan && { colspan } ) ) , ( rowspan && { rowspan } ) ) ;
return this . wrap ( tag , data , attrs ) ;
} )
. join ( '' ) ;
return this . wrap ( 'tr' , cells ) ;
2022-05-20 05:17:44 +08:00
} )
2023-01-06 05:27:11 +08:00
. join ( '' ) ;
const element = this . wrap ( 'table' , tableBody ) ;
return this . addRaw ( element ) . addEOL ( ) ;
}
/ * *
* Adds a collapsable HTML details element to the summary buffer
*
* @ param { string } label text for the closed state
* @ param { string } content collapsable content
*
* @ returns { Summary } summary instance
* /
addDetails ( label , content ) {
const element = this . wrap ( 'details' , this . wrap ( 'summary' , label ) + content ) ;
return this . addRaw ( element ) . addEOL ( ) ;
}
/ * *
* Adds an HTML image tag to the summary buffer
*
* @ param { string } src path to the image you to embed
* @ param { string } alt text description of the image
* @ param { SummaryImageOptions } options ( optional ) addition image attributes
*
* @ returns { Summary } summary instance
* /
addImage ( src , alt , options ) {
const { width , height } = options || { } ;
const attrs = Object . assign ( Object . assign ( { } , ( width && { width } ) ) , ( height && { height } ) ) ;
const element = this . wrap ( 'img' , null , Object . assign ( { src , alt } , attrs ) ) ;
return this . addRaw ( element ) . addEOL ( ) ;
}
/ * *
* Adds an HTML section heading element
*
* @ param { string } text heading text
* @ param { number | string } [ level = 1 ] ( optional ) the heading level , default : 1
*
* @ returns { Summary } summary instance
* /
addHeading ( text , level ) {
const tag = ` h ${ level } ` ;
const allowedTag = [ 'h1' , 'h2' , 'h3' , 'h4' , 'h5' , 'h6' ] . includes ( tag )
? tag
: 'h1' ;
const element = this . wrap ( allowedTag , text ) ;
return this . addRaw ( element ) . addEOL ( ) ;
}
/ * *
* Adds an HTML thematic break ( < hr > ) to the summary buffer
*
* @ returns { Summary } summary instance
* /
addSeparator ( ) {
const element = this . wrap ( 'hr' , null ) ;
return this . addRaw ( element ) . addEOL ( ) ;
}
/ * *
* Adds an HTML line break ( < br > ) to the summary buffer
*
* @ returns { Summary } summary instance
* /
addBreak ( ) {
const element = this . wrap ( 'br' , null ) ;
return this . addRaw ( element ) . addEOL ( ) ;
}
/ * *
* Adds an HTML blockquote to the summary buffer
*
* @ param { string } text quote text
* @ param { string } cite ( optional ) citation url
*
* @ returns { Summary } summary instance
* /
addQuote ( text , cite ) {
const attrs = Object . assign ( { } , ( cite && { cite } ) ) ;
const element = this . wrap ( 'blockquote' , text , attrs ) ;
return this . addRaw ( element ) . addEOL ( ) ;
}
/ * *
* Adds an HTML anchor tag to the summary buffer
*
* @ param { string } text link text / content
* @ param { string } href hyperlink
*
* @ returns { Summary } summary instance
* /
addLink ( text , href ) {
const element = this . wrap ( 'a' , text , { href } ) ;
return this . addRaw ( element ) . addEOL ( ) ;
2022-05-20 05:17:44 +08:00
}
}
2023-01-06 05:27:11 +08:00
const _summary = new Summary ( ) ;
/ * *
* @ deprecated use ` core.summary `
* /
exports . markdownSummary = _summary ;
exports . summary = _summary ;
//# sourceMappingURL=summary.js.map
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
/***/ } ) ,
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
/***/ 5278 :
/***/ ( ( _ _unused _webpack _module , exports ) => {
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
"use strict" ;
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
// We use any as a valid input type
/* eslint-disable @typescript-eslint/no-explicit-any */
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . toCommandProperties = exports . toCommandValue = void 0 ;
/ * *
* Sanitizes an input into a string so it can be passed into issueCommand safely
* @ param input input to sanitize into a string
* /
function toCommandValue ( input ) {
if ( input === null || input === undefined ) {
return '' ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
else if ( typeof input === 'string' || input instanceof String ) {
return input ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
return JSON . stringify ( input ) ;
2021-04-07 04:47:26 +08:00
}
2023-01-06 05:27:11 +08:00
exports . toCommandValue = toCommandValue ;
/ * *
*
* @ param annotationProperties
* @ returns The command properties to send with the actual annotation command
* See IssueCommandProperties : https : //github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646
* /
function toCommandProperties ( annotationProperties ) {
if ( ! Object . keys ( annotationProperties ) . length ) {
return { } ;
}
return {
title : annotationProperties . title ,
file : annotationProperties . file ,
line : annotationProperties . startLine ,
endLine : annotationProperties . endLine ,
col : annotationProperties . startColumn ,
endColumn : annotationProperties . endColumn
} ;
}
exports . toCommandProperties = toCommandProperties ;
//# sourceMappingURL=utils.js.map
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
/***/ } ) ,
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
/***/ 8090 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
"use strict" ;
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . hashFiles = exports . create = void 0 ;
const internal _globber _1 = _ _nccwpck _require _ _ ( 8298 ) ;
const internal _hash _files _1 = _ _nccwpck _require _ _ ( 2448 ) ;
/ * *
* Constructs a globber
*
* @ param patterns Patterns separated by newlines
* @ param options Glob options
* /
function create ( patterns , options ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return yield internal _globber _1 . DefaultGlobber . create ( patterns , options ) ;
} ) ;
}
exports . create = create ;
/ * *
* Computes the sha256 hash of a glob
*
* @ param patterns Patterns separated by newlines
2024-08-22 22:36:56 +08:00
* @ param currentWorkspace Workspace used when matching files
2023-01-06 05:27:11 +08:00
* @ param options Glob options
2024-08-22 22:36:56 +08:00
* @ param verbose Enables verbose logging
2023-01-06 05:27:11 +08:00
* /
2024-08-22 22:36:56 +08:00
function hashFiles ( patterns , currentWorkspace = '' , options , verbose = false ) {
2023-01-06 05:27:11 +08:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
let followSymbolicLinks = true ;
if ( options && typeof options . followSymbolicLinks === 'boolean' ) {
followSymbolicLinks = options . followSymbolicLinks ;
}
const globber = yield create ( patterns , { followSymbolicLinks } ) ;
2024-08-22 22:36:56 +08:00
return ( 0 , internal _hash _files _1 . hashFiles ) ( globber , currentWorkspace , verbose ) ;
2023-01-06 05:27:11 +08:00
} ) ;
}
exports . hashFiles = hashFiles ;
//# sourceMappingURL=glob.js.map
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
/***/ } ) ,
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
/***/ 1026 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
"use strict" ;
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
2024-08-22 22:36:56 +08:00
var desc = Object . getOwnPropertyDescriptor ( m , k ) ;
if ( ! desc || ( "get" in desc ? ! m . _ _esModule : desc . writable || desc . configurable ) ) {
desc = { enumerable : true , get : function ( ) { return m [ k ] ; } } ;
}
Object . defineProperty ( o , k2 , desc ) ;
2023-01-06 05:27:11 +08:00
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
2024-08-22 22:36:56 +08:00
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . prototype . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
2023-01-06 05:27:11 +08:00
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . getOptions = void 0 ;
const core = _ _importStar ( _ _nccwpck _require _ _ ( 2186 ) ) ;
/ * *
* Returns a copy with defaults filled in .
* /
function getOptions ( copy ) {
const result = {
followSymbolicLinks : true ,
implicitDescendants : true ,
matchDirectories : true ,
2024-08-22 22:36:56 +08:00
omitBrokenSymbolicLinks : true ,
excludeHiddenFiles : false
2023-01-06 05:27:11 +08:00
} ;
if ( copy ) {
if ( typeof copy . followSymbolicLinks === 'boolean' ) {
result . followSymbolicLinks = copy . followSymbolicLinks ;
core . debug ( ` followSymbolicLinks ' ${ result . followSymbolicLinks } ' ` ) ;
}
if ( typeof copy . implicitDescendants === 'boolean' ) {
result . implicitDescendants = copy . implicitDescendants ;
core . debug ( ` implicitDescendants ' ${ result . implicitDescendants } ' ` ) ;
}
if ( typeof copy . matchDirectories === 'boolean' ) {
result . matchDirectories = copy . matchDirectories ;
core . debug ( ` matchDirectories ' ${ result . matchDirectories } ' ` ) ;
}
if ( typeof copy . omitBrokenSymbolicLinks === 'boolean' ) {
result . omitBrokenSymbolicLinks = copy . omitBrokenSymbolicLinks ;
core . debug ( ` omitBrokenSymbolicLinks ' ${ result . omitBrokenSymbolicLinks } ' ` ) ;
}
2024-08-22 22:36:56 +08:00
if ( typeof copy . excludeHiddenFiles === 'boolean' ) {
result . excludeHiddenFiles = copy . excludeHiddenFiles ;
core . debug ( ` excludeHiddenFiles ' ${ result . excludeHiddenFiles } ' ` ) ;
}
2023-01-06 05:27:11 +08:00
}
return result ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
exports . getOptions = getOptions ;
//# sourceMappingURL=internal-glob-options-helper.js.map
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
/***/ } ) ,
2021-04-07 04:47:26 +08:00
2023-01-06 05:27:11 +08:00
/***/ 8298 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2021-04-07 04:47:26 +08:00
2023-01-06 05:27:11 +08:00
"use strict" ;
2021-04-07 04:47:26 +08:00
2023-01-06 05:27:11 +08:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
2024-08-22 22:36:56 +08:00
var desc = Object . getOwnPropertyDescriptor ( m , k ) ;
if ( ! desc || ( "get" in desc ? ! m . _ _esModule : desc . writable || desc . configurable ) ) {
desc = { enumerable : true , get : function ( ) { return m [ k ] ; } } ;
}
Object . defineProperty ( o , k2 , desc ) ;
2023-01-06 05:27:11 +08:00
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
2024-08-22 22:36:56 +08:00
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . prototype . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
2023-01-06 05:27:11 +08:00
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
var _ _asyncValues = ( this && this . _ _asyncValues ) || function ( o ) {
if ( ! Symbol . asyncIterator ) throw new TypeError ( "Symbol.asyncIterator is not defined." ) ;
var m = o [ Symbol . asyncIterator ] , i ;
return m ? m . call ( o ) : ( o = typeof _ _values === "function" ? _ _values ( o ) : o [ Symbol . iterator ] ( ) , i = { } , verb ( "next" ) , verb ( "throw" ) , verb ( "return" ) , i [ Symbol . asyncIterator ] = function ( ) { return this ; } , i ) ;
function verb ( n ) { i [ n ] = o [ n ] && function ( v ) { return new Promise ( function ( resolve , reject ) { v = o [ n ] ( v ) , settle ( resolve , reject , v . done , v . value ) ; } ) ; } ; }
function settle ( resolve , reject , d , v ) { Promise . resolve ( v ) . then ( function ( v ) { resolve ( { value : v , done : d } ) ; } , reject ) ; }
} ;
var _ _await = ( this && this . _ _await ) || function ( v ) { return this instanceof _ _await ? ( this . v = v , this ) : new _ _await ( v ) ; }
var _ _asyncGenerator = ( this && this . _ _asyncGenerator ) || function ( thisArg , _arguments , generator ) {
if ( ! Symbol . asyncIterator ) throw new TypeError ( "Symbol.asyncIterator is not defined." ) ;
var g = generator . apply ( thisArg , _arguments || [ ] ) , i , q = [ ] ;
return i = { } , verb ( "next" ) , verb ( "throw" ) , verb ( "return" ) , i [ Symbol . asyncIterator ] = function ( ) { return this ; } , i ;
function verb ( n ) { if ( g [ n ] ) i [ n ] = function ( v ) { return new Promise ( function ( a , b ) { q . push ( [ n , v , a , b ] ) > 1 || resume ( n , v ) ; } ) ; } ; }
function resume ( n , v ) { try { step ( g [ n ] ( v ) ) ; } catch ( e ) { settle ( q [ 0 ] [ 3 ] , e ) ; } }
function step ( r ) { r . value instanceof _ _await ? Promise . resolve ( r . value . v ) . then ( fulfill , reject ) : settle ( q [ 0 ] [ 2 ] , r ) ; }
function fulfill ( value ) { resume ( "next" , value ) ; }
function reject ( value ) { resume ( "throw" , value ) ; }
function settle ( f , v ) { if ( f ( v ) , q . shift ( ) , q . length ) resume ( q [ 0 ] [ 0 ] , q [ 0 ] [ 1 ] ) ; }
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . DefaultGlobber = void 0 ;
const core = _ _importStar ( _ _nccwpck _require _ _ ( 2186 ) ) ;
const fs = _ _importStar ( _ _nccwpck _require _ _ ( 7147 ) ) ;
const globOptionsHelper = _ _importStar ( _ _nccwpck _require _ _ ( 1026 ) ) ;
const path = _ _importStar ( _ _nccwpck _require _ _ ( 1017 ) ) ;
const patternHelper = _ _importStar ( _ _nccwpck _require _ _ ( 9005 ) ) ;
const internal _match _kind _1 = _ _nccwpck _require _ _ ( 1063 ) ;
const internal _pattern _1 = _ _nccwpck _require _ _ ( 4536 ) ;
const internal _search _state _1 = _ _nccwpck _require _ _ ( 9117 ) ;
const IS _WINDOWS = process . platform === 'win32' ;
class DefaultGlobber {
constructor ( options ) {
this . patterns = [ ] ;
this . searchPaths = [ ] ;
this . options = globOptionsHelper . getOptions ( options ) ;
}
getSearchPaths ( ) {
// Return a copy
return this . searchPaths . slice ( ) ;
}
glob ( ) {
2024-08-22 22:36:56 +08:00
var _a , e _1 , _b , _c ;
2023-01-06 05:27:11 +08:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const result = [ ] ;
try {
2024-08-22 22:36:56 +08:00
for ( var _d = true , _e = _ _asyncValues ( this . globGenerator ( ) ) , _f ; _f = yield _e . next ( ) , _a = _f . done , ! _a ; _d = true ) {
_c = _f . value ;
_d = false ;
const itemPath = _c ;
2023-01-06 05:27:11 +08:00
result . push ( itemPath ) ;
}
}
catch ( e _1 _1 ) { e _1 = { error : e _1 _1 } ; }
finally {
try {
2024-08-22 22:36:56 +08:00
if ( ! _d && ! _a && ( _b = _e . return ) ) yield _b . call ( _e ) ;
2023-01-06 05:27:11 +08:00
}
finally { if ( e _1 ) throw e _1 . error ; }
}
return result ;
} ) ;
}
globGenerator ( ) {
return _ _asyncGenerator ( this , arguments , function * globGenerator _1 ( ) {
// Fill in defaults options
const options = globOptionsHelper . getOptions ( this . options ) ;
// Implicit descendants?
const patterns = [ ] ;
for ( const pattern of this . patterns ) {
patterns . push ( pattern ) ;
if ( options . implicitDescendants &&
( pattern . trailingSeparator ||
pattern . segments [ pattern . segments . length - 1 ] !== '**' ) ) {
patterns . push ( new internal _pattern _1 . Pattern ( pattern . negate , true , pattern . segments . concat ( '**' ) ) ) ;
}
}
// Push the search paths
const stack = [ ] ;
for ( const searchPath of patternHelper . getSearchPaths ( patterns ) ) {
core . debug ( ` Search path ' ${ searchPath } ' ` ) ;
// Exists?
try {
// Intentionally using lstat. Detection for broken symlink
// will be performed later (if following symlinks).
yield _ _await ( fs . promises . lstat ( searchPath ) ) ;
}
catch ( err ) {
if ( err . code === 'ENOENT' ) {
continue ;
}
throw err ;
}
stack . unshift ( new internal _search _state _1 . SearchState ( searchPath , 1 ) ) ;
}
// Search
const traversalChain = [ ] ; // used to detect cycles
while ( stack . length ) {
// Pop
const item = stack . pop ( ) ;
// Match?
const match = patternHelper . match ( patterns , item . path ) ;
const partialMatch = ! ! match || patternHelper . partialMatch ( patterns , item . path ) ;
if ( ! match && ! partialMatch ) {
continue ;
}
// Stat
const stats = yield _ _await ( DefaultGlobber . stat ( item , options , traversalChain )
// Broken symlink, or symlink cycle detected, or no longer exists
) ;
// Broken symlink, or symlink cycle detected, or no longer exists
if ( ! stats ) {
continue ;
}
2024-08-22 22:36:56 +08:00
// Hidden file or directory?
if ( options . excludeHiddenFiles && path . basename ( item . path ) . match ( /^\./ ) ) {
continue ;
}
2023-01-06 05:27:11 +08:00
// Directory
if ( stats . isDirectory ( ) ) {
// Matched
if ( match & internal _match _kind _1 . MatchKind . Directory && options . matchDirectories ) {
yield yield _ _await ( item . path ) ;
}
// Descend?
else if ( ! partialMatch ) {
continue ;
}
// Push the child items in reverse
const childLevel = item . level + 1 ;
const childItems = ( yield _ _await ( fs . promises . readdir ( item . path ) ) ) . map ( x => new internal _search _state _1 . SearchState ( path . join ( item . path , x ) , childLevel ) ) ;
stack . push ( ... childItems . reverse ( ) ) ;
}
// File
else if ( match & internal _match _kind _1 . MatchKind . File ) {
yield yield _ _await ( item . path ) ;
}
}
} ) ;
2021-04-07 04:47:26 +08:00
}
2023-01-06 05:27:11 +08:00
/ * *
* Constructs a DefaultGlobber
* /
static create ( patterns , options ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const result = new DefaultGlobber ( options ) ;
if ( IS _WINDOWS ) {
patterns = patterns . replace ( /\r\n/g , '\n' ) ;
patterns = patterns . replace ( /\r/g , '\n' ) ;
}
const lines = patterns . split ( '\n' ) . map ( x => x . trim ( ) ) ;
for ( const line of lines ) {
// Empty or comment
if ( ! line || line . startsWith ( '#' ) ) {
continue ;
}
// Pattern
else {
result . patterns . push ( new internal _pattern _1 . Pattern ( line ) ) ;
}
}
result . searchPaths . push ( ... patternHelper . getSearchPaths ( result . patterns ) ) ;
return result ;
} ) ;
2022-05-20 04:33:04 +08:00
}
2023-01-06 05:27:11 +08:00
static stat ( item , options , traversalChain ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
// Note:
// `stat` returns info about the target of a symlink (or symlink chain)
// `lstat` returns info about a symlink itself
let stats ;
if ( options . followSymbolicLinks ) {
try {
// Use `stat` (following symlinks)
stats = yield fs . promises . stat ( item . path ) ;
}
catch ( err ) {
if ( err . code === 'ENOENT' ) {
if ( options . omitBrokenSymbolicLinks ) {
core . debug ( ` Broken symlink ' ${ item . path } ' ` ) ;
return undefined ;
}
throw new Error ( ` No information found for the path ' ${ item . path } '. This may indicate a broken symbolic link. ` ) ;
}
throw err ;
}
}
else {
// Use `lstat` (not following symlinks)
stats = yield fs . promises . lstat ( item . path ) ;
}
// Note, isDirectory() returns false for the lstat of a symlink
if ( stats . isDirectory ( ) && options . followSymbolicLinks ) {
// Get the realpath
const realPath = yield fs . promises . realpath ( item . path ) ;
// Fixup the traversal chain to match the item level
while ( traversalChain . length >= item . level ) {
traversalChain . pop ( ) ;
}
// Test for a cycle
if ( traversalChain . some ( ( x ) => x === realPath ) ) {
core . debug ( ` Symlink cycle detected for path ' ${ item . path } ' and realpath ' ${ realPath } ' ` ) ;
return undefined ;
}
// Update the traversal chain
traversalChain . push ( realPath ) ;
}
return stats ;
} ) ;
2022-05-20 04:33:04 +08:00
}
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
exports . DefaultGlobber = DefaultGlobber ;
//# sourceMappingURL=internal-globber.js.map
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
/***/ } ) ,
2021-04-07 04:47:26 +08:00
2023-01-06 05:27:11 +08:00
/***/ 2448 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
"use strict" ;
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
2024-08-22 22:36:56 +08:00
var desc = Object . getOwnPropertyDescriptor ( m , k ) ;
if ( ! desc || ( "get" in desc ? ! m . _ _esModule : desc . writable || desc . configurable ) ) {
desc = { enumerable : true , get : function ( ) { return m [ k ] ; } } ;
}
Object . defineProperty ( o , k2 , desc ) ;
2023-01-06 05:27:11 +08:00
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
2024-08-22 22:36:56 +08:00
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . prototype . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
2023-01-06 05:27:11 +08:00
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
var _ _asyncValues = ( this && this . _ _asyncValues ) || function ( o ) {
if ( ! Symbol . asyncIterator ) throw new TypeError ( "Symbol.asyncIterator is not defined." ) ;
var m = o [ Symbol . asyncIterator ] , i ;
return m ? m . call ( o ) : ( o = typeof _ _values === "function" ? _ _values ( o ) : o [ Symbol . iterator ] ( ) , i = { } , verb ( "next" ) , verb ( "throw" ) , verb ( "return" ) , i [ Symbol . asyncIterator ] = function ( ) { return this ; } , i ) ;
function verb ( n ) { i [ n ] = o [ n ] && function ( v ) { return new Promise ( function ( resolve , reject ) { v = o [ n ] ( v ) , settle ( resolve , reject , v . done , v . value ) ; } ) ; } ; }
function settle ( resolve , reject , d , v ) { Promise . resolve ( v ) . then ( function ( v ) { resolve ( { value : v , done : d } ) ; } , reject ) ; }
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . hashFiles = void 0 ;
const crypto = _ _importStar ( _ _nccwpck _require _ _ ( 6113 ) ) ;
const core = _ _importStar ( _ _nccwpck _require _ _ ( 2186 ) ) ;
const fs = _ _importStar ( _ _nccwpck _require _ _ ( 7147 ) ) ;
const stream = _ _importStar ( _ _nccwpck _require _ _ ( 2781 ) ) ;
const util = _ _importStar ( _ _nccwpck _require _ _ ( 3837 ) ) ;
const path = _ _importStar ( _ _nccwpck _require _ _ ( 1017 ) ) ;
2024-08-22 22:36:56 +08:00
function hashFiles ( globber , currentWorkspace , verbose = false ) {
var _a , e _1 , _b , _c ;
var _d ;
2023-01-06 05:27:11 +08:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const writeDelegate = verbose ? core . info : core . debug ;
let hasMatch = false ;
2024-08-22 22:36:56 +08:00
const githubWorkspace = currentWorkspace
? currentWorkspace
: ( _d = process . env [ 'GITHUB_WORKSPACE' ] ) !== null && _d !== void 0 ? _d : process . cwd ( ) ;
2023-01-06 05:27:11 +08:00
const result = crypto . createHash ( 'sha256' ) ;
let count = 0 ;
try {
2024-08-22 22:36:56 +08:00
for ( var _e = true , _f = _ _asyncValues ( globber . globGenerator ( ) ) , _g ; _g = yield _f . next ( ) , _a = _g . done , ! _a ; _e = true ) {
_c = _g . value ;
_e = false ;
const file = _c ;
2023-01-06 05:27:11 +08:00
writeDelegate ( file ) ;
if ( ! file . startsWith ( ` ${ githubWorkspace } ${ path . sep } ` ) ) {
writeDelegate ( ` Ignore ' ${ file } ' since it is not under GITHUB_WORKSPACE. ` ) ;
continue ;
}
if ( fs . statSync ( file ) . isDirectory ( ) ) {
writeDelegate ( ` Skip directory ' ${ file } '. ` ) ;
continue ;
}
const hash = crypto . createHash ( 'sha256' ) ;
const pipeline = util . promisify ( stream . pipeline ) ;
yield pipeline ( fs . createReadStream ( file ) , hash ) ;
result . write ( hash . digest ( ) ) ;
count ++ ;
if ( ! hasMatch ) {
hasMatch = true ;
}
}
}
catch ( e _1 _1 ) { e _1 = { error : e _1 _1 } ; }
finally {
try {
2024-08-22 22:36:56 +08:00
if ( ! _e && ! _a && ( _b = _f . return ) ) yield _b . call ( _f ) ;
2023-01-06 05:27:11 +08:00
}
finally { if ( e _1 ) throw e _1 . error ; }
}
result . end ( ) ;
if ( hasMatch ) {
writeDelegate ( ` Found ${ count } files to hash. ` ) ;
return result . digest ( 'hex' ) ;
}
else {
writeDelegate ( ` No matches found for glob ` ) ;
return '' ;
}
} ) ;
2022-05-20 04:33:04 +08:00
}
2023-01-06 05:27:11 +08:00
exports . hashFiles = hashFiles ;
//# sourceMappingURL=internal-hash-files.js.map
2021-04-07 04:47:26 +08:00
2023-01-06 05:27:11 +08:00
/***/ } ) ,
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
/***/ 1063 :
/***/ ( ( _ _unused _webpack _module , exports ) => {
2021-04-07 04:47:26 +08:00
2023-01-06 05:27:11 +08:00
"use strict" ;
2021-04-07 04:47:26 +08:00
2023-01-06 05:27:11 +08:00
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . MatchKind = void 0 ;
/ * *
* Indicates whether a pattern matches a path
* /
var MatchKind ;
( function ( MatchKind ) {
/** Not matched */
MatchKind [ MatchKind [ "None" ] = 0 ] = "None" ;
/** Matched if the path is a directory */
MatchKind [ MatchKind [ "Directory" ] = 1 ] = "Directory" ;
/** Matched if the path is a regular file */
MatchKind [ MatchKind [ "File" ] = 2 ] = "File" ;
/** Matched */
MatchKind [ MatchKind [ "All" ] = 3 ] = "All" ;
2024-08-22 22:36:56 +08:00
} ) ( MatchKind || ( exports . MatchKind = MatchKind = { } ) ) ;
2023-01-06 05:27:11 +08:00
//# sourceMappingURL=internal-match-kind.js.map
2021-04-07 04:47:26 +08:00
2023-01-06 05:27:11 +08:00
/***/ } ) ,
2021-04-07 04:47:26 +08:00
2023-01-06 05:27:11 +08:00
/***/ 1849 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
"use strict" ;
2021-04-07 04:47:26 +08:00
2023-01-06 05:27:11 +08:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
2024-08-22 22:36:56 +08:00
var desc = Object . getOwnPropertyDescriptor ( m , k ) ;
if ( ! desc || ( "get" in desc ? ! m . _ _esModule : desc . writable || desc . configurable ) ) {
desc = { enumerable : true , get : function ( ) { return m [ k ] ; } } ;
}
Object . defineProperty ( o , k2 , desc ) ;
2023-01-06 05:27:11 +08:00
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
2024-08-22 22:36:56 +08:00
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . prototype . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
2023-01-06 05:27:11 +08:00
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
var _ _importDefault = ( this && this . _ _importDefault ) || function ( mod ) {
return ( mod && mod . _ _esModule ) ? mod : { "default" : mod } ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . safeTrimTrailingSeparator = exports . normalizeSeparators = exports . hasRoot = exports . hasAbsoluteRoot = exports . ensureAbsoluteRoot = exports . dirname = void 0 ;
const path = _ _importStar ( _ _nccwpck _require _ _ ( 1017 ) ) ;
const assert _1 = _ _importDefault ( _ _nccwpck _require _ _ ( 9491 ) ) ;
const IS _WINDOWS = process . platform === 'win32' ;
/ * *
* Similar to path . dirname except normalizes the path separators and slightly better handling for Windows UNC paths .
*
* For example , on Linux / macOS :
* - ` / => / `
* - ` /hello => / `
*
* For example , on Windows :
* - ` C: \ => C: \`
* - ` C: \h ello => C: \`
* - ` C: => C: `
* - ` C:hello => C: `
* - ` \ => \`
* - ` \h ello => \`
* - ` \\ hello => \\ hello `
* - ` \\ hello \w orld => \\ hello \w orld `
* /
function dirname ( p ) {
// Normalize slashes and trim unnecessary trailing slash
p = safeTrimTrailingSeparator ( p ) ;
// Windows UNC root, e.g. \\hello or \\hello\world
if ( IS _WINDOWS && /^\\\\[^\\]+(\\[^\\]+)?$/ . test ( p ) ) {
return p ;
}
// Get dirname
let result = path . dirname ( p ) ;
// Trim trailing slash for Windows UNC root, e.g. \\hello\world\
if ( IS _WINDOWS && /^\\\\[^\\]+\\[^\\]+\\$/ . test ( result ) ) {
result = safeTrimTrailingSeparator ( result ) ;
}
return result ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
exports . dirname = dirname ;
/ * *
* Roots the path if not already rooted . On Windows , relative roots like ` \`
* or ` C: ` are expanded based on the current working directory .
* /
function ensureAbsoluteRoot ( root , itemPath ) {
2024-08-22 22:36:56 +08:00
( 0 , assert _1 . default ) ( root , ` ensureAbsoluteRoot parameter 'root' must not be empty ` ) ;
( 0 , assert _1 . default ) ( itemPath , ` ensureAbsoluteRoot parameter 'itemPath' must not be empty ` ) ;
2023-01-06 05:27:11 +08:00
// Already rooted
if ( hasAbsoluteRoot ( itemPath ) ) {
return itemPath ;
}
// Windows
if ( IS _WINDOWS ) {
// Check for itemPath like C: or C:foo
if ( itemPath . match ( /^[A-Z]:[^\\/]|^[A-Z]:$/i ) ) {
let cwd = process . cwd ( ) ;
2024-08-22 22:36:56 +08:00
( 0 , assert _1 . default ) ( cwd . match ( /^[A-Z]:\\/i ) , ` Expected current directory to start with an absolute drive root. Actual ' ${ cwd } ' ` ) ;
2023-01-06 05:27:11 +08:00
// Drive letter matches cwd? Expand to cwd
if ( itemPath [ 0 ] . toUpperCase ( ) === cwd [ 0 ] . toUpperCase ( ) ) {
// Drive only, e.g. C:
if ( itemPath . length === 2 ) {
// Preserve specified drive letter case (upper or lower)
return ` ${ itemPath [ 0 ] } : \\ ${ cwd . substr ( 3 ) } ` ;
}
// Drive + path, e.g. C:foo
else {
if ( ! cwd . endsWith ( '\\' ) ) {
cwd += '\\' ;
}
// Preserve specified drive letter case (upper or lower)
return ` ${ itemPath [ 0 ] } : \\ ${ cwd . substr ( 3 ) } ${ itemPath . substr ( 2 ) } ` ;
}
}
// Different drive
else {
return ` ${ itemPath [ 0 ] } : \\ ${ itemPath . substr ( 2 ) } ` ;
}
}
// Check for itemPath like \ or \foo
else if ( normalizeSeparators ( itemPath ) . match ( /^\\$|^\\[^\\]/ ) ) {
const cwd = process . cwd ( ) ;
2024-08-22 22:36:56 +08:00
( 0 , assert _1 . default ) ( cwd . match ( /^[A-Z]:\\/i ) , ` Expected current directory to start with an absolute drive root. Actual ' ${ cwd } ' ` ) ;
2023-01-06 05:27:11 +08:00
return ` ${ cwd [ 0 ] } : \\ ${ itemPath . substr ( 1 ) } ` ;
}
}
2024-08-22 22:36:56 +08:00
( 0 , assert _1 . default ) ( hasAbsoluteRoot ( root ) , ` ensureAbsoluteRoot parameter 'root' must have an absolute root ` ) ;
2023-01-06 05:27:11 +08:00
// Otherwise ensure root ends with a separator
if ( root . endsWith ( '/' ) || ( IS _WINDOWS && root . endsWith ( '\\' ) ) ) {
// Intentionally empty
}
else {
// Append separator
root += path . sep ;
}
return root + itemPath ;
2021-04-07 04:47:26 +08:00
}
2023-01-06 05:27:11 +08:00
exports . ensureAbsoluteRoot = ensureAbsoluteRoot ;
/ * *
* On Linux / macOS , true if path starts with ` / ` . On Windows , true for paths like :
* ` \\ hello \s hare ` and ` C: \h ello ` ( and using alternate separator ) .
* /
function hasAbsoluteRoot ( itemPath ) {
2024-08-22 22:36:56 +08:00
( 0 , assert _1 . default ) ( itemPath , ` hasAbsoluteRoot parameter 'itemPath' must not be empty ` ) ;
2023-01-06 05:27:11 +08:00
// Normalize separators
itemPath = normalizeSeparators ( itemPath ) ;
// Windows
if ( IS _WINDOWS ) {
// E.g. \\hello\share or C:\hello
return itemPath . startsWith ( '\\\\' ) || /^[A-Z]:\\/i . test ( itemPath ) ;
}
// E.g. /hello
return itemPath . startsWith ( '/' ) ;
}
exports . hasAbsoluteRoot = hasAbsoluteRoot ;
/ * *
* On Linux / macOS , true if path starts with ` / ` . On Windows , true for paths like :
* ` \` , ` \ hello ` , ` \ \ hello \ share ` , ` C : ` , and ` C : \ hello ` (and using alternate separator).
* /
function hasRoot ( itemPath ) {
2024-08-22 22:36:56 +08:00
( 0 , assert _1 . default ) ( itemPath , ` isRooted parameter 'itemPath' must not be empty ` ) ;
2023-01-06 05:27:11 +08:00
// Normalize separators
itemPath = normalizeSeparators ( itemPath ) ;
// Windows
if ( IS _WINDOWS ) {
// E.g. \ or \hello or \\hello
// E.g. C: or C:\hello
return itemPath . startsWith ( '\\' ) || /^[A-Z]:/i . test ( itemPath ) ;
2022-05-20 04:33:04 +08:00
}
2023-01-06 05:27:11 +08:00
// E.g. /hello
return itemPath . startsWith ( '/' ) ;
2021-04-07 04:47:26 +08:00
}
2023-01-06 05:27:11 +08:00
exports . hasRoot = hasRoot ;
/ * *
* Removes redundant slashes and converts ` / ` to ` \` on Windows
* /
function normalizeSeparators ( p ) {
p = p || '' ;
// Windows
if ( IS _WINDOWS ) {
// Convert slashes on Windows
p = p . replace ( /\//g , '\\' ) ;
// Remove redundant slashes
const isUnc = /^\\\\+[^\\]/ . test ( p ) ; // e.g. \\hello
return ( isUnc ? '\\' : '' ) + p . replace ( /\\\\+/g , '\\' ) ; // preserve leading \\ for UNC
}
// Remove redundant slashes
return p . replace ( /\/\/+/g , '/' ) ;
2021-04-07 04:47:26 +08:00
}
2023-01-06 05:27:11 +08:00
exports . normalizeSeparators = normalizeSeparators ;
/ * *
* Normalizes the path separators and trims the trailing separator ( when safe ) .
* For example , ` /foo/ => /foo ` but ` / => / `
* /
function safeTrimTrailingSeparator ( p ) {
// Short-circuit if empty
if ( ! p ) {
return '' ;
}
// Normalize separators
p = normalizeSeparators ( p ) ;
// No trailing slash
if ( ! p . endsWith ( path . sep ) ) {
return p ;
}
// Check '/' on Linux/macOS and '\' on Windows
if ( p === path . sep ) {
return p ;
}
// On Windows check if drive root. E.g. C:\
if ( IS _WINDOWS && /^[A-Z]:\\$/i . test ( p ) ) {
return p ;
}
// Otherwise trim trailing slash
return p . substr ( 0 , p . length - 1 ) ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
exports . safeTrimTrailingSeparator = safeTrimTrailingSeparator ;
//# sourceMappingURL=internal-path-helper.js.map
2021-04-07 04:47:26 +08:00
2023-01-06 05:27:11 +08:00
/***/ } ) ,
2021-04-07 04:47:26 +08:00
2023-01-06 05:27:11 +08:00
/***/ 6836 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2021-04-07 04:47:26 +08:00
2023-01-06 05:27:11 +08:00
"use strict" ;
2021-04-07 04:47:26 +08:00
2023-01-06 05:27:11 +08:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
2024-08-22 22:36:56 +08:00
var desc = Object . getOwnPropertyDescriptor ( m , k ) ;
if ( ! desc || ( "get" in desc ? ! m . _ _esModule : desc . writable || desc . configurable ) ) {
desc = { enumerable : true , get : function ( ) { return m [ k ] ; } } ;
}
Object . defineProperty ( o , k2 , desc ) ;
2023-01-06 05:27:11 +08:00
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
2024-08-22 22:36:56 +08:00
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . prototype . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
2023-01-06 05:27:11 +08:00
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
var _ _importDefault = ( this && this . _ _importDefault ) || function ( mod ) {
return ( mod && mod . _ _esModule ) ? mod : { "default" : mod } ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . Path = void 0 ;
const path = _ _importStar ( _ _nccwpck _require _ _ ( 1017 ) ) ;
const pathHelper = _ _importStar ( _ _nccwpck _require _ _ ( 1849 ) ) ;
const assert _1 = _ _importDefault ( _ _nccwpck _require _ _ ( 9491 ) ) ;
const IS _WINDOWS = process . platform === 'win32' ;
/ * *
* Helper class for parsing paths into segments
* /
class Path {
/ * *
* Constructs a Path
* @ param itemPath Path or array of segments
* /
constructor ( itemPath ) {
this . segments = [ ] ;
// String
if ( typeof itemPath === 'string' ) {
2024-08-22 22:36:56 +08:00
( 0 , assert _1 . default ) ( itemPath , ` Parameter 'itemPath' must not be empty ` ) ;
2023-01-06 05:27:11 +08:00
// Normalize slashes and trim unnecessary trailing slash
itemPath = pathHelper . safeTrimTrailingSeparator ( itemPath ) ;
// Not rooted
if ( ! pathHelper . hasRoot ( itemPath ) ) {
this . segments = itemPath . split ( path . sep ) ;
}
// Rooted
else {
// Add all segments, while not at the root
let remaining = itemPath ;
let dir = pathHelper . dirname ( remaining ) ;
while ( dir !== remaining ) {
// Add the segment
const basename = path . basename ( remaining ) ;
this . segments . unshift ( basename ) ;
// Truncate the last segment
remaining = dir ;
dir = pathHelper . dirname ( remaining ) ;
}
// Remainder is the root
this . segments . unshift ( remaining ) ;
}
}
// Array
else {
// Must not be empty
2024-08-22 22:36:56 +08:00
( 0 , assert _1 . default ) ( itemPath . length > 0 , ` Parameter 'itemPath' must not be an empty array ` ) ;
2023-01-06 05:27:11 +08:00
// Each segment
for ( let i = 0 ; i < itemPath . length ; i ++ ) {
let segment = itemPath [ i ] ;
// Must not be empty
2024-08-22 22:36:56 +08:00
( 0 , assert _1 . default ) ( segment , ` Parameter 'itemPath' must not contain any empty segments ` ) ;
2023-01-06 05:27:11 +08:00
// Normalize slashes
segment = pathHelper . normalizeSeparators ( itemPath [ i ] ) ;
// Root segment
if ( i === 0 && pathHelper . hasRoot ( segment ) ) {
segment = pathHelper . safeTrimTrailingSeparator ( segment ) ;
2024-08-22 22:36:56 +08:00
( 0 , assert _1 . default ) ( segment === pathHelper . dirname ( segment ) , ` Parameter 'itemPath' root segment contains information for multiple segments ` ) ;
2023-01-06 05:27:11 +08:00
this . segments . push ( segment ) ;
}
// All other segments
else {
// Must not contain slash
2024-08-22 22:36:56 +08:00
( 0 , assert _1 . default ) ( ! segment . includes ( path . sep ) , ` Parameter 'itemPath' contains unexpected path separators ` ) ;
2023-01-06 05:27:11 +08:00
this . segments . push ( segment ) ;
}
}
}
}
/ * *
* Converts the path to it ' s string representation
* /
toString ( ) {
// First segment
let result = this . segments [ 0 ] ;
// All others
let skipSlash = result . endsWith ( path . sep ) || ( IS _WINDOWS && /^[A-Z]:$/i . test ( result ) ) ;
for ( let i = 1 ; i < this . segments . length ; i ++ ) {
if ( skipSlash ) {
skipSlash = false ;
}
else {
result += path . sep ;
}
result += this . segments [ i ] ;
}
return result ;
}
}
exports . Path = Path ;
//# sourceMappingURL=internal-path.js.map
2021-04-07 04:47:26 +08:00
2023-01-06 05:27:11 +08:00
/***/ } ) ,
2021-04-07 04:47:26 +08:00
2023-01-06 05:27:11 +08:00
/***/ 9005 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2021-04-07 04:47:26 +08:00
2023-01-06 05:27:11 +08:00
"use strict" ;
2021-04-07 04:47:26 +08:00
2023-01-06 05:27:11 +08:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
2024-08-22 22:36:56 +08:00
var desc = Object . getOwnPropertyDescriptor ( m , k ) ;
if ( ! desc || ( "get" in desc ? ! m . _ _esModule : desc . writable || desc . configurable ) ) {
desc = { enumerable : true , get : function ( ) { return m [ k ] ; } } ;
}
Object . defineProperty ( o , k2 , desc ) ;
2023-01-06 05:27:11 +08:00
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
2024-08-22 22:36:56 +08:00
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . prototype . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
2023-01-06 05:27:11 +08:00
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . partialMatch = exports . match = exports . getSearchPaths = void 0 ;
const pathHelper = _ _importStar ( _ _nccwpck _require _ _ ( 1849 ) ) ;
const internal _match _kind _1 = _ _nccwpck _require _ _ ( 1063 ) ;
const IS _WINDOWS = process . platform === 'win32' ;
/ * *
* Given an array of patterns , returns an array of paths to search .
* Duplicates and paths under other included paths are filtered out .
* /
function getSearchPaths ( patterns ) {
// Ignore negate patterns
patterns = patterns . filter ( x => ! x . negate ) ;
// Create a map of all search paths
const searchPathMap = { } ;
for ( const pattern of patterns ) {
const key = IS _WINDOWS
? pattern . searchPath . toUpperCase ( )
: pattern . searchPath ;
searchPathMap [ key ] = 'candidate' ;
}
const result = [ ] ;
for ( const pattern of patterns ) {
// Check if already included
const key = IS _WINDOWS
? pattern . searchPath . toUpperCase ( )
: pattern . searchPath ;
if ( searchPathMap [ key ] === 'included' ) {
continue ;
}
// Check for an ancestor search path
let foundAncestor = false ;
let tempKey = key ;
let parent = pathHelper . dirname ( tempKey ) ;
while ( parent !== tempKey ) {
if ( searchPathMap [ parent ] ) {
foundAncestor = true ;
break ;
}
tempKey = parent ;
parent = pathHelper . dirname ( tempKey ) ;
}
// Include the search pattern in the result
if ( ! foundAncestor ) {
result . push ( pattern . searchPath ) ;
searchPathMap [ key ] = 'included' ;
}
}
return result ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
exports . getSearchPaths = getSearchPaths ;
/ * *
* Matches the patterns against the path
* /
function match ( patterns , itemPath ) {
let result = internal _match _kind _1 . MatchKind . None ;
for ( const pattern of patterns ) {
if ( pattern . negate ) {
result &= ~ pattern . match ( itemPath ) ;
}
else {
result |= pattern . match ( itemPath ) ;
}
}
return result ;
}
exports . match = match ;
/ * *
* Checks whether to descend further into the directory
* /
function partialMatch ( patterns , itemPath ) {
return patterns . some ( x => ! x . negate && x . partialMatch ( itemPath ) ) ;
}
exports . partialMatch = partialMatch ;
//# sourceMappingURL=internal-pattern-helper.js.map
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
/***/ } ) ,
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
/***/ 4536 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
"use strict" ;
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
2024-08-22 22:36:56 +08:00
var desc = Object . getOwnPropertyDescriptor ( m , k ) ;
if ( ! desc || ( "get" in desc ? ! m . _ _esModule : desc . writable || desc . configurable ) ) {
desc = { enumerable : true , get : function ( ) { return m [ k ] ; } } ;
}
Object . defineProperty ( o , k2 , desc ) ;
2023-01-06 05:27:11 +08:00
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
2024-08-22 22:36:56 +08:00
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . prototype . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
2023-01-06 05:27:11 +08:00
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
var _ _importDefault = ( this && this . _ _importDefault ) || function ( mod ) {
return ( mod && mod . _ _esModule ) ? mod : { "default" : mod } ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . Pattern = void 0 ;
const os = _ _importStar ( _ _nccwpck _require _ _ ( 2037 ) ) ;
const path = _ _importStar ( _ _nccwpck _require _ _ ( 1017 ) ) ;
const pathHelper = _ _importStar ( _ _nccwpck _require _ _ ( 1849 ) ) ;
const assert _1 = _ _importDefault ( _ _nccwpck _require _ _ ( 9491 ) ) ;
const minimatch _1 = _ _nccwpck _require _ _ ( 3973 ) ;
const internal _match _kind _1 = _ _nccwpck _require _ _ ( 1063 ) ;
const internal _path _1 = _ _nccwpck _require _ _ ( 6836 ) ;
const IS _WINDOWS = process . platform === 'win32' ;
class Pattern {
constructor ( patternOrNegate , isImplicitPattern = false , segments , homedir ) {
/ * *
* Indicates whether matches should be excluded from the result set
* /
this . negate = false ;
// Pattern overload
let pattern ;
if ( typeof patternOrNegate === 'string' ) {
pattern = patternOrNegate . trim ( ) ;
}
// Segments overload
else {
// Convert to pattern
segments = segments || [ ] ;
2024-08-22 22:36:56 +08:00
( 0 , assert _1 . default ) ( segments . length , ` Parameter 'segments' must not empty ` ) ;
2023-01-06 05:27:11 +08:00
const root = Pattern . getLiteral ( segments [ 0 ] ) ;
2024-08-22 22:36:56 +08:00
( 0 , assert _1 . default ) ( root && pathHelper . hasAbsoluteRoot ( root ) , ` Parameter 'segments' first element must be a root path ` ) ;
2023-01-06 05:27:11 +08:00
pattern = new internal _path _1 . Path ( segments ) . toString ( ) . trim ( ) ;
if ( patternOrNegate ) {
pattern = ` ! ${ pattern } ` ;
}
}
// Negate
while ( pattern . startsWith ( '!' ) ) {
this . negate = ! this . negate ;
pattern = pattern . substr ( 1 ) . trim ( ) ;
}
// Normalize slashes and ensures absolute root
pattern = Pattern . fixupPattern ( pattern , homedir ) ;
// Segments
this . segments = new internal _path _1 . Path ( pattern ) . segments ;
// Trailing slash indicates the pattern should only match directories, not regular files
this . trailingSeparator = pathHelper
. normalizeSeparators ( pattern )
. endsWith ( path . sep ) ;
pattern = pathHelper . safeTrimTrailingSeparator ( pattern ) ;
// Search path (literal path prior to the first glob segment)
let foundGlob = false ;
const searchSegments = this . segments
. map ( x => Pattern . getLiteral ( x ) )
. filter ( x => ! foundGlob && ! ( foundGlob = x === '' ) ) ;
this . searchPath = new internal _path _1 . Path ( searchSegments ) . toString ( ) ;
// Root RegExp (required when determining partial match)
this . rootRegExp = new RegExp ( Pattern . regExpEscape ( searchSegments [ 0 ] ) , IS _WINDOWS ? 'i' : '' ) ;
this . isImplicitPattern = isImplicitPattern ;
// Create minimatch
const minimatchOptions = {
dot : true ,
nobrace : true ,
nocase : IS _WINDOWS ,
nocomment : true ,
noext : true ,
nonegate : true
} ;
pattern = IS _WINDOWS ? pattern . replace ( /\\/g , '/' ) : pattern ;
this . minimatch = new minimatch _1 . Minimatch ( pattern , minimatchOptions ) ;
}
/ * *
* Matches the pattern against the specified path
* /
match ( itemPath ) {
// Last segment is globstar?
if ( this . segments [ this . segments . length - 1 ] === '**' ) {
// Normalize slashes
itemPath = pathHelper . normalizeSeparators ( itemPath ) ;
// Append a trailing slash. Otherwise Minimatch will not match the directory immediately
// preceding the globstar. For example, given the pattern `/foo/**`, Minimatch returns
// false for `/foo` but returns true for `/foo/`. Append a trailing slash to handle that quirk.
if ( ! itemPath . endsWith ( path . sep ) && this . isImplicitPattern === false ) {
// Note, this is safe because the constructor ensures the pattern has an absolute root.
// For example, formats like C: and C:foo on Windows are resolved to an absolute root.
itemPath = ` ${ itemPath } ${ path . sep } ` ;
}
}
else {
// Normalize slashes and trim unnecessary trailing slash
itemPath = pathHelper . safeTrimTrailingSeparator ( itemPath ) ;
}
// Match
if ( this . minimatch . match ( itemPath ) ) {
return this . trailingSeparator ? internal _match _kind _1 . MatchKind . Directory : internal _match _kind _1 . MatchKind . All ;
}
return internal _match _kind _1 . MatchKind . None ;
}
/ * *
* Indicates whether the pattern may match descendants of the specified path
* /
partialMatch ( itemPath ) {
// Normalize slashes and trim unnecessary trailing slash
itemPath = pathHelper . safeTrimTrailingSeparator ( itemPath ) ;
// matchOne does not handle root path correctly
if ( pathHelper . dirname ( itemPath ) === itemPath ) {
return this . rootRegExp . test ( itemPath ) ;
}
return this . minimatch . matchOne ( itemPath . split ( IS _WINDOWS ? /\\+/ : /\/+/ ) , this . minimatch . set [ 0 ] , true ) ;
}
/ * *
* Escapes glob patterns within a path
* /
static globEscape ( s ) {
return ( IS _WINDOWS ? s : s . replace ( /\\/g , '\\\\' ) ) // escape '\' on Linux/macOS
. replace ( /(\[)(?=[^/]+\])/g , '[[]' ) // escape '[' when ']' follows within the path segment
. replace ( /\?/g , '[?]' ) // escape '?'
. replace ( /\*/g , '[*]' ) ; // escape '*'
}
/ * *
* Normalizes slashes and ensures absolute root
* /
static fixupPattern ( pattern , homedir ) {
// Empty
2024-08-22 22:36:56 +08:00
( 0 , assert _1 . default ) ( pattern , 'pattern cannot be empty' ) ;
2023-01-06 05:27:11 +08:00
// Must not contain `.` segment, unless first segment
// Must not contain `..` segment
const literalSegments = new internal _path _1 . Path ( pattern ) . segments . map ( x => Pattern . getLiteral ( x ) ) ;
2024-08-22 22:36:56 +08:00
( 0 , assert _1 . default ) ( literalSegments . every ( ( x , i ) => ( x !== '.' || i === 0 ) && x !== '..' ) , ` Invalid pattern ' ${ pattern } '. Relative pathing '.' and '..' is not allowed. ` ) ;
2023-01-06 05:27:11 +08:00
// Must not contain globs in root, e.g. Windows UNC path \\foo\b*r
2024-08-22 22:36:56 +08:00
( 0 , assert _1 . default ) ( ! pathHelper . hasRoot ( pattern ) || literalSegments [ 0 ] , ` Invalid pattern ' ${ pattern } '. Root segment must not contain globs. ` ) ;
2023-01-06 05:27:11 +08:00
// Normalize slashes
pattern = pathHelper . normalizeSeparators ( pattern ) ;
// Replace leading `.` segment
if ( pattern === '.' || pattern . startsWith ( ` . ${ path . sep } ` ) ) {
pattern = Pattern . globEscape ( process . cwd ( ) ) + pattern . substr ( 1 ) ;
}
// Replace leading `~` segment
else if ( pattern === '~' || pattern . startsWith ( ` ~ ${ path . sep } ` ) ) {
homedir = homedir || os . homedir ( ) ;
2024-08-22 22:36:56 +08:00
( 0 , assert _1 . default ) ( homedir , 'Unable to determine HOME directory' ) ;
( 0 , assert _1 . default ) ( pathHelper . hasAbsoluteRoot ( homedir ) , ` Expected HOME directory to be a rooted path. Actual ' ${ homedir } ' ` ) ;
2023-01-06 05:27:11 +08:00
pattern = Pattern . globEscape ( homedir ) + pattern . substr ( 1 ) ;
}
// Replace relative drive root, e.g. pattern is C: or C:foo
else if ( IS _WINDOWS &&
( pattern . match ( /^[A-Z]:$/i ) || pattern . match ( /^[A-Z]:[^\\]/i ) ) ) {
let root = pathHelper . ensureAbsoluteRoot ( 'C:\\dummy-root' , pattern . substr ( 0 , 2 ) ) ;
if ( pattern . length > 2 && ! root . endsWith ( '\\' ) ) {
root += '\\' ;
}
pattern = Pattern . globEscape ( root ) + pattern . substr ( 2 ) ;
}
// Replace relative root, e.g. pattern is \ or \foo
else if ( IS _WINDOWS && ( pattern === '\\' || pattern . match ( /^\\[^\\]/ ) ) ) {
let root = pathHelper . ensureAbsoluteRoot ( 'C:\\dummy-root' , '\\' ) ;
if ( ! root . endsWith ( '\\' ) ) {
root += '\\' ;
}
pattern = Pattern . globEscape ( root ) + pattern . substr ( 1 ) ;
}
// Otherwise ensure absolute root
else {
pattern = pathHelper . ensureAbsoluteRoot ( Pattern . globEscape ( process . cwd ( ) ) , pattern ) ;
}
return pathHelper . normalizeSeparators ( pattern ) ;
2022-05-20 04:33:04 +08:00
}
2023-01-06 05:27:11 +08:00
/ * *
* Attempts to unescape a pattern segment to create a literal path segment .
* Otherwise returns empty string .
* /
static getLiteral ( segment ) {
let literal = '' ;
for ( let i = 0 ; i < segment . length ; i ++ ) {
const c = segment [ i ] ;
// Escape
if ( c === '\\' && ! IS _WINDOWS && i + 1 < segment . length ) {
literal += segment [ ++ i ] ;
continue ;
}
// Wildcard
else if ( c === '*' || c === '?' ) {
return '' ;
}
// Character set
else if ( c === '[' && i + 1 < segment . length ) {
let set = '' ;
let closed = - 1 ;
for ( let i2 = i + 1 ; i2 < segment . length ; i2 ++ ) {
const c2 = segment [ i2 ] ;
// Escape
if ( c2 === '\\' && ! IS _WINDOWS && i2 + 1 < segment . length ) {
set += segment [ ++ i2 ] ;
continue ;
}
// Closed
else if ( c2 === ']' ) {
closed = i2 ;
break ;
}
// Otherwise
else {
set += c2 ;
}
}
// Closed?
if ( closed >= 0 ) {
// Cannot convert
if ( set . length > 1 ) {
return '' ;
}
// Convert to literal
if ( set ) {
literal += set ;
i = closed ;
continue ;
}
}
// Otherwise fall thru
}
// Append
literal += c ;
}
return literal ;
2021-04-07 04:47:26 +08:00
}
2023-01-06 05:27:11 +08:00
/ * *
* Escapes regexp special characters
* https : //javascript.info/regexp-escaping
* /
static regExpEscape ( s ) {
return s . replace ( /[[\\^$.|?*+()]/g , '\\$&' ) ;
2021-04-07 04:47:26 +08:00
}
}
2023-01-06 05:27:11 +08:00
exports . Pattern = Pattern ;
//# sourceMappingURL=internal-pattern.js.map
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
/***/ } ) ,
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
/***/ 9117 :
/***/ ( ( _ _unused _webpack _module , exports ) => {
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
"use strict" ;
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . SearchState = void 0 ;
class SearchState {
constructor ( path , level ) {
this . path = path ;
this . level = level ;
}
2021-04-07 04:47:26 +08:00
}
2023-01-06 05:27:11 +08:00
exports . SearchState = SearchState ;
//# sourceMappingURL=internal-search-state.js.map
2022-05-20 05:17:44 +08:00
2022-10-22 03:17:17 +08:00
/***/ } ) ,
2023-01-06 05:27:11 +08:00
/***/ 5526 :
/***/ ( function ( _ _unused _webpack _module , exports ) {
2022-10-22 03:17:17 +08:00
"use strict" ;
2023-01-06 05:27:11 +08:00
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . PersonalAccessTokenCredentialHandler = exports . BearerCredentialHandler = exports . BasicCredentialHandler = void 0 ;
class BasicCredentialHandler {
constructor ( username , password ) {
this . username = username ;
this . password = password ;
}
prepareRequest ( options ) {
if ( ! options . headers ) {
throw Error ( 'The request has no headers' ) ;
}
options . headers [ 'Authorization' ] = ` Basic ${ Buffer . from ( ` ${ this . username } : ${ this . password } ` ) . toString ( 'base64' ) } ` ;
}
// This handler cannot handle 401
canHandleAuthentication ( ) {
return false ;
}
handleAuthentication ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
throw new Error ( 'not implemented' ) ;
} ) ;
}
}
exports . BasicCredentialHandler = BasicCredentialHandler ;
class BearerCredentialHandler {
constructor ( token ) {
this . token = token ;
}
// currently implements pre-authorization
// TODO: support preAuth = false where it hooks on 401
prepareRequest ( options ) {
if ( ! options . headers ) {
throw Error ( 'The request has no headers' ) ;
}
options . headers [ 'Authorization' ] = ` Bearer ${ this . token } ` ;
}
// This handler cannot handle 401
canHandleAuthentication ( ) {
return false ;
}
handleAuthentication ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
throw new Error ( 'not implemented' ) ;
} ) ;
}
2022-10-22 03:17:17 +08:00
}
2023-01-06 05:27:11 +08:00
exports . BearerCredentialHandler = BearerCredentialHandler ;
class PersonalAccessTokenCredentialHandler {
constructor ( token ) {
this . token = token ;
}
// currently implements pre-authorization
// TODO: support preAuth = false where it hooks on 401
prepareRequest ( options ) {
if ( ! options . headers ) {
throw Error ( 'The request has no headers' ) ;
}
options . headers [ 'Authorization' ] = ` Basic ${ Buffer . from ( ` PAT: ${ this . token } ` ) . toString ( 'base64' ) } ` ;
}
// This handler cannot handle 401
canHandleAuthentication ( ) {
return false ;
}
handleAuthentication ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
throw new Error ( 'not implemented' ) ;
} ) ;
}
2022-10-22 03:17:17 +08:00
}
2023-01-06 05:27:11 +08:00
exports . PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler ;
//# sourceMappingURL=auth.js.map
2020-04-28 23:18:53 +08:00
/***/ } ) ,
2023-01-06 05:27:11 +08:00
/***/ 6255 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2020-04-28 23:18:53 +08:00
"use strict" ;
2022-05-20 04:33:04 +08:00
/* eslint-disable @typescript-eslint/no-explicit-any */
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
2020-04-28 23:18:53 +08:00
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
2022-05-20 04:33:04 +08:00
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
2020-04-28 23:18:53 +08:00
return result ;
} ;
2022-05-20 04:33:04 +08:00
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
2023-01-06 05:27:11 +08:00
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
2022-05-20 04:33:04 +08:00
exports . HttpClient = exports . isHttps = exports . HttpClientResponse = exports . HttpClientError = exports . getProxyUrl = exports . MediaTypes = exports . Headers = exports . HttpCodes = void 0 ;
2023-01-06 05:27:11 +08:00
const http = _ _importStar ( _ _nccwpck _require _ _ ( 3685 ) ) ;
const https = _ _importStar ( _ _nccwpck _require _ _ ( 5687 ) ) ;
const pm = _ _importStar ( _ _nccwpck _require _ _ ( 9835 ) ) ;
const tunnel = _ _importStar ( _ _nccwpck _require _ _ ( 4294 ) ) ;
2022-05-20 04:33:04 +08:00
var HttpCodes ;
( function ( HttpCodes ) {
HttpCodes [ HttpCodes [ "OK" ] = 200 ] = "OK" ;
HttpCodes [ HttpCodes [ "MultipleChoices" ] = 300 ] = "MultipleChoices" ;
HttpCodes [ HttpCodes [ "MovedPermanently" ] = 301 ] = "MovedPermanently" ;
HttpCodes [ HttpCodes [ "ResourceMoved" ] = 302 ] = "ResourceMoved" ;
HttpCodes [ HttpCodes [ "SeeOther" ] = 303 ] = "SeeOther" ;
HttpCodes [ HttpCodes [ "NotModified" ] = 304 ] = "NotModified" ;
HttpCodes [ HttpCodes [ "UseProxy" ] = 305 ] = "UseProxy" ;
HttpCodes [ HttpCodes [ "SwitchProxy" ] = 306 ] = "SwitchProxy" ;
HttpCodes [ HttpCodes [ "TemporaryRedirect" ] = 307 ] = "TemporaryRedirect" ;
HttpCodes [ HttpCodes [ "PermanentRedirect" ] = 308 ] = "PermanentRedirect" ;
HttpCodes [ HttpCodes [ "BadRequest" ] = 400 ] = "BadRequest" ;
HttpCodes [ HttpCodes [ "Unauthorized" ] = 401 ] = "Unauthorized" ;
HttpCodes [ HttpCodes [ "PaymentRequired" ] = 402 ] = "PaymentRequired" ;
HttpCodes [ HttpCodes [ "Forbidden" ] = 403 ] = "Forbidden" ;
HttpCodes [ HttpCodes [ "NotFound" ] = 404 ] = "NotFound" ;
HttpCodes [ HttpCodes [ "MethodNotAllowed" ] = 405 ] = "MethodNotAllowed" ;
HttpCodes [ HttpCodes [ "NotAcceptable" ] = 406 ] = "NotAcceptable" ;
HttpCodes [ HttpCodes [ "ProxyAuthenticationRequired" ] = 407 ] = "ProxyAuthenticationRequired" ;
HttpCodes [ HttpCodes [ "RequestTimeout" ] = 408 ] = "RequestTimeout" ;
HttpCodes [ HttpCodes [ "Conflict" ] = 409 ] = "Conflict" ;
HttpCodes [ HttpCodes [ "Gone" ] = 410 ] = "Gone" ;
HttpCodes [ HttpCodes [ "TooManyRequests" ] = 429 ] = "TooManyRequests" ;
HttpCodes [ HttpCodes [ "InternalServerError" ] = 500 ] = "InternalServerError" ;
HttpCodes [ HttpCodes [ "NotImplemented" ] = 501 ] = "NotImplemented" ;
HttpCodes [ HttpCodes [ "BadGateway" ] = 502 ] = "BadGateway" ;
HttpCodes [ HttpCodes [ "ServiceUnavailable" ] = 503 ] = "ServiceUnavailable" ;
HttpCodes [ HttpCodes [ "GatewayTimeout" ] = 504 ] = "GatewayTimeout" ;
} ) ( HttpCodes = exports . HttpCodes || ( exports . HttpCodes = { } ) ) ;
var Headers ;
( function ( Headers ) {
Headers [ "Accept" ] = "accept" ;
Headers [ "ContentType" ] = "content-type" ;
} ) ( Headers = exports . Headers || ( exports . Headers = { } ) ) ;
var MediaTypes ;
( function ( MediaTypes ) {
MediaTypes [ "ApplicationJson" ] = "application/json" ;
} ) ( MediaTypes = exports . MediaTypes || ( exports . MediaTypes = { } ) ) ;
2020-04-28 23:18:53 +08:00
/ * *
2022-05-20 04:33:04 +08:00
* Returns the proxy URL , depending upon the supplied url and proxy environment variables .
* @ param serverUrl The server URL where the request will be sent . For example , https : //api.github.com
2020-04-28 23:18:53 +08:00
* /
2022-05-20 04:33:04 +08:00
function getProxyUrl ( serverUrl ) {
const proxyUrl = pm . getProxyUrl ( new URL ( serverUrl ) ) ;
return proxyUrl ? proxyUrl . href : '' ;
2020-04-28 23:18:53 +08:00
}
2022-05-20 04:33:04 +08:00
exports . getProxyUrl = getProxyUrl ;
const HttpRedirectCodes = [
HttpCodes . MovedPermanently ,
HttpCodes . ResourceMoved ,
HttpCodes . SeeOther ,
HttpCodes . TemporaryRedirect ,
HttpCodes . PermanentRedirect
] ;
const HttpResponseRetryCodes = [
HttpCodes . BadGateway ,
HttpCodes . ServiceUnavailable ,
HttpCodes . GatewayTimeout
] ;
const RetryableHttpVerbs = [ 'OPTIONS' , 'GET' , 'DELETE' , 'HEAD' ] ;
const ExponentialBackoffCeiling = 10 ;
const ExponentialBackoffTimeSlice = 5 ;
class HttpClientError extends Error {
constructor ( message , statusCode ) {
super ( message ) ;
this . name = 'HttpClientError' ;
this . statusCode = statusCode ;
Object . setPrototypeOf ( this , HttpClientError . prototype ) ;
2020-04-28 23:18:53 +08:00
}
2023-01-06 05:27:11 +08:00
}
exports . HttpClientError = HttpClientError ;
class HttpClientResponse {
constructor ( message ) {
this . message = message ;
}
readBody ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return new Promise ( ( resolve ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
let output = Buffer . alloc ( 0 ) ;
this . message . on ( 'data' , ( chunk ) => {
output = Buffer . concat ( [ output , chunk ] ) ;
} ) ;
this . message . on ( 'end' , ( ) => {
resolve ( output . toString ( ) ) ;
} ) ;
} ) ) ;
} ) ;
}
}
exports . HttpClientResponse = HttpClientResponse ;
function isHttps ( requestUrl ) {
const parsedUrl = new URL ( requestUrl ) ;
return parsedUrl . protocol === 'https:' ;
}
exports . isHttps = isHttps ;
class HttpClient {
constructor ( userAgent , handlers , requestOptions ) {
this . _ignoreSslError = false ;
this . _allowRedirects = true ;
this . _allowRedirectDowngrade = false ;
this . _maxRedirects = 50 ;
this . _allowRetries = false ;
this . _maxRetries = 1 ;
this . _keepAlive = false ;
this . _disposed = false ;
this . userAgent = userAgent ;
this . handlers = handlers || [ ] ;
this . requestOptions = requestOptions ;
if ( requestOptions ) {
if ( requestOptions . ignoreSslError != null ) {
this . _ignoreSslError = requestOptions . ignoreSslError ;
}
this . _socketTimeout = requestOptions . socketTimeout ;
if ( requestOptions . allowRedirects != null ) {
this . _allowRedirects = requestOptions . allowRedirects ;
}
if ( requestOptions . allowRedirectDowngrade != null ) {
this . _allowRedirectDowngrade = requestOptions . allowRedirectDowngrade ;
}
if ( requestOptions . maxRedirects != null ) {
this . _maxRedirects = Math . max ( requestOptions . maxRedirects , 0 ) ;
}
if ( requestOptions . keepAlive != null ) {
this . _keepAlive = requestOptions . keepAlive ;
}
if ( requestOptions . allowRetries != null ) {
this . _allowRetries = requestOptions . allowRetries ;
}
if ( requestOptions . maxRetries != null ) {
this . _maxRetries = requestOptions . maxRetries ;
}
}
}
options ( requestUrl , additionalHeaders ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return this . request ( 'OPTIONS' , requestUrl , null , additionalHeaders || { } ) ;
} ) ;
}
get ( requestUrl , additionalHeaders ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return this . request ( 'GET' , requestUrl , null , additionalHeaders || { } ) ;
} ) ;
}
del ( requestUrl , additionalHeaders ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return this . request ( 'DELETE' , requestUrl , null , additionalHeaders || { } ) ;
} ) ;
}
post ( requestUrl , data , additionalHeaders ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return this . request ( 'POST' , requestUrl , data , additionalHeaders || { } ) ;
} ) ;
}
patch ( requestUrl , data , additionalHeaders ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return this . request ( 'PATCH' , requestUrl , data , additionalHeaders || { } ) ;
} ) ;
}
put ( requestUrl , data , additionalHeaders ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return this . request ( 'PUT' , requestUrl , data , additionalHeaders || { } ) ;
} ) ;
}
head ( requestUrl , additionalHeaders ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return this . request ( 'HEAD' , requestUrl , null , additionalHeaders || { } ) ;
} ) ;
}
sendStream ( verb , requestUrl , stream , additionalHeaders ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return this . request ( verb , requestUrl , stream , additionalHeaders ) ;
} ) ;
}
/ * *
* Gets a typed object from an endpoint
* Be aware that not found returns a null . Other errors ( 4 xx , 5 xx ) reject the promise
* /
getJson ( requestUrl , additionalHeaders = { } ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
additionalHeaders [ Headers . Accept ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . Accept , MediaTypes . ApplicationJson ) ;
const res = yield this . get ( requestUrl , additionalHeaders ) ;
return this . _processResponse ( res , this . requestOptions ) ;
} ) ;
}
postJson ( requestUrl , obj , additionalHeaders = { } ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const data = JSON . stringify ( obj , null , 2 ) ;
additionalHeaders [ Headers . Accept ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . Accept , MediaTypes . ApplicationJson ) ;
additionalHeaders [ Headers . ContentType ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . ContentType , MediaTypes . ApplicationJson ) ;
const res = yield this . post ( requestUrl , data , additionalHeaders ) ;
return this . _processResponse ( res , this . requestOptions ) ;
} ) ;
}
putJson ( requestUrl , obj , additionalHeaders = { } ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const data = JSON . stringify ( obj , null , 2 ) ;
additionalHeaders [ Headers . Accept ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . Accept , MediaTypes . ApplicationJson ) ;
additionalHeaders [ Headers . ContentType ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . ContentType , MediaTypes . ApplicationJson ) ;
const res = yield this . put ( requestUrl , data , additionalHeaders ) ;
return this . _processResponse ( res , this . requestOptions ) ;
} ) ;
}
patchJson ( requestUrl , obj , additionalHeaders = { } ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const data = JSON . stringify ( obj , null , 2 ) ;
additionalHeaders [ Headers . Accept ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . Accept , MediaTypes . ApplicationJson ) ;
additionalHeaders [ Headers . ContentType ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . ContentType , MediaTypes . ApplicationJson ) ;
const res = yield this . patch ( requestUrl , data , additionalHeaders ) ;
return this . _processResponse ( res , this . requestOptions ) ;
} ) ;
}
/ * *
* Makes a raw http request .
* All other methods such as get , post , patch , and request ultimately call this .
* Prefer get , del , post and patch
* /
request ( verb , requestUrl , data , headers ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
if ( this . _disposed ) {
throw new Error ( 'Client has already been disposed.' ) ;
}
const parsedUrl = new URL ( requestUrl ) ;
let info = this . _prepareRequest ( verb , parsedUrl , headers ) ;
// Only perform retries on reads since writes may not be idempotent.
const maxTries = this . _allowRetries && RetryableHttpVerbs . includes ( verb )
? this . _maxRetries + 1
: 1 ;
let numTries = 0 ;
let response ;
do {
response = yield this . requestRaw ( info , data ) ;
// Check if it's an authentication challenge
if ( response &&
response . message &&
response . message . statusCode === HttpCodes . Unauthorized ) {
let authenticationHandler ;
for ( const handler of this . handlers ) {
if ( handler . canHandleAuthentication ( response ) ) {
authenticationHandler = handler ;
break ;
}
}
if ( authenticationHandler ) {
return authenticationHandler . handleAuthentication ( this , info , data ) ;
}
else {
// We have received an unauthorized response but have no handlers to handle it.
// Let the response return to the caller.
return response ;
}
}
let redirectsRemaining = this . _maxRedirects ;
while ( response . message . statusCode &&
HttpRedirectCodes . includes ( response . message . statusCode ) &&
this . _allowRedirects &&
redirectsRemaining > 0 ) {
const redirectUrl = response . message . headers [ 'location' ] ;
if ( ! redirectUrl ) {
// if there's no location to redirect to, we won't
break ;
}
const parsedRedirectUrl = new URL ( redirectUrl ) ;
if ( parsedUrl . protocol === 'https:' &&
parsedUrl . protocol !== parsedRedirectUrl . protocol &&
! this . _allowRedirectDowngrade ) {
throw new Error ( 'Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.' ) ;
}
// we need to finish reading the response before reassigning response
// which will leak the open socket.
yield response . readBody ( ) ;
// strip authorization header if redirected to a different hostname
if ( parsedRedirectUrl . hostname !== parsedUrl . hostname ) {
for ( const header in headers ) {
// header names are case insensitive
if ( header . toLowerCase ( ) === 'authorization' ) {
delete headers [ header ] ;
}
}
}
// let's make the request with the new redirectUrl
info = this . _prepareRequest ( verb , parsedRedirectUrl , headers ) ;
response = yield this . requestRaw ( info , data ) ;
redirectsRemaining -- ;
}
if ( ! response . message . statusCode ||
! HttpResponseRetryCodes . includes ( response . message . statusCode ) ) {
// If not a retry code, return immediately instead of retrying
return response ;
}
numTries += 1 ;
if ( numTries < maxTries ) {
yield response . readBody ( ) ;
yield this . _performExponentialBackoff ( numTries ) ;
}
} while ( numTries < maxTries ) ;
return response ;
} ) ;
}
/ * *
* Needs to be called if keepAlive is set to true in request options .
* /
dispose ( ) {
if ( this . _agent ) {
this . _agent . destroy ( ) ;
}
this . _disposed = true ;
2022-05-20 04:33:04 +08:00
}
2023-01-06 05:27:11 +08:00
/ * *
* Raw request .
* @ param info
* @ param data
* /
requestRaw ( info , data ) {
2022-05-20 04:33:04 +08:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2023-01-06 05:27:11 +08:00
return new Promise ( ( resolve , reject ) => {
function callbackForResult ( err , res ) {
if ( err ) {
reject ( err ) ;
}
else if ( ! res ) {
// If `err` is not passed, then `res` must be passed.
reject ( new Error ( 'Unknown error' ) ) ;
}
else {
resolve ( res ) ;
}
}
this . requestRawWithCallback ( info , data , callbackForResult ) ;
} ) ;
2022-05-20 04:33:04 +08:00
} ) ;
}
2023-01-06 05:27:11 +08:00
/ * *
* Raw request with callback .
* @ param info
* @ param data
* @ param onResult
* /
requestRawWithCallback ( info , data , onResult ) {
if ( typeof data === 'string' ) {
if ( ! info . options . headers ) {
info . options . headers = { } ;
2022-05-20 04:33:04 +08:00
}
2023-01-06 05:27:11 +08:00
info . options . headers [ 'Content-Length' ] = Buffer . byteLength ( data , 'utf8' ) ;
}
let callbackCalled = false ;
function handleResult ( err , res ) {
if ( ! callbackCalled ) {
callbackCalled = true ;
onResult ( err , res ) ;
2022-05-20 04:33:04 +08:00
}
2023-01-06 05:27:11 +08:00
}
const req = info . httpModule . request ( info . options , ( msg ) => {
const res = new HttpClientResponse ( msg ) ;
handleResult ( undefined , res ) ;
} ) ;
let socket ;
req . on ( 'socket' , sock => {
socket = sock ;
} ) ;
// If we ever get disconnected, we want the socket to timeout eventually
req . setTimeout ( this . _socketTimeout || 3 * 60000 , ( ) => {
if ( socket ) {
socket . end ( ) ;
2022-05-20 04:33:04 +08:00
}
2023-01-06 05:27:11 +08:00
handleResult ( new Error ( ` Request timeout: ${ info . options . path } ` ) ) ;
} ) ;
req . on ( 'error' , function ( err ) {
// err has statusCode property
// res should have headers
handleResult ( err ) ;
} ) ;
if ( data && typeof data === 'string' ) {
req . write ( data , 'utf8' ) ;
}
if ( data && typeof data !== 'string' ) {
data . on ( 'close' , function ( ) {
req . end ( ) ;
} ) ;
data . pipe ( req ) ;
}
else {
req . end ( ) ;
}
}
/ * *
* Gets an http agent . This function is useful when you need an http agent that handles
* routing through a proxy server - depending upon the url and proxy environment variables .
* @ param serverUrl The server URL where the request will be sent . For example , https : //api.github.com
* /
getAgent ( serverUrl ) {
const parsedUrl = new URL ( serverUrl ) ;
return this . _getAgent ( parsedUrl ) ;
}
_prepareRequest ( method , requestUrl , headers ) {
const info = { } ;
info . parsedUrl = requestUrl ;
const usingSsl = info . parsedUrl . protocol === 'https:' ;
info . httpModule = usingSsl ? https : http ;
const defaultPort = usingSsl ? 443 : 80 ;
info . options = { } ;
info . options . host = info . parsedUrl . hostname ;
info . options . port = info . parsedUrl . port
? parseInt ( info . parsedUrl . port )
: defaultPort ;
info . options . path =
( info . parsedUrl . pathname || '' ) + ( info . parsedUrl . search || '' ) ;
info . options . method = method ;
info . options . headers = this . _mergeHeaders ( headers ) ;
if ( this . userAgent != null ) {
info . options . headers [ 'user-agent' ] = this . userAgent ;
}
info . options . agent = this . _getAgent ( info . parsedUrl ) ;
// gives handlers an opportunity to participate
if ( this . handlers ) {
for ( const handler of this . handlers ) {
handler . prepareRequest ( info . options ) ;
2022-05-20 04:33:04 +08:00
}
2023-01-06 05:27:11 +08:00
}
return info ;
}
_mergeHeaders ( headers ) {
if ( this . requestOptions && this . requestOptions . headers ) {
return Object . assign ( { } , lowercaseKeys ( this . requestOptions . headers ) , lowercaseKeys ( headers || { } ) ) ;
}
return lowercaseKeys ( headers || { } ) ;
}
_getExistingOrDefaultHeader ( additionalHeaders , header , _default ) {
let clientHeader ;
if ( this . requestOptions && this . requestOptions . headers ) {
clientHeader = lowercaseKeys ( this . requestOptions . headers ) [ header ] ;
}
return additionalHeaders [ header ] || clientHeader || _default ;
}
_getAgent ( parsedUrl ) {
let agent ;
const proxyUrl = pm . getProxyUrl ( parsedUrl ) ;
const useProxy = proxyUrl && proxyUrl . hostname ;
if ( this . _keepAlive && useProxy ) {
agent = this . _proxyAgent ;
}
if ( this . _keepAlive && ! useProxy ) {
agent = this . _agent ;
}
// if agent is already assigned use that agent.
if ( agent ) {
return agent ;
}
const usingSsl = parsedUrl . protocol === 'https:' ;
let maxSockets = 100 ;
if ( this . requestOptions ) {
maxSockets = this . requestOptions . maxSockets || http . globalAgent . maxSockets ;
}
// This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis.
if ( proxyUrl && proxyUrl . hostname ) {
const agentOptions = {
maxSockets ,
keepAlive : this . _keepAlive ,
proxy : Object . assign ( Object . assign ( { } , ( ( proxyUrl . username || proxyUrl . password ) && {
proxyAuth : ` ${ proxyUrl . username } : ${ proxyUrl . password } `
} ) ) , { host : proxyUrl . hostname , port : proxyUrl . port } )
} ;
let tunnelAgent ;
const overHttps = proxyUrl . protocol === 'https:' ;
if ( usingSsl ) {
tunnelAgent = overHttps ? tunnel . httpsOverHttps : tunnel . httpsOverHttp ;
2022-05-20 04:33:04 +08:00
}
2023-01-06 05:27:11 +08:00
else {
tunnelAgent = overHttps ? tunnel . httpOverHttps : tunnel . httpOverHttp ;
2022-05-20 04:33:04 +08:00
}
2023-01-06 05:27:11 +08:00
agent = tunnelAgent ( agentOptions ) ;
this . _proxyAgent = agent ;
}
// if reusing agent across request and tunneling agent isn't assigned create a new agent
if ( this . _keepAlive && ! agent ) {
const options = { keepAlive : this . _keepAlive , maxSockets } ;
agent = usingSsl ? new https . Agent ( options ) : new http . Agent ( options ) ;
this . _agent = agent ;
}
// if not using private agent and tunnel agent isn't setup then use global agent
if ( ! agent ) {
agent = usingSsl ? https . globalAgent : http . globalAgent ;
2022-05-20 04:33:04 +08:00
}
2023-01-06 05:27:11 +08:00
if ( usingSsl && this . _ignoreSslError ) {
// we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
// http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
// we have to cast it to any and change it directly
agent . options = Object . assign ( agent . options || { } , {
rejectUnauthorized : false
} ) ;
}
return agent ;
2020-04-28 23:18:53 +08:00
}
2023-01-06 05:27:11 +08:00
_performExponentialBackoff ( retryNumber ) {
2022-05-20 04:33:04 +08:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2023-01-06 05:27:11 +08:00
retryNumber = Math . min ( ExponentialBackoffCeiling , retryNumber ) ;
const ms = ExponentialBackoffTimeSlice * Math . pow ( 2 , retryNumber ) ;
return new Promise ( resolve => setTimeout ( ( ) => resolve ( ) , ms ) ) ;
2022-05-20 04:33:04 +08:00
} ) ;
2020-04-28 23:18:53 +08:00
}
2023-01-06 05:27:11 +08:00
_processResponse ( res , options ) {
2022-05-20 04:33:04 +08:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2023-01-06 05:27:11 +08:00
return new Promise ( ( resolve , reject ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const statusCode = res . message . statusCode || 0 ;
const response = {
statusCode ,
result : null ,
headers : { }
} ;
// not found leads to null obj returned
if ( statusCode === HttpCodes . NotFound ) {
resolve ( response ) ;
}
// get the result from the body
function dateTimeDeserializer ( key , value ) {
if ( typeof value === 'string' ) {
const a = new Date ( value ) ;
if ( ! isNaN ( a . valueOf ( ) ) ) {
return a ;
}
}
return value ;
}
let obj ;
let contents ;
try {
contents = yield res . readBody ( ) ;
if ( contents && contents . length > 0 ) {
if ( options && options . deserializeDates ) {
obj = JSON . parse ( contents , dateTimeDeserializer ) ;
}
else {
obj = JSON . parse ( contents ) ;
}
response . result = obj ;
}
response . headers = res . message . headers ;
}
catch ( err ) {
// Invalid resource (contents not json); leaving result obj null
}
// note that 3xx redirects are handled by the http layer.
if ( statusCode > 299 ) {
let msg ;
// if exception/error in body, attempt to get better error
if ( obj && obj . message ) {
msg = obj . message ;
}
else if ( contents && contents . length > 0 ) {
// it may be the case that the exception is in the body message as string
msg = contents ;
}
else {
msg = ` Failed request: ( ${ statusCode } ) ` ;
}
const err = new HttpClientError ( msg , statusCode ) ;
err . result = response . result ;
reject ( err ) ;
}
else {
resolve ( response ) ;
}
} ) ) ;
2022-05-20 04:33:04 +08:00
} ) ;
2020-04-28 23:18:53 +08:00
}
2023-01-06 05:27:11 +08:00
}
exports . HttpClient = HttpClient ;
const lowercaseKeys = ( obj ) => Object . keys ( obj ) . reduce ( ( c , k ) => ( ( c [ k . toLowerCase ( ) ] = obj [ k ] ) , c ) , { } ) ;
//# sourceMappingURL=index.js.map
/***/ } ) ,
/***/ 9835 :
/***/ ( ( _ _unused _webpack _module , exports ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . checkBypass = exports . getProxyUrl = void 0 ;
function getProxyUrl ( reqUrl ) {
const usingSsl = reqUrl . protocol === 'https:' ;
if ( checkBypass ( reqUrl ) ) {
return undefined ;
2022-05-20 04:33:04 +08:00
}
2023-01-06 05:27:11 +08:00
const proxyVar = ( ( ) => {
if ( usingSsl ) {
return process . env [ 'https_proxy' ] || process . env [ 'HTTPS_PROXY' ] ;
}
else {
return process . env [ 'http_proxy' ] || process . env [ 'HTTP_PROXY' ] ;
}
} ) ( ) ;
if ( proxyVar ) {
return new URL ( proxyVar ) ;
2022-05-20 04:33:04 +08:00
}
2023-01-06 05:27:11 +08:00
else {
return undefined ;
2022-05-20 04:33:04 +08:00
}
2023-01-06 05:27:11 +08:00
}
exports . getProxyUrl = getProxyUrl ;
function checkBypass ( reqUrl ) {
if ( ! reqUrl . hostname ) {
return false ;
2022-05-20 04:33:04 +08:00
}
2023-01-06 05:27:11 +08:00
const noProxy = process . env [ 'no_proxy' ] || process . env [ 'NO_PROXY' ] || '' ;
if ( ! noProxy ) {
return false ;
2022-05-20 04:33:04 +08:00
}
2023-01-06 05:27:11 +08:00
// Determine the request port
let reqPort ;
if ( reqUrl . port ) {
reqPort = Number ( reqUrl . port ) ;
2020-04-28 23:18:53 +08:00
}
2023-01-06 05:27:11 +08:00
else if ( reqUrl . protocol === 'http:' ) {
reqPort = 80 ;
2020-11-13 05:32:13 +08:00
}
2023-01-06 05:27:11 +08:00
else if ( reqUrl . protocol === 'https:' ) {
reqPort = 443 ;
2020-11-13 05:32:13 +08:00
}
2023-01-06 05:27:11 +08:00
// Format the request hostname and hostname with port
const upperReqHosts = [ reqUrl . hostname . toUpperCase ( ) ] ;
if ( typeof reqPort === 'number' ) {
upperReqHosts . push ( ` ${ upperReqHosts [ 0 ] } : ${ reqPort } ` ) ;
2020-11-13 05:32:13 +08:00
}
2023-01-06 05:27:11 +08:00
// Compare request host against noproxy
for ( const upperNoProxyItem of noProxy
. split ( ',' )
. map ( x => x . trim ( ) . toUpperCase ( ) )
. filter ( x => x ) ) {
if ( upperReqHosts . some ( x => x === upperNoProxyItem ) ) {
return true ;
}
}
return false ;
}
exports . checkBypass = checkBypass ;
//# sourceMappingURL=proxy.js.map
/***/ } ) ,
/***/ 9417 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = balanced ;
function balanced ( a , b , str ) {
if ( a instanceof RegExp ) a = maybeMatch ( a , str ) ;
if ( b instanceof RegExp ) b = maybeMatch ( b , str ) ;
var r = range ( a , b , str ) ;
return r && {
start : r [ 0 ] ,
end : r [ 1 ] ,
pre : str . slice ( 0 , r [ 0 ] ) ,
body : str . slice ( r [ 0 ] + a . length , r [ 1 ] ) ,
post : str . slice ( r [ 1 ] + b . length )
} ;
}
function maybeMatch ( reg , str ) {
var m = str . match ( reg ) ;
return m ? m [ 0 ] : null ;
}
balanced . range = range ;
function range ( a , b , str ) {
var begs , beg , left , right , result ;
var ai = str . indexOf ( a ) ;
var bi = str . indexOf ( b , ai + 1 ) ;
var i = ai ;
if ( ai >= 0 && bi > 0 ) {
begs = [ ] ;
left = str . length ;
while ( i >= 0 && ! result ) {
if ( i == ai ) {
begs . push ( i ) ;
ai = str . indexOf ( a , i + 1 ) ;
} else if ( begs . length == 1 ) {
result = [ begs . pop ( ) , bi ] ;
} else {
beg = begs . pop ( ) ;
if ( beg < left ) {
left = beg ;
right = bi ;
}
bi = str . indexOf ( b , i + 1 ) ;
}
i = ai < bi && ai >= 0 ? ai : bi ;
}
if ( begs . length ) {
result = [ left , right ] ;
2020-11-13 05:32:13 +08:00
}
2023-01-06 05:27:11 +08:00
}
return result ;
}
/***/ } ) ,
/***/ 3717 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
var concatMap = _ _nccwpck _require _ _ ( 6891 ) ;
var balanced = _ _nccwpck _require _ _ ( 9417 ) ;
module . exports = expandTop ;
var escSlash = '\0SLASH' + Math . random ( ) + '\0' ;
var escOpen = '\0OPEN' + Math . random ( ) + '\0' ;
var escClose = '\0CLOSE' + Math . random ( ) + '\0' ;
var escComma = '\0COMMA' + Math . random ( ) + '\0' ;
var escPeriod = '\0PERIOD' + Math . random ( ) + '\0' ;
function numeric ( str ) {
return parseInt ( str , 10 ) == str
? parseInt ( str , 10 )
: str . charCodeAt ( 0 ) ;
}
function escapeBraces ( str ) {
return str . split ( '\\\\' ) . join ( escSlash )
. split ( '\\{' ) . join ( escOpen )
. split ( '\\}' ) . join ( escClose )
. split ( '\\,' ) . join ( escComma )
. split ( '\\.' ) . join ( escPeriod ) ;
}
function unescapeBraces ( str ) {
return str . split ( escSlash ) . join ( '\\' )
. split ( escOpen ) . join ( '{' )
. split ( escClose ) . join ( '}' )
. split ( escComma ) . join ( ',' )
. split ( escPeriod ) . join ( '.' ) ;
}
// Basically just str.split(","), but handling cases
// where we have nested braced sections, which should be
// treated as individual members, like {a,{b,c},d}
function parseCommaParts ( str ) {
if ( ! str )
return [ '' ] ;
var parts = [ ] ;
var m = balanced ( '{' , '}' , str ) ;
if ( ! m )
return str . split ( ',' ) ;
var pre = m . pre ;
var body = m . body ;
var post = m . post ;
var p = pre . split ( ',' ) ;
p [ p . length - 1 ] += '{' + body + '}' ;
var postParts = parseCommaParts ( post ) ;
if ( post . length ) {
p [ p . length - 1 ] += postParts . shift ( ) ;
p . push . apply ( p , postParts ) ;
}
parts . push . apply ( parts , p ) ;
return parts ;
}
function expandTop ( str ) {
if ( ! str )
return [ ] ;
// I don't know why Bash 4.3 does this, but it does.
// Anything starting with {} will have the first two bytes preserved
// but *only* at the top level, so {},a}b will not expand to anything,
// but a{},b}c will be expanded to [a}c,abc].
// One could argue that this is a bug in Bash, but since the goal of
// this module is to match Bash's rules, we escape a leading {}
if ( str . substr ( 0 , 2 ) === '{}' ) {
str = '\\{\\}' + str . substr ( 2 ) ;
}
return expand ( escapeBraces ( str ) , true ) . map ( unescapeBraces ) ;
}
function identity ( e ) {
return e ;
}
function embrace ( str ) {
return '{' + str + '}' ;
}
function isPadded ( el ) {
return /^-?0\d/ . test ( el ) ;
}
function lte ( i , y ) {
return i <= y ;
}
function gte ( i , y ) {
return i >= y ;
}
function expand ( str , isTop ) {
var expansions = [ ] ;
var m = balanced ( '{' , '}' , str ) ;
if ( ! m || /\$$/ . test ( m . pre ) ) return [ str ] ;
var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/ . test ( m . body ) ;
var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/ . test ( m . body ) ;
var isSequence = isNumericSequence || isAlphaSequence ;
var isOptions = m . body . indexOf ( ',' ) >= 0 ;
if ( ! isSequence && ! isOptions ) {
// {a},b}
if ( m . post . match ( /,.*\}/ ) ) {
str = m . pre + '{' + m . body + escClose + m . post ;
return expand ( str ) ;
}
return [ str ] ;
}
var n ;
if ( isSequence ) {
n = m . body . split ( /\.\./ ) ;
} else {
n = parseCommaParts ( m . body ) ;
if ( n . length === 1 ) {
// x{{a,b}}y ==> x{a}y x{b}y
n = expand ( n [ 0 ] , false ) . map ( embrace ) ;
if ( n . length === 1 ) {
var post = m . post . length
? expand ( m . post , false )
: [ '' ] ;
return post . map ( function ( p ) {
return m . pre + n [ 0 ] + p ;
2022-05-20 04:33:04 +08:00
} ) ;
2023-01-06 05:27:11 +08:00
}
2022-05-20 04:33:04 +08:00
}
2023-01-06 05:27:11 +08:00
}
// at this point, n is the parts, and we know it's not a comma set
// with a single entry.
// no need to expand pre, since it is guaranteed to be free of brace-sets
var pre = m . pre ;
var post = m . post . length
? expand ( m . post , false )
: [ '' ] ;
var N ;
if ( isSequence ) {
var x = numeric ( n [ 0 ] ) ;
var y = numeric ( n [ 1 ] ) ;
var width = Math . max ( n [ 0 ] . length , n [ 1 ] . length )
var incr = n . length == 3
? Math . abs ( numeric ( n [ 2 ] ) )
: 1 ;
var test = lte ;
var reverse = y < x ;
if ( reverse ) {
incr *= - 1 ;
test = gte ;
}
var pad = n . some ( isPadded ) ;
N = [ ] ;
for ( var i = x ; test ( i , y ) ; i += incr ) {
var c ;
if ( isAlphaSequence ) {
c = String . fromCharCode ( i ) ;
if ( c === '\\' )
c = '' ;
} else {
c = String ( i ) ;
if ( pad ) {
var need = width - c . length ;
if ( need > 0 ) {
var z = new Array ( need + 1 ) . join ( '0' ) ;
if ( i < 0 )
c = '-' + z + c . slice ( 1 ) ;
else
c = z + c ;
}
2021-01-04 22:48:10 +08:00
}
2023-01-06 05:27:11 +08:00
}
N . push ( c ) ;
2022-05-20 04:33:04 +08:00
}
2023-01-06 05:27:11 +08:00
} else {
N = concatMap ( n , function ( el ) { return expand ( el , false ) } ) ;
}
for ( var j = 0 ; j < N . length ; j ++ ) {
for ( var k = 0 ; k < post . length ; k ++ ) {
var expansion = pre + N [ j ] + post [ k ] ;
if ( ! isTop || isSequence || expansion )
expansions . push ( expansion ) ;
2022-05-20 04:33:04 +08:00
}
2023-01-06 05:27:11 +08:00
}
return expansions ;
}
/***/ } ) ,
/***/ 6891 :
/***/ ( ( module ) => {
module . exports = function ( xs , fn ) {
var res = [ ] ;
for ( var i = 0 ; i < xs . length ; i ++ ) {
var x = fn ( xs [ i ] , i ) ;
if ( isArray ( x ) ) res . push . apply ( res , x ) ;
else res . push ( x ) ;
2022-05-20 04:33:04 +08:00
}
2023-01-06 05:27:11 +08:00
return res ;
} ;
var isArray = Array . isArray || function ( xs ) {
return Object . prototype . toString . call ( xs ) === '[object Array]' ;
} ;
/***/ } ) ,
/***/ 6863 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
module . exports = realpath
realpath . realpath = realpath
realpath . sync = realpathSync
realpath . realpathSync = realpathSync
realpath . monkeypatch = monkeypatch
realpath . unmonkeypatch = unmonkeypatch
var fs = _ _nccwpck _require _ _ ( 7147 )
var origRealpath = fs . realpath
var origRealpathSync = fs . realpathSync
var version = process . version
var ok = /^v[0-5]\./ . test ( version )
var old = _ _nccwpck _require _ _ ( 1734 )
function newError ( er ) {
return er && er . syscall === 'realpath' && (
er . code === 'ELOOP' ||
er . code === 'ENOMEM' ||
er . code === 'ENAMETOOLONG'
)
}
function realpath ( p , cache , cb ) {
if ( ok ) {
return origRealpath ( p , cache , cb )
}
if ( typeof cache === 'function' ) {
cb = cache
cache = null
}
origRealpath ( p , cache , function ( er , result ) {
if ( newError ( er ) ) {
old . realpath ( p , cache , cb )
} else {
cb ( er , result )
}
} )
}
function realpathSync ( p , cache ) {
if ( ok ) {
return origRealpathSync ( p , cache )
}
try {
return origRealpathSync ( p , cache )
} catch ( er ) {
if ( newError ( er ) ) {
return old . realpathSync ( p , cache )
} else {
throw er
}
}
}
function monkeypatch ( ) {
fs . realpath = realpath
fs . realpathSync = realpathSync
}
function unmonkeypatch ( ) {
fs . realpath = origRealpath
fs . realpathSync = origRealpathSync
}
/***/ } ) ,
/***/ 1734 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
var pathModule = _ _nccwpck _require _ _ ( 1017 ) ;
var isWindows = process . platform === 'win32' ;
var fs = _ _nccwpck _require _ _ ( 7147 ) ;
// JavaScript implementation of realpath, ported from node pre-v6
var DEBUG = process . env . NODE _DEBUG && /fs/ . test ( process . env . NODE _DEBUG ) ;
function rethrow ( ) {
// Only enable in debug mode. A backtrace uses ~1000 bytes of heap space and
// is fairly slow to generate.
var callback ;
if ( DEBUG ) {
var backtrace = new Error ;
callback = debugCallback ;
} else
callback = missingCallback ;
return callback ;
function debugCallback ( err ) {
if ( err ) {
backtrace . message = err . message ;
err = backtrace ;
missingCallback ( err ) ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
}
function missingCallback ( err ) {
if ( err ) {
if ( process . throwDeprecation )
throw err ; // Forgot a callback but don't know where? Use NODE_DEBUG=fs
else if ( ! process . noDeprecation ) {
var msg = 'fs: missing callback ' + ( err . stack || err . message ) ;
if ( process . traceDeprecation )
console . trace ( msg ) ;
else
console . error ( msg ) ;
}
2022-05-20 04:33:04 +08:00
}
2023-01-06 05:27:11 +08:00
}
}
function maybeCallback ( cb ) {
return typeof cb === 'function' ? cb : rethrow ( ) ;
}
var normalize = pathModule . normalize ;
// Regexp that finds the next partion of a (partial) path
// result is [base_with_slash, base], e.g. ['somedir/', 'somedir']
if ( isWindows ) {
var nextPartRe = /(.*?)(?:[\/\\]+|$)/g ;
} else {
var nextPartRe = /(.*?)(?:[\/]+|$)/g ;
}
// Regex to find the device root, including trailing slash. E.g. 'c:\\'.
if ( isWindows ) {
var splitRootRe = /^(?:[a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/][^\\\/]+)?[\\\/]*/ ;
} else {
var splitRootRe = /^[\/]*/ ;
}
exports . realpathSync = function realpathSync ( p , cache ) {
// make p is absolute
p = pathModule . resolve ( p ) ;
if ( cache && Object . prototype . hasOwnProperty . call ( cache , p ) ) {
return cache [ p ] ;
}
var original = p ,
seenLinks = { } ,
knownHard = { } ;
// current character position in p
var pos ;
// the partial path so far, including a trailing slash if any
var current ;
// the partial path without a trailing slash (except when pointing at a root)
var base ;
// the partial path scanned in the previous round, with slash
var previous ;
start ( ) ;
function start ( ) {
// Skip over roots
var m = splitRootRe . exec ( p ) ;
pos = m [ 0 ] . length ;
current = m [ 0 ] ;
base = m [ 0 ] ;
previous = '' ;
// On windows, check that the root exists. On unix there is no need.
if ( isWindows && ! knownHard [ base ] ) {
fs . lstatSync ( base ) ;
knownHard [ base ] = true ;
2022-05-20 04:33:04 +08:00
}
2023-01-06 05:27:11 +08:00
}
// walk down the path, swapping out linked pathparts for their real
// values
// NB: p.length changes.
while ( pos < p . length ) {
// find the next part
nextPartRe . lastIndex = pos ;
var result = nextPartRe . exec ( p ) ;
previous = current ;
current += result [ 0 ] ;
base = previous + result [ 1 ] ;
pos = nextPartRe . lastIndex ;
// continue if not a symlink
if ( knownHard [ base ] || ( cache && cache [ base ] === base ) ) {
continue ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
var resolvedLink ;
if ( cache && Object . prototype . hasOwnProperty . call ( cache , base ) ) {
// some known symbolic link. no need to stat again.
resolvedLink = cache [ base ] ;
} else {
var stat = fs . lstatSync ( base ) ;
if ( ! stat . isSymbolicLink ( ) ) {
knownHard [ base ] = true ;
if ( cache ) cache [ base ] = base ;
continue ;
}
// read the link if it wasn't read before
// dev/ino always return 0 on windows, so skip the check.
var linkTarget = null ;
if ( ! isWindows ) {
var id = stat . dev . toString ( 32 ) + ':' + stat . ino . toString ( 32 ) ;
if ( seenLinks . hasOwnProperty ( id ) ) {
linkTarget = seenLinks [ id ] ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
}
if ( linkTarget === null ) {
fs . statSync ( base ) ;
linkTarget = fs . readlinkSync ( base ) ;
}
resolvedLink = pathModule . resolve ( previous , linkTarget ) ;
// track this, if given a cache.
if ( cache ) cache [ base ] = resolvedLink ;
if ( ! isWindows ) seenLinks [ id ] = linkTarget ;
}
// resolve the link, then start over
p = pathModule . resolve ( resolvedLink , p . slice ( pos ) ) ;
start ( ) ;
}
if ( cache ) cache [ original ] = p ;
return p ;
} ;
exports . realpath = function realpath ( p , cache , cb ) {
if ( typeof cb !== 'function' ) {
cb = maybeCallback ( cache ) ;
cache = null ;
}
// make p is absolute
p = pathModule . resolve ( p ) ;
if ( cache && Object . prototype . hasOwnProperty . call ( cache , p ) ) {
return process . nextTick ( cb . bind ( null , null , cache [ p ] ) ) ;
}
var original = p ,
seenLinks = { } ,
knownHard = { } ;
// current character position in p
var pos ;
// the partial path so far, including a trailing slash if any
var current ;
// the partial path without a trailing slash (except when pointing at a root)
var base ;
// the partial path scanned in the previous round, with slash
var previous ;
start ( ) ;
function start ( ) {
// Skip over roots
var m = splitRootRe . exec ( p ) ;
pos = m [ 0 ] . length ;
current = m [ 0 ] ;
base = m [ 0 ] ;
previous = '' ;
// On windows, check that the root exists. On unix there is no need.
if ( isWindows && ! knownHard [ base ] ) {
fs . lstat ( base , function ( err ) {
if ( err ) return cb ( err ) ;
knownHard [ base ] = true ;
LOOP ( ) ;
} ) ;
} else {
process . nextTick ( LOOP ) ;
2022-05-20 04:33:04 +08:00
}
2023-01-06 05:27:11 +08:00
}
// walk down the path, swapping out linked pathparts for their real
// values
function LOOP ( ) {
// stop if scanned past end of path
if ( pos >= p . length ) {
if ( cache ) cache [ original ] = p ;
return cb ( null , p ) ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
// find the next part
nextPartRe . lastIndex = pos ;
var result = nextPartRe . exec ( p ) ;
previous = current ;
current += result [ 0 ] ;
base = previous + result [ 1 ] ;
pos = nextPartRe . lastIndex ;
// continue if not a symlink
if ( knownHard [ base ] || ( cache && cache [ base ] === base ) ) {
return process . nextTick ( LOOP ) ;
2020-04-28 23:18:53 +08:00
}
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
if ( cache && Object . prototype . hasOwnProperty . call ( cache , base ) ) {
// known symbolic link. no need to stat again.
return gotResolvedLink ( cache [ base ] ) ;
}
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
return fs . lstat ( base , gotStat ) ;
}
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
function gotStat ( err , stat ) {
if ( err ) return cb ( err ) ;
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
// if not a symlink, skip to the next path part
if ( ! stat . isSymbolicLink ( ) ) {
knownHard [ base ] = true ;
if ( cache ) cache [ base ] = base ;
return process . nextTick ( LOOP ) ;
2020-04-28 23:18:53 +08:00
}
2023-01-06 05:27:11 +08:00
// stat & read the link if not read before
// call gotTarget as soon as the link target is known
// dev/ino always return 0 on windows, so skip the check.
if ( ! isWindows ) {
var id = stat . dev . toString ( 32 ) + ':' + stat . ino . toString ( 32 ) ;
if ( seenLinks . hasOwnProperty ( id ) ) {
return gotTarget ( null , seenLinks [ id ] , base ) ;
}
2020-04-28 23:18:53 +08:00
}
2023-01-06 05:27:11 +08:00
fs . stat ( base , function ( err ) {
if ( err ) return cb ( err ) ;
fs . readlink ( base , function ( err , target ) {
if ( ! isWindows ) seenLinks [ id ] = target ;
gotTarget ( err , target ) ;
} ) ;
} ) ;
}
function gotTarget ( err , target , base ) {
if ( err ) return cb ( err ) ;
var resolvedLink = pathModule . resolve ( previous , target ) ;
if ( cache ) cache [ base ] = resolvedLink ;
gotResolvedLink ( resolvedLink ) ;
}
function gotResolvedLink ( resolvedLink ) {
// resolve the link, then start over
p = pathModule . resolve ( resolvedLink , p . slice ( pos ) ) ;
start ( ) ;
}
} ;
/***/ } ) ,
/***/ 2492 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
var wrappy = _ _nccwpck _require _ _ ( 2940 )
var reqs = Object . create ( null )
var once = _ _nccwpck _require _ _ ( 1223 )
module . exports = wrappy ( inflight )
function inflight ( key , cb ) {
if ( reqs [ key ] ) {
reqs [ key ] . push ( cb )
return null
} else {
reqs [ key ] = [ cb ]
return makeres ( key )
}
2020-04-28 23:18:53 +08:00
}
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
function makeres ( key ) {
return once ( function RES ( ) {
var cbs = reqs [ key ]
var len = cbs . length
var args = slice ( arguments )
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
// XXX It's somewhat ambiguous whether a new callback added in this
// pass should be queued for later execution if something in the
// list of callbacks throws, or if it should just be discarded.
// However, it's such an edge case that it hardly matters, and either
// choice is likely as surprising as the other.
// As it happens, we do go ahead and schedule it for later execution.
try {
for ( var i = 0 ; i < len ; i ++ ) {
cbs [ i ] . apply ( null , args )
}
} finally {
if ( cbs . length > len ) {
// added more in the interim.
// de-zalgo, just in case, but don't call again.
cbs . splice ( 0 , len )
process . nextTick ( function ( ) {
RES . apply ( null , args )
} )
} else {
delete reqs [ key ]
}
}
} )
}
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
function slice ( args ) {
var length = args . length
var array = [ ]
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
for ( var i = 0 ; i < length ; i ++ ) array [ i ] = args [ i ]
return array
2022-05-20 04:33:04 +08:00
}
2023-01-06 05:27:11 +08:00
/***/ } ) ,
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
/***/ 4124 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
try {
var util = _ _nccwpck _require _ _ ( 3837 ) ;
/* istanbul ignore next */
if ( typeof util . inherits !== 'function' ) throw '' ;
module . exports = util . inherits ;
} catch ( e ) {
/* istanbul ignore next */
module . exports = _ _nccwpck _require _ _ ( 8544 ) ;
}
2022-10-22 03:17:17 +08:00
/***/ } ) ,
2023-01-06 05:27:11 +08:00
/***/ 8544 :
/***/ ( ( module ) => {
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
if ( typeof Object . create === 'function' ) {
// implementation from standard node.js 'util' module
module . exports = function inherits ( ctor , superCtor ) {
if ( superCtor ) {
ctor . super _ = superCtor
ctor . prototype = Object . create ( superCtor . prototype , {
constructor : {
value : ctor ,
enumerable : false ,
writable : true ,
configurable : true
}
} )
2022-10-22 03:17:17 +08:00
}
2023-01-06 05:27:11 +08:00
} ;
} else {
// old school shim for old browsers
module . exports = function inherits ( ctor , superCtor ) {
if ( superCtor ) {
ctor . super _ = superCtor
var TempCtor = function ( ) { }
TempCtor . prototype = superCtor . prototype
ctor . prototype = new TempCtor ( )
ctor . prototype . constructor = ctor
2022-10-22 03:17:17 +08:00
}
2023-01-06 05:27:11 +08:00
}
2022-05-20 04:33:04 +08:00
}
2023-01-06 05:27:11 +08:00
/***/ } ) ,
/***/ 3973 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
module . exports = minimatch
minimatch . Minimatch = Minimatch
var path = ( function ( ) { try { return _ _nccwpck _require _ _ ( 1017 ) } catch ( e ) { } } ( ) ) || {
sep : '/'
2022-05-20 04:33:04 +08:00
}
2023-01-06 05:27:11 +08:00
minimatch . sep = path . sep
var GLOBSTAR = minimatch . GLOBSTAR = Minimatch . GLOBSTAR = { }
var expand = _ _nccwpck _require _ _ ( 3717 )
var plTypes = {
'!' : { open : '(?:(?!(?:' , close : '))[^/]*?)' } ,
'?' : { open : '(?:' , close : ')?' } ,
'+' : { open : '(?:' , close : ')+' } ,
'*' : { open : '(?:' , close : ')*' } ,
'@' : { open : '(?:' , close : ')' }
2022-05-20 04:33:04 +08:00
}
2023-01-06 05:27:11 +08:00
// any single thing other than /
// don't need to escape / when using new RegExp()
var qmark = '[^/]'
// * => any number of characters
var star = qmark + '*?'
// ** when dots are allowed. Anything goes, except .. and .
// not (^ or / followed by one or two dots followed by $ or /),
// followed by anything, any number of times.
var twoStarDot = '(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?'
// not a ^ or / followed by a dot,
// followed by anything, any number of times.
var twoStarNoDot = '(?:(?!(?:\\\/|^)\\.).)*?'
// characters that need to be escaped in RegExp.
var reSpecials = charSet ( '().*{}+?[]^$\\!' )
// "abc" -> { a:true, b:true, c:true }
function charSet ( s ) {
return s . split ( '' ) . reduce ( function ( set , c ) {
set [ c ] = true
return set
} , { } )
2022-05-20 04:33:04 +08:00
}
2023-01-06 05:27:11 +08:00
// normalizes slashes.
var slashSplit = /\/+/
minimatch . filter = filter
function filter ( pattern , options ) {
options = options || { }
return function ( p , i , list ) {
return minimatch ( p , pattern , options )
}
2022-05-20 04:33:04 +08:00
}
2023-01-06 05:27:11 +08:00
function ext ( a , b ) {
b = b || { }
var t = { }
Object . keys ( a ) . forEach ( function ( k ) {
t [ k ] = a [ k ]
} )
Object . keys ( b ) . forEach ( function ( k ) {
t [ k ] = b [ k ]
} )
return t
2022-10-22 03:17:17 +08:00
}
2023-01-06 05:27:11 +08:00
minimatch . defaults = function ( def ) {
if ( ! def || typeof def !== 'object' || ! Object . keys ( def ) . length ) {
return minimatch
}
var orig = minimatch
var m = function minimatch ( p , pattern , options ) {
return orig ( p , pattern , ext ( def , options ) )
}
m . Minimatch = function Minimatch ( pattern , options ) {
return new orig . Minimatch ( pattern , ext ( def , options ) )
}
m . Minimatch . defaults = function defaults ( options ) {
return orig . defaults ( ext ( def , options ) ) . Minimatch
}
m . filter = function filter ( pattern , options ) {
return orig . filter ( pattern , ext ( def , options ) )
}
m . defaults = function defaults ( options ) {
return orig . defaults ( ext ( def , options ) )
}
m . makeRe = function makeRe ( pattern , options ) {
return orig . makeRe ( pattern , ext ( def , options ) )
}
m . braceExpand = function braceExpand ( pattern , options ) {
return orig . braceExpand ( pattern , ext ( def , options ) )
}
m . match = function ( list , pattern , options ) {
return orig . match ( list , pattern , ext ( def , options ) )
}
return m
2022-05-20 04:33:04 +08:00
}
2023-01-06 05:27:11 +08:00
Minimatch . defaults = function ( def ) {
return minimatch . defaults ( def ) . Minimatch
2022-05-20 04:33:04 +08:00
}
2023-01-06 05:27:11 +08:00
function minimatch ( p , pattern , options ) {
assertValidPattern ( pattern )
if ( ! options ) options = { }
// shortcut: comments match nothing.
if ( ! options . nocomment && pattern . charAt ( 0 ) === '#' ) {
return false
}
return new Minimatch ( pattern , options ) . match ( p )
2022-05-20 04:33:04 +08:00
}
2023-01-06 05:27:11 +08:00
function Minimatch ( pattern , options ) {
if ( ! ( this instanceof Minimatch ) ) {
return new Minimatch ( pattern , options )
}
assertValidPattern ( pattern )
if ( ! options ) options = { }
pattern = pattern . trim ( )
// windows support: need to use /, not \
if ( ! options . allowWindowsEscape && path . sep !== '/' ) {
pattern = pattern . split ( path . sep ) . join ( '/' )
}
this . options = options
this . set = [ ]
this . pattern = pattern
this . regexp = null
this . negate = false
this . comment = false
this . empty = false
this . partial = ! ! options . partial
// make the set of regexps etc.
this . make ( )
2020-04-28 23:18:53 +08:00
}
2023-01-06 05:27:11 +08:00
Minimatch . prototype . debug = function ( ) { }
Minimatch . prototype . make = make
function make ( ) {
var pattern = this . pattern
var options = this . options
// empty patterns and comments match nothing.
if ( ! options . nocomment && pattern . charAt ( 0 ) === '#' ) {
this . comment = true
return
}
if ( ! pattern ) {
this . empty = true
return
}
// step 1: figure out negation, etc.
this . parseNegate ( )
// step 2: expand braces
var set = this . globSet = this . braceExpand ( )
if ( options . debug ) this . debug = function debug ( ) { console . error . apply ( console , arguments ) }
this . debug ( this . pattern , set )
// step 3: now we have a set, so turn each one into a series of path-portion
// matching patterns.
// These will be regexps, except in the case of "**", which is
// set to the GLOBSTAR object for globstar behavior,
// and will not contain any / characters
set = this . globParts = set . map ( function ( s ) {
return s . split ( slashSplit )
} )
this . debug ( this . pattern , set )
// glob --> regexps
set = set . map ( function ( s , si , set ) {
return s . map ( this . parse , this )
} , this )
this . debug ( this . pattern , set )
// filter out everything that didn't compile properly.
set = set . filter ( function ( s ) {
return s . indexOf ( false ) === - 1
} )
this . debug ( this . pattern , set )
this . set = set
2022-05-20 04:33:04 +08:00
}
2023-01-06 05:27:11 +08:00
Minimatch . prototype . parseNegate = parseNegate
function parseNegate ( ) {
var pattern = this . pattern
var negate = false
var options = this . options
var negateOffset = 0
if ( options . nonegate ) return
for ( var i = 0 , l = pattern . length
; i < l && pattern . charAt ( i ) === '!'
; i ++ ) {
negate = ! negate
negateOffset ++
}
if ( negateOffset ) this . pattern = pattern . substr ( negateOffset )
this . negate = negate
2022-05-20 04:33:04 +08:00
}
2023-01-06 05:27:11 +08:00
// Brace expansion:
// a{b,c}d -> abd acd
// a{b,}c -> abc ac
// a{0..3}d -> a0d a1d a2d a3d
// a{b,c{d,e}f}g -> abg acdfg acefg
// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg
//
// Invalid sets are not expanded.
// a{2..}b -> a{2..}b
// a{b}c -> a{b}c
minimatch . braceExpand = function ( pattern , options ) {
return braceExpand ( pattern , options )
2022-10-22 03:17:17 +08:00
}
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
Minimatch . prototype . braceExpand = braceExpand
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
function braceExpand ( pattern , options ) {
if ( ! options ) {
if ( this instanceof Minimatch ) {
options = this . options
} else {
options = { }
}
}
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
pattern = typeof pattern === 'undefined'
? this . pattern : pattern
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
assertValidPattern ( pattern )
// Thanks to Yeting Li <https://github.com/yetingli> for
// improving this regexp to avoid a ReDOS vulnerability.
if ( options . nobrace || ! /\{(?:(?!\{).)*\}/ . test ( pattern ) ) {
// shortcut. no need to expand.
return [ pattern ]
}
return expand ( pattern )
2022-05-20 04:33:04 +08:00
}
2023-01-06 05:27:11 +08:00
var MAX _PATTERN _LENGTH = 1024 * 64
var assertValidPattern = function ( pattern ) {
if ( typeof pattern !== 'string' ) {
throw new TypeError ( 'invalid pattern' )
}
if ( pattern . length > MAX _PATTERN _LENGTH ) {
throw new TypeError ( 'pattern is too long' )
}
}
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
// parse a component of the expanded set.
// At this point, no pattern may contain "/" in it
// so we're going to return a 2d array, where each entry is the full
// pattern, split on '/', and then turned into a regular expression.
// A regexp is made at the end which joins each array with an
// escaped /, and another full one which joins each regexp with |.
//
// Following the lead of Bash 4.1, note that "**" only has special meaning
// when it is the *only* thing in a path portion. Otherwise, any series
// of * is equivalent to a single *. Globstar behavior is enabled by
// default, and can be disabled by setting options.noglobstar.
Minimatch . prototype . parse = parse
var SUBPARSE = { }
function parse ( pattern , isSub ) {
assertValidPattern ( pattern )
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
var options = this . options
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
// shortcuts
if ( pattern === '**' ) {
if ( ! options . noglobstar )
return GLOBSTAR
else
pattern = '*'
}
if ( pattern === '' ) return ''
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
var re = ''
var hasMagic = ! ! options . nocase
var escaping = false
// ? => one single character
var patternListStack = [ ]
var negativeLists = [ ]
var stateChar
var inClass = false
var reClassStart = - 1
var classStart = - 1
// . and .. never match anything that doesn't start with .,
// even when options.dot is set.
var patternStart = pattern . charAt ( 0 ) === '.' ? '' // anything
// not (start or / followed by . or .. followed by / or end)
: options . dot ? '(?!(?:^|\\\/)\\.{1,2}(?:$|\\\/))'
: '(?!\\.)'
var self = this
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
function clearStateChar ( ) {
if ( stateChar ) {
// we had some state-tracking character
// that wasn't consumed by this pass.
switch ( stateChar ) {
case '*' :
re += star
hasMagic = true
break
case '?' :
re += qmark
hasMagic = true
break
default :
re += '\\' + stateChar
break
}
self . debug ( 'clearStateChar %j %j' , stateChar , re )
stateChar = false
}
}
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
for ( var i = 0 , len = pattern . length , c
; ( i < len ) && ( c = pattern . charAt ( i ) )
; i ++ ) {
this . debug ( '%s\t%s %s %j' , pattern , i , re , c )
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
// skip over any that are escaped.
if ( escaping && reSpecials [ c ] ) {
re += '\\' + c
escaping = false
continue
}
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
switch ( c ) {
/* istanbul ignore next */
case '/' : {
// completely not allowed, even escaped.
// Should already be path-split by now.
return false
}
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
case '\\' :
clearStateChar ( )
escaping = true
continue
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
// the various stateChar values
// for the "extglob" stuff.
case '?' :
case '*' :
case '+' :
case '@' :
case '!' :
this . debug ( '%s\t%s %s %j <-- stateChar' , pattern , i , re , c )
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
// all of those are literals inside a class, except that
// the glob [!a] means [^a] in regexp
if ( inClass ) {
this . debug ( ' in class' )
if ( c === '!' && i === classStart + 1 ) c = '^'
re += c
continue
}
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
// if we already have a stateChar, then it means
// that there was something like ** or +? in there.
// Handle the stateChar, then proceed with this one.
self . debug ( 'call clearStateChar %j' , stateChar )
clearStateChar ( )
stateChar = c
// if extglob is disabled, then +(asdf|foo) isn't a thing.
// just clear the statechar *now*, rather than even diving into
// the patternList stuff.
if ( options . noext ) clearStateChar ( )
continue
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
case '(' :
if ( inClass ) {
re += '('
continue
2022-05-20 05:17:44 +08:00
}
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
if ( ! stateChar ) {
re += '\\('
continue
}
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
patternListStack . push ( {
type : stateChar ,
start : i - 1 ,
reStart : re . length ,
open : plTypes [ stateChar ] . open ,
close : plTypes [ stateChar ] . close
} )
// negation is (?:(?!js)[^/]*)
re += stateChar === '!' ? '(?:(?!(?:' : '(?:'
this . debug ( 'plType %j %j' , stateChar , re )
stateChar = false
continue
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
case ')' :
if ( inClass || ! patternListStack . length ) {
re += '\\)'
continue
}
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
clearStateChar ( )
hasMagic = true
var pl = patternListStack . pop ( )
// negation is (?:(?!js)[^/]*)
// The others are (?:<pattern>)<type>
re += pl . close
if ( pl . type === '!' ) {
negativeLists . push ( pl )
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
pl . reEnd = re . length
continue
case '|' :
if ( inClass || ! patternListStack . length || escaping ) {
re += '\\|'
escaping = false
continue
2022-05-20 05:17:44 +08:00
}
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
clearStateChar ( )
re += '|'
continue
2020-07-31 23:27:37 +08:00
2023-01-06 05:27:11 +08:00
// these are mostly the same in regexp and glob
case '[' :
// swallow any state-tracking char before the [
clearStateChar ( )
2020-07-31 23:27:37 +08:00
2023-01-06 05:27:11 +08:00
if ( inClass ) {
re += '\\' + c
continue
}
2020-07-31 23:27:37 +08:00
2023-01-06 05:27:11 +08:00
inClass = true
classStart = i
reClassStart = re . length
re += c
continue
case ']' :
// a right bracket shall lose its special
// meaning and represent itself in
// a bracket expression if it occurs
// first in the list. -- POSIX.2 2.8.3.2
if ( i === classStart + 1 || ! inClass ) {
re += '\\' + c
escaping = false
continue
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
// handle the case where we left a class open.
// "[z-a]" is valid, equivalent to "\[z-a\]"
// split where the last [ was, make sure we don't have
// an invalid re. if so, re-walk the contents of the
// would-be class to re-translate any characters that
// were passed through as-is
// TODO: It would probably be faster to determine this
// without a try/catch and a new RegExp, but it's tricky
// to do safely. For now, this is safe and works.
var cs = pattern . substring ( classStart + 1 , i )
try {
RegExp ( '[' + cs + ']' )
} catch ( er ) {
// not a valid class!
var sp = this . parse ( cs , SUBPARSE )
re = re . substr ( 0 , reClassStart ) + '\\[' + sp [ 0 ] + '\\]'
hasMagic = hasMagic || sp [ 1 ]
inClass = false
continue
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
// finish up the class.
hasMagic = true
inClass = false
re += c
continue
default :
// swallow any state char that wasn't consumed
clearStateChar ( )
if ( escaping ) {
// no need
escaping = false
} else if ( reSpecials [ c ]
&& ! ( c === '^' && inClass ) ) {
re += '\\'
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
re += c
} // switch
} // for
// handle the case where we left a class open.
// "[abc" is valid, equivalent to "\[abc"
if ( inClass ) {
// split where the last [ was, and escape it
// this is a huge pita. We now have to re-walk
// the contents of the would-be class to re-translate
// any characters that were passed through as-is
cs = pattern . substr ( classStart + 1 )
sp = this . parse ( cs , SUBPARSE )
re = re . substr ( 0 , reClassStart ) + '\\[' + sp [ 0 ]
hasMagic = hasMagic || sp [ 1 ]
}
// handle the case where we had a +( thing at the *end*
// of the pattern.
// each pattern list stack adds 3 chars, and we need to go through
// and escape any | chars that were passed through as-is for the regexp.
// Go through and escape them, taking care not to double-escape any
// | chars that were already escaped.
for ( pl = patternListStack . pop ( ) ; pl ; pl = patternListStack . pop ( ) ) {
var tail = re . slice ( pl . reStart + pl . open . length )
this . debug ( 'setting tail' , re , pl )
// maybe some even number of \, then maybe 1 \, followed by a |
tail = tail . replace ( /((?:\\{2}){0,64})(\\?)\|/g , function ( _ , $1 , $2 ) {
if ( ! $2 ) {
// the | isn't already escaped, so escape it.
$2 = '\\'
}
// need to escape all those slashes *again*, without escaping the
// one that we need for escaping the | character. As it works out,
// escaping an even number of slashes can be done by simply repeating
// it exactly after itself. That's why this trick works.
//
// I am sorry that you have to see this.
return $1 + $1 + $2 + '|'
} )
this . debug ( 'tail=%j\n %s' , tail , tail , pl , re )
var t = pl . type === '*' ? star
: pl . type === '?' ? qmark
: '\\' + pl . type
hasMagic = true
re = re . slice ( 0 , pl . reStart ) + t + '\\(' + tail
}
// handle trailing things that only matter at the very end.
clearStateChar ( )
if ( escaping ) {
// trailing \\
re += '\\\\'
}
// only need to apply the nodot start if the re starts with
// something that could conceivably capture a dot
var addPatternStart = false
switch ( re . charAt ( 0 ) ) {
case '[' : case '.' : case '(' : addPatternStart = true
}
// Hack to work around lack of negative lookbehind in JS
// A pattern like: *.!(x).!(y|z) needs to ensure that a name
// like 'a.xyz.yz' doesn't match. So, the first negative
// lookahead, has to look ALL the way ahead, to the end of
// the pattern.
for ( var n = negativeLists . length - 1 ; n > - 1 ; n -- ) {
var nl = negativeLists [ n ]
var nlBefore = re . slice ( 0 , nl . reStart )
var nlFirst = re . slice ( nl . reStart , nl . reEnd - 8 )
var nlLast = re . slice ( nl . reEnd - 8 , nl . reEnd )
var nlAfter = re . slice ( nl . reEnd )
nlLast += nlAfter
// Handle nested stuff like *(*.js|!(*.json)), where open parens
// mean that we should *not* include the ) in the bit that is considered
// "after" the negated section.
var openParensBefore = nlBefore . split ( '(' ) . length - 1
var cleanAfter = nlAfter
for ( i = 0 ; i < openParensBefore ; i ++ ) {
cleanAfter = cleanAfter . replace ( /\)[+*?]?/ , '' )
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
nlAfter = cleanAfter
var dollar = ''
if ( nlAfter === '' && isSub !== SUBPARSE ) {
dollar = '$'
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
var newRe = nlBefore + nlFirst + nlAfter + dollar + nlLast
re = newRe
}
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
// if the re is not "" at this point, then we need to make sure
// it doesn't match against an empty path part.
// Otherwise a/* will match a/, which it should not.
if ( re !== '' && hasMagic ) {
re = '(?=.)' + re
}
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
if ( addPatternStart ) {
re = patternStart + re
}
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
// parsing just a piece of a larger pattern.
if ( isSub === SUBPARSE ) {
return [ re , hasMagic ]
}
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
// skip the regexp for non-magical patterns
// unescape anything in it, though, so that it'll be
// an exact match against a file etc.
if ( ! hasMagic ) {
return globUnescape ( pattern )
}
var flags = options . nocase ? 'i' : ''
try {
var regExp = new RegExp ( '^' + re + '$' , flags )
} catch ( er ) /* istanbul ignore next - should be impossible */ {
// If it was an invalid regular expression, then it can't match
// anything. This trick looks for a character after the end of
// the string, which is of course impossible, except in multi-line
// mode, but it's not a /m regex.
return new RegExp ( '$.' )
}
regExp . _glob = pattern
regExp . _src = re
return regExp
2022-10-22 03:17:17 +08:00
}
2023-01-06 05:27:11 +08:00
minimatch . makeRe = function ( pattern , options ) {
return new Minimatch ( pattern , options || { } ) . makeRe ( )
2022-10-22 03:17:17 +08:00
}
2023-01-06 05:27:11 +08:00
Minimatch . prototype . makeRe = makeRe
function makeRe ( ) {
if ( this . regexp || this . regexp === false ) return this . regexp
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
// at this point, this.set is a 2d array of partial
// pattern strings, or "**".
//
// It's better to use .match(). This function shouldn't
// be used, really, but it's pretty convenient sometimes,
// when you just want to work with a regex.
var set = this . set
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
if ( ! set . length ) {
this . regexp = false
return this . regexp
}
var options = this . options
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
var twoStar = options . noglobstar ? star
: options . dot ? twoStarDot
: twoStarNoDot
var flags = options . nocase ? 'i' : ''
2020-07-31 23:27:37 +08:00
2023-01-06 05:27:11 +08:00
var re = set . map ( function ( pattern ) {
return pattern . map ( function ( p ) {
return ( p === GLOBSTAR ) ? twoStar
: ( typeof p === 'string' ) ? regExpEscape ( p )
: p . _src
} ) . join ( '\\\/' )
} ) . join ( '|' )
2020-07-31 23:27:37 +08:00
2023-01-06 05:27:11 +08:00
// must match entire pattern
// ending in a * or ** will make it less strict.
re = '^(?:' + re + ')$'
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
// can match anything, as long as it's not this.
if ( this . negate ) re = '^(?!' + re + ').*$'
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
try {
this . regexp = new RegExp ( re , flags )
} catch ( ex ) /* istanbul ignore next - should be impossible */ {
this . regexp = false
}
return this . regexp
}
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
minimatch . match = function ( list , pattern , options ) {
options = options || { }
var mm = new Minimatch ( pattern , options )
list = list . filter ( function ( f ) {
return mm . match ( f )
} )
if ( mm . options . nonull && ! list . length ) {
list . push ( pattern )
}
return list
2022-05-20 04:33:04 +08:00
}
2023-01-06 05:27:11 +08:00
Minimatch . prototype . match = function match ( f , partial ) {
if ( typeof partial === 'undefined' ) partial = this . partial
this . debug ( 'match' , f , this . pattern )
// short-circuit in the case of busted things.
// comments, etc.
if ( this . comment ) return false
if ( this . empty ) return f === ''
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
if ( f === '/' && partial ) return true
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
var options = this . options
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
// windows: need to use /, not \
if ( path . sep !== '/' ) {
f = f . split ( path . sep ) . join ( '/' )
}
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
// treat the test path as a set of pathparts.
f = f . split ( slashSplit )
this . debug ( this . pattern , 'split' , f )
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
// just ONE of the pattern sets in this.set needs to match
// in order for it to be valid. If negating, then just one
// match means that we have failed.
// Either way, return on the first hit.
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
var set = this . set
this . debug ( this . pattern , 'set' , set )
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
// Find the basename of the path by looking for the last non-empty segment
var filename
var i
for ( i = f . length - 1 ; i >= 0 ; i -- ) {
filename = f [ i ]
if ( filename ) break
}
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
for ( i = 0 ; i < set . length ; i ++ ) {
var pattern = set [ i ]
var file = f
if ( options . matchBase && pattern . length === 1 ) {
file = [ filename ]
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
var hit = this . matchOne ( file , pattern , partial )
if ( hit ) {
if ( options . flipNegate ) return true
return ! this . negate
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
}
// didn't get any hits. this is success if it's a negative
// pattern, failure otherwise.
if ( options . flipNegate ) return false
return this . negate
2022-05-20 05:17:44 +08:00
}
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
// set partial to true to test if, for example,
// "/a/b" matches the start of "/*/b/*/d"
// Partial means, if you run out of file before you run
// out of pattern, then that's fine, as long as all
// the parts match.
Minimatch . prototype . matchOne = function ( file , pattern , partial ) {
var options = this . options
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
this . debug ( 'matchOne' ,
{ 'this' : this , file : file , pattern : pattern } )
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
this . debug ( 'matchOne' , file . length , pattern . length )
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
for ( var fi = 0 ,
pi = 0 ,
fl = file . length ,
pl = pattern . length
; ( fi < fl ) && ( pi < pl )
; fi ++ , pi ++ ) {
this . debug ( 'matchOne loop' )
var p = pattern [ pi ]
var f = file [ fi ]
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
this . debug ( pattern , p , f )
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
// should be impossible.
// some invalid regexp stuff in the set.
/* istanbul ignore if */
if ( p === false ) return false
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
if ( p === GLOBSTAR ) {
this . debug ( 'GLOBSTAR' , [ pattern , p , f ] )
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
// "**"
// a/**/b/**/c would match the following:
// a/b/x/y/z/c
// a/x/y/z/b/c
// a/b/x/b/x/c
// a/b/c
// To do this, take the rest of the pattern after
// the **, and see if it would match the file remainder.
// If so, return success.
// If not, the ** "swallows" a segment, and try again.
// This is recursively awful.
//
// a/**/b/**/c matching a/b/x/y/z/c
// - a matches a
// - doublestar
// - matchOne(b/x/y/z/c, b/**/c)
// - b matches b
// - doublestar
// - matchOne(x/y/z/c, c) -> no
// - matchOne(y/z/c, c) -> no
// - matchOne(z/c, c) -> no
// - matchOne(c, c) yes, hit
var fr = fi
var pr = pi + 1
if ( pr === pl ) {
this . debug ( '** at the end' )
// a ** at the end will just swallow the rest.
// We have found a match.
// however, it will not swallow /.x, unless
// options.dot is set.
// . and .. are *never* matched by **, for explosively
// exponential reasons.
for ( ; fi < fl ; fi ++ ) {
if ( file [ fi ] === '.' || file [ fi ] === '..' ||
( ! options . dot && file [ fi ] . charAt ( 0 ) === '.' ) ) return false
}
return true
}
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
// ok, let's see if we can swallow whatever we can.
while ( fr < fl ) {
var swallowee = file [ fr ]
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
this . debug ( '\nglobstar while' , file , fr , pattern , pr , swallowee )
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
// XXX remove this slice. Just pass the start index.
if ( this . matchOne ( file . slice ( fr ) , pattern . slice ( pr ) , partial ) ) {
this . debug ( 'globstar found match!' , fr , fl , swallowee )
// found a match.
return true
} else {
// can't swallow "." or ".." ever.
// can only swallow ".foo" when explicitly asked.
if ( swallowee === '.' || swallowee === '..' ||
( ! options . dot && swallowee . charAt ( 0 ) === '.' ) ) {
this . debug ( 'dot detected!' , file , fr , pattern , pr )
break
}
// ** swallows a segment, and continue.
this . debug ( 'globstar swallow a segment, and continue' )
fr ++
}
}
// no match was found.
// However, in partial mode, we can't say this is necessarily over.
// If there's more *pattern* left, then
/* istanbul ignore if */
if ( partial ) {
// ran out of file
this . debug ( '\n>>> no match, partial?' , file , fr , pattern , pr )
if ( fr === fl ) return true
}
return false
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
// something other than **
// non-magic patterns just have to match exactly
// patterns with magic have been turned into regexps.
var hit
if ( typeof p === 'string' ) {
hit = f === p
this . debug ( 'string match' , p , f , hit )
} else {
hit = f . match ( p )
this . debug ( 'pattern match' , p , f , hit )
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
if ( ! hit ) return false
}
// Note: ending in / means that we'll get a final ""
// at the end of the pattern. This can only match a
// corresponding "" at the end of the file.
// If the file ends in /, then it can only match a
// a pattern that ends in /, unless the pattern just
// doesn't have any more for it. But, a/b/ should *not*
// match "a/b/*", even though "" matches against the
// [^/]*? pattern, except in partial mode, where it might
// simply not be reached yet.
// However, a/b/ should still satisfy a/*
// now either we fell off the end of the pattern, or we're done.
if ( fi === fl && pi === pl ) {
// ran out of pattern and filename at the same time.
// an exact hit!
return true
} else if ( fi === fl ) {
// ran out of file, but still had pattern left.
// this is ok if we're doing the match as part of
// a glob fs traversal.
return partial
} else /* istanbul ignore else */ if ( pi === pl ) {
// ran out of pattern, still have file left.
// this is only acceptable if we're on the very last
// empty segment of a file with a trailing slash.
// a/* should match a/b/
return ( fi === fl - 1 ) && ( file [ fi ] === '' )
}
// should be unreachable.
/* istanbul ignore next */
throw new Error ( 'wtf?' )
}
// replace stuff like \* with *
function globUnescape ( s ) {
return s . replace ( /\\(.)/g , '$1' )
}
function regExpEscape ( s ) {
return s . replace ( /[-[\]{}()*+?.,\\^$|#\s]/g , '\\$&' )
}
/***/ } ) ,
/***/ 1223 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
var wrappy = _ _nccwpck _require _ _ ( 2940 )
module . exports = wrappy ( once )
module . exports . strict = wrappy ( onceStrict )
once . proto = once ( function ( ) {
Object . defineProperty ( Function . prototype , 'once' , {
value : function ( ) {
return once ( this )
} ,
configurable : true
} )
Object . defineProperty ( Function . prototype , 'onceStrict' , {
value : function ( ) {
return onceStrict ( this )
} ,
configurable : true
} )
} )
function once ( fn ) {
var f = function ( ) {
if ( f . called ) return f . value
f . called = true
return f . value = fn . apply ( this , arguments )
}
f . called = false
return f
}
function onceStrict ( fn ) {
var f = function ( ) {
if ( f . called )
throw new Error ( f . onceError )
f . called = true
return f . value = fn . apply ( this , arguments )
}
var name = fn . name || 'Function wrapped with `once`'
f . onceError = name + " shouldn't be called more than once"
f . called = false
return f
}
/***/ } ) ,
/***/ 8714 :
/***/ ( ( module ) => {
"use strict" ;
function posix ( path ) {
return path . charAt ( 0 ) === '/' ;
}
function win32 ( path ) {
// https://github.com/nodejs/node/blob/b3fcc245fb25539909ef1d5eaa01dbf92e168633/lib/path.js#L56
var splitDeviceRe = /^([a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/]+[^\\\/]+)?([\\\/])?([\s\S]*?)$/ ;
var result = splitDeviceRe . exec ( path ) ;
var device = result [ 1 ] || '' ;
var isUnc = Boolean ( device && device . charAt ( 1 ) !== ':' ) ;
// UNC paths are always absolute
return Boolean ( result [ 2 ] || isUnc ) ;
}
module . exports = process . platform === 'win32' ? win32 : posix ;
module . exports . posix = posix ;
module . exports . win32 = win32 ;
/***/ } ) ,
/***/ 5888 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
exports . setopts = setopts
exports . ownProp = ownProp
exports . makeAbs = makeAbs
exports . finish = finish
exports . mark = mark
exports . isIgnored = isIgnored
exports . childrenIgnored = childrenIgnored
function ownProp ( obj , field ) {
return Object . prototype . hasOwnProperty . call ( obj , field )
}
var fs = _ _nccwpck _require _ _ ( 7147 )
var path = _ _nccwpck _require _ _ ( 1017 )
var minimatch = _ _nccwpck _require _ _ ( 3973 )
var isAbsolute = _ _nccwpck _require _ _ ( 8714 )
var Minimatch = minimatch . Minimatch
function alphasort ( a , b ) {
return a . localeCompare ( b , 'en' )
}
function setupIgnores ( self , options ) {
self . ignore = options . ignore || [ ]
if ( ! Array . isArray ( self . ignore ) )
self . ignore = [ self . ignore ]
if ( self . ignore . length ) {
self . ignore = self . ignore . map ( ignoreMap )
}
}
// ignore patterns are always in dot:true mode.
function ignoreMap ( pattern ) {
var gmatcher = null
if ( pattern . slice ( - 3 ) === '/**' ) {
var gpattern = pattern . replace ( /(\/\*\*)+$/ , '' )
gmatcher = new Minimatch ( gpattern , { dot : true } )
}
return {
matcher : new Minimatch ( pattern , { dot : true } ) ,
gmatcher : gmatcher
}
}
function setopts ( self , pattern , options ) {
if ( ! options )
options = { }
// base-matching: just use globstar for that.
if ( options . matchBase && - 1 === pattern . indexOf ( "/" ) ) {
if ( options . noglobstar ) {
throw new Error ( "base matching requires globstar" )
2020-04-28 23:18:53 +08:00
}
2023-01-06 05:27:11 +08:00
pattern = "**/" + pattern
}
self . silent = ! ! options . silent
self . pattern = pattern
self . strict = options . strict !== false
self . realpath = ! ! options . realpath
self . realpathCache = options . realpathCache || Object . create ( null )
self . follow = ! ! options . follow
self . dot = ! ! options . dot
self . mark = ! ! options . mark
self . nodir = ! ! options . nodir
if ( self . nodir )
self . mark = true
self . sync = ! ! options . sync
self . nounique = ! ! options . nounique
self . nonull = ! ! options . nonull
self . nosort = ! ! options . nosort
self . nocase = ! ! options . nocase
self . stat = ! ! options . stat
self . noprocess = ! ! options . noprocess
self . absolute = ! ! options . absolute
self . fs = options . fs || fs
self . maxLength = options . maxLength || Infinity
self . cache = options . cache || Object . create ( null )
self . statCache = options . statCache || Object . create ( null )
self . symlinks = options . symlinks || Object . create ( null )
setupIgnores ( self , options )
self . changedCwd = false
var cwd = process . cwd ( )
if ( ! ownProp ( options , "cwd" ) )
self . cwd = cwd
else {
self . cwd = path . resolve ( options . cwd )
self . changedCwd = self . cwd !== cwd
}
self . root = options . root || path . resolve ( self . cwd , "/" )
self . root = path . resolve ( self . root )
if ( process . platform === "win32" )
self . root = self . root . replace ( /\\/g , "/" )
// TODO: is an absolute `cwd` supposed to be resolved against `root`?
// e.g. { cwd: '/test', root: __dirname } === path.join(__dirname, '/test')
self . cwdAbs = isAbsolute ( self . cwd ) ? self . cwd : makeAbs ( self , self . cwd )
if ( process . platform === "win32" )
self . cwdAbs = self . cwdAbs . replace ( /\\/g , "/" )
self . nomount = ! ! options . nomount
// disable comments and negation in Minimatch.
// Note that they are not supported in Glob itself anyway.
options . nonegate = true
options . nocomment = true
// always treat \ in patterns as escapes, not path separators
options . allowWindowsEscape = false
self . minimatch = new Minimatch ( pattern , options )
self . options = self . minimatch . options
}
function finish ( self ) {
var nou = self . nounique
var all = nou ? [ ] : Object . create ( null )
for ( var i = 0 , l = self . matches . length ; i < l ; i ++ ) {
var matches = self . matches [ i ]
if ( ! matches || Object . keys ( matches ) . length === 0 ) {
if ( self . nonull ) {
// do like the shell, and spit out the literal glob
var literal = self . minimatch . globSet [ i ]
if ( nou )
all . push ( literal )
else
all [ literal ] = true
}
} else {
// had matches
var m = Object . keys ( matches )
if ( nou )
all . push . apply ( all , m )
else
m . forEach ( function ( m ) {
all [ m ] = true
} )
2020-04-28 23:18:53 +08:00
}
2023-01-06 05:27:11 +08:00
}
if ( ! nou )
all = Object . keys ( all )
if ( ! self . nosort )
all = all . sort ( alphasort )
// at *some* point we statted all of these
if ( self . mark ) {
for ( var i = 0 ; i < all . length ; i ++ ) {
all [ i ] = self . _mark ( all [ i ] )
}
if ( self . nodir ) {
all = all . filter ( function ( e ) {
var notDir = ! ( /\/$/ . test ( e ) )
var c = self . cache [ e ] || self . cache [ makeAbs ( self , e ) ]
if ( notDir && c )
notDir = c !== 'DIR' && ! Array . isArray ( c )
return notDir
} )
2022-05-20 04:33:04 +08:00
}
2023-01-06 05:27:11 +08:00
}
if ( self . ignore . length )
all = all . filter ( function ( m ) {
return ! isIgnored ( self , m )
} )
self . found = all
}
function mark ( self , p ) {
var abs = makeAbs ( self , p )
var c = self . cache [ abs ]
var m = p
if ( c ) {
var isDir = c === 'DIR' || Array . isArray ( c )
var slash = p . slice ( - 1 ) === '/'
if ( isDir && ! slash )
m += '/'
else if ( ! isDir && slash )
m = m . slice ( 0 , - 1 )
if ( m !== p ) {
var mabs = makeAbs ( self , m )
self . statCache [ mabs ] = self . statCache [ abs ]
self . cache [ mabs ] = self . cache [ abs ]
}
}
return m
}
// lotta situps...
function makeAbs ( self , f ) {
var abs = f
if ( f . charAt ( 0 ) === '/' ) {
abs = path . join ( self . root , f )
} else if ( isAbsolute ( f ) || f === '' ) {
abs = f
} else if ( self . changedCwd ) {
abs = path . resolve ( self . cwd , f )
} else {
abs = path . resolve ( f )
}
if ( process . platform === 'win32' )
abs = abs . replace ( /\\/g , '/' )
return abs
}
// Return true, if pattern ends with globstar '**', for the accompanying parent directory.
// Ex:- If node_modules/** is the pattern, add 'node_modules' to ignore list along with it's contents
function isIgnored ( self , path ) {
if ( ! self . ignore . length )
return false
return self . ignore . some ( function ( item ) {
return item . matcher . match ( path ) || ! ! ( item . gmatcher && item . gmatcher . match ( path ) )
} )
}
function childrenIgnored ( self , path ) {
if ( ! self . ignore . length )
return false
return self . ignore . some ( function ( item ) {
return ! ! ( item . gmatcher && item . gmatcher . match ( path ) )
} )
}
/***/ } ) ,
/***/ 6968 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
// Approach:
//
// 1. Get the minimatch set
// 2. For each pattern in the set, PROCESS(pattern, false)
// 3. Store matches per-set, then uniq them
//
// PROCESS(pattern, inGlobStar)
// Get the first [n] items from pattern that are all strings
// Join these together. This is PREFIX.
// If there is no more remaining, then stat(PREFIX) and
// add to matches if it succeeds. END.
//
// If inGlobStar and PREFIX is symlink and points to dir
// set ENTRIES = []
// else readdir(PREFIX) as ENTRIES
// If fail, END
//
// with ENTRIES
// If pattern[n] is GLOBSTAR
// // handle the case where the globstar match is empty
// // by pruning it out, and testing the resulting pattern
// PROCESS(pattern[0..n] + pattern[n+1 .. $], false)
// // handle other cases.
// for ENTRY in ENTRIES (not dotfiles)
// // attach globstar + tail onto the entry
// // Mark that this entry is a globstar match
// PROCESS(pattern[0..n] + ENTRY + pattern[n .. $], true)
//
// else // not globstar
// for ENTRY in ENTRIES (not dotfiles, unless pattern[n] is dot)
// Test ENTRY against pattern[n]
// If fails, continue
// If passes, PROCESS(pattern[0..n] + item + pattern[n+1 .. $])
//
// Caveat:
// Cache all stats and readdirs results to minimize syscall. Since all
// we ever care about is existence and directory-ness, we can just keep
// `true` for files, and [children,...] for directories, or `false` for
// things that don't exist.
module . exports = glob
var rp = _ _nccwpck _require _ _ ( 6863 )
var minimatch = _ _nccwpck _require _ _ ( 3973 )
var Minimatch = minimatch . Minimatch
var inherits = _ _nccwpck _require _ _ ( 4124 )
var EE = ( _ _nccwpck _require _ _ ( 2361 ) . EventEmitter )
var path = _ _nccwpck _require _ _ ( 1017 )
var assert = _ _nccwpck _require _ _ ( 9491 )
var isAbsolute = _ _nccwpck _require _ _ ( 8714 )
var globSync = _ _nccwpck _require _ _ ( 7967 )
var common = _ _nccwpck _require _ _ ( 5888 )
var setopts = common . setopts
var ownProp = common . ownProp
var inflight = _ _nccwpck _require _ _ ( 2492 )
var util = _ _nccwpck _require _ _ ( 3837 )
var childrenIgnored = common . childrenIgnored
var isIgnored = common . isIgnored
var once = _ _nccwpck _require _ _ ( 1223 )
function glob ( pattern , options , cb ) {
if ( typeof options === 'function' ) cb = options , options = { }
if ( ! options ) options = { }
if ( options . sync ) {
if ( cb )
throw new TypeError ( 'callback provided to sync glob' )
return globSync ( pattern , options )
}
return new Glob ( pattern , options , cb )
}
glob . sync = globSync
var GlobSync = glob . GlobSync = globSync . GlobSync
// old api surface
glob . glob = glob
function extend ( origin , add ) {
if ( add === null || typeof add !== 'object' ) {
return origin
}
var keys = Object . keys ( add )
var i = keys . length
while ( i -- ) {
origin [ keys [ i ] ] = add [ keys [ i ] ]
}
return origin
2022-05-20 05:17:44 +08:00
}
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
glob . hasMagic = function ( pattern , options _ ) {
var options = extend ( { } , options _ )
options . noprocess = true
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
var g = new Glob ( pattern , options )
var set = g . minimatch . set
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
if ( ! pattern )
return false
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
if ( set . length > 1 )
return true
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
for ( var j = 0 ; j < set [ 0 ] . length ; j ++ ) {
if ( typeof set [ 0 ] [ j ] !== 'string' )
return true
}
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
return false
}
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
glob . Glob = Glob
inherits ( Glob , EE )
function Glob ( pattern , options , cb ) {
if ( typeof options === 'function' ) {
cb = options
options = null
}
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
if ( options && options . sync ) {
if ( cb )
throw new TypeError ( 'callback provided to sync glob' )
return new GlobSync ( pattern , options )
}
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
if ( ! ( this instanceof Glob ) )
return new Glob ( pattern , options , cb )
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
setopts ( this , pattern , options )
this . _didRealPath = false
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
// process each pattern in the minimatch set
var n = this . minimatch . set . length
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
// The matches are stored as {<filename>: true,...} so that
// duplicates are automagically pruned.
// Later, we do an Object.keys() on these.
// Keep them as a list so we can fill in when nonull is set.
this . matches = new Array ( n )
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
if ( typeof cb === 'function' ) {
cb = once ( cb )
this . on ( 'error' , cb )
this . on ( 'end' , function ( matches ) {
cb ( null , matches )
} )
}
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
var self = this
this . _processing = 0
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
this . _emitQueue = [ ]
this . _processQueue = [ ]
this . paused = false
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
if ( this . noprocess )
return this
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
if ( n === 0 )
return done ( )
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
var sync = true
for ( var i = 0 ; i < n ; i ++ ) {
this . _process ( this . minimatch . set [ i ] , i , false , done )
}
sync = false
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
function done ( ) {
-- self . _processing
if ( self . _processing <= 0 ) {
if ( sync ) {
process . nextTick ( function ( ) {
self . _finish ( )
} )
} else {
self . _finish ( )
}
}
}
}
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
Glob . prototype . _finish = function ( ) {
assert ( this instanceof Glob )
if ( this . aborted )
return
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
if ( this . realpath && ! this . _didRealpath )
return this . _realpath ( )
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
common . finish ( this )
this . emit ( 'end' , this . found )
}
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
Glob . prototype . _realpath = function ( ) {
if ( this . _didRealpath )
return
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
this . _didRealpath = true
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
var n = this . matches . length
if ( n === 0 )
return this . _finish ( )
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
var self = this
for ( var i = 0 ; i < this . matches . length ; i ++ )
this . _realpathSet ( i , next )
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
function next ( ) {
if ( -- n === 0 )
self . _finish ( )
}
}
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
Glob . prototype . _realpathSet = function ( index , cb ) {
var matchset = this . matches [ index ]
if ( ! matchset )
return cb ( )
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
var found = Object . keys ( matchset )
var self = this
var n = found . length
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
if ( n === 0 )
return cb ( )
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
var set = this . matches [ index ] = Object . create ( null )
found . forEach ( function ( p , i ) {
// If there's a problem with the stat, then it means that
// one or more of the links in the realpath couldn't be
// resolved. just return the abs value in that case.
p = self . _makeAbs ( p )
rp . realpath ( p , self . realpathCache , function ( er , real ) {
if ( ! er )
set [ real ] = true
else if ( er . syscall === 'stat' )
set [ p ] = true
else
self . emit ( 'error' , er ) // srsly wtf right here
if ( -- n === 0 ) {
self . matches [ index ] = set
cb ( )
}
} )
} )
2020-04-28 23:18:53 +08:00
}
2023-01-06 05:27:11 +08:00
Glob . prototype . _mark = function ( p ) {
return common . mark ( this , p )
2022-05-20 04:33:04 +08:00
}
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
Glob . prototype . _makeAbs = function ( f ) {
return common . makeAbs ( this , f )
}
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
Glob . prototype . abort = function ( ) {
this . aborted = true
this . emit ( 'abort' )
}
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
Glob . prototype . pause = function ( ) {
if ( ! this . paused ) {
this . paused = true
this . emit ( 'pause' )
}
}
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
Glob . prototype . resume = function ( ) {
if ( this . paused ) {
this . emit ( 'resume' )
this . paused = false
if ( this . _emitQueue . length ) {
var eq = this . _emitQueue . slice ( 0 )
this . _emitQueue . length = 0
for ( var i = 0 ; i < eq . length ; i ++ ) {
var e = eq [ i ]
this . _emitMatch ( e [ 0 ] , e [ 1 ] )
}
2022-10-22 03:17:17 +08:00
}
2023-01-06 05:27:11 +08:00
if ( this . _processQueue . length ) {
var pq = this . _processQueue . slice ( 0 )
this . _processQueue . length = 0
for ( var i = 0 ; i < pq . length ; i ++ ) {
var p = pq [ i ]
this . _processing --
this . _process ( p [ 0 ] , p [ 1 ] , p [ 2 ] , p [ 3 ] )
}
2022-10-22 03:17:17 +08:00
}
2023-01-06 05:27:11 +08:00
}
}
Glob . prototype . _process = function ( pattern , index , inGlobStar , cb ) {
assert ( this instanceof Glob )
assert ( typeof cb === 'function' )
if ( this . aborted )
return
this . _processing ++
if ( this . paused ) {
this . _processQueue . push ( [ pattern , index , inGlobStar , cb ] )
return
}
//console.error('PROCESS %d', this._processing, pattern)
// Get the first [n] parts of pattern that are all strings.
var n = 0
while ( typeof pattern [ n ] === 'string' ) {
n ++
}
// now n is the index of the first one that is *not* a string.
// see if there's anything else
var prefix
switch ( n ) {
// if not, then this is rather simple
case pattern . length :
this . _processSimple ( pattern . join ( '/' ) , index , cb )
return
case 0 :
// pattern *starts* with some non-trivial item.
// going to readdir(cwd), but not include the prefix in matches.
prefix = null
break
default :
// pattern has some string bits in the front.
// whatever it starts with, whether that's 'absolute' like /foo/bar,
// or 'relative' like '../baz'
prefix = pattern . slice ( 0 , n ) . join ( '/' )
break
}
var remain = pattern . slice ( n )
// get the list of entries.
var read
if ( prefix === null )
read = '.'
else if ( isAbsolute ( prefix ) ||
isAbsolute ( pattern . map ( function ( p ) {
return typeof p === 'string' ? p : '[*]'
} ) . join ( '/' ) ) ) {
if ( ! prefix || ! isAbsolute ( prefix ) )
prefix = '/' + prefix
read = prefix
} else
read = prefix
var abs = this . _makeAbs ( read )
//if ignored, skip _processing
if ( childrenIgnored ( this , read ) )
return cb ( )
var isGlobStar = remain [ 0 ] === minimatch . GLOBSTAR
if ( isGlobStar )
this . _processGlobStar ( prefix , read , abs , remain , index , inGlobStar , cb )
else
this . _processReaddir ( prefix , read , abs , remain , index , inGlobStar , cb )
}
Glob . prototype . _processReaddir = function ( prefix , read , abs , remain , index , inGlobStar , cb ) {
var self = this
this . _readdir ( abs , inGlobStar , function ( er , entries ) {
return self . _processReaddir2 ( prefix , read , abs , remain , index , inGlobStar , entries , cb )
} )
}
Glob . prototype . _processReaddir2 = function ( prefix , read , abs , remain , index , inGlobStar , entries , cb ) {
// if the abs isn't a dir, then nothing can match!
if ( ! entries )
return cb ( )
// It will only match dot entries if it starts with a dot, or if
// dot is set. Stuff like @(.foo|.bar) isn't allowed.
var pn = remain [ 0 ]
var negate = ! ! this . minimatch . negate
var rawGlob = pn . _glob
var dotOk = this . dot || rawGlob . charAt ( 0 ) === '.'
var matchedEntries = [ ]
for ( var i = 0 ; i < entries . length ; i ++ ) {
var e = entries [ i ]
if ( e . charAt ( 0 ) !== '.' || dotOk ) {
var m
if ( negate && ! prefix ) {
m = ! e . match ( pn )
} else {
m = e . match ( pn )
}
if ( m )
matchedEntries . push ( e )
2022-10-22 03:17:17 +08:00
}
2023-01-06 05:27:11 +08:00
}
//console.error('prd2', prefix, entries, remain[0]._glob, matchedEntries)
var len = matchedEntries . length
// If there are no matched entries, then nothing matches.
if ( len === 0 )
return cb ( )
// if this is the last remaining pattern bit, then no need for
// an additional stat *unless* the user has specified mark or
// stat explicitly. We know they exist, since readdir returned
// them.
if ( remain . length === 1 && ! this . mark && ! this . stat ) {
if ( ! this . matches [ index ] )
this . matches [ index ] = Object . create ( null )
for ( var i = 0 ; i < len ; i ++ ) {
var e = matchedEntries [ i ]
if ( prefix ) {
if ( prefix !== '/' )
e = prefix + '/' + e
else
e = prefix + e
}
if ( e . charAt ( 0 ) === '/' && ! this . nomount ) {
e = path . join ( this . root , e )
}
this . _emitMatch ( index , e )
2022-10-22 03:17:17 +08:00
}
2023-01-06 05:27:11 +08:00
// This was the last one, and no stats were needed
return cb ( )
}
// now test all matched entries as stand-ins for that part
// of the pattern.
remain . shift ( )
for ( var i = 0 ; i < len ; i ++ ) {
var e = matchedEntries [ i ]
var newPattern
if ( prefix ) {
if ( prefix !== '/' )
e = prefix + '/' + e
else
e = prefix + e
2022-10-22 03:17:17 +08:00
}
2023-01-06 05:27:11 +08:00
this . _process ( [ e ] . concat ( remain ) , index , inGlobStar , cb )
}
cb ( )
}
Glob . prototype . _emitMatch = function ( index , e ) {
if ( this . aborted )
return
if ( isIgnored ( this , e ) )
return
if ( this . paused ) {
this . _emitQueue . push ( [ index , e ] )
return
}
var abs = isAbsolute ( e ) ? e : this . _makeAbs ( e )
if ( this . mark )
e = this . _mark ( e )
if ( this . absolute )
e = abs
if ( this . matches [ index ] [ e ] )
return
if ( this . nodir ) {
var c = this . cache [ abs ]
if ( c === 'DIR' || Array . isArray ( c ) )
return
}
this . matches [ index ] [ e ] = true
var st = this . statCache [ abs ]
if ( st )
this . emit ( 'stat' , e , st )
this . emit ( 'match' , e )
}
Glob . prototype . _readdirInGlobStar = function ( abs , cb ) {
if ( this . aborted )
return
// follow all symlinked directories forever
// just proceed as if this is a non-globstar situation
if ( this . follow )
return this . _readdir ( abs , false , cb )
var lstatkey = 'lstat\0' + abs
var self = this
var lstatcb = inflight ( lstatkey , lstatcb _ )
if ( lstatcb )
self . fs . lstat ( abs , lstatcb )
function lstatcb _ ( er , lstat ) {
if ( er && er . code === 'ENOENT' )
return cb ( )
var isSym = lstat && lstat . isSymbolicLink ( )
self . symlinks [ abs ] = isSym
// If it's not a symlink or a dir, then it's definitely a regular file.
// don't bother doing a readdir in that case.
if ( ! isSym && lstat && ! lstat . isDirectory ( ) ) {
self . cache [ abs ] = 'FILE'
cb ( )
} else
self . _readdir ( abs , false , cb )
}
}
Glob . prototype . _readdir = function ( abs , inGlobStar , cb ) {
if ( this . aborted )
return
cb = inflight ( 'readdir\0' + abs + '\0' + inGlobStar , cb )
if ( ! cb )
return
//console.error('RD %j %j', +inGlobStar, abs)
if ( inGlobStar && ! ownProp ( this . symlinks , abs ) )
return this . _readdirInGlobStar ( abs , cb )
if ( ownProp ( this . cache , abs ) ) {
var c = this . cache [ abs ]
if ( ! c || c === 'FILE' )
return cb ( )
if ( Array . isArray ( c ) )
return cb ( null , c )
}
var self = this
self . fs . readdir ( abs , readdirCb ( this , abs , cb ) )
}
function readdirCb ( self , abs , cb ) {
return function ( er , entries ) {
if ( er )
self . _readdirError ( abs , er , cb )
else
self . _readdirEntries ( abs , entries , cb )
}
}
Glob . prototype . _readdirEntries = function ( abs , entries , cb ) {
if ( this . aborted )
return
// if we haven't asked to stat everything, then just
// assume that everything in there exists, so we can avoid
// having to stat it a second time.
if ( ! this . mark && ! this . stat ) {
for ( var i = 0 ; i < entries . length ; i ++ ) {
var e = entries [ i ]
if ( abs === '/' )
e = abs + e
else
e = abs + '/' + e
this . cache [ e ] = true
2022-10-22 03:17:17 +08:00
}
2023-01-06 05:27:11 +08:00
}
this . cache [ abs ] = entries
return cb ( null , entries )
}
Glob . prototype . _readdirError = function ( f , er , cb ) {
if ( this . aborted )
return
// handle errors, and cache the information
switch ( er . code ) {
case 'ENOTSUP' : // https://github.com/isaacs/node-glob/issues/205
case 'ENOTDIR' : // totally normal. means it *does* exist.
var abs = this . _makeAbs ( f )
this . cache [ abs ] = 'FILE'
if ( abs === this . cwdAbs ) {
var error = new Error ( er . code + ' invalid cwd ' + this . cwd )
error . path = this . cwd
error . code = er . code
this . emit ( 'error' , error )
this . abort ( )
}
break
case 'ENOENT' : // not terribly unusual
case 'ELOOP' :
case 'ENAMETOOLONG' :
case 'UNKNOWN' :
this . cache [ this . _makeAbs ( f ) ] = false
break
default : // some unusual error. Treat as failure.
this . cache [ this . _makeAbs ( f ) ] = false
if ( this . strict ) {
this . emit ( 'error' , er )
// If the error is handled, then we abort
// if not, we threw out of here
this . abort ( )
}
if ( ! this . silent )
console . error ( 'glob error' , er )
break
}
return cb ( )
}
Glob . prototype . _processGlobStar = function ( prefix , read , abs , remain , index , inGlobStar , cb ) {
var self = this
this . _readdir ( abs , inGlobStar , function ( er , entries ) {
self . _processGlobStar2 ( prefix , read , abs , remain , index , inGlobStar , entries , cb )
} )
}
Glob . prototype . _processGlobStar2 = function ( prefix , read , abs , remain , index , inGlobStar , entries , cb ) {
//console.error('pgs2', prefix, remain[0], entries)
// no entries means not a dir, so it can never have matches
// foo.txt/** doesn't match foo.txt
if ( ! entries )
return cb ( )
// test without the globstar, and with every child both below
// and replacing the globstar.
var remainWithoutGlobStar = remain . slice ( 1 )
var gspref = prefix ? [ prefix ] : [ ]
var noGlobStar = gspref . concat ( remainWithoutGlobStar )
// the noGlobStar pattern exits the inGlobStar state
this . _process ( noGlobStar , index , false , cb )
var isSym = this . symlinks [ abs ]
var len = entries . length
// If it's a symlink, and we're in a globstar, then stop
if ( isSym && inGlobStar )
return cb ( )
for ( var i = 0 ; i < len ; i ++ ) {
var e = entries [ i ]
if ( e . charAt ( 0 ) === '.' && ! this . dot )
continue
// these two cases enter the inGlobStar state
var instead = gspref . concat ( entries [ i ] , remainWithoutGlobStar )
this . _process ( instead , index , true , cb )
var below = gspref . concat ( entries [ i ] , remain )
this . _process ( below , index , true , cb )
}
cb ( )
}
Glob . prototype . _processSimple = function ( prefix , index , cb ) {
// XXX review this. Shouldn't it be doing the mounting etc
// before doing stat? kinda weird?
var self = this
this . _stat ( prefix , function ( er , exists ) {
self . _processSimple2 ( prefix , index , er , exists , cb )
} )
}
Glob . prototype . _processSimple2 = function ( prefix , index , er , exists , cb ) {
//console.error('ps2', prefix, exists)
if ( ! this . matches [ index ] )
this . matches [ index ] = Object . create ( null )
// If it doesn't exist, then just mark the lack of results
if ( ! exists )
return cb ( )
if ( prefix && isAbsolute ( prefix ) && ! this . nomount ) {
var trail = /[\/\\]$/ . test ( prefix )
if ( prefix . charAt ( 0 ) === '/' ) {
prefix = path . join ( this . root , prefix )
} else {
prefix = path . resolve ( this . root , prefix )
if ( trail )
prefix += '/'
2022-10-22 03:17:17 +08:00
}
2023-01-06 05:27:11 +08:00
}
if ( process . platform === 'win32' )
prefix = prefix . replace ( /\\/g , '/' )
// Mark this as a match
this . _emitMatch ( index , prefix )
cb ( )
}
// Returns either 'DIR', 'FILE', or false
Glob . prototype . _stat = function ( f , cb ) {
var abs = this . _makeAbs ( f )
var needDir = f . slice ( - 1 ) === '/'
if ( f . length > this . maxLength )
return cb ( )
if ( ! this . stat && ownProp ( this . cache , abs ) ) {
var c = this . cache [ abs ]
if ( Array . isArray ( c ) )
c = 'DIR'
// It exists, but maybe not how we need it
if ( ! needDir || c === 'DIR' )
return cb ( null , c )
if ( needDir && c === 'FILE' )
return cb ( )
// otherwise we have to stat, because maybe c=true
// if we know it exists, but not what it is.
}
var exists
var stat = this . statCache [ abs ]
if ( stat !== undefined ) {
if ( stat === false )
return cb ( null , stat )
else {
var type = stat . isDirectory ( ) ? 'DIR' : 'FILE'
if ( needDir && type === 'FILE' )
return cb ( )
else
return cb ( null , type , stat )
2022-10-22 03:17:17 +08:00
}
2023-01-06 05:27:11 +08:00
}
var self = this
var statcb = inflight ( 'stat\0' + abs , lstatcb _ )
if ( statcb )
self . fs . lstat ( abs , statcb )
function lstatcb _ ( er , lstat ) {
if ( lstat && lstat . isSymbolicLink ( ) ) {
// If it's a symlink, then treat it as the target, unless
// the target does not exist, then treat it as a file.
return self . fs . stat ( abs , function ( er , stat ) {
if ( er )
self . _stat2 ( f , abs , null , lstat , cb )
else
self . _stat2 ( f , abs , er , stat , cb )
} )
} else {
self . _stat2 ( f , abs , er , lstat , cb )
2022-10-22 03:17:17 +08:00
}
2023-01-06 05:27:11 +08:00
}
2022-10-22 03:17:17 +08:00
}
2023-01-06 05:27:11 +08:00
Glob . prototype . _stat2 = function ( f , abs , er , stat , cb ) {
if ( er && ( er . code === 'ENOENT' || er . code === 'ENOTDIR' ) ) {
this . statCache [ abs ] = false
return cb ( )
}
var needDir = f . slice ( - 1 ) === '/'
this . statCache [ abs ] = stat
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
if ( abs . slice ( - 1 ) === '/' && stat && ! stat . isDirectory ( ) )
return cb ( null , false , stat )
var c = true
if ( stat )
c = stat . isDirectory ( ) ? 'DIR' : 'FILE'
this . cache [ abs ] = this . cache [ abs ] || c
if ( needDir && c === 'FILE' )
return cb ( )
return cb ( null , c , stat )
}
2020-04-28 23:18:53 +08:00
2022-05-20 05:17:44 +08:00
/***/ } ) ,
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
/***/ 7967 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
module . exports = globSync
globSync . GlobSync = GlobSync
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
var rp = _ _nccwpck _require _ _ ( 6863 )
var minimatch = _ _nccwpck _require _ _ ( 3973 )
var Minimatch = minimatch . Minimatch
var Glob = ( _ _nccwpck _require _ _ ( 6968 ) . Glob )
var util = _ _nccwpck _require _ _ ( 3837 )
var path = _ _nccwpck _require _ _ ( 1017 )
var assert = _ _nccwpck _require _ _ ( 9491 )
var isAbsolute = _ _nccwpck _require _ _ ( 8714 )
var common = _ _nccwpck _require _ _ ( 5888 )
var setopts = common . setopts
var ownProp = common . ownProp
var childrenIgnored = common . childrenIgnored
var isIgnored = common . isIgnored
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
function globSync ( pattern , options ) {
if ( typeof options === 'function' || arguments . length === 3 )
throw new TypeError ( 'callback provided to sync glob\n' +
'See: https://github.com/isaacs/node-glob/issues/167' )
return new GlobSync ( pattern , options ) . found
2020-04-28 23:18:53 +08:00
}
2023-01-06 05:27:11 +08:00
function GlobSync ( pattern , options ) {
if ( ! pattern )
throw new Error ( 'must provide pattern' )
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
if ( typeof options === 'function' || arguments . length === 3 )
throw new TypeError ( 'callback provided to sync glob\n' +
'See: https://github.com/isaacs/node-glob/issues/167' )
if ( ! ( this instanceof GlobSync ) )
return new GlobSync ( pattern , options )
setopts ( this , pattern , options )
if ( this . noprocess )
return this
var n = this . minimatch . set . length
this . matches = new Array ( n )
for ( var i = 0 ; i < n ; i ++ ) {
this . _process ( this . minimatch . set [ i ] , i , false )
}
this . _finish ( )
}
GlobSync . prototype . _finish = function ( ) {
assert . ok ( this instanceof GlobSync )
if ( this . realpath ) {
var self = this
this . matches . forEach ( function ( matchset , index ) {
var set = self . matches [ index ] = Object . create ( null )
for ( var p in matchset ) {
try {
p = self . _makeAbs ( p )
var real = rp . realpathSync ( p , self . realpathCache )
set [ real ] = true
} catch ( er ) {
if ( er . syscall === 'stat' )
set [ self . _makeAbs ( p ) ] = true
else
throw er
}
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
} )
}
common . finish ( this )
2020-04-28 23:18:53 +08:00
}
2023-01-06 05:27:11 +08:00
GlobSync . prototype . _process = function ( pattern , index , inGlobStar ) {
assert . ok ( this instanceof GlobSync )
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
// Get the first [n] parts of pattern that are all strings.
var n = 0
while ( typeof pattern [ n ] === 'string' ) {
n ++
}
// now n is the index of the first one that is *not* a string.
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
// See if there's anything else
var prefix
switch ( n ) {
// if not, then this is rather simple
case pattern . length :
this . _processSimple ( pattern . join ( '/' ) , index )
return
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
case 0 :
// pattern *starts* with some non-trivial item.
// going to readdir(cwd), but not include the prefix in matches.
prefix = null
break
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
default :
// pattern has some string bits in the front.
// whatever it starts with, whether that's 'absolute' like /foo/bar,
// or 'relative' like '../baz'
prefix = pattern . slice ( 0 , n ) . join ( '/' )
break
}
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
var remain = pattern . slice ( n )
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
// get the list of entries.
var read
if ( prefix === null )
read = '.'
else if ( isAbsolute ( prefix ) ||
isAbsolute ( pattern . map ( function ( p ) {
return typeof p === 'string' ? p : '[*]'
} ) . join ( '/' ) ) ) {
if ( ! prefix || ! isAbsolute ( prefix ) )
prefix = '/' + prefix
read = prefix
} else
read = prefix
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
var abs = this . _makeAbs ( read )
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
//if ignored, skip processing
if ( childrenIgnored ( this , read ) )
return
var isGlobStar = remain [ 0 ] === minimatch . GLOBSTAR
if ( isGlobStar )
this . _processGlobStar ( prefix , read , abs , remain , index , inGlobStar )
else
this . _processReaddir ( prefix , read , abs , remain , index , inGlobStar )
2022-05-20 04:33:04 +08:00
}
2020-04-28 23:18:53 +08:00
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
GlobSync . prototype . _processReaddir = function ( prefix , read , abs , remain , index , inGlobStar ) {
var entries = this . _readdir ( abs , inGlobStar )
// if the abs isn't a dir, then nothing can match!
if ( ! entries )
return
// It will only match dot entries if it starts with a dot, or if
// dot is set. Stuff like @(.foo|.bar) isn't allowed.
var pn = remain [ 0 ]
var negate = ! ! this . minimatch . negate
var rawGlob = pn . _glob
var dotOk = this . dot || rawGlob . charAt ( 0 ) === '.'
var matchedEntries = [ ]
for ( var i = 0 ; i < entries . length ; i ++ ) {
var e = entries [ i ]
if ( e . charAt ( 0 ) !== '.' || dotOk ) {
var m
if ( negate && ! prefix ) {
m = ! e . match ( pn )
} else {
m = e . match ( pn )
}
if ( m )
matchedEntries . push ( e )
}
}
var len = matchedEntries . length
// If there are no matched entries, then nothing matches.
if ( len === 0 )
return
// if this is the last remaining pattern bit, then no need for
// an additional stat *unless* the user has specified mark or
// stat explicitly. We know they exist, since readdir returned
// them.
if ( remain . length === 1 && ! this . mark && ! this . stat ) {
if ( ! this . matches [ index ] )
this . matches [ index ] = Object . create ( null )
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
for ( var i = 0 ; i < len ; i ++ ) {
var e = matchedEntries [ i ]
if ( prefix ) {
if ( prefix . slice ( - 1 ) !== '/' )
e = prefix + '/' + e
else
e = prefix + e
}
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
if ( e . charAt ( 0 ) === '/' && ! this . nomount ) {
e = path . join ( this . root , e )
}
this . _emitMatch ( index , e )
}
// This was the last one, and no stats were needed
return
}
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
// now test all matched entries as stand-ins for that part
// of the pattern.
remain . shift ( )
for ( var i = 0 ; i < len ; i ++ ) {
var e = matchedEntries [ i ]
var newPattern
if ( prefix )
newPattern = [ prefix , e ]
else
newPattern = [ e ]
this . _process ( newPattern . concat ( remain ) , index , inGlobStar )
}
2022-05-20 05:17:44 +08:00
}
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
GlobSync . prototype . _emitMatch = function ( index , e ) {
if ( isIgnored ( this , e ) )
return
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
var abs = this . _makeAbs ( e )
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
if ( this . mark )
e = this . _mark ( e )
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
if ( this . absolute ) {
e = abs
}
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
if ( this . matches [ index ] [ e ] )
return
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
if ( this . nodir ) {
var c = this . cache [ abs ]
if ( c === 'DIR' || Array . isArray ( c ) )
return
}
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
this . matches [ index ] [ e ] = true
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
if ( this . stat )
this . _stat ( e )
}
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
GlobSync . prototype . _readdirInGlobStar = function ( abs ) {
// follow all symlinked directories forever
// just proceed as if this is a non-globstar situation
if ( this . follow )
return this . _readdir ( abs , false )
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
var entries
var lstat
var stat
try {
lstat = this . fs . lstatSync ( abs )
} catch ( er ) {
if ( er . code === 'ENOENT' ) {
// lstat failed, doesn't exist
return null
}
}
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
var isSym = lstat && lstat . isSymbolicLink ( )
this . symlinks [ abs ] = isSym
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
// If it's not a symlink or a dir, then it's definitely a regular file.
// don't bother doing a readdir in that case.
if ( ! isSym && lstat && ! lstat . isDirectory ( ) )
this . cache [ abs ] = 'FILE'
else
entries = this . _readdir ( abs , false )
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
return entries
2022-10-22 03:17:17 +08:00
}
2023-01-06 05:27:11 +08:00
GlobSync . prototype . _readdir = function ( abs , inGlobStar ) {
var entries
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
if ( inGlobStar && ! ownProp ( this . symlinks , abs ) )
return this . _readdirInGlobStar ( abs )
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
if ( ownProp ( this . cache , abs ) ) {
var c = this . cache [ abs ]
if ( ! c || c === 'FILE' )
return null
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
if ( Array . isArray ( c ) )
return c
}
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
try {
return this . _readdirEntries ( abs , this . fs . readdirSync ( abs ) )
} catch ( er ) {
this . _readdirError ( abs , er )
return null
}
2020-04-28 23:18:53 +08:00
}
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
GlobSync . prototype . _readdirEntries = function ( abs , entries ) {
// if we haven't asked to stat everything, then just
// assume that everything in there exists, so we can avoid
// having to stat it a second time.
if ( ! this . mark && ! this . stat ) {
for ( var i = 0 ; i < entries . length ; i ++ ) {
var e = entries [ i ]
if ( abs === '/' )
e = abs + e
else
e = abs + '/' + e
this . cache [ e ] = true
}
}
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
this . cache [ abs ] = entries
2023-01-05 02:55:10 +08:00
2023-01-06 05:27:11 +08:00
// mark and cache dir-ness
return entries
}
2023-01-05 02:55:10 +08:00
2023-01-06 05:27:11 +08:00
GlobSync . prototype . _readdirError = function ( f , er ) {
// handle errors, and cache the information
switch ( er . code ) {
case 'ENOTSUP' : // https://github.com/isaacs/node-glob/issues/205
case 'ENOTDIR' : // totally normal. means it *does* exist.
var abs = this . _makeAbs ( f )
this . cache [ abs ] = 'FILE'
if ( abs === this . cwdAbs ) {
var error = new Error ( er . code + ' invalid cwd ' + this . cwd )
error . path = this . cwd
error . code = er . code
throw error
}
break
case 'ENOENT' : // not terribly unusual
case 'ELOOP' :
case 'ENAMETOOLONG' :
case 'UNKNOWN' :
this . cache [ this . _makeAbs ( f ) ] = false
break
default : // some unusual error. Treat as failure.
this . cache [ this . _makeAbs ( f ) ] = false
if ( this . strict )
throw er
if ( ! this . silent )
console . error ( 'glob error' , er )
break
}
2023-01-05 02:55:10 +08:00
}
2023-01-06 05:27:11 +08:00
GlobSync . prototype . _processGlobStar = function ( prefix , read , abs , remain , index , inGlobStar ) {
2023-01-05 02:55:10 +08:00
2023-01-06 05:27:11 +08:00
var entries = this . _readdir ( abs , inGlobStar )
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
// no entries means not a dir, so it can never have matches
// foo.txt/** doesn't match foo.txt
if ( ! entries )
return
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
// test without the globstar, and with every child both below
// and replacing the globstar.
var remainWithoutGlobStar = remain . slice ( 1 )
var gspref = prefix ? [ prefix ] : [ ]
var noGlobStar = gspref . concat ( remainWithoutGlobStar )
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
// the noGlobStar pattern exits the inGlobStar state
this . _process ( noGlobStar , index , false )
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
var len = entries . length
var isSym = this . symlinks [ abs ]
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
// If it's a symlink, and we're in a globstar, then stop
if ( isSym && inGlobStar )
return
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
for ( var i = 0 ; i < len ; i ++ ) {
var e = entries [ i ]
if ( e . charAt ( 0 ) === '.' && ! this . dot )
continue
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
// these two cases enter the inGlobStar state
var instead = gspref . concat ( entries [ i ] , remainWithoutGlobStar )
this . _process ( instead , index , true )
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
var below = gspref . concat ( entries [ i ] , remain )
this . _process ( below , index , true )
}
}
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
GlobSync . prototype . _processSimple = function ( prefix , index ) {
// XXX review this. Shouldn't it be doing the mounting etc
// before doing stat? kinda weird?
var exists = this . _stat ( prefix )
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
if ( ! this . matches [ index ] )
this . matches [ index ] = Object . create ( null )
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
// If it doesn't exist, then just mark the lack of results
if ( ! exists )
return
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
if ( prefix && isAbsolute ( prefix ) && ! this . nomount ) {
var trail = /[\/\\]$/ . test ( prefix )
if ( prefix . charAt ( 0 ) === '/' ) {
prefix = path . join ( this . root , prefix )
} else {
prefix = path . resolve ( this . root , prefix )
if ( trail )
prefix += '/'
2022-10-22 03:17:17 +08:00
}
}
2023-01-06 05:27:11 +08:00
if ( process . platform === 'win32' )
prefix = prefix . replace ( /\\/g , '/' )
// Mark this as a match
this . _emitMatch ( index , prefix )
2022-10-22 03:17:17 +08:00
}
2023-01-06 05:27:11 +08:00
// Returns either 'DIR', 'FILE', or false
GlobSync . prototype . _stat = function ( f ) {
var abs = this . _makeAbs ( f )
var needDir = f . slice ( - 1 ) === '/'
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
if ( f . length > this . maxLength )
return false
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
if ( ! this . stat && ownProp ( this . cache , abs ) ) {
var c = this . cache [ abs ]
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
if ( Array . isArray ( c ) )
c = 'DIR'
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
// It exists, but maybe not how we need it
if ( ! needDir || c === 'DIR' )
return c
if ( needDir && c === 'FILE' )
return false
// otherwise we have to stat, because maybe c=true
// if we know it exists, but not what it is.
}
var exists
var stat = this . statCache [ abs ]
if ( ! stat ) {
var lstat
try {
lstat = this . fs . lstatSync ( abs )
} catch ( er ) {
if ( er && ( er . code === 'ENOENT' || er . code === 'ENOTDIR' ) ) {
this . statCache [ abs ] = false
return false
}
2022-10-22 03:17:17 +08:00
}
2023-01-06 05:27:11 +08:00
if ( lstat && lstat . isSymbolicLink ( ) ) {
try {
stat = this . fs . statSync ( abs )
} catch ( er ) {
stat = lstat
}
} else {
stat = lstat
2022-10-22 03:17:17 +08:00
}
2023-01-06 05:27:11 +08:00
}
this . statCache [ abs ] = stat
var c = true
if ( stat )
c = stat . isDirectory ( ) ? 'DIR' : 'FILE'
this . cache [ abs ] = this . cache [ abs ] || c
if ( needDir && c === 'FILE' )
return false
return c
}
GlobSync . prototype . _mark = function ( p ) {
return common . mark ( this , p )
}
GlobSync . prototype . _makeAbs = function ( f ) {
return common . makeAbs ( this , f )
}
/***/ } ) ,
/***/ 4959 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
const assert = _ _nccwpck _require _ _ ( 9491 )
const path = _ _nccwpck _require _ _ ( 1017 )
const fs = _ _nccwpck _require _ _ ( 7147 )
let glob = undefined
try {
glob = _ _nccwpck _require _ _ ( 6968 )
} catch ( _err ) {
// treat glob as optional.
}
const defaultGlobOpts = {
nosort : true ,
silent : true
}
// for EMFILE handling
let timeout = 0
const isWindows = ( process . platform === "win32" )
const defaults = options => {
const methods = [
'unlink' ,
'chmod' ,
'stat' ,
'lstat' ,
'rmdir' ,
'readdir'
]
methods . forEach ( m => {
options [ m ] = options [ m ] || fs [ m ]
m = m + 'Sync'
options [ m ] = options [ m ] || fs [ m ]
} )
options . maxBusyTries = options . maxBusyTries || 3
options . emfileWait = options . emfileWait || 1000
if ( options . glob === false ) {
options . disableGlob = true
}
if ( options . disableGlob !== true && glob === undefined ) {
throw Error ( 'glob dependency not found, set `options.disableGlob = true` if intentional' )
}
options . disableGlob = options . disableGlob || false
options . glob = options . glob || defaultGlobOpts
2022-10-22 03:17:17 +08:00
}
2023-01-06 05:27:11 +08:00
const rimraf = ( p , options , cb ) => {
if ( typeof options === 'function' ) {
cb = options
options = { }
}
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
assert ( p , 'rimraf: missing path' )
assert . equal ( typeof p , 'string' , 'rimraf: path should be a string' )
assert . equal ( typeof cb , 'function' , 'rimraf: callback function required' )
assert ( options , 'rimraf: invalid options argument provided' )
assert . equal ( typeof options , 'object' , 'rimraf: options should be object' )
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
defaults ( options )
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
let busyTries = 0
let errState = null
let n = 0
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
const next = ( er ) => {
errState = errState || er
if ( -- n === 0 )
cb ( errState )
}
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
const afterGlob = ( er , results ) => {
if ( er )
return cb ( er )
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
n = results . length
if ( n === 0 )
return cb ( )
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
results . forEach ( p => {
const CB = ( er ) => {
if ( er ) {
if ( ( er . code === "EBUSY" || er . code === "ENOTEMPTY" || er . code === "EPERM" ) &&
busyTries < options . maxBusyTries ) {
busyTries ++
// try again, with the same exact callback as this one.
return setTimeout ( ( ) => rimraf _ ( p , options , CB ) , busyTries * 100 )
}
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
// this one won't happen if graceful-fs is used.
if ( er . code === "EMFILE" && timeout < options . emfileWait ) {
return setTimeout ( ( ) => rimraf _ ( p , options , CB ) , timeout ++ )
}
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
// already gone
if ( er . code === "ENOENT" ) er = null
}
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
timeout = 0
next ( er )
}
rimraf _ ( p , options , CB )
} )
}
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
if ( options . disableGlob || ! glob . hasMagic ( p ) )
return afterGlob ( null , [ p ] )
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
options . lstat ( p , ( er , stat ) => {
if ( ! er )
return afterGlob ( null , [ p ] )
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
glob ( p , options . glob , afterGlob )
} )
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
}
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
// Two possible strategies.
// 1. Assume it's a file. unlink it, then do the dir stuff on EPERM or EISDIR
// 2. Assume it's a directory. readdir, then do the file stuff on ENOTDIR
//
// Both result in an extra syscall when you guess wrong. However, there
// are likely far more normal files in the world than directories. This
// is based on the assumption that a the average number of files per
// directory is >= 1.
//
// If anyone ever complains about this, then I guess the strategy could
// be made configurable somehow. But until then, YAGNI.
const rimraf _ = ( p , options , cb ) => {
assert ( p )
assert ( options )
assert ( typeof cb === 'function' )
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
// sunos lets the root user unlink directories, which is... weird.
// so we have to lstat here and make sure it's not a dir.
options . lstat ( p , ( er , st ) => {
if ( er && er . code === "ENOENT" )
return cb ( null )
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
// Windows can EPERM on stat. Life is suffering.
if ( er && er . code === "EPERM" && isWindows )
fixWinEPERM ( p , options , er , cb )
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
if ( st && st . isDirectory ( ) )
return rmdir ( p , options , er , cb )
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
options . unlink ( p , er => {
if ( er ) {
if ( er . code === "ENOENT" )
return cb ( null )
if ( er . code === "EPERM" )
return ( isWindows )
? fixWinEPERM ( p , options , er , cb )
: rmdir ( p , options , er , cb )
if ( er . code === "EISDIR" )
return rmdir ( p , options , er , cb )
}
return cb ( er )
} )
} )
}
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
const fixWinEPERM = ( p , options , er , cb ) => {
assert ( p )
assert ( options )
assert ( typeof cb === 'function' )
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
options . chmod ( p , 0o666 , er2 => {
if ( er2 )
cb ( er2 . code === "ENOENT" ? null : er )
else
options . stat ( p , ( er3 , stats ) => {
if ( er3 )
cb ( er3 . code === "ENOENT" ? null : er )
else if ( stats . isDirectory ( ) )
rmdir ( p , options , er , cb )
else
options . unlink ( p , cb )
} )
} )
}
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
const fixWinEPERMSync = ( p , options , er ) => {
assert ( p )
assert ( options )
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
try {
options . chmodSync ( p , 0o666 )
} catch ( er2 ) {
if ( er2 . code === "ENOENT" )
return
else
throw er
}
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
let stats
try {
stats = options . statSync ( p )
} catch ( er3 ) {
if ( er3 . code === "ENOENT" )
return
else
throw er
}
if ( stats . isDirectory ( ) )
rmdirSync ( p , options , er )
else
options . unlinkSync ( p )
}
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
const rmdir = ( p , options , originalEr , cb ) => {
assert ( p )
assert ( options )
assert ( typeof cb === 'function' )
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
// try to rmdir first, and only readdir on ENOTEMPTY or EEXIST (SunOS)
// if we guessed wrong, and it's not a directory, then
// raise the original error.
options . rmdir ( p , er => {
if ( er && ( er . code === "ENOTEMPTY" || er . code === "EEXIST" || er . code === "EPERM" ) )
rmkids ( p , options , cb )
else if ( er && er . code === "ENOTDIR" )
cb ( originalEr )
else
cb ( er )
} )
}
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
const rmkids = ( p , options , cb ) => {
assert ( p )
assert ( options )
assert ( typeof cb === 'function' )
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
options . readdir ( p , ( er , files ) => {
if ( er )
return cb ( er )
let n = files . length
if ( n === 0 )
return options . rmdir ( p , cb )
let errState
files . forEach ( f => {
rimraf ( path . join ( p , f ) , options , er => {
if ( errState )
return
if ( er )
return cb ( errState = er )
if ( -- n === 0 )
options . rmdir ( p , cb )
} )
} )
} )
}
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
// this looks simpler, and is strictly *faster*, but will
// tie up the JavaScript thread and fail on excessively
// deep directory trees.
const rimrafSync = ( p , options ) => {
options = options || { }
defaults ( options )
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
assert ( p , 'rimraf: missing path' )
assert . equal ( typeof p , 'string' , 'rimraf: path should be a string' )
assert ( options , 'rimraf: missing options' )
assert . equal ( typeof options , 'object' , 'rimraf: options should be object' )
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
let results
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
if ( options . disableGlob || ! glob . hasMagic ( p ) ) {
results = [ p ]
} else {
try {
options . lstatSync ( p )
results = [ p ]
} catch ( er ) {
results = glob . sync ( p , options . glob )
}
2022-10-22 03:17:17 +08:00
}
2023-01-06 05:27:11 +08:00
if ( ! results . length )
return
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
for ( let i = 0 ; i < results . length ; i ++ ) {
const p = results [ i ]
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
let st
try {
st = options . lstatSync ( p )
} catch ( er ) {
if ( er . code === "ENOENT" )
return
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
// Windows can EPERM on stat. Life is suffering.
if ( er . code === "EPERM" && isWindows )
fixWinEPERMSync ( p , options , er )
2022-05-20 04:33:04 +08:00
}
2023-01-06 05:27:11 +08:00
try {
// sunos lets the root user unlink directories, which is... weird.
if ( st && st . isDirectory ( ) )
rmdirSync ( p , options , null )
else
options . unlinkSync ( p )
} catch ( er ) {
if ( er . code === "ENOENT" )
return
if ( er . code === "EPERM" )
return isWindows ? fixWinEPERMSync ( p , options , er ) : rmdirSync ( p , options , er )
if ( er . code !== "EISDIR" )
throw er
rmdirSync ( p , options , er )
2020-04-28 23:18:53 +08:00
}
2023-01-06 05:27:11 +08:00
}
}
const rmdirSync = ( p , options , originalEr ) => {
assert ( p )
assert ( options )
try {
options . rmdirSync ( p )
} catch ( er ) {
if ( er . code === "ENOENT" )
return
if ( er . code === "ENOTDIR" )
throw originalEr
if ( er . code === "ENOTEMPTY" || er . code === "EEXIST" || er . code === "EPERM" )
rmkidsSync ( p , options )
}
}
const rmkidsSync = ( p , options ) => {
assert ( p )
assert ( options )
options . readdirSync ( p ) . forEach ( f => rimrafSync ( path . join ( p , f ) , options ) )
// We only end up here once we got ENOTEMPTY at least once, and
// at this point, we are guaranteed to have removed all the kids.
// So, we know that it won't be ENOENT or ENOTDIR or anything else.
// try really hard to delete stuff on windows, because it has a
// PROFOUNDLY annoying habit of not closing handles promptly when
// files are deleted, resulting in spurious ENOTEMPTY errors.
const retries = isWindows ? 100 : 1
let i = 0
do {
let threw = true
try {
const ret = options . rmdirSync ( p , options )
threw = false
return ret
} finally {
if ( ++ i < retries && threw )
continue
2020-04-28 23:18:53 +08:00
}
2023-01-06 05:27:11 +08:00
} while ( true )
}
module . exports = rimraf
rimraf . sync = rimrafSync
/***/ } ) ,
/***/ 8065 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const { promisify } = _ _nccwpck _require _ _ ( 3837 ) ;
const tmp = _ _nccwpck _require _ _ ( 8517 ) ;
// file
module . exports . fileSync = tmp . fileSync ;
const fileWithOptions = promisify ( ( options , cb ) =>
tmp . file ( options , ( err , path , fd , cleanup ) =>
err ? cb ( err ) : cb ( undefined , { path , fd , cleanup : promisify ( cleanup ) } )
)
) ;
module . exports . file = async ( options ) => fileWithOptions ( options ) ;
module . exports . withFile = async function withFile ( fn , options ) {
const { path , fd , cleanup } = await module . exports . file ( options ) ;
try {
return await fn ( { path , fd } ) ;
} finally {
await cleanup ( ) ;
}
} ;
// directory
module . exports . dirSync = tmp . dirSync ;
const dirWithOptions = promisify ( ( options , cb ) =>
tmp . dir ( options , ( err , path , cleanup ) =>
err ? cb ( err ) : cb ( undefined , { path , cleanup : promisify ( cleanup ) } )
)
) ;
module . exports . dir = async ( options ) => dirWithOptions ( options ) ;
module . exports . withDir = async function withDir ( fn , options ) {
const { path , cleanup } = await module . exports . dir ( options ) ;
try {
return await fn ( { path } ) ;
} finally {
await cleanup ( ) ;
}
} ;
// name generation
module . exports . tmpNameSync = tmp . tmpNameSync ;
module . exports . tmpName = promisify ( tmp . tmpName ) ;
module . exports . tmpdir = tmp . tmpdir ;
module . exports . setGracefulCleanup = tmp . setGracefulCleanup ;
/***/ } ) ,
/***/ 8517 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
/ * !
* Tmp
*
* Copyright ( c ) 2011 - 2017 KARASZI Istvan < github @ spam . raszi . hu >
*
* MIT Licensed
* /
/ *
* Module dependencies .
* /
const fs = _ _nccwpck _require _ _ ( 7147 ) ;
const os = _ _nccwpck _require _ _ ( 2037 ) ;
const path = _ _nccwpck _require _ _ ( 1017 ) ;
const crypto = _ _nccwpck _require _ _ ( 6113 ) ;
const _c = { fs : fs . constants , os : os . constants } ;
const rimraf = _ _nccwpck _require _ _ ( 4959 ) ;
/ *
* The working inner variables .
* /
const
// the random characters to choose from
RANDOM _CHARS = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz' ,
TEMPLATE _PATTERN = /XXXXXX/ ,
DEFAULT _TRIES = 3 ,
CREATE _FLAGS = ( _c . O _CREAT || _c . fs . O _CREAT ) | ( _c . O _EXCL || _c . fs . O _EXCL ) | ( _c . O _RDWR || _c . fs . O _RDWR ) ,
// constants are off on the windows platform and will not match the actual errno codes
IS _WIN32 = os . platform ( ) === 'win32' ,
EBADF = _c . EBADF || _c . os . errno . EBADF ,
ENOENT = _c . ENOENT || _c . os . errno . ENOENT ,
DIR _MODE = 0o700 /* 448 */ ,
FILE _MODE = 0o600 /* 384 */ ,
EXIT = 'exit' ,
// this will hold the objects need to be removed on exit
_removeObjects = [ ] ,
// API change in fs.rmdirSync leads to error when passing in a second parameter, e.g. the callback
FN _RMDIR _SYNC = fs . rmdirSync . bind ( fs ) ,
FN _RIMRAF _SYNC = rimraf . sync ;
let
_gracefulCleanup = false ;
/ * *
* Gets a temporary file name .
*
* @ param { ( Options | tmpNameCallback ) } options options or callback
* @ param { ? tmpNameCallback } callback the callback function
* /
function tmpName ( options , callback ) {
const
args = _parseArguments ( options , callback ) ,
opts = args [ 0 ] ,
cb = args [ 1 ] ;
try {
_assertAndSanitizeOptions ( opts ) ;
} catch ( err ) {
return cb ( err ) ;
}
let tries = opts . tries ;
( function _getUniqueName ( ) {
try {
const name = _generateTmpName ( opts ) ;
// check whether the path exists then retry if needed
fs . stat ( name , function ( err ) {
/* istanbul ignore else */
if ( ! err ) {
/* istanbul ignore else */
if ( tries -- > 0 ) return _getUniqueName ( ) ;
return cb ( new Error ( 'Could not get a unique tmp filename, max tries reached ' + name ) ) ;
}
cb ( null , name ) ;
} ) ;
} catch ( err ) {
cb ( err ) ;
}
} ( ) ) ;
}
/ * *
* Synchronous version of tmpName .
*
* @ param { Object } options
* @ returns { string } the generated random name
* @ throws { Error } if the options are invalid or could not generate a filename
* /
function tmpNameSync ( options ) {
const
args = _parseArguments ( options ) ,
opts = args [ 0 ] ;
_assertAndSanitizeOptions ( opts ) ;
let tries = opts . tries ;
do {
const name = _generateTmpName ( opts ) ;
try {
fs . statSync ( name ) ;
} catch ( e ) {
return name ;
}
} while ( tries -- > 0 ) ;
throw new Error ( 'Could not get a unique tmp filename, max tries reached' ) ;
}
/ * *
* Creates and opens a temporary file .
*
* @ param { ( Options | null | undefined | fileCallback ) } options the config options or the callback function or null or undefined
* @ param { ? fileCallback } callback
* /
function file ( options , callback ) {
const
args = _parseArguments ( options , callback ) ,
opts = args [ 0 ] ,
cb = args [ 1 ] ;
// gets a temporary filename
tmpName ( opts , function _tmpNameCreated ( err , name ) {
/* istanbul ignore else */
if ( err ) return cb ( err ) ;
// create and open the file
fs . open ( name , CREATE _FLAGS , opts . mode || FILE _MODE , function _fileCreated ( err , fd ) {
/* istanbu ignore else */
if ( err ) return cb ( err ) ;
if ( opts . discardDescriptor ) {
return fs . close ( fd , function _discardCallback ( possibleErr ) {
// the chance of getting an error on close here is rather low and might occur in the most edgiest cases only
return cb ( possibleErr , name , undefined , _prepareTmpFileRemoveCallback ( name , - 1 , opts , false ) ) ;
2022-05-20 05:17:44 +08:00
} ) ;
2023-01-06 05:27:11 +08:00
} else {
// detachDescriptor passes the descriptor whereas discardDescriptor closes it, either way, we no longer care
// about the descriptor
const discardOrDetachDescriptor = opts . discardDescriptor || opts . detachDescriptor ;
cb ( null , name , fd , _prepareTmpFileRemoveCallback ( name , discardOrDetachDescriptor ? - 1 : fd , opts , false ) ) ;
}
} ) ;
} ) ;
}
/ * *
* Synchronous version of file .
*
* @ param { Options } options
* @ returns { FileSyncObject } object consists of name , fd and removeCallback
* @ throws { Error } if cannot create a file
* /
function fileSync ( options ) {
const
args = _parseArguments ( options ) ,
opts = args [ 0 ] ;
const discardOrDetachDescriptor = opts . discardDescriptor || opts . detachDescriptor ;
const name = tmpNameSync ( opts ) ;
var fd = fs . openSync ( name , CREATE _FLAGS , opts . mode || FILE _MODE ) ;
/* istanbul ignore else */
if ( opts . discardDescriptor ) {
fs . closeSync ( fd ) ;
fd = undefined ;
}
return {
name : name ,
fd : fd ,
removeCallback : _prepareTmpFileRemoveCallback ( name , discardOrDetachDescriptor ? - 1 : fd , opts , true )
} ;
}
/ * *
* Creates a temporary directory .
*
* @ param { ( Options | dirCallback ) } options the options or the callback function
* @ param { ? dirCallback } callback
* /
function dir ( options , callback ) {
const
args = _parseArguments ( options , callback ) ,
opts = args [ 0 ] ,
cb = args [ 1 ] ;
// gets a temporary filename
tmpName ( opts , function _tmpNameCreated ( err , name ) {
/* istanbul ignore else */
if ( err ) return cb ( err ) ;
// create the directory
fs . mkdir ( name , opts . mode || DIR _MODE , function _dirCreated ( err ) {
/* istanbul ignore else */
if ( err ) return cb ( err ) ;
cb ( null , name , _prepareTmpDirRemoveCallback ( name , opts , false ) ) ;
} ) ;
} ) ;
}
/ * *
* Synchronous version of dir .
*
* @ param { Options } options
* @ returns { DirSyncObject } object consists of name and removeCallback
* @ throws { Error } if it cannot create a directory
* /
function dirSync ( options ) {
const
args = _parseArguments ( options ) ,
opts = args [ 0 ] ;
const name = tmpNameSync ( opts ) ;
fs . mkdirSync ( name , opts . mode || DIR _MODE ) ;
return {
name : name ,
removeCallback : _prepareTmpDirRemoveCallback ( name , opts , true )
} ;
}
/ * *
* Removes files asynchronously .
*
* @ param { Object } fdPath
* @ param { Function } next
* @ private
* /
function _removeFileAsync ( fdPath , next ) {
const _handler = function ( err ) {
if ( err && ! _isENOENT ( err ) ) {
// reraise any unanticipated error
return next ( err ) ;
}
next ( ) ;
} ;
if ( 0 <= fdPath [ 0 ] )
fs . close ( fdPath [ 0 ] , function ( ) {
fs . unlink ( fdPath [ 1 ] , _handler ) ;
} ) ;
else fs . unlink ( fdPath [ 1 ] , _handler ) ;
}
/ * *
* Removes files synchronously .
*
* @ param { Object } fdPath
* @ private
* /
function _removeFileSync ( fdPath ) {
let rethrownException = null ;
try {
if ( 0 <= fdPath [ 0 ] ) fs . closeSync ( fdPath [ 0 ] ) ;
} catch ( e ) {
// reraise any unanticipated error
if ( ! _isEBADF ( e ) && ! _isENOENT ( e ) ) throw e ;
} finally {
try {
fs . unlinkSync ( fdPath [ 1 ] ) ;
}
catch ( e ) {
// reraise any unanticipated error
if ( ! _isENOENT ( e ) ) rethrownException = e ;
}
}
if ( rethrownException !== null ) {
throw rethrownException ;
}
}
/ * *
* Prepares the callback for removal of the temporary file .
*
* Returns either a sync callback or a async callback depending on whether
* fileSync or file was called , which is expressed by the sync parameter .
*
* @ param { string } name the path of the file
* @ param { number } fd file descriptor
* @ param { Object } opts
* @ param { boolean } sync
* @ returns { fileCallback | fileCallbackSync }
* @ private
* /
function _prepareTmpFileRemoveCallback ( name , fd , opts , sync ) {
const removeCallbackSync = _prepareRemoveCallback ( _removeFileSync , [ fd , name ] , sync ) ;
const removeCallback = _prepareRemoveCallback ( _removeFileAsync , [ fd , name ] , sync , removeCallbackSync ) ;
if ( ! opts . keep ) _removeObjects . unshift ( removeCallbackSync ) ;
return sync ? removeCallbackSync : removeCallback ;
}
/ * *
* Prepares the callback for removal of the temporary directory .
*
* Returns either a sync callback or a async callback depending on whether
* tmpFileSync or tmpFile was called , which is expressed by the sync parameter .
*
* @ param { string } name
* @ param { Object } opts
* @ param { boolean } sync
* @ returns { Function } the callback
* @ private
* /
function _prepareTmpDirRemoveCallback ( name , opts , sync ) {
const removeFunction = opts . unsafeCleanup ? rimraf : fs . rmdir . bind ( fs ) ;
const removeFunctionSync = opts . unsafeCleanup ? FN _RIMRAF _SYNC : FN _RMDIR _SYNC ;
const removeCallbackSync = _prepareRemoveCallback ( removeFunctionSync , name , sync ) ;
const removeCallback = _prepareRemoveCallback ( removeFunction , name , sync , removeCallbackSync ) ;
if ( ! opts . keep ) _removeObjects . unshift ( removeCallbackSync ) ;
return sync ? removeCallbackSync : removeCallback ;
}
/ * *
* Creates a guarded function wrapping the removeFunction call .
*
* The cleanup callback is save to be called multiple times .
* Subsequent invocations will be ignored .
*
* @ param { Function } removeFunction
* @ param { string } fileOrDirName
* @ param { boolean } sync
* @ param { cleanupCallbackSync ? } cleanupCallbackSync
* @ returns { cleanupCallback | cleanupCallbackSync }
* @ private
* /
function _prepareRemoveCallback ( removeFunction , fileOrDirName , sync , cleanupCallbackSync ) {
let called = false ;
// if sync is true, the next parameter will be ignored
return function _cleanupCallback ( next ) {
/* istanbul ignore else */
if ( ! called ) {
// remove cleanupCallback from cache
const toRemove = cleanupCallbackSync || _cleanupCallback ;
const index = _removeObjects . indexOf ( toRemove ) ;
/* istanbul ignore else */
if ( index >= 0 ) _removeObjects . splice ( index , 1 ) ;
called = true ;
if ( sync || removeFunction === FN _RMDIR _SYNC || removeFunction === FN _RIMRAF _SYNC ) {
return removeFunction ( fileOrDirName ) ;
} else {
return removeFunction ( fileOrDirName , next || function ( ) { } ) ;
}
}
} ;
}
/ * *
* The garbage collector .
*
* @ private
* /
function _garbageCollector ( ) {
/* istanbul ignore else */
if ( ! _gracefulCleanup ) return ;
// the function being called removes itself from _removeObjects,
// loop until _removeObjects is empty
while ( _removeObjects . length ) {
try {
_removeObjects [ 0 ] ( ) ;
} catch ( e ) {
// already removed?
2020-04-28 23:18:53 +08:00
}
2023-01-06 05:27:11 +08:00
}
2020-04-28 23:18:53 +08:00
}
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
/ * *
* Random name generator based on crypto .
* Adapted from http : //blog.tompawlak.org/how-to-generate-random-values-nodejs-javascript
*
* @ param { number } howMany
* @ returns { string } the generated random name
* @ private
* /
function _randomChars ( howMany ) {
let
value = [ ] ,
rnd = null ;
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
// make sure that we do not fail because we ran out of entropy
try {
rnd = crypto . randomBytes ( howMany ) ;
} catch ( e ) {
rnd = crypto . pseudoRandomBytes ( howMany ) ;
}
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
for ( var i = 0 ; i < howMany ; i ++ ) {
value . push ( RANDOM _CHARS [ rnd [ i ] % RANDOM _CHARS . length ] ) ;
}
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
return value . join ( '' ) ;
2020-04-28 23:18:53 +08:00
}
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
/ * *
* Helper which determines whether a string s is blank , that is undefined , or empty or null .
*
* @ private
* @ param { string } s
* @ returns { Boolean } true whether the string s is blank , false otherwise
* /
function _isBlank ( s ) {
return s === null || _isUndefined ( s ) || ! s . trim ( ) ;
2020-04-28 23:18:53 +08:00
}
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
/ * *
* Checks whether the ` obj ` parameter is defined or not .
*
* @ param { Object } obj
* @ returns { boolean } true if the object is undefined
* @ private
* /
function _isUndefined ( obj ) {
return typeof obj === 'undefined' ;
2021-01-04 22:48:10 +08:00
}
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
/ * *
* Parses the function arguments .
*
* This function helps to have optional arguments .
*
* @ param { ( Options | null | undefined | Function ) } options
* @ param { ? Function } callback
* @ returns { Array } parsed arguments
* @ private
* /
function _parseArguments ( options , callback ) {
/* istanbul ignore else */
if ( typeof options === 'function' ) {
return [ { } , options ] ;
}
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
/* istanbul ignore else */
if ( _isUndefined ( options ) ) {
return [ { } , callback ] ;
}
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
// copy options so we do not leak the changes we make internally
const actualOptions = { } ;
for ( const key of Object . getOwnPropertyNames ( options ) ) {
actualOptions [ key ] = options [ key ] ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
return [ actualOptions , callback ] ;
2021-01-04 22:48:10 +08:00
}
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
/ * *
* Generates a new temporary name .
*
* @ param { Object } opts
* @ returns { string } the new random name according to opts
* @ private
* /
function _generateTmpName ( opts ) {
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
const tmpDir = opts . tmpdir ;
/* istanbul ignore else */
if ( ! _isUndefined ( opts . name ) )
return path . join ( tmpDir , opts . dir , opts . name ) ;
/* istanbul ignore else */
if ( ! _isUndefined ( opts . template ) )
return path . join ( tmpDir , opts . dir , opts . template ) . replace ( TEMPLATE _PATTERN , _randomChars ( 6 ) ) ;
// prefix and postfix
const name = [
opts . prefix ? opts . prefix : 'tmp' ,
'-' ,
process . pid ,
'-' ,
_randomChars ( 12 ) ,
opts . postfix ? '-' + opts . postfix : ''
] . join ( '' ) ;
return path . join ( tmpDir , opts . dir , name ) ;
2022-05-20 05:17:44 +08:00
}
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
/ * *
* Asserts whether the specified options are valid , also sanitizes options and provides sane defaults for missing
* options .
*
* @ param { Options } options
* @ private
* /
function _assertAndSanitizeOptions ( options ) {
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
options . tmpdir = _getTmpDir ( options ) ;
const tmpDir = options . tmpdir ;
/* istanbul ignore else */
if ( ! _isUndefined ( options . name ) )
_assertIsRelative ( options . name , 'name' , tmpDir ) ;
/* istanbul ignore else */
if ( ! _isUndefined ( options . dir ) )
_assertIsRelative ( options . dir , 'dir' , tmpDir ) ;
/* istanbul ignore else */
if ( ! _isUndefined ( options . template ) ) {
_assertIsRelative ( options . template , 'template' , tmpDir ) ;
if ( ! options . template . match ( TEMPLATE _PATTERN ) )
throw new Error ( ` Invalid template, found " ${ options . template } ". ` ) ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
/* istanbul ignore else */
if ( ! _isUndefined ( options . tries ) && isNaN ( options . tries ) || options . tries < 0 )
throw new Error ( ` Invalid tries, found " ${ options . tries } ". ` ) ;
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
// if a name was specified we will try once
options . tries = _isUndefined ( options . name ) ? options . tries || DEFAULT _TRIES : 1 ;
options . keep = ! ! options . keep ;
options . detachDescriptor = ! ! options . detachDescriptor ;
options . discardDescriptor = ! ! options . discardDescriptor ;
options . unsafeCleanup = ! ! options . unsafeCleanup ;
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
// sanitize dir, also keep (multiple) blanks if the user, purportedly sane, requests us to
options . dir = _isUndefined ( options . dir ) ? '' : path . relative ( tmpDir , _resolvePath ( options . dir , tmpDir ) ) ;
options . template = _isUndefined ( options . template ) ? undefined : path . relative ( tmpDir , _resolvePath ( options . template , tmpDir ) ) ;
// sanitize further if template is relative to options.dir
options . template = _isBlank ( options . template ) ? undefined : path . relative ( options . dir , options . template ) ;
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
// for completeness' sake only, also keep (multiple) blanks if the user, purportedly sane, requests us to
options . name = _isUndefined ( options . name ) ? undefined : _sanitizeName ( options . name ) ;
options . prefix = _isUndefined ( options . prefix ) ? '' : options . prefix ;
options . postfix = _isUndefined ( options . postfix ) ? '' : options . postfix ;
}
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
/ * *
* Resolve the specified path name in respect to tmpDir .
*
* The specified name might include relative path components , e . g . . . /
* so we need to resolve in order to be sure that is is located inside tmpDir
*
* @ param name
* @ param tmpDir
* @ returns { string }
* @ private
* /
function _resolvePath ( name , tmpDir ) {
const sanitizedName = _sanitizeName ( name ) ;
if ( sanitizedName . startsWith ( tmpDir ) ) {
return path . resolve ( sanitizedName ) ;
} else {
return path . resolve ( path . join ( tmpDir , sanitizedName ) ) ;
}
}
/ * *
* Sanitize the specified path name by removing all quote characters .
*
* @ param name
* @ returns { string }
* @ private
* /
function _sanitizeName ( name ) {
if ( _isBlank ( name ) ) {
return name ;
}
return name . replace ( /["']/g , '' ) ;
}
/ * *
* Asserts whether specified name is relative to the specified tmpDir .
*
* @ param { string } name
* @ param { string } option
* @ param { string } tmpDir
* @ throws { Error }
* @ private
* /
function _assertIsRelative ( name , option , tmpDir ) {
if ( option === 'name' ) {
// assert that name is not absolute and does not contain a path
if ( path . isAbsolute ( name ) )
throw new Error ( ` ${ option } option must not contain an absolute path, found " ${ name } ". ` ) ;
// must not fail on valid .<name> or ..<name> or similar such constructs
let basename = path . basename ( name ) ;
if ( basename === '..' || basename === '.' || basename !== name )
throw new Error ( ` ${ option } option must not contain a path, found " ${ name } ". ` ) ;
}
else { // if (option === 'dir' || option === 'template') {
// assert that dir or template are relative to tmpDir
if ( path . isAbsolute ( name ) && ! name . startsWith ( tmpDir ) ) {
throw new Error ( ` ${ option } option must be relative to " ${ tmpDir } ", found " ${ name } ". ` ) ;
}
let resolvedPath = _resolvePath ( name , tmpDir ) ;
if ( ! resolvedPath . startsWith ( tmpDir ) )
throw new Error ( ` ${ option } option must be relative to " ${ tmpDir } ", found " ${ resolvedPath } ". ` ) ;
2022-05-20 04:33:04 +08:00
}
2023-01-06 05:27:11 +08:00
}
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
/ * *
* Helper for testing against EBADF to compensate changes made to Node 7. x under Windows .
*
* @ private
* /
function _isEBADF ( error ) {
return _isExpectedError ( error , - EBADF , 'EBADF' ) ;
}
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
/ * *
* Helper for testing against ENOENT to compensate changes made to Node 7. x under Windows .
*
* @ private
* /
function _isENOENT ( error ) {
return _isExpectedError ( error , - ENOENT , 'ENOENT' ) ;
}
2022-05-20 04:33:04 +08:00
2023-01-06 05:27:11 +08:00
/ * *
* Helper to determine whether the expected error code matches the actual code and errno ,
* which will differ between the supported node versions .
*
* - Node >= 7.0 :
* error . code { string }
* error . errno { number } any numerical value will be negated
*
* CAVEAT
*
* On windows , the errno for EBADF is - 4083 but os . constants . errno . EBADF is different and we must assume that ENOENT
* is no different here .
*
* @ param { SystemError } error
* @ param { number } errno
* @ param { string } code
* @ private
* /
function _isExpectedError ( error , errno , code ) {
return IS _WIN32 ? error . code === code : error . code === code && error . errno === errno ;
}
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
/ * *
* Sets the graceful cleanup .
*
* If graceful cleanup is set , tmp will remove all controlled temporary objects on process exit , otherwise the
* temporary objects will remain in place , waiting to be cleaned up on system restart or otherwise scheduled temporary
* object removals .
* /
function setGracefulCleanup ( ) {
_gracefulCleanup = true ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
/ * *
* Returns the currently configured tmp dir from os . tmpdir ( ) .
*
* @ private
* @ param { ? Options } options
* @ returns { string } the currently configured tmp dir
* /
function _getTmpDir ( options ) {
return path . resolve ( _sanitizeName ( options && options . tmpdir || os . tmpdir ( ) ) ) ;
}
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
// Install process exit listener
process . addListener ( EXIT , _garbageCollector ) ;
2021-12-08 00:45:04 +08:00
2023-01-06 05:27:11 +08:00
/ * *
* Configuration options .
*
* @ typedef { Object } Options
* @ property { ? boolean } keep the temporary object ( file or dir ) will not be garbage collected
* @ property { ? number } tries the number of tries before give up the name generation
* @ property ( ? int ) mode the access mode , defaults are 0o700 for directories and 0o600 for files
* @ property { ? string } template the "mkstemp" like filename template
* @ property { ? string } name fixed name relative to tmpdir or the specified dir option
* @ property { ? string } dir tmp directory relative to the root tmp directory in use
* @ property { ? string } prefix prefix for the generated name
* @ property { ? string } postfix postfix for the generated name
* @ property { ? string } tmpdir the root tmp directory which overrides the os tmpdir
* @ property { ? boolean } unsafeCleanup recursively removes the created temporary directory , even when it ' s not empty
* @ property { ? boolean } detachDescriptor detaches the file descriptor , caller is responsible for closing the file , tmp will no longer try closing the file during garbage collection
* @ property { ? boolean } discardDescriptor discards the file descriptor ( closes file , fd is - 1 ) , tmp will no longer try closing the file during garbage collection
* /
2021-12-08 00:45:04 +08:00
2023-01-06 05:27:11 +08:00
/ * *
* @ typedef { Object } FileSyncObject
* @ property { string } name the name of the file
* @ property { string } fd the file descriptor or - 1 if the fd has been discarded
* @ property { fileCallback } removeCallback the callback function to remove the file
* /
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
/ * *
* @ typedef { Object } DirSyncObject
* @ property { string } name the name of the directory
* @ property { fileCallback } removeCallback the callback function to remove the directory
* /
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
/ * *
* @ callback tmpNameCallback
* @ param { ? Error } err the error object if anything goes wrong
* @ param { string } name the temporary file name
* /
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
/ * *
* @ callback fileCallback
* @ param { ? Error } err the error object if anything goes wrong
* @ param { string } name the temporary file name
* @ param { number } fd the file descriptor or - 1 if the fd had been discarded
* @ param { cleanupCallback } fn the cleanup callback function
* /
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
/ * *
* @ callback fileCallbackSync
* @ param { ? Error } err the error object if anything goes wrong
* @ param { string } name the temporary file name
* @ param { number } fd the file descriptor or - 1 if the fd had been discarded
* @ param { cleanupCallbackSync } fn the cleanup callback function
* /
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
/ * *
* @ callback dirCallback
* @ param { ? Error } err the error object if anything goes wrong
* @ param { string } name the temporary file name
* @ param { cleanupCallback } fn the cleanup callback function
* /
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
/ * *
* @ callback dirCallbackSync
* @ param { ? Error } err the error object if anything goes wrong
* @ param { string } name the temporary file name
* @ param { cleanupCallbackSync } fn the cleanup callback function
* /
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
/ * *
* Removes the temporary created file or directory .
*
* @ callback cleanupCallback
* @ param { simpleCallback } [ next ] function to call whenever the tmp object needs to be removed
* /
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
/ * *
* Removes the temporary created file or directory .
*
* @ callback cleanupCallbackSync
* /
/ * *
* Callback function for function composition .
* @ see { @ link https : //github.com/raszi/node-tmp/issues/57|raszi/node-tmp#57}
*
* @ callback simpleCallback
* /
// exporting all the needed methods
// evaluate _getTmpDir() lazily, mainly for simplifying testing but it also will
// allow users to reconfigure the temporary directory
Object . defineProperty ( module . exports , "tmpdir" , ( {
enumerable : true ,
configurable : false ,
get : function ( ) {
return _getTmpDir ( ) ;
2020-04-28 23:18:53 +08:00
}
2023-01-06 05:27:11 +08:00
} ) ) ;
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
module . exports . dir = dir ;
module . exports . dirSync = dirSync ;
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
module . exports . file = file ;
module . exports . fileSync = fileSync ;
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
module . exports . tmpName = tmpName ;
module . exports . tmpNameSync = tmpNameSync ;
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
module . exports . setGracefulCleanup = setGracefulCleanup ;
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
/***/ } ) ,
2020-04-28 23:18:53 +08:00
2023-01-06 05:27:11 +08:00
/***/ 4294 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
module . exports = _ _nccwpck _require _ _ ( 4219 ) ;
2020-04-28 23:18:53 +08:00
/***/ } ) ,
2023-01-06 05:27:11 +08:00
/***/ 4219 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
2020-04-28 23:18:53 +08:00
2022-05-20 05:17:44 +08:00
"use strict" ;
2023-01-06 05:27:11 +08:00
var net = _ _nccwpck _require _ _ ( 1808 ) ;
var tls = _ _nccwpck _require _ _ ( 4404 ) ;
var http = _ _nccwpck _require _ _ ( 3685 ) ;
var https = _ _nccwpck _require _ _ ( 5687 ) ;
var events = _ _nccwpck _require _ _ ( 2361 ) ;
var assert = _ _nccwpck _require _ _ ( 9491 ) ;
var util = _ _nccwpck _require _ _ ( 3837 ) ;
exports . httpOverHttp = httpOverHttp ;
exports . httpsOverHttp = httpsOverHttp ;
exports . httpOverHttps = httpOverHttps ;
exports . httpsOverHttps = httpsOverHttps ;
function httpOverHttp ( options ) {
var agent = new TunnelingAgent ( options ) ;
agent . request = http . request ;
return agent ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
function httpsOverHttp ( options ) {
var agent = new TunnelingAgent ( options ) ;
agent . request = http . request ;
agent . createSocket = createSecureSocket ;
agent . defaultPort = 443 ;
return agent ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
function httpOverHttps ( options ) {
var agent = new TunnelingAgent ( options ) ;
agent . request = https . request ;
return agent ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
function httpsOverHttps ( options ) {
var agent = new TunnelingAgent ( options ) ;
agent . request = https . request ;
agent . createSocket = createSecureSocket ;
agent . defaultPort = 443 ;
return agent ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
function TunnelingAgent ( options ) {
var self = this ;
self . options = options || { } ;
self . proxyOptions = self . options . proxy || { } ;
self . maxSockets = self . options . maxSockets || http . Agent . defaultMaxSockets ;
self . requests = [ ] ;
self . sockets = [ ] ;
self . on ( 'free' , function onFree ( socket , host , port , localAddress ) {
var options = toOptions ( host , port , localAddress ) ;
for ( var i = 0 , len = self . requests . length ; i < len ; ++ i ) {
var pending = self . requests [ i ] ;
if ( pending . host === options . host && pending . port === options . port ) {
// Detect the request to connect same origin server,
// reuse the connection.
self . requests . splice ( i , 1 ) ;
pending . request . onSocket ( socket ) ;
return ;
}
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
socket . destroy ( ) ;
self . removeSocket ( socket ) ;
} ) ;
2022-05-20 04:33:04 +08:00
}
2023-01-06 05:27:11 +08:00
util . inherits ( TunnelingAgent , events . EventEmitter ) ;
TunnelingAgent . prototype . addRequest = function addRequest ( req , host , port , localAddress ) {
var self = this ;
var options = mergeOptions ( { request : req } , self . options , toOptions ( host , port , localAddress ) ) ;
if ( self . sockets . length >= this . maxSockets ) {
// We are over limit so we'll add it to the queue.
self . requests . push ( options ) ;
return ;
}
// If we are under maxSockets create a new one.
self . createSocket ( options , function ( socket ) {
socket . on ( 'free' , onFree ) ;
socket . on ( 'close' , onCloseOrRemove ) ;
socket . on ( 'agentRemove' , onCloseOrRemove ) ;
req . onSocket ( socket ) ;
function onFree ( ) {
self . emit ( 'free' , socket , options ) ;
2022-05-20 04:33:04 +08:00
}
2023-01-06 05:27:11 +08:00
function onCloseOrRemove ( err ) {
self . removeSocket ( socket ) ;
socket . removeListener ( 'free' , onFree ) ;
socket . removeListener ( 'close' , onCloseOrRemove ) ;
socket . removeListener ( 'agentRemove' , onCloseOrRemove ) ;
2022-05-20 04:33:04 +08:00
}
2023-01-06 05:27:11 +08:00
} ) ;
} ;
TunnelingAgent . prototype . createSocket = function createSocket ( options , cb ) {
var self = this ;
var placeholder = { } ;
self . sockets . push ( placeholder ) ;
var connectOptions = mergeOptions ( { } , self . proxyOptions , {
method : 'CONNECT' ,
path : options . host + ':' + options . port ,
agent : false ,
headers : {
host : options . host + ':' + options . port
2022-05-20 04:33:04 +08:00
}
2023-01-06 05:27:11 +08:00
} ) ;
if ( options . localAddress ) {
connectOptions . localAddress = options . localAddress ;
}
if ( connectOptions . proxyAuth ) {
connectOptions . headers = connectOptions . headers || { } ;
connectOptions . headers [ 'Proxy-Authorization' ] = 'Basic ' +
new Buffer ( connectOptions . proxyAuth ) . toString ( 'base64' ) ;
}
debug ( 'making CONNECT request' ) ;
var connectReq = self . request ( connectOptions ) ;
connectReq . useChunkedEncodingByDefault = false ; // for v0.6
connectReq . once ( 'response' , onResponse ) ; // for v0.6
connectReq . once ( 'upgrade' , onUpgrade ) ; // for v0.6
connectReq . once ( 'connect' , onConnect ) ; // for v0.7 or later
connectReq . once ( 'error' , onError ) ;
connectReq . end ( ) ;
function onResponse ( res ) {
// Very hacky. This is necessary to avoid http-parser leaks.
res . upgrade = true ;
}
function onUpgrade ( res , socket , head ) {
// Hacky.
process . nextTick ( function ( ) {
onConnect ( res , socket , head ) ;
} ) ;
}
function onConnect ( res , socket , head ) {
connectReq . removeAllListeners ( ) ;
socket . removeAllListeners ( ) ;
if ( res . statusCode !== 200 ) {
debug ( 'tunneling socket could not be established, statusCode=%d' ,
res . statusCode ) ;
socket . destroy ( ) ;
var error = new Error ( 'tunneling socket could not be established, ' +
'statusCode=' + res . statusCode ) ;
error . code = 'ECONNRESET' ;
options . request . emit ( 'error' , error ) ;
self . removeSocket ( placeholder ) ;
return ;
2020-04-28 23:18:53 +08:00
}
2023-01-06 05:27:11 +08:00
if ( head . length > 0 ) {
debug ( 'got illegal response body from proxy' ) ;
socket . destroy ( ) ;
var error = new Error ( 'got illegal response body from proxy' ) ;
error . code = 'ECONNRESET' ;
options . request . emit ( 'error' , error ) ;
self . removeSocket ( placeholder ) ;
return ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
debug ( 'tunneling connection has established' ) ;
self . sockets [ self . sockets . indexOf ( placeholder ) ] = socket ;
return cb ( socket ) ;
}
function onError ( cause ) {
connectReq . removeAllListeners ( ) ;
debug ( 'tunneling socket could not be established, cause=%s\n' ,
cause . message , cause . stack ) ;
var error = new Error ( 'tunneling socket could not be established, ' +
'cause=' + cause . message ) ;
error . code = 'ECONNRESET' ;
options . request . emit ( 'error' , error ) ;
self . removeSocket ( placeholder ) ;
}
} ;
TunnelingAgent . prototype . removeSocket = function removeSocket ( socket ) {
var pos = this . sockets . indexOf ( socket )
if ( pos === - 1 ) {
return ;
}
this . sockets . splice ( pos , 1 ) ;
var pending = this . requests . shift ( ) ;
if ( pending ) {
// If we have pending requests and a socket gets closed a new one
// needs to be created to take over in the pool for the one that closed.
this . createSocket ( pending , function ( socket ) {
pending . request . onSocket ( socket ) ;
2022-05-20 05:17:44 +08:00
} ) ;
2023-01-06 05:27:11 +08:00
}
} ;
function createSecureSocket ( options , cb ) {
var self = this ;
TunnelingAgent . prototype . createSocket . call ( self , options , function ( socket ) {
var hostHeader = options . request . getHeader ( 'host' ) ;
var tlsOptions = mergeOptions ( { } , self . options , {
socket : socket ,
servername : hostHeader ? hostHeader . replace ( /:.*$/ , '' ) : options . host
2022-05-20 05:17:44 +08:00
} ) ;
2023-01-06 05:27:11 +08:00
// 0 is dummy port for v0.6
var secureSocket = tls . connect ( 0 , tlsOptions ) ;
self . sockets [ self . sockets . indexOf ( socket ) ] = secureSocket ;
cb ( secureSocket ) ;
} ) ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
function toOptions ( host , port , localAddress ) {
if ( typeof host === 'string' ) { // since v0.10
return {
host : host ,
port : port ,
localAddress : localAddress
} ;
}
return host ; // for v0.11 or later
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
function mergeOptions ( target ) {
for ( var i = 1 , len = arguments . length ; i < len ; ++ i ) {
var overrides = arguments [ i ] ;
if ( typeof overrides === 'object' ) {
var keys = Object . keys ( overrides ) ;
for ( var j = 0 , keyLen = keys . length ; j < keyLen ; ++ j ) {
var k = keys [ j ] ;
if ( overrides [ k ] !== undefined ) {
target [ k ] = overrides [ k ] ;
2022-05-20 04:33:04 +08:00
}
2023-01-06 05:27:11 +08:00
}
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
}
return target ;
}
var debug ;
if ( process . env . NODE _DEBUG && /\btunnel\b/ . test ( process . env . NODE _DEBUG ) ) {
debug = function ( ) {
var args = Array . prototype . slice . call ( arguments ) ;
if ( typeof args [ 0 ] === 'string' ) {
args [ 0 ] = 'TUNNEL: ' + args [ 0 ] ;
} else {
args . unshift ( 'TUNNEL:' ) ;
}
console . error . apply ( console , args ) ;
}
} else {
debug = function ( ) { } ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
exports . debug = debug ; // for test
/***/ } ) ,
/***/ 5840 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
Object . defineProperty ( exports , "v1" , ( {
enumerable : true ,
get : function ( ) {
return _v . default ;
}
} ) ) ;
Object . defineProperty ( exports , "v3" , ( {
enumerable : true ,
get : function ( ) {
return _v2 . default ;
}
} ) ) ;
Object . defineProperty ( exports , "v4" , ( {
enumerable : true ,
get : function ( ) {
return _v3 . default ;
}
} ) ) ;
Object . defineProperty ( exports , "v5" , ( {
enumerable : true ,
get : function ( ) {
return _v4 . default ;
}
} ) ) ;
Object . defineProperty ( exports , "NIL" , ( {
enumerable : true ,
get : function ( ) {
return _nil . default ;
}
} ) ) ;
Object . defineProperty ( exports , "version" , ( {
enumerable : true ,
get : function ( ) {
return _version . default ;
}
} ) ) ;
Object . defineProperty ( exports , "validate" , ( {
enumerable : true ,
get : function ( ) {
return _validate . default ;
}
} ) ) ;
Object . defineProperty ( exports , "stringify" , ( {
enumerable : true ,
get : function ( ) {
return _stringify . default ;
}
} ) ) ;
Object . defineProperty ( exports , "parse" , ( {
enumerable : true ,
get : function ( ) {
return _parse . default ;
}
} ) ) ;
var _v = _interopRequireDefault ( _ _nccwpck _require _ _ ( 8628 ) ) ;
var _v2 = _interopRequireDefault ( _ _nccwpck _require _ _ ( 6409 ) ) ;
var _v3 = _interopRequireDefault ( _ _nccwpck _require _ _ ( 5122 ) ) ;
var _v4 = _interopRequireDefault ( _ _nccwpck _require _ _ ( 9120 ) ) ;
var _nil = _interopRequireDefault ( _ _nccwpck _require _ _ ( 5332 ) ) ;
var _version = _interopRequireDefault ( _ _nccwpck _require _ _ ( 1595 ) ) ;
var _validate = _interopRequireDefault ( _ _nccwpck _require _ _ ( 6900 ) ) ;
var _stringify = _interopRequireDefault ( _ _nccwpck _require _ _ ( 8950 ) ) ;
var _parse = _interopRequireDefault ( _ _nccwpck _require _ _ ( 2746 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
/***/ } ) ,
/***/ 4569 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
var _crypto = _interopRequireDefault ( _ _nccwpck _require _ _ ( 6113 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
function md5 ( bytes ) {
if ( Array . isArray ( bytes ) ) {
bytes = Buffer . from ( bytes ) ;
} else if ( typeof bytes === 'string' ) {
bytes = Buffer . from ( bytes , 'utf8' ) ;
}
return _crypto . default . createHash ( 'md5' ) . update ( bytes ) . digest ( ) ;
2020-04-28 23:18:53 +08:00
}
2023-01-06 05:27:11 +08:00
var _default = md5 ;
exports [ "default" ] = _default ;
/***/ } ) ,
/***/ 5332 :
/***/ ( ( _ _unused _webpack _module , exports ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
var _default = '00000000-0000-0000-0000-000000000000' ;
exports [ "default" ] = _default ;
/***/ } ) ,
/***/ 2746 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
var _validate = _interopRequireDefault ( _ _nccwpck _require _ _ ( 6900 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
function parse ( uuid ) {
if ( ! ( 0 , _validate . default ) ( uuid ) ) {
throw TypeError ( 'Invalid UUID' ) ;
}
let v ;
const arr = new Uint8Array ( 16 ) ; // Parse ########-....-....-....-............
arr [ 0 ] = ( v = parseInt ( uuid . slice ( 0 , 8 ) , 16 ) ) >>> 24 ;
arr [ 1 ] = v >>> 16 & 0xff ;
arr [ 2 ] = v >>> 8 & 0xff ;
arr [ 3 ] = v & 0xff ; // Parse ........-####-....-....-............
arr [ 4 ] = ( v = parseInt ( uuid . slice ( 9 , 13 ) , 16 ) ) >>> 8 ;
arr [ 5 ] = v & 0xff ; // Parse ........-....-####-....-............
arr [ 6 ] = ( v = parseInt ( uuid . slice ( 14 , 18 ) , 16 ) ) >>> 8 ;
arr [ 7 ] = v & 0xff ; // Parse ........-....-....-####-............
arr [ 8 ] = ( v = parseInt ( uuid . slice ( 19 , 23 ) , 16 ) ) >>> 8 ;
arr [ 9 ] = v & 0xff ; // Parse ........-....-....-....-############
// (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes)
arr [ 10 ] = ( v = parseInt ( uuid . slice ( 24 , 36 ) , 16 ) ) / 0x10000000000 & 0xff ;
arr [ 11 ] = v / 0x100000000 & 0xff ;
arr [ 12 ] = v >>> 24 & 0xff ;
arr [ 13 ] = v >>> 16 & 0xff ;
arr [ 14 ] = v >>> 8 & 0xff ;
arr [ 15 ] = v & 0xff ;
return arr ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
var _default = parse ;
exports [ "default" ] = _default ;
2022-05-20 05:17:44 +08:00
/***/ } ) ,
2023-01-06 05:27:11 +08:00
/***/ 814 :
/***/ ( ( _ _unused _webpack _module , exports ) => {
2022-05-20 05:17:44 +08:00
"use strict" ;
2023-01-06 05:27:11 +08:00
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i ;
exports [ "default" ] = _default ;
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
/***/ } ) ,
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
/***/ 807 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = rng ;
var _crypto = _interopRequireDefault ( _ _nccwpck _require _ _ ( 6113 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
const rnds8Pool = new Uint8Array ( 256 ) ; // # of random values to pre-allocate
let poolPtr = rnds8Pool . length ;
function rng ( ) {
if ( poolPtr > rnds8Pool . length - 16 ) {
_crypto . default . randomFillSync ( rnds8Pool ) ;
poolPtr = 0 ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
return rnds8Pool . slice ( poolPtr , poolPtr += 16 ) ;
}
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
/***/ } ) ,
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
/***/ 5274 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
var _crypto = _interopRequireDefault ( _ _nccwpck _require _ _ ( 6113 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
function sha1 ( bytes ) {
if ( Array . isArray ( bytes ) ) {
bytes = Buffer . from ( bytes ) ;
} else if ( typeof bytes === 'string' ) {
bytes = Buffer . from ( bytes , 'utf8' ) ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
return _crypto . default . createHash ( 'sha1' ) . update ( bytes ) . digest ( ) ;
}
var _default = sha1 ;
exports [ "default" ] = _default ;
/***/ } ) ,
/***/ 8950 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
var _validate = _interopRequireDefault ( _ _nccwpck _require _ _ ( 6900 ) ) ;
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
/ * *
* Convert array of 16 byte values to UUID string format of the form :
* XXXXXXXX - XXXX - XXXX - XXXX - XXXXXXXXXXXX
* /
const byteToHex = [ ] ;
for ( let i = 0 ; i < 256 ; ++ i ) {
byteToHex . push ( ( i + 0x100 ) . toString ( 16 ) . substr ( 1 ) ) ;
}
function stringify ( arr , offset = 0 ) {
// Note: Be careful editing this code! It's been tuned for performance
// and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434
const uuid = ( byteToHex [ arr [ offset + 0 ] ] + byteToHex [ arr [ offset + 1 ] ] + byteToHex [ arr [ offset + 2 ] ] + byteToHex [ arr [ offset + 3 ] ] + '-' + byteToHex [ arr [ offset + 4 ] ] + byteToHex [ arr [ offset + 5 ] ] + '-' + byteToHex [ arr [ offset + 6 ] ] + byteToHex [ arr [ offset + 7 ] ] + '-' + byteToHex [ arr [ offset + 8 ] ] + byteToHex [ arr [ offset + 9 ] ] + '-' + byteToHex [ arr [ offset + 10 ] ] + byteToHex [ arr [ offset + 11 ] ] + byteToHex [ arr [ offset + 12 ] ] + byteToHex [ arr [ offset + 13 ] ] + byteToHex [ arr [ offset + 14 ] ] + byteToHex [ arr [ offset + 15 ] ] ) . toLowerCase ( ) ; // Consistency check for valid UUID. If this throws, it's likely due to one
// of the following:
// - One or more input array values don't map to a hex octet (leading to
// "undefined" in the uuid)
// - Invalid input values for the RFC `version` or `variant` fields
if ( ! ( 0 , _validate . default ) ( uuid ) ) {
throw TypeError ( 'Stringified UUID is invalid' ) ;
}
return uuid ;
}
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
var _default = stringify ;
exports [ "default" ] = _default ;
2022-05-20 05:17:44 +08:00
2022-10-22 03:17:17 +08:00
/***/ } ) ,
2023-01-06 05:27:11 +08:00
/***/ 8628 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
2022-10-22 03:17:17 +08:00
"use strict" ;
2023-01-06 05:27:11 +08:00
Object . defineProperty ( exports , "__esModule" , ( {
2022-10-22 03:17:17 +08:00
value : true
2023-01-06 05:27:11 +08:00
} ) ) ;
exports [ "default" ] = void 0 ;
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
var _rng = _interopRequireDefault ( _ _nccwpck _require _ _ ( 807 ) ) ;
2022-10-22 03:17:17 +08:00
2023-01-06 05:27:11 +08:00
var _stringify = _interopRequireDefault ( _ _nccwpck _require _ _ ( 8950 ) ) ;
2022-10-22 03:17:17 +08:00
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
// **`v1()` - Generate time-based UUID**
//
// Inspired by https://github.com/LiosK/UUID.js
// and http://docs.python.org/library/uuid.html
let _nodeId ;
let _clockseq ; // Previous uuid creation time
let _lastMSecs = 0 ;
let _lastNSecs = 0 ; // See https://github.com/uuidjs/uuid for API details
function v1 ( options , buf , offset ) {
let i = buf && offset || 0 ;
const b = buf || new Array ( 16 ) ;
options = options || { } ;
let node = options . node || _nodeId ;
let clockseq = options . clockseq !== undefined ? options . clockseq : _clockseq ; // node and clockseq need to be initialized to random values if they're not
// specified. We do this lazily to minimize issues related to insufficient
// system entropy. See #189
if ( node == null || clockseq == null ) {
const seedBytes = options . random || ( options . rng || _rng . default ) ( ) ;
if ( node == null ) {
// Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1)
node = _nodeId = [ seedBytes [ 0 ] | 0x01 , seedBytes [ 1 ] , seedBytes [ 2 ] , seedBytes [ 3 ] , seedBytes [ 4 ] , seedBytes [ 5 ] ] ;
}
if ( clockseq == null ) {
// Per 4.2.2, randomize (14 bit) clockseq
clockseq = _clockseq = ( seedBytes [ 6 ] << 8 | seedBytes [ 7 ] ) & 0x3fff ;
}
} // UUID timestamps are 100 nano-second units since the Gregorian epoch,
// (1582-10-15 00:00). JSNumbers aren't precise enough for this, so
// time is handled internally as 'msecs' (integer milliseconds) and 'nsecs'
// (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00.
let msecs = options . msecs !== undefined ? options . msecs : Date . now ( ) ; // Per 4.2.1.2, use count of uuid's generated during the current clock
// cycle to simulate higher resolution clock
let nsecs = options . nsecs !== undefined ? options . nsecs : _lastNSecs + 1 ; // Time since last uuid creation (in msecs)
const dt = msecs - _lastMSecs + ( nsecs - _lastNSecs ) / 10000 ; // Per 4.2.1.2, Bump clockseq on clock regression
if ( dt < 0 && options . clockseq === undefined ) {
clockseq = clockseq + 1 & 0x3fff ;
} // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new
// time interval
if ( ( dt < 0 || msecs > _lastMSecs ) && options . nsecs === undefined ) {
nsecs = 0 ;
} // Per 4.2.1.2 Throw error if too many uuids are requested
if ( nsecs >= 10000 ) {
throw new Error ( "uuid.v1(): Can't create more than 10M uuids/sec" ) ;
}
_lastMSecs = msecs ;
_lastNSecs = nsecs ;
_clockseq = clockseq ; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch
msecs += 12219292800000 ; // `time_low`
const tl = ( ( msecs & 0xfffffff ) * 10000 + nsecs ) % 0x100000000 ;
b [ i ++ ] = tl >>> 24 & 0xff ;
b [ i ++ ] = tl >>> 16 & 0xff ;
b [ i ++ ] = tl >>> 8 & 0xff ;
b [ i ++ ] = tl & 0xff ; // `time_mid`
const tmh = msecs / 0x100000000 * 10000 & 0xfffffff ;
b [ i ++ ] = tmh >>> 8 & 0xff ;
b [ i ++ ] = tmh & 0xff ; // `time_high_and_version`
b [ i ++ ] = tmh >>> 24 & 0xf | 0x10 ; // include version
b [ i ++ ] = tmh >>> 16 & 0xff ; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant)
b [ i ++ ] = clockseq >>> 8 | 0x80 ; // `clock_seq_low`
b [ i ++ ] = clockseq & 0xff ; // `node`
for ( let n = 0 ; n < 6 ; ++ n ) {
b [ i + n ] = node [ n ] ;
}
return buf || ( 0 , _stringify . default ) ( b ) ;
}
var _default = v1 ;
2023-01-06 05:27:11 +08:00
exports [ "default" ] = _default ;
2022-10-22 03:17:17 +08:00
2022-05-20 05:17:44 +08:00
/***/ } ) ,
2023-01-06 05:27:11 +08:00
/***/ 6409 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
2022-05-20 05:17:44 +08:00
2023-01-06 05:27:11 +08:00
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
var _v = _interopRequireDefault ( _ _nccwpck _require _ _ ( 5998 ) ) ;
var _md = _interopRequireDefault ( _ _nccwpck _require _ _ ( 4569 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
const v3 = ( 0 , _v . default ) ( 'v3' , 0x30 , _md . default ) ;
var _default = v3 ;
exports [ "default" ] = _default ;
/***/ } ) ,
/***/ 5998 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = _default ;
exports . URL = exports . DNS = void 0 ;
var _stringify = _interopRequireDefault ( _ _nccwpck _require _ _ ( 8950 ) ) ;
var _parse = _interopRequireDefault ( _ _nccwpck _require _ _ ( 2746 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
function stringToBytes ( str ) {
str = unescape ( encodeURIComponent ( str ) ) ; // UTF8 escape
const bytes = [ ] ;
for ( let i = 0 ; i < str . length ; ++ i ) {
bytes . push ( str . charCodeAt ( i ) ) ;
}
return bytes ;
}
const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8' ;
exports . DNS = DNS ;
const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8' ;
exports . URL = URL ;
function _default ( name , version , hashfunc ) {
function generateUUID ( value , namespace , buf , offset ) {
if ( typeof value === 'string' ) {
value = stringToBytes ( value ) ;
}
if ( typeof namespace === 'string' ) {
namespace = ( 0 , _parse . default ) ( namespace ) ;
}
if ( namespace . length !== 16 ) {
throw TypeError ( 'Namespace must be array-like (16 iterable integer values, 0-255)' ) ;
} // Compute hash of namespace and value, Per 4.3
// Future: Use spread syntax when supported on all platforms, e.g. `bytes =
// hashfunc([...namespace, ... value])`
let bytes = new Uint8Array ( 16 + value . length ) ;
bytes . set ( namespace ) ;
bytes . set ( value , namespace . length ) ;
bytes = hashfunc ( bytes ) ;
bytes [ 6 ] = bytes [ 6 ] & 0x0f | version ;
bytes [ 8 ] = bytes [ 8 ] & 0x3f | 0x80 ;
if ( buf ) {
offset = offset || 0 ;
for ( let i = 0 ; i < 16 ; ++ i ) {
buf [ offset + i ] = bytes [ i ] ;
}
return buf ;
}
return ( 0 , _stringify . default ) ( bytes ) ;
} // Function#name is not settable on some platforms (#270)
try {
generateUUID . name = name ; // eslint-disable-next-line no-empty
} catch ( err ) { } // For CommonJS default export support
generateUUID . DNS = DNS ;
generateUUID . URL = URL ;
return generateUUID ;
}
/***/ } ) ,
/***/ 5122 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
var _rng = _interopRequireDefault ( _ _nccwpck _require _ _ ( 807 ) ) ;
var _stringify = _interopRequireDefault ( _ _nccwpck _require _ _ ( 8950 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
function v4 ( options , buf , offset ) {
options = options || { } ;
const rnds = options . random || ( options . rng || _rng . default ) ( ) ; // Per 4.4, set bits for version and `clock_seq_hi_and_reserved`
rnds [ 6 ] = rnds [ 6 ] & 0x0f | 0x40 ;
rnds [ 8 ] = rnds [ 8 ] & 0x3f | 0x80 ; // Copy bytes to buffer, if provided
if ( buf ) {
offset = offset || 0 ;
for ( let i = 0 ; i < 16 ; ++ i ) {
buf [ offset + i ] = rnds [ i ] ;
}
return buf ;
}
return ( 0 , _stringify . default ) ( rnds ) ;
}
var _default = v4 ;
exports [ "default" ] = _default ;
/***/ } ) ,
/***/ 9120 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
var _v = _interopRequireDefault ( _ _nccwpck _require _ _ ( 5998 ) ) ;
var _sha = _interopRequireDefault ( _ _nccwpck _require _ _ ( 5274 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
const v5 = ( 0 , _v . default ) ( 'v5' , 0x50 , _sha . default ) ;
var _default = v5 ;
exports [ "default" ] = _default ;
/***/ } ) ,
/***/ 6900 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
var _regex = _interopRequireDefault ( _ _nccwpck _require _ _ ( 814 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
function validate ( uuid ) {
return typeof uuid === 'string' && _regex . default . test ( uuid ) ;
}
var _default = validate ;
exports [ "default" ] = _default ;
/***/ } ) ,
/***/ 1595 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
var _validate = _interopRequireDefault ( _ _nccwpck _require _ _ ( 6900 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
function version ( uuid ) {
if ( ! ( 0 , _validate . default ) ( uuid ) ) {
throw TypeError ( 'Invalid UUID' ) ;
}
return parseInt ( uuid . substr ( 14 , 1 ) , 16 ) ;
}
var _default = version ;
exports [ "default" ] = _default ;
/***/ } ) ,
/***/ 2940 :
/***/ ( ( module ) => {
// Returns a wrapper function that returns a wrapped callback
// The wrapper function should do some stuff, and return a
// presumably different callback function.
// This makes sure that own properties are retained, so that
// decorations and such are not lost along the way.
module . exports = wrappy
function wrappy ( fn , cb ) {
if ( fn && cb ) return wrappy ( fn ) ( cb )
if ( typeof fn !== 'function' )
throw new TypeError ( 'need wrapper function' )
Object . keys ( fn ) . forEach ( function ( k ) {
wrapper [ k ] = fn [ k ]
} )
return wrapper
function wrapper ( ) {
var args = new Array ( arguments . length )
for ( var i = 0 ; i < args . length ; i ++ ) {
args [ i ] = arguments [ i ]
}
var ret = fn . apply ( this , args )
var cb = args [ args . length - 1 ]
if ( typeof ret === 'function' && ret !== cb ) {
Object . keys ( cb ) . forEach ( function ( k ) {
ret [ k ] = cb [ k ]
} )
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
return ret
}
}
2022-05-20 04:33:04 +08:00
2020-04-28 23:18:53 +08:00
/***/ } ) ,
2023-01-06 05:27:11 +08:00
/***/ 9042 :
/***/ ( ( _ _unused _webpack _module , exports ) => {
2020-04-28 23:18:53 +08:00
"use strict" ;
2023-01-06 05:27:11 +08:00
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . NoFileOptions = exports . Inputs = void 0 ;
/* eslint-disable no-unused-vars */
var Inputs ;
( function ( Inputs ) {
Inputs [ "Name" ] = "name" ;
Inputs [ "Path" ] = "path" ;
Inputs [ "IfNoFilesFound" ] = "if-no-files-found" ;
Inputs [ "RetentionDays" ] = "retention-days" ;
2024-08-22 22:36:56 +08:00
Inputs [ "IncludeHiddenFiles" ] = "include-hidden-files" ;
2023-01-06 05:27:11 +08:00
} ) ( Inputs = exports . Inputs || ( exports . Inputs = { } ) ) ;
var NoFileOptions ;
( function ( NoFileOptions ) {
2022-05-20 05:17:44 +08:00
/ * *
2023-01-06 05:27:11 +08:00
* Default . Output a warning but do not fail the action
2022-05-20 05:17:44 +08:00
* /
2023-01-06 05:27:11 +08:00
NoFileOptions [ "warn" ] = "warn" ;
2022-05-20 04:33:04 +08:00
/ * *
2023-01-06 05:27:11 +08:00
* Fail the action with an error message
2022-05-20 04:33:04 +08:00
* /
2023-01-06 05:27:11 +08:00
NoFileOptions [ "error" ] = "error" ;
/ * *
* Do not output any warnings or errors , the action does not fail
* /
NoFileOptions [ "ignore" ] = "ignore" ;
} ) ( NoFileOptions = exports . NoFileOptions || ( exports . NoFileOptions = { } ) ) ;
/***/ } ) ,
/***/ 6455 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
"use strict" ;
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
var desc = Object . getOwnPropertyDescriptor ( m , k ) ;
if ( ! desc || ( "get" in desc ? ! m . _ _esModule : desc . writable || desc . configurable ) ) {
desc = { enumerable : true , get : function ( ) { return m [ k ] ; } } ;
}
Object . defineProperty ( o , k2 , desc ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . prototype . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . getInputs = void 0 ;
const core = _ _importStar ( _ _nccwpck _require _ _ ( 2186 ) ) ;
const constants _1 = _ _nccwpck _require _ _ ( 9042 ) ;
/ * *
* Helper to get all the inputs for the action
* /
function getInputs ( ) {
const name = core . getInput ( constants _1 . Inputs . Name ) ;
const path = core . getInput ( constants _1 . Inputs . Path , { required : true } ) ;
const ifNoFilesFound = core . getInput ( constants _1 . Inputs . IfNoFilesFound ) ;
const noFileBehavior = constants _1 . NoFileOptions [ ifNoFilesFound ] ;
2024-08-22 22:36:56 +08:00
const includeHiddenFiles = core . getBooleanInput ( constants _1 . Inputs . IncludeHiddenFiles ) ;
2023-01-06 05:27:11 +08:00
if ( ! noFileBehavior ) {
core . setFailed ( ` Unrecognized ${ constants _1 . Inputs . IfNoFilesFound } input. Provided: ${ ifNoFilesFound } . Available options: ${ Object . keys ( constants _1 . NoFileOptions ) } ` ) ;
}
const inputs = {
artifactName : name ,
searchPath : path ,
2024-08-22 22:36:56 +08:00
ifNoFilesFound : noFileBehavior ,
includeHiddenFiles
2023-01-06 05:27:11 +08:00
} ;
const retentionDaysStr = core . getInput ( constants _1 . Inputs . RetentionDays ) ;
if ( retentionDaysStr ) {
inputs . retentionDays = parseInt ( retentionDaysStr ) ;
if ( isNaN ( inputs . retentionDays ) ) {
core . setFailed ( 'Invalid retention-days' ) ;
}
2022-05-20 04:33:04 +08:00
}
2023-01-06 05:27:11 +08:00
return inputs ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
exports . getInputs = getInputs ;
2022-05-20 05:17:44 +08:00
/***/ } ) ,
2023-01-06 05:27:11 +08:00
/***/ 3930 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2022-05-20 05:17:44 +08:00
"use strict" ;
2023-01-05 02:55:10 +08:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
2023-01-06 05:27:11 +08:00
var desc = Object . getOwnPropertyDescriptor ( m , k ) ;
if ( ! desc || ( "get" in desc ? ! m . _ _esModule : desc . writable || desc . configurable ) ) {
desc = { enumerable : true , get : function ( ) { return m [ k ] ; } } ;
}
Object . defineProperty ( o , k2 , desc ) ;
2023-01-05 02:55:10 +08:00
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
2023-01-06 05:27:11 +08:00
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . prototype . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
2023-01-05 02:55:10 +08:00
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
2023-01-06 05:27:11 +08:00
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
2023-01-05 02:55:10 +08:00
} ;
2023-01-06 05:27:11 +08:00
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . findFilesToUpload = void 0 ;
const glob = _ _importStar ( _ _nccwpck _require _ _ ( 8090 ) ) ;
const path = _ _importStar ( _ _nccwpck _require _ _ ( 1017 ) ) ;
const core _1 = _ _nccwpck _require _ _ ( 2186 ) ;
const fs _1 = _ _nccwpck _require _ _ ( 7147 ) ;
const path _1 = _ _nccwpck _require _ _ ( 1017 ) ;
const util _1 = _ _nccwpck _require _ _ ( 3837 ) ;
const stats = ( 0 , util _1 . promisify ) ( fs _1 . stat ) ;
2024-08-22 22:36:56 +08:00
function getDefaultGlobOptions ( includeHiddenFiles ) {
2023-01-06 05:27:11 +08:00
return {
followSymbolicLinks : true ,
implicitDescendants : true ,
2024-08-22 22:36:56 +08:00
omitBrokenSymbolicLinks : true ,
excludeHiddenFiles : ! includeHiddenFiles
2023-01-06 05:27:11 +08:00
} ;
}
2022-05-20 05:17:44 +08:00
/ * *
2023-01-06 05:27:11 +08:00
* If multiple paths are specific , the least common ancestor ( LCA ) of the search paths is used as
* the delimiter to control the directory structure for the artifact . This function returns the LCA
* when given an array of search paths
2022-05-20 05:17:44 +08:00
*
2023-01-06 05:27:11 +08:00
* Example 1 : The patterns ` /foo/ ` and ` /bar/ ` returns ` / `
2022-05-20 05:17:44 +08:00
*
2023-01-06 05:27:11 +08:00
* Example 2 : The patterns ` ~/foo/bar/* ` and ` ~/foo/voo/two/* ` and ` ~/foo/mo/ ` returns ` ~/foo `
2022-05-20 05:17:44 +08:00
* /
2023-01-06 05:27:11 +08:00
function getMultiPathLCA ( searchPaths ) {
if ( searchPaths . length < 2 ) {
throw new Error ( 'At least two search paths must be provided' ) ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
const commonPaths = new Array ( ) ;
const splitPaths = new Array ( ) ;
let smallestPathLength = Number . MAX _SAFE _INTEGER ;
// split each of the search paths using the platform specific separator
for ( const searchPath of searchPaths ) {
( 0 , core _1 . debug ) ( ` Using search path ${ searchPath } ` ) ;
const splitSearchPath = path . normalize ( searchPath ) . split ( path . sep ) ;
// keep track of the smallest path length so that we don't accidentally later go out of bounds
smallestPathLength = Math . min ( smallestPathLength , splitSearchPath . length ) ;
splitPaths . push ( splitSearchPath ) ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
// on Unix-like file systems, the file separator exists at the beginning of the file path, make sure to preserve it
if ( searchPaths [ 0 ] . startsWith ( path . sep ) ) {
commonPaths . push ( path . sep ) ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
let splitIndex = 0 ;
// function to check if the paths are the same at a specific index
function isPathTheSame ( ) {
const compare = splitPaths [ 0 ] [ splitIndex ] ;
for ( let i = 1 ; i < splitPaths . length ; i ++ ) {
if ( compare !== splitPaths [ i ] [ splitIndex ] ) {
// a non-common index has been reached
return false ;
}
}
return true ;
}
// loop over all the search paths until there is a non-common ancestor or we go out of bounds
while ( splitIndex < smallestPathLength ) {
if ( ! isPathTheSame ( ) ) {
break ;
}
// if all are the same, add to the end result & increment the index
commonPaths . push ( splitPaths [ 0 ] [ splitIndex ] ) ;
splitIndex ++ ;
}
return path . join ( ... commonPaths ) ;
}
2024-08-22 22:36:56 +08:00
function findFilesToUpload ( searchPath , includeHiddenFiles ) {
2023-01-06 05:27:11 +08:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const searchResults = [ ] ;
2024-08-22 22:36:56 +08:00
const globber = yield glob . create ( searchPath , getDefaultGlobOptions ( includeHiddenFiles || false ) ) ;
2023-01-06 05:27:11 +08:00
const rawSearchResults = yield globber . glob ( ) ;
/ *
Files are saved with case insensitivity . Uploading both a . txt and A . txt will files to be overwritten
Detect any files that could be overwritten for user awareness
* /
const set = new Set ( ) ;
/ *
Directories will be rejected if attempted to be uploaded . This includes just empty
directories so filter any directories out from the raw search results
* /
for ( const searchResult of rawSearchResults ) {
const fileStats = yield stats ( searchResult ) ;
// isDirectory() returns false for symlinks if using fs.lstat(), make sure to use fs.stat() instead
if ( ! fileStats . isDirectory ( ) ) {
( 0 , core _1 . debug ) ( ` File: ${ searchResult } was found using the provided searchPath ` ) ;
searchResults . push ( searchResult ) ;
// detect any files that would be overwritten because of case insensitivity
if ( set . has ( searchResult . toLowerCase ( ) ) ) {
( 0 , core _1 . info ) ( ` Uploads are case insensitive: ${ searchResult } was detected that it will be overwritten by another file with the same path ` ) ;
2022-05-20 04:33:04 +08:00
}
2022-05-20 05:17:44 +08:00
else {
2023-01-06 05:27:11 +08:00
set . add ( searchResult . toLowerCase ( ) ) ;
}
}
else {
( 0 , core _1 . debug ) ( ` Removing ${ searchResult } from rawSearchResults because it is a directory ` ) ;
}
}
// Calculate the root directory for the artifact using the search paths that were utilized
const searchPaths = globber . getSearchPaths ( ) ;
if ( searchPaths . length > 1 ) {
( 0 , core _1 . info ) ( ` Multiple search paths detected. Calculating the least common ancestor of all paths ` ) ;
const lcaSearchPath = getMultiPathLCA ( searchPaths ) ;
( 0 , core _1 . info ) ( ` The least common ancestor is ${ lcaSearchPath } . This will be the root directory of the artifact ` ) ;
return {
filesToUpload : searchResults ,
rootDirectory : lcaSearchPath
} ;
}
/ *
Special case for a single file artifact that is uploaded without a directory or wildcard pattern . The directory structure is
not preserved and the root directory will be the single files parent directory
* /
if ( searchResults . length === 1 && searchPaths [ 0 ] === searchResults [ 0 ] ) {
return {
filesToUpload : searchResults ,
rootDirectory : ( 0 , path _1 . dirname ) ( searchResults [ 0 ] )
} ;
}
return {
filesToUpload : searchResults ,
rootDirectory : searchPaths [ 0 ]
} ;
} ) ;
}
exports . findFilesToUpload = findFilesToUpload ;
/***/ } ) ,
/***/ 334 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
"use strict" ;
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
var desc = Object . getOwnPropertyDescriptor ( m , k ) ;
if ( ! desc || ( "get" in desc ? ! m . _ _esModule : desc . writable || desc . configurable ) ) {
desc = { enumerable : true , get : function ( ) { return m [ k ] ; } } ;
}
Object . defineProperty ( o , k2 , desc ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . prototype . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
const core = _ _importStar ( _ _nccwpck _require _ _ ( 2186 ) ) ;
const artifact _1 = _ _nccwpck _require _ _ ( 2605 ) ;
const search _1 = _ _nccwpck _require _ _ ( 3930 ) ;
const input _helper _1 = _ _nccwpck _require _ _ ( 6455 ) ;
const constants _1 = _ _nccwpck _require _ _ ( 9042 ) ;
function run ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
try {
const inputs = ( 0 , input _helper _1 . getInputs ) ( ) ;
const searchResult = yield ( 0 , search _1 . findFilesToUpload ) ( inputs . searchPath ) ;
if ( searchResult . filesToUpload . length === 0 ) {
// No files were found, different use cases warrant different types of behavior if nothing is found
switch ( inputs . ifNoFilesFound ) {
case constants _1 . NoFileOptions . warn : {
core . warning ( ` No files were found with the provided path: ${ inputs . searchPath } . No artifacts will be uploaded. ` ) ;
break ;
}
case constants _1 . NoFileOptions . error : {
core . setFailed ( ` No files were found with the provided path: ${ inputs . searchPath } . No artifacts will be uploaded. ` ) ;
break ;
}
case constants _1 . NoFileOptions . ignore : {
core . info ( ` No files were found with the provided path: ${ inputs . searchPath } . No artifacts will be uploaded. ` ) ;
break ;
2020-04-28 23:18:53 +08:00
}
}
}
else {
2023-01-06 05:27:11 +08:00
const s = searchResult . filesToUpload . length === 1 ? '' : 's' ;
core . info ( ` With the provided path, there will be ${ searchResult . filesToUpload . length } file ${ s } uploaded ` ) ;
core . debug ( ` Root artifact directory is ${ searchResult . rootDirectory } ` ) ;
if ( searchResult . filesToUpload . length > 10000 ) {
core . warning ( ` There are over 10,000 files in this artifact, consider creating an archive before upload to improve the upload performance. ` ) ;
}
const artifactClient = ( 0 , artifact _1 . create ) ( ) ;
const options = {
continueOnError : false
} ;
if ( inputs . retentionDays ) {
options . retentionDays = inputs . retentionDays ;
}
const uploadResponse = yield artifactClient . uploadArtifact ( inputs . artifactName , searchResult . filesToUpload , searchResult . rootDirectory , options ) ;
if ( uploadResponse . failedItems . length > 0 ) {
core . setFailed ( ` An error was encountered when uploading ${ uploadResponse . artifactName } . There were ${ uploadResponse . failedItems . length } items that failed to upload. ` ) ;
}
else {
core . info ( ` Artifact ${ uploadResponse . artifactName } has been successfully uploaded! ` ) ;
}
2022-05-20 04:33:04 +08:00
}
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
catch ( error ) {
core . setFailed ( error . message ) ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
} ) ;
2022-05-20 05:17:44 +08:00
}
2023-01-06 05:27:11 +08:00
run ( ) ;
/***/ } ) ,
/***/ 9491 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "assert" ) ;
/***/ } ) ,
/***/ 6113 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "crypto" ) ;
/***/ } ) ,
/***/ 2361 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "events" ) ;
/***/ } ) ,
/***/ 7147 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "fs" ) ;
/***/ } ) ,
/***/ 3685 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "http" ) ;
/***/ } ) ,
/***/ 5687 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "https" ) ;
/***/ } ) ,
/***/ 1808 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "net" ) ;
/***/ } ) ,
/***/ 2037 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "os" ) ;
/***/ } ) ,
/***/ 1017 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "path" ) ;
/***/ } ) ,
/***/ 4074 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "perf_hooks" ) ;
/***/ } ) ,
/***/ 2781 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "stream" ) ;
/***/ } ) ,
/***/ 4404 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "tls" ) ;
/***/ } ) ,
/***/ 7310 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "url" ) ;
/***/ } ) ,
/***/ 3837 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "util" ) ;
/***/ } ) ,
/***/ 9796 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "zlib" ) ;
2022-05-20 05:17:44 +08:00
2020-04-28 23:18:53 +08:00
/***/ } )
2023-01-06 05:27:11 +08:00
/******/ } ) ;
/************************************************************************/
/******/ // The module cache
/******/ var _ _webpack _module _cache _ _ = { } ;
/******/
/******/ // The require function
/******/ function _ _nccwpck _require _ _ ( moduleId ) {
/******/ // Check if module is in cache
/******/ var cachedModule = _ _webpack _module _cache _ _ [ moduleId ] ;
/******/ if ( cachedModule !== undefined ) {
/******/ return cachedModule . exports ;
/******/ }
/******/ // Create a new module (and put it into the cache)
/******/ var module = _ _webpack _module _cache _ _ [ moduleId ] = {
/******/ // no module.id needed
/******/ // no module.loaded needed
/******/ exports : { }
/******/ } ;
/******/
/******/ // Execute the module function
/******/ var threw = true ;
/******/ try {
/******/ _ _webpack _modules _ _ [ moduleId ] . call ( module . exports , module , module . exports , _ _nccwpck _require _ _ ) ;
/******/ threw = false ;
/******/ } finally {
/******/ if ( threw ) delete _ _webpack _module _cache _ _ [ moduleId ] ;
/******/ }
/******/
/******/ // Return the exports of the module
/******/ return module . exports ;
/******/ }
/******/
/************************************************************************/
/******/ /* webpack/runtime/compat */
/******/
/******/ if ( typeof _ _nccwpck _require _ _ !== 'undefined' ) _ _nccwpck _require _ _ . ab = _ _dirname + "/" ;
/******/
/************************************************************************/
/******/
/******/ // startup
/******/ // Load entry module and return exports
/******/ // This entry module is referenced by other modules so it can't be inlined
/******/ var _ _webpack _exports _ _ = _ _nccwpck _require _ _ ( 334 ) ;
/******/ module . exports = _ _webpack _exports _ _ ;
/******/
/******/ } ) ( )
;