diff --git a/README.md b/README.md index 4af5dca..4782f03 100644 --- a/README.md +++ b/README.md @@ -618,15 +618,17 @@ credentials, while authenticated access is used only to push `myorg/myimage`. The following inputs can be used as `step.with` keys: -| Name | Type | Default | Description | -|-----------------|--------|-------------|-------------------------------------------------------------------------------| -| `registry` | String | `docker.io` | Server address of Docker registry. If not set then will default to Docker Hub | -| `username` | String | | Username for authenticating to the Docker registry | -| `password` | String | | Password or personal access token for authenticating the Docker registry | -| `scope` | String | | Scope for the authentication token | -| `ecr` | String | `auto` | Specifies whether the given registry is ECR (`auto`, `true` or `false`) | -| `logout` | Bool | `true` | Log out from the Docker registry at the end of a job | -| `registry-auth` | YAML | | Raw authentication to registries, defined as YAML objects | +| Name | Type | Default | Description | +|------------------|--------|-------------|-------------------------------------------------------------------------------| +| `registry` | String | `docker.io` | Server address of Docker registry. If not set then will default to Docker Hub | +| `username` | String | | Username for authenticating to the Docker registry | +| `password` | String | | Password or personal access token for authenticating the Docker registry | +| `scope` | String | | Scope for the authentication token | +| `ecr` | String | `auto` | Specifies whether the given registry is ECR (`auto`, `true` or `false`) | +| `logout` | Bool | `true` | Log out from the Docker registry at the end of a job | +| `registry-auth` | YAML | | Raw authentication to registries, defined as YAML objects | +| `retry-attempts` | Number | `0` | Number of retry attempts for transient failures | +| `retry-delay` | Number | `5000` | Delay between retries in milliseconds (uses exponential backoff) | > [!NOTE] > The `registry-auth` input cannot be used with other inputs except `logout`. diff --git a/__tests__/docker.test.ts b/__tests__/docker.test.ts index 99cd6ef..9596254 100644 --- a/__tests__/docker.test.ts +++ b/__tests__/docker.test.ts @@ -2,6 +2,7 @@ import {expect, jest, test} from '@jest/globals'; import * as path from 'path'; import {loginStandard, logout} from '../src/docker'; +import {RetryArgs} from '../src/context'; import {Docker} from '@docker/actions-toolkit/lib/docker/docker'; @@ -62,3 +63,103 @@ test('logout calls exec', async () => { ignoreReturnCode: true }); }); + +test('loginStandard retries on failure', async () => { + jest.useFakeTimers(); + let attemptCount = 0; + + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + const execSpy = jest.spyOn(Docker, 'getExecOutput').mockImplementation(async () => { + attemptCount++; + if (attemptCount < 3) { + return { + exitCode: 1, + stdout: '', + stderr: 'Error: timeout exceeded' + }; + } + return { + exitCode: 0, + stdout: 'Login Succeeded', + stderr: '' + }; + }); + + const username = 'dbowie'; + const password = 'groundcontrol'; + const registry = 'https://ghcr.io'; + const retryArgs: RetryArgs = {attempts: 3, delayMs: 100}; + + const loginPromise = loginStandard(registry, username, password, undefined, retryArgs); + await jest.runAllTimersAsync(); + await loginPromise; + + expect(execSpy).toHaveBeenCalledTimes(3); + expect(attemptCount).toBe(3); + + jest.useRealTimers(); +}); + +test('loginStandard does not retry when attempts is 0', async () => { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + const execSpy = jest.spyOn(Docker, 'getExecOutput').mockImplementation(async () => { + return { + exitCode: 1, + stdout: '', + stderr: 'Error: timeout exceeded' + }; + }); + + const username = 'dbowie'; + const password = 'groundcontrol'; + const registry = 'https://ghcr.io'; + const retryArgs: RetryArgs = {attempts: 0, delayMs: 100}; + + await expect(loginStandard(registry, username, password, undefined, retryArgs)).rejects.toThrow('timeout exceeded'); + + expect(execSpy).toHaveBeenCalledTimes(1); +}); + +test('loginStandard fails after max retries', async () => { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + const execSpy = jest.spyOn(Docker, 'getExecOutput').mockImplementation(async () => { + return { + exitCode: 1, + stdout: '', + stderr: 'Error: timeout exceeded' + }; + }); + + const username = 'dbowie'; + const password = 'groundcontrol'; + const registry = 'https://ghcr.io'; + const retryArgs: RetryArgs = {attempts: 2, delayMs: 10}; + + await expect(loginStandard(registry, username, password, undefined, retryArgs)).rejects.toThrow('timeout exceeded'); + + expect(execSpy).toHaveBeenCalledTimes(3); +}); + +test('loginStandard does not retry on 5xx errors', async () => { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + const execSpy = jest.spyOn(Docker, 'getExecOutput').mockImplementation(async () => { + return { + exitCode: 1, + stdout: '', + stderr: 'Error: 500 Internal Server Error' + }; + }); + + const username = 'dbowie'; + const password = 'groundcontrol'; + const registry = 'https://ghcr.io'; + const retryArgs: RetryArgs = {attempts: 3, delayMs: 100}; + + await expect(loginStandard(registry, username, password, undefined, retryArgs)).rejects.toThrow('500 Internal Server Error'); + + expect(execSpy).toHaveBeenCalledTimes(1); +}); diff --git a/action.yml b/action.yml index 44c1adc..149c1fc 100644 --- a/action.yml +++ b/action.yml @@ -29,6 +29,14 @@ inputs: registry-auth: description: 'Raw authentication to registries, defined as YAML objects' required: false + retry-attempts: + description: 'Number of retry attempts for transient failures' + default: '0' + required: false + retry-delay: + description: 'Delay between retries in milliseconds' + default: '5000' + required: false runs: using: 'node20' diff --git a/dist/index.js b/dist/index.js index df75bd3..7a31df4 100644 --- a/dist/index.js +++ b/dist/index.js @@ -87,5 +87,5 @@ const v=p(79896);const V=p(70857);const K=p(16928);const ee=p(76982);const te={f * @license [MIT]{@link https://github.com/archiverjs/node-zip-stream/blob/master/LICENSE} * @copyright (c) 2014 Chris Talkington, contributors. */ -var v=p(39023).inherits;var V=p(47544).ZipArchiveOutputStream;var K=p(47544).ZipArchiveEntry;var ee=p(53296);var te=i.exports=function(i){if(!(this instanceof te)){return new te(i)}i=this.options=i||{};i.zlib=i.zlib||{};V.call(this,i);if(typeof i.level==="number"&&i.level>=0){i.zlib.level=i.level;delete i.level}if(!i.forceZip64&&typeof i.zlib.level==="number"&&i.zlib.level===0){i.store=true}i.namePrependSlash=i.namePrependSlash||false;if(i.comment&&i.comment.length>0){this.setComment(i.comment)}};v(te,V);te.prototype._normalizeFileData=function(i){i=ee.defaults(i,{type:"file",name:null,namePrependSlash:this.options.namePrependSlash,linkname:null,date:null,mode:null,store:this.options.store,comment:""});var d=i.type==="directory";var p=i.type==="symlink";if(i.name){i.name=ee.sanitizePath(i.name);if(!p&&i.name.slice(-1)==="/"){d=true;i.type="directory"}else if(d){i.name+="/"}}if(d||p){i.store=true}i.date=ee.dateify(i.date);return i};te.prototype.entry=function(i,d,p){if(typeof p!=="function"){p=this._emitErrorCallback.bind(this)}d=this._normalizeFileData(d);if(d.type!=="file"&&d.type!=="directory"&&d.type!=="symlink"){p(new Error(d.type+" entries not currently supported"));return}if(typeof d.name!=="string"||d.name.length===0){p(new Error("entry name must be a non-empty string value"));return}if(d.type==="symlink"&&typeof d.linkname!=="string"){p(new Error("entry linkname must be a non-empty string value when type equals symlink"));return}var v=new K(d.name);v.setTime(d.date,this.options.forceLocalTime);if(d.namePrependSlash){v.setName(d.name,true)}if(d.store){v.setMethod(0)}if(d.comment.length>0){v.setComment(d.comment)}if(d.type==="symlink"&&typeof d.mode!=="number"){d.mode=40960}if(typeof d.mode==="number"){if(d.type==="symlink"){d.mode|=40960}v.setUnixMode(d.mode)}if(d.type==="symlink"&&typeof d.linkname==="string"){i=Buffer.from(d.linkname)}return V.prototype.entry.call(this,v,i,p)};te.prototype.finalize=function(){this.finish()}},94258:function(i,d,p){"use strict";var v=this&&this.__createBinding||(Object.create?function(i,d,p,v){if(v===undefined)v=p;var V=Object.getOwnPropertyDescriptor(d,p);if(!V||("get"in V?!d.__esModule:V.writable||V.configurable)){V={enumerable:true,get:function(){return d[p]}}}Object.defineProperty(i,v,V)}:function(i,d,p,v){if(v===undefined)v=p;i[v]=d[p]});var V=this&&this.__setModuleDefault||(Object.create?function(i,d){Object.defineProperty(i,"default",{enumerable:true,value:d})}:function(i,d){i["default"]=d});var K=this&&this.__importStar||function(){var ownKeys=function(i){ownKeys=Object.getOwnPropertyNames||function(i){var d=[];for(var p in i)if(Object.prototype.hasOwnProperty.call(i,p))d[d.length]=p;return d};return ownKeys(i)};return function(i){if(i&&i.__esModule)return i;var d={};if(i!=null)for(var p=ownKeys(i),K=0;K
i.getToken(d,p);v=beginRefresh(tryGetAccessToken,ee.retryIntervalInMs,V?.expiresOnTimestamp??Date.now()).then((i=>{v=null;V=i;K=p.tenantId;return V})).catch((i=>{v=null;V=null;K=undefined;throw i}))}return v}return async(i,d)=>{const p=Boolean(d.claims);const v=K!==d.tenantId;if(p){V=null}const ee=v||p||te.mustRefresh;if(ee){return refresh(i,d)}if(te.shouldRefresh){refresh(i,d)}return V}}},28431:(i,d,p)=>{"use strict";Object.defineProperty(d,"__esModule",{value:true});d.getUserAgentHeaderName=getUserAgentHeaderName;d.getUserAgentValue=getUserAgentValue;const v=p(31848);const V=p(66427);function getUserAgentString(i){const d=[];for(const[p,v]of i){const i=v?`${p}/${v}`:p;d.push(i)}return d.join(" ")}function getUserAgentHeaderName(){return(0,v.getHeaderName)()}async function getUserAgentValue(i){const d=new Map;d.set("core-rest-pipeline",V.SDK_VERSION);await(0,v.setPlatformSpecificData)(d);const p=getUserAgentString(d);const K=i?`${i} ${p}`:p;return K}},31848:(i,d,p)=>{"use strict";Object.defineProperty(d,"__esModule",{value:true});d.getHeaderName=getHeaderName;d.setPlatformSpecificData=setPlatformSpecificData;const v=p(61860);const V=v.__importDefault(p(48161));const K=v.__importDefault(p(1708));function getHeaderName(){return"User-Agent"}async function setPlatformSpecificData(i){if(K.default&&K.default.versions){const d=`${V.default.type()} ${V.default.release()}; ${V.default.arch()}`;const p=K.default.versions;if(p.bun){i.set("Bun",`${p.bun} (${d})`)}else if(p.deno){i.set("Deno",`${p.deno} (${d})`)}else if(p.node){i.set("Node",`${p.node} (${d})`)}}}},91297:(i,d)=>{"use strict";Object.defineProperty(d,"__esModule",{value:true});d.wrapAbortSignalLike=wrapAbortSignalLike;function wrapAbortSignalLike(i){if(i instanceof AbortSignal){return{abortSignal:i}}if(i.aborted){return{abortSignal:AbortSignal.abort(i.reason)}}const d=new AbortController;let p=true;function cleanup(){if(p){i.removeEventListener("abort",listener);p=false}}function listener(){d.abort(i.reason);cleanup()}i.addEventListener("abort",listener);return{abortSignal:d.signal,cleanup:cleanup}}},20623:(i,d,p)=>{"use strict";Object.defineProperty(d,"__esModule",{value:true});d.createTracingClient=d.useInstrumenter=void 0;var v=p(48729);Object.defineProperty(d,"useInstrumenter",{enumerable:true,get:function(){return v.useInstrumenter}});var V=p(93438);Object.defineProperty(d,"createTracingClient",{enumerable:true,get:function(){return V.createTracingClient}})},48729:(i,d,p)=>{"use strict";Object.defineProperty(d,"__esModule",{value:true});d.createDefaultTracingSpan=createDefaultTracingSpan;d.createDefaultInstrumenter=createDefaultInstrumenter;d.useInstrumenter=useInstrumenter;d.getInstrumenter=getInstrumenter;const v=p(79186);const V=p(38914);function createDefaultTracingSpan(){return{end:()=>{},isRecording:()=>false,recordException:()=>{},setAttribute:()=>{},setStatus:()=>{},addEvent:()=>{}}}function createDefaultInstrumenter(){return{createRequestHeaders:()=>({}),parseTraceparentHeader:()=>undefined,startSpan:(i,d)=>({span:createDefaultTracingSpan(),tracingContext:(0,v.createTracingContext)({parentContext:d.tracingContext})}),withContext(i,d,...p){return d(...p)}}}function useInstrumenter(i){V.state.instrumenterImplementation=i}function getInstrumenter(){if(!V.state.instrumenterImplementation){V.state.instrumenterImplementation=createDefaultInstrumenter()}return V.state.instrumenterImplementation}},38914:(i,d)=>{"use strict";Object.defineProperty(d,"__esModule",{value:true});d.state=void 0;d.state={instrumenterImplementation:undefined}},93438:(i,d,p)=>{"use strict";Object.defineProperty(d,"__esModule",{value:true});d.createTracingClient=createTracingClient;const v=p(48729);const V=p(79186);function createTracingClient(i){const{namespace:d,packageName:p,packageVersion:K}=i;function startSpan(i,ee,te){const re=(0,v.getInstrumenter)().startSpan(i,{...te,packageName:p,packageVersion:K,tracingContext:ee?.tracingOptions?.tracingContext});let ne=re.tracingContext;const se=re.span;if(!ne.getValue(V.knownContextKeys.namespace)){ne=ne.setValue(V.knownContextKeys.namespace,d)}se.setAttribute("az.namespace",ne.getValue(V.knownContextKeys.namespace));const ie=Object.assign({},ee,{tracingOptions:{...ee?.tracingOptions,tracingContext:ne}});return{span:se,updatedOptions:ie}}async function withSpan(i,d,p,v){const{span:V,updatedOptions:K}=startSpan(i,d,v);try{const i=await withContext(K.tracingOptions.tracingContext,(()=>Promise.resolve(p(K,V))));V.setStatus({status:"success"});return i}catch(i){V.setStatus({status:"error",error:i});throw i}finally{V.end()}}function withContext(i,d,...p){return(0,v.getInstrumenter)().withContext(i,d,...p)}function parseTraceparentHeader(i){return(0,v.getInstrumenter)().parseTraceparentHeader(i)}function createRequestHeaders(i){return(0,v.getInstrumenter)().createRequestHeaders(i)}return{startSpan:startSpan,withSpan:withSpan,withContext:withContext,parseTraceparentHeader:parseTraceparentHeader,createRequestHeaders:createRequestHeaders}}},79186:(i,d)=>{"use strict";Object.defineProperty(d,"__esModule",{value:true});d.TracingContextImpl=d.knownContextKeys=void 0;d.createTracingContext=createTracingContext;d.knownContextKeys={span:Symbol.for("@azure/core-tracing span"),namespace:Symbol.for("@azure/core-tracing namespace")};function createTracingContext(i={}){let p=new TracingContextImpl(i.parentContext);if(i.span){p=p.setValue(d.knownContextKeys.span,i.span)}if(i.namespace){p=p.setValue(d.knownContextKeys.namespace,i.namespace)}return p}class TracingContextImpl{_contextMap;constructor(i){this._contextMap=i instanceof TracingContextImpl?new Map(i._contextMap):new Map}setValue(i,d){const p=new TracingContextImpl(this);p._contextMap.set(i,d);return p}getValue(i){return this._contextMap.get(i)}deleteValue(i){const d=new TracingContextImpl(this);d._contextMap.delete(i);return d}}d.TracingContextImpl=TracingContextImpl},95209:(i,d)=>{"use strict";Object.defineProperty(d,"__esModule",{value:true});d.cancelablePromiseRace=cancelablePromiseRace;async function cancelablePromiseRace(i,d){const p=new AbortController;function abortHandler(){p.abort()}d?.abortSignal?.addEventListener("abort",abortHandler);try{return await Promise.race(i.map((i=>i({abortSignal:p.signal}))))}finally{p.abort();d?.abortSignal?.removeEventListener("abort",abortHandler)}}},63128:(i,d,p)=>{"use strict";Object.defineProperty(d,"__esModule",{value:true});d.createAbortablePromise=createAbortablePromise;const v=p(83134);function createAbortablePromise(i,d){const{cleanupBeforeAbort:p,abortSignal:V,abortErrorMsg:K}=d??{};return new Promise(((d,ee)=>{function rejectOnAbort(){ee(new v.AbortError(K??"The operation was aborted."))}function removeListeners(){V?.removeEventListener("abort",onAbort)}function onAbort(){p?.();removeListeners();rejectOnAbort()}if(V?.aborted){return rejectOnAbort()}try{i((i=>{removeListeners();d(i)}),(i=>{removeListeners();ee(i)}))}catch(i){ee(i)}V?.addEventListener("abort",onAbort)}))}},90636:(i,d,p)=>{"use strict";Object.defineProperty(d,"__esModule",{value:true});d.delay=delay;d.calculateRetryDelay=calculateRetryDelay;const v=p(63128);const V=p(95750);const K="The delay was aborted.";function delay(i,d){let p;const{abortSignal:V,abortErrorMsg:ee}=d??{};return(0,v.createAbortablePromise)((d=>{p=setTimeout(d,i)}),{cleanupBeforeAbort:()=>clearTimeout(p),abortSignal:V,abortErrorMsg:ee??K})}function calculateRetryDelay(i,d){const p=d.retryDelayInMs*Math.pow(2,i);const v=Math.min(d.maxRetryDelayInMs,p);const K=v/2+(0,V.getRandomIntegerInclusive)(0,v/2);return{retryAfterInMs:K}}},99945:(i,d,p)=>{"use strict";Object.defineProperty(d,"__esModule",{value:true});d.getErrorMessage=getErrorMessage;const v=p(95750);function getErrorMessage(i){if((0,v.isError)(i)){return i.message}else{let d;try{if(typeof i==="object"&&i){d=JSON.stringify(i)}else{d=String(i)}}catch(i){d="[unable to stringify input]"}return`Unknown error ${d}`}}},87779:(i,d,p)=>{"use strict";Object.defineProperty(d,"__esModule",{value:true});d.isWebWorker=d.isReactNative=d.isNodeRuntime=d.isNodeLike=d.isNode=d.isDeno=d.isBun=d.isBrowser=d.objectHasProperty=d.isObjectWithProperties=d.isDefined=d.getErrorMessage=d.delay=d.createAbortablePromise=d.cancelablePromiseRace=void 0;d.calculateRetryDelay=calculateRetryDelay;d.computeSha256Hash=computeSha256Hash;d.computeSha256Hmac=computeSha256Hmac;d.getRandomIntegerInclusive=getRandomIntegerInclusive;d.isError=isError;d.isObject=isObject;d.randomUUID=randomUUID;d.uint8ArrayToString=uint8ArrayToString;d.stringToUint8Array=stringToUint8Array;const v=p(61860);const V=v.__importStar(p(95750));var K=p(95209);Object.defineProperty(d,"cancelablePromiseRace",{enumerable:true,get:function(){return K.cancelablePromiseRace}});var ee=p(63128);Object.defineProperty(d,"createAbortablePromise",{enumerable:true,get:function(){return ee.createAbortablePromise}});var te=p(90636);Object.defineProperty(d,"delay",{enumerable:true,get:function(){return te.delay}});var re=p(99945);Object.defineProperty(d,"getErrorMessage",{enumerable:true,get:function(){return re.getErrorMessage}});var ne=p(66277);Object.defineProperty(d,"isDefined",{enumerable:true,get:function(){return ne.isDefined}});Object.defineProperty(d,"isObjectWithProperties",{enumerable:true,get:function(){return ne.isObjectWithProperties}});Object.defineProperty(d,"objectHasProperty",{enumerable:true,get:function(){return ne.objectHasProperty}});function calculateRetryDelay(i,d){return V.calculateRetryDelay(i,d)}function computeSha256Hash(i,d){return V.computeSha256Hash(i,d)}function computeSha256Hmac(i,d,p){return V.computeSha256Hmac(i,d,p)}function getRandomIntegerInclusive(i,d){return V.getRandomIntegerInclusive(i,d)}function isError(i){return V.isError(i)}function isObject(i){return V.isObject(i)}function randomUUID(){return V.randomUUID()}d.isBrowser=V.isBrowser;d.isBun=V.isBun;d.isDeno=V.isDeno;d.isNode=V.isNodeLike;d.isNodeLike=V.isNodeLike;d.isNodeRuntime=V.isNodeRuntime;d.isReactNative=V.isReactNative;d.isWebWorker=V.isWebWorker;function uint8ArrayToString(i,d){return V.uint8ArrayToString(i,d)}function stringToUint8Array(i,d){return V.stringToUint8Array(i,d)}},66277:(i,d)=>{"use strict";Object.defineProperty(d,"__esModule",{value:true});d.isDefined=isDefined;d.isObjectWithProperties=isObjectWithProperties;d.objectHasProperty=objectHasProperty;function isDefined(i){return typeof i!=="undefined"&&i!==null}function isObjectWithProperties(i,d){if(!isDefined(i)||typeof i!=="object"){return false}for(const p of d){if(!objectHasProperty(i,p)){return false}}return true}function objectHasProperty(i,d){return isDefined(i)&&typeof i==="object"&&d in i}},78756:(i,d,p)=>{"use strict";Object.defineProperty(d,"__esModule",{value:true});d.XML_CHARKEY=d.XML_ATTRKEY=d.parseXML=d.stringifyXML=void 0;var v=p(48133);Object.defineProperty(d,"stringifyXML",{enumerable:true,get:function(){return v.stringifyXML}});Object.defineProperty(d,"parseXML",{enumerable:true,get:function(){return v.parseXML}});var V=p(93406);Object.defineProperty(d,"XML_ATTRKEY",{enumerable:true,get:function(){return V.XML_ATTRKEY}});Object.defineProperty(d,"XML_CHARKEY",{enumerable:true,get:function(){return V.XML_CHARKEY}})},93406:(i,d)=>{"use strict";Object.defineProperty(d,"__esModule",{value:true});d.XML_CHARKEY=d.XML_ATTRKEY=void 0;d.XML_ATTRKEY="$";d.XML_CHARKEY="_"},48133:(i,d,p)=>{"use strict";Object.defineProperty(d,"__esModule",{value:true});d.stringifyXML=stringifyXML;d.parseXML=parseXML;const v=p(89048);const V=p(93406);function getCommonOptions(i){var d;return{attributesGroupName:V.XML_ATTRKEY,textNodeName:(d=i.xmlCharKey)!==null&&d!==void 0?d:V.XML_CHARKEY,ignoreAttributes:false,suppressBooleanAttributes:false}}function getSerializerOptions(i={}){var d,p;return Object.assign(Object.assign({},getCommonOptions(i)),{attributeNamePrefix:"@_",format:true,suppressEmptyNode:true,indentBy:"",rootNodeName:(d=i.rootName)!==null&&d!==void 0?d:"root",cdataPropName:(p=i.cdataPropName)!==null&&p!==void 0?p:"__cdata"})}function getParserOptions(i={}){return Object.assign(Object.assign({},getCommonOptions(i)),{parseAttributeValue:false,parseTagValue:false,attributeNamePrefix:"",stopNodes:i.stopNodes,processEntities:true,trimValues:false})}function stringifyXML(i,d={}){const p=getSerializerOptions(d);const V=new v.XMLBuilder(p);const K={[p.rootNodeName]:i};const ee=V.build(K);return`${ee}`.replace(/\n/g,"")}async function parseXML(i,d={}){if(!i){throw new Error("Document is empty")}const p=v.XMLValidator.validate(i);if(p!==true){throw p}const V=new v.XMLParser(getParserOptions(d));const K=V.parse(i);if(K["?xml"]){delete K["?xml"]}if(!d.includeRoot){for(const i of Object.keys(K)){const d=K[i];return typeof d==="object"?Object.assign({},d):d}}return K}},26515:(i,d,p)=>{"use strict";Object.defineProperty(d,"__esModule",{value:true});d.AzureLogger=void 0;d.setLogLevel=setLogLevel;d.getLogLevel=getLogLevel;d.createClientLogger=createClientLogger;const v=p(82490);const V=(0,v.createLoggerContext)({logLevelEnvVarName:"AZURE_LOG_LEVEL",namespace:"azure"});d.AzureLogger=V.logger;function setLogLevel(i){V.setLogLevel(i)}function getLogLevel(){return V.getLogLevel()}function createClientLogger(i){return V.createClientLogger(i)}},48175:(i,d)=>{"use strict";Object.defineProperty(d,"__esModule",{value:true})},41564:(i,d,p)=>{"use strict";Object.defineProperty(d,"__esModule",{value:true});d.BatchResponseParser=void 0;const v=p(20778);const V=p(61584);const K=p(27323);const ee=p(72995);const te=p(46370);const re=": ";const ne=" ";const se=-1;class BatchResponseParser{batchResponse;responseBatchBoundary;perResponsePrefix;batchResponseEnding;subRequests;constructor(i,d){if(!i||!i.contentType){throw new RangeError("batchResponse is malformed or doesn't contain valid content-type.")}if(!d||d.size===0){throw new RangeError("Invalid state: subRequests is not provided or size is 0.")}this.batchResponse=i;this.subRequests=d;this.responseBatchBoundary=this.batchResponse.contentType.split("=")[1];this.perResponsePrefix=`--${this.responseBatchBoundary}${K.HTTP_LINE_ENDING}`;this.batchResponseEnding=`--${this.responseBatchBoundary}--`}async parseBatchResponse(){if(this.batchResponse._response.status!==K.HTTPURLConnection.HTTP_ACCEPTED){throw new Error(`Invalid state: batch request failed with status: '${this.batchResponse._response.status}'.`)}const i=await(0,ee.getBodyAsText)(this.batchResponse);const d=i.split(this.batchResponseEnding)[0].split(this.perResponsePrefix).slice(1);const p=d.length;if(p!==this.subRequests.size&&p!==1){throw new Error("Invalid state: sub responses' count is not equal to sub requests' count.")}const ie=new Array(p);let oe=0;let ae=0;for(let i=0;i
=0&&de {const K=await this.download(d,p,{...v,tracingOptions:V.tracingOptions});if(K.readableStreamBody){await(0,he.readStreamToLocalFile)(K.readableStreamBody,i)}K.blobDownloadStream=undefined;return K}))}getBlobAndContainerNamesFromUrl(){let i;let d;try{const p=new URL(this.url);if(p.host.split(".")[1]==="blob"){const v=p.pathname.match("/([^/]*)(/(.*))?");i=v[1];d=v[3]}else if((0,fe.isIpEndpointStyle)(p)){const v=p.pathname.match("/([^/]*)/([^/]*)(/(.*))?");i=v[2];d=v[4]}else{const v=p.pathname.match("/([^/]*)(/(.*))?");i=v[1];d=v[3]}i=decodeURIComponent(i);d=decodeURIComponent(d);d=d.replace(/\\/g,"/");if(!i){throw new Error("Provided containerName is invalid.")}return{blobName:d,containerName:i}}catch(i){throw new Error("Unable to extract blobName and containerName with provided information.")}}async startCopyFromURL(i,d={}){return pe.tracingClient.withSpan("BlobClient-startCopyFromURL",d,(async p=>{d.conditions=d.conditions||{};d.sourceConditions=d.sourceConditions||{};return(0,fe.assertResponse)(await this.blobContext.startCopyFromURL(i,{abortSignal:d.abortSignal,leaseAccessConditions:d.conditions,metadata:d.metadata,modifiedAccessConditions:{...d.conditions,ifTags:d.conditions?.tagConditions},sourceModifiedAccessConditions:{sourceIfMatch:d.sourceConditions.ifMatch,sourceIfModifiedSince:d.sourceConditions.ifModifiedSince,sourceIfNoneMatch:d.sourceConditions.ifNoneMatch,sourceIfUnmodifiedSince:d.sourceConditions.ifUnmodifiedSince,sourceIfTags:d.sourceConditions.tagConditions},immutabilityPolicyExpiry:d.immutabilityPolicy?.expiriesOn,immutabilityPolicyMode:d.immutabilityPolicy?.policyMode,legalHold:d.legalHold,rehydratePriority:d.rehydratePriority,tier:(0,se.toAccessTier)(d.tier),blobTagsString:(0,fe.toBlobTagsString)(d.tags),sealBlob:d.sealBlob,tracingOptions:p.tracingOptions}))}))}generateSasUrl(i){return new Promise((d=>{if(!(this.credential instanceof ne.StorageSharedKeyCredential)){throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential")}const p=(0,ge.generateBlobSASQueryParameters)({containerName:this._containerName,blobName:this._name,snapshotTime:this._snapshot,versionId:this._versionId,...i},this.credential).toString();d((0,fe.appendToURLQuery)(this.url,p))}))}generateSasStringToSign(i){if(!(this.credential instanceof ne.StorageSharedKeyCredential)){throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential")}return(0,ge.generateBlobSASQueryParametersInternal)({containerName:this._containerName,blobName:this._name,snapshotTime:this._snapshot,versionId:this._versionId,...i},this.credential).stringToSign}generateUserDelegationSasUrl(i,d){return new Promise((p=>{const v=(0,ge.generateBlobSASQueryParameters)({containerName:this._containerName,blobName:this._name,snapshotTime:this._snapshot,versionId:this._versionId,...i},d,this.accountName).toString();p((0,fe.appendToURLQuery)(this.url,v))}))}generateUserDelegationSasStringToSign(i,d){return(0,ge.generateBlobSASQueryParametersInternal)({containerName:this._containerName,blobName:this._name,snapshotTime:this._snapshot,versionId:this._versionId,...i},d,this.accountName).stringToSign}async deleteImmutabilityPolicy(i={}){return pe.tracingClient.withSpan("BlobClient-deleteImmutabilityPolicy",i,(async i=>(0,fe.assertResponse)(await this.blobContext.deleteImmutabilityPolicy({tracingOptions:i.tracingOptions}))))}async setImmutabilityPolicy(i,d={}){return pe.tracingClient.withSpan("BlobClient-setImmutabilityPolicy",d,(async d=>(0,fe.assertResponse)(await this.blobContext.setImmutabilityPolicy({immutabilityPolicyExpiry:i.expiriesOn,immutabilityPolicyMode:i.policyMode,tracingOptions:d.tracingOptions}))))}async setLegalHold(i,d={}){return pe.tracingClient.withSpan("BlobClient-setLegalHold",d,(async d=>(0,fe.assertResponse)(await this.blobContext.setLegalHold(i,{tracingOptions:d.tracingOptions}))))}async getAccountInfo(i={}){return pe.tracingClient.withSpan("BlobClient-getAccountInfo",i,(async d=>(0,fe.assertResponse)(await this.blobContext.getAccountInfo({abortSignal:i.abortSignal,tracingOptions:d.tracingOptions}))))}}d.BlobClient=BlobClient;class AppendBlobClient extends BlobClient{appendBlobContext;constructor(i,d,p,ee){let te;let re;ee=ee||{};if((0,oe.isPipelineLike)(d)){re=i;te=d}else if(K.isNodeLike&&d instanceof ne.StorageSharedKeyCredential||d instanceof ne.AnonymousCredential||(0,V.isTokenCredential)(d)){re=i;ee=p;te=(0,oe.newPipeline)(d,ee)}else if(!d&&typeof d!=="string"){re=i;te=(0,oe.newPipeline)(new ne.AnonymousCredential,ee)}else if(d&&typeof d==="string"&&p&&typeof p==="string"){const V=d;const se=p;const ie=(0,fe.extractConnectionStringParts)(i);if(ie.kind==="AccountConnString"){if(K.isNodeLike){const i=new ne.StorageSharedKeyCredential(ie.accountName,ie.accountKey);re=(0,fe.appendToURLPath)((0,fe.appendToURLPath)(ie.url,encodeURIComponent(V)),encodeURIComponent(se));if(!ee.proxyOptions){ee.proxyOptions=(0,v.getDefaultProxySettings)(ie.proxyUri)}te=(0,oe.newPipeline)(i,ee)}else{throw new Error("Account connection string is only supported in Node.js environment")}}else if(ie.kind==="SASConnString"){re=(0,fe.appendToURLPath)((0,fe.appendToURLPath)(ie.url,encodeURIComponent(V)),encodeURIComponent(se))+"?"+ie.accountSas;te=(0,oe.newPipeline)(new ne.AnonymousCredential,ee)}else{throw new Error("Connection string must be either an Account connection string or a SAS connection string")}}else{throw new Error("Expecting non-empty strings for containerName and blobName parameters")}super(re,te);this.appendBlobContext=this.storageClientContext.appendBlob}withSnapshot(i){return new AppendBlobClient((0,fe.setURLParameter)(this.url,de.URLConstants.Parameters.SNAPSHOT,i.length===0?undefined:i),this.pipeline)}async create(i={}){i.conditions=i.conditions||{};(0,se.ensureCpkIfSpecified)(i.customerProvidedKey,this.isHttps);return pe.tracingClient.withSpan("AppendBlobClient-create",i,(async d=>(0,fe.assertResponse)(await this.appendBlobContext.create(0,{abortSignal:i.abortSignal,blobHttpHeaders:i.blobHTTPHeaders,leaseAccessConditions:i.conditions,metadata:i.metadata,modifiedAccessConditions:{...i.conditions,ifTags:i.conditions?.tagConditions},cpkInfo:i.customerProvidedKey,encryptionScope:i.encryptionScope,immutabilityPolicyExpiry:i.immutabilityPolicy?.expiriesOn,immutabilityPolicyMode:i.immutabilityPolicy?.policyMode,legalHold:i.legalHold,blobTagsString:(0,fe.toBlobTagsString)(i.tags),tracingOptions:d.tracingOptions}))))}async createIfNotExists(i={}){const d={ifNoneMatch:de.ETagAny};return pe.tracingClient.withSpan("AppendBlobClient-createIfNotExists",i,(async i=>{try{const p=(0,fe.assertResponse)(await this.create({...i,conditions:d}));return{succeeded:true,...p,_response:p._response}}catch(i){if(i.details?.errorCode==="BlobAlreadyExists"){return{succeeded:false,...i.response?.parsedHeaders,_response:i.response}}throw i}}))}async seal(i={}){i.conditions=i.conditions||{};return pe.tracingClient.withSpan("AppendBlobClient-seal",i,(async d=>(0,fe.assertResponse)(await this.appendBlobContext.seal({abortSignal:i.abortSignal,appendPositionAccessConditions:i.conditions,leaseAccessConditions:i.conditions,modifiedAccessConditions:{...i.conditions,ifTags:i.conditions?.tagConditions},tracingOptions:d.tracingOptions}))))}async appendBlock(i,d,p={}){p.conditions=p.conditions||{};(0,se.ensureCpkIfSpecified)(p.customerProvidedKey,this.isHttps);return pe.tracingClient.withSpan("AppendBlobClient-appendBlock",p,(async v=>(0,fe.assertResponse)(await this.appendBlobContext.appendBlock(d,i,{abortSignal:p.abortSignal,appendPositionAccessConditions:p.conditions,leaseAccessConditions:p.conditions,modifiedAccessConditions:{...p.conditions,ifTags:p.conditions?.tagConditions},requestOptions:{onUploadProgress:p.onProgress},transactionalContentMD5:p.transactionalContentMD5,transactionalContentCrc64:p.transactionalContentCrc64,cpkInfo:p.customerProvidedKey,encryptionScope:p.encryptionScope,tracingOptions:v.tracingOptions}))))}async appendBlockFromURL(i,d,p,v={}){v.conditions=v.conditions||{};v.sourceConditions=v.sourceConditions||{};(0,se.ensureCpkIfSpecified)(v.customerProvidedKey,this.isHttps);return pe.tracingClient.withSpan("AppendBlobClient-appendBlockFromURL",v,(async V=>(0,fe.assertResponse)(await this.appendBlobContext.appendBlockFromUrl(i,0,{abortSignal:v.abortSignal,sourceRange:(0,ce.rangeToString)({offset:d,count:p}),sourceContentMD5:v.sourceContentMD5,sourceContentCrc64:v.sourceContentCrc64,leaseAccessConditions:v.conditions,appendPositionAccessConditions:v.conditions,modifiedAccessConditions:{...v.conditions,ifTags:v.conditions?.tagConditions},sourceModifiedAccessConditions:{sourceIfMatch:v.sourceConditions?.ifMatch,sourceIfModifiedSince:v.sourceConditions?.ifModifiedSince,sourceIfNoneMatch:v.sourceConditions?.ifNoneMatch,sourceIfUnmodifiedSince:v.sourceConditions?.ifUnmodifiedSince},copySourceAuthorization:(0,fe.httpAuthorizationToString)(v.sourceAuthorization),cpkInfo:v.customerProvidedKey,encryptionScope:v.encryptionScope,fileRequestIntent:v.sourceShareTokenIntent,tracingOptions:V.tracingOptions}))))}}d.AppendBlobClient=AppendBlobClient;class BlockBlobClient extends BlobClient{_blobContext;blockBlobContext;constructor(i,d,p,ee){let te;let re;ee=ee||{};if((0,oe.isPipelineLike)(d)){re=i;te=d}else if(K.isNodeLike&&d instanceof ne.StorageSharedKeyCredential||d instanceof ne.AnonymousCredential||(0,V.isTokenCredential)(d)){re=i;ee=p;te=(0,oe.newPipeline)(d,ee)}else if(!d&&typeof d!=="string"){re=i;if(p&&typeof p!=="string"){ee=p}te=(0,oe.newPipeline)(new ne.AnonymousCredential,ee)}else if(d&&typeof d==="string"&&p&&typeof p==="string"){const V=d;const se=p;const ie=(0,fe.extractConnectionStringParts)(i);if(ie.kind==="AccountConnString"){if(K.isNodeLike){const i=new ne.StorageSharedKeyCredential(ie.accountName,ie.accountKey);re=(0,fe.appendToURLPath)((0,fe.appendToURLPath)(ie.url,encodeURIComponent(V)),encodeURIComponent(se));if(!ee.proxyOptions){ee.proxyOptions=(0,v.getDefaultProxySettings)(ie.proxyUri)}te=(0,oe.newPipeline)(i,ee)}else{throw new Error("Account connection string is only supported in Node.js environment")}}else if(ie.kind==="SASConnString"){re=(0,fe.appendToURLPath)((0,fe.appendToURLPath)(ie.url,encodeURIComponent(V)),encodeURIComponent(se))+"?"+ie.accountSas;te=(0,oe.newPipeline)(new ne.AnonymousCredential,ee)}else{throw new Error("Connection string must be either an Account connection string or a SAS connection string")}}else{throw new Error("Expecting non-empty strings for containerName and blobName parameters")}super(re,te);this.blockBlobContext=this.storageClientContext.blockBlob;this._blobContext=this.storageClientContext.blob}withSnapshot(i){return new BlockBlobClient((0,fe.setURLParameter)(this.url,de.URLConstants.Parameters.SNAPSHOT,i.length===0?undefined:i),this.pipeline)}async query(i,d={}){(0,se.ensureCpkIfSpecified)(d.customerProvidedKey,this.isHttps);if(!K.isNodeLike){throw new Error("This operation currently is only supported in Node.js.")}return pe.tracingClient.withSpan("BlockBlobClient-query",d,(async p=>{const v=(0,fe.assertResponse)(await this._blobContext.query({abortSignal:d.abortSignal,queryRequest:{queryType:"SQL",expression:i,inputSerialization:(0,fe.toQuerySerialization)(d.inputTextConfiguration),outputSerialization:(0,fe.toQuerySerialization)(d.outputTextConfiguration)},leaseAccessConditions:d.conditions,modifiedAccessConditions:{...d.conditions,ifTags:d.conditions?.tagConditions},cpkInfo:d.customerProvidedKey,tracingOptions:p.tracingOptions}));return new re.BlobQueryResponse(v,{abortSignal:d.abortSignal,onProgress:d.onProgress,onError:d.onError})}))}async upload(i,d,p={}){p.conditions=p.conditions||{};(0,se.ensureCpkIfSpecified)(p.customerProvidedKey,this.isHttps);return pe.tracingClient.withSpan("BlockBlobClient-upload",p,(async v=>(0,fe.assertResponse)(await this.blockBlobContext.upload(d,i,{abortSignal:p.abortSignal,blobHttpHeaders:p.blobHTTPHeaders,leaseAccessConditions:p.conditions,metadata:p.metadata,modifiedAccessConditions:{...p.conditions,ifTags:p.conditions?.tagConditions},requestOptions:{onUploadProgress:p.onProgress},cpkInfo:p.customerProvidedKey,encryptionScope:p.encryptionScope,immutabilityPolicyExpiry:p.immutabilityPolicy?.expiriesOn,immutabilityPolicyMode:p.immutabilityPolicy?.policyMode,legalHold:p.legalHold,tier:(0,se.toAccessTier)(p.tier),blobTagsString:(0,fe.toBlobTagsString)(p.tags),tracingOptions:v.tracingOptions}))))}async syncUploadFromURL(i,d={}){d.conditions=d.conditions||{};(0,se.ensureCpkIfSpecified)(d.customerProvidedKey,this.isHttps);return pe.tracingClient.withSpan("BlockBlobClient-syncUploadFromURL",d,(async p=>(0,fe.assertResponse)(await this.blockBlobContext.putBlobFromUrl(0,i,{...d,blobHttpHeaders:d.blobHTTPHeaders,leaseAccessConditions:d.conditions,modifiedAccessConditions:{...d.conditions,ifTags:d.conditions?.tagConditions},sourceModifiedAccessConditions:{sourceIfMatch:d.sourceConditions?.ifMatch,sourceIfModifiedSince:d.sourceConditions?.ifModifiedSince,sourceIfNoneMatch:d.sourceConditions?.ifNoneMatch,sourceIfUnmodifiedSince:d.sourceConditions?.ifUnmodifiedSince,sourceIfTags:d.sourceConditions?.tagConditions},cpkInfo:d.customerProvidedKey,copySourceAuthorization:(0,fe.httpAuthorizationToString)(d.sourceAuthorization),tier:(0,se.toAccessTier)(d.tier),blobTagsString:(0,fe.toBlobTagsString)(d.tags),copySourceTags:d.copySourceTags,fileRequestIntent:d.sourceShareTokenIntent,tracingOptions:p.tracingOptions}))))}async stageBlock(i,d,p,v={}){(0,se.ensureCpkIfSpecified)(v.customerProvidedKey,this.isHttps);return pe.tracingClient.withSpan("BlockBlobClient-stageBlock",v,(async V=>(0,fe.assertResponse)(await this.blockBlobContext.stageBlock(i,p,d,{abortSignal:v.abortSignal,leaseAccessConditions:v.conditions,requestOptions:{onUploadProgress:v.onProgress},transactionalContentMD5:v.transactionalContentMD5,transactionalContentCrc64:v.transactionalContentCrc64,cpkInfo:v.customerProvidedKey,encryptionScope:v.encryptionScope,tracingOptions:V.tracingOptions}))))}async stageBlockFromURL(i,d,p=0,v,V={}){(0,se.ensureCpkIfSpecified)(V.customerProvidedKey,this.isHttps);return pe.tracingClient.withSpan("BlockBlobClient-stageBlockFromURL",V,(async K=>(0,fe.assertResponse)(await this.blockBlobContext.stageBlockFromURL(i,0,d,{abortSignal:V.abortSignal,leaseAccessConditions:V.conditions,sourceContentMD5:V.sourceContentMD5,sourceContentCrc64:V.sourceContentCrc64,sourceRange:p===0&&!v?undefined:(0,ce.rangeToString)({offset:p,count:v}),cpkInfo:V.customerProvidedKey,encryptionScope:V.encryptionScope,copySourceAuthorization:(0,fe.httpAuthorizationToString)(V.sourceAuthorization),fileRequestIntent:V.sourceShareTokenIntent,tracingOptions:K.tracingOptions}))))}async commitBlockList(i,d={}){d.conditions=d.conditions||{};(0,se.ensureCpkIfSpecified)(d.customerProvidedKey,this.isHttps);return pe.tracingClient.withSpan("BlockBlobClient-commitBlockList",d,(async p=>(0,fe.assertResponse)(await this.blockBlobContext.commitBlockList({latest:i},{abortSignal:d.abortSignal,blobHttpHeaders:d.blobHTTPHeaders,leaseAccessConditions:d.conditions,metadata:d.metadata,modifiedAccessConditions:{...d.conditions,ifTags:d.conditions?.tagConditions},cpkInfo:d.customerProvidedKey,encryptionScope:d.encryptionScope,immutabilityPolicyExpiry:d.immutabilityPolicy?.expiriesOn,immutabilityPolicyMode:d.immutabilityPolicy?.policyMode,legalHold:d.legalHold,tier:(0,se.toAccessTier)(d.tier),blobTagsString:(0,fe.toBlobTagsString)(d.tags),tracingOptions:p.tracingOptions}))))}async getBlockList(i,d={}){return pe.tracingClient.withSpan("BlockBlobClient-getBlockList",d,(async p=>{const v=(0,fe.assertResponse)(await this.blockBlobContext.getBlockList(i,{abortSignal:d.abortSignal,leaseAccessConditions:d.conditions,modifiedAccessConditions:{...d.conditions,ifTags:d.conditions?.tagConditions},tracingOptions:p.tracingOptions}));if(!v.committedBlocks){v.committedBlocks=[]}if(!v.uncommittedBlocks){v.uncommittedBlocks=[]}return v}))}async uploadData(i,d={}){return pe.tracingClient.withSpan("BlockBlobClient-uploadData",d,(async d=>{if(K.isNodeLike){let p;if(i instanceof Buffer){p=i}else if(i instanceof ArrayBuffer){p=Buffer.from(i)}else{i=i;p=Buffer.from(i.buffer,i.byteOffset,i.byteLength)}return this.uploadSeekableInternal(((i,d)=>p.slice(i,i+d)),p.byteLength,d)}else{const p=new Blob([i]);return this.uploadSeekableInternal(((i,d)=>p.slice(i,i+d)),p.size,d)}}))}async uploadBrowserData(i,d={}){return pe.tracingClient.withSpan("BlockBlobClient-uploadBrowserData",d,(async d=>{const p=new Blob([i]);return this.uploadSeekableInternal(((i,d)=>p.slice(i,i+d)),p.size,d)}))}async uploadSeekableInternal(i,d,p={}){let v=p.blockSize??0;if(v<0||v>de.BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES){throw new RangeError(`blockSize option must be >= 0 and <= ${de.BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES}`)}const V=p.maxSingleShotSize??de.BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES;if(V<0||V>de.BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES){throw new RangeError(`maxSingleShotSize option must be >= 0 and <= ${de.BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}`)}if(v===0){if(d>de.BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES*de.BLOCK_BLOB_MAX_BLOCKS){throw new RangeError(`${d} is too larger to upload to a block blob.`)}if(d>V){v=Math.ceil(d/de.BLOCK_BLOB_MAX_BLOCKS);if(v i.getToken(d,p);v=beginRefresh(tryGetAccessToken,ee.retryIntervalInMs,V?.expiresOnTimestamp??Date.now()).then((i=>{v=null;V=i;K=p.tenantId;return V})).catch((i=>{v=null;V=null;K=undefined;throw i}))}return v}return async(i,d)=>{const p=Boolean(d.claims);const v=K!==d.tenantId;if(p){V=null}const ee=v||p||te.mustRefresh;if(ee){return refresh(i,d)}if(te.shouldRefresh){refresh(i,d)}return V}}},28431:(i,d,p)=>{"use strict";Object.defineProperty(d,"__esModule",{value:true});d.getUserAgentHeaderName=getUserAgentHeaderName;d.getUserAgentValue=getUserAgentValue;const v=p(31848);const V=p(66427);function getUserAgentString(i){const d=[];for(const[p,v]of i){const i=v?`${p}/${v}`:p;d.push(i)}return d.join(" ")}function getUserAgentHeaderName(){return(0,v.getHeaderName)()}async function getUserAgentValue(i){const d=new Map;d.set("core-rest-pipeline",V.SDK_VERSION);await(0,v.setPlatformSpecificData)(d);const p=getUserAgentString(d);const K=i?`${i} ${p}`:p;return K}},31848:(i,d,p)=>{"use strict";Object.defineProperty(d,"__esModule",{value:true});d.getHeaderName=getHeaderName;d.setPlatformSpecificData=setPlatformSpecificData;const v=p(61860);const V=v.__importDefault(p(48161));const K=v.__importDefault(p(1708));function getHeaderName(){return"User-Agent"}async function setPlatformSpecificData(i){if(K.default&&K.default.versions){const d=`${V.default.type()} ${V.default.release()}; ${V.default.arch()}`;const p=K.default.versions;if(p.bun){i.set("Bun",`${p.bun} (${d})`)}else if(p.deno){i.set("Deno",`${p.deno} (${d})`)}else if(p.node){i.set("Node",`${p.node} (${d})`)}}}},91297:(i,d)=>{"use strict";Object.defineProperty(d,"__esModule",{value:true});d.wrapAbortSignalLike=wrapAbortSignalLike;function wrapAbortSignalLike(i){if(i instanceof AbortSignal){return{abortSignal:i}}if(i.aborted){return{abortSignal:AbortSignal.abort(i.reason)}}const d=new AbortController;let p=true;function cleanup(){if(p){i.removeEventListener("abort",listener);p=false}}function listener(){d.abort(i.reason);cleanup()}i.addEventListener("abort",listener);return{abortSignal:d.signal,cleanup:cleanup}}},20623:(i,d,p)=>{"use strict";Object.defineProperty(d,"__esModule",{value:true});d.createTracingClient=d.useInstrumenter=void 0;var v=p(48729);Object.defineProperty(d,"useInstrumenter",{enumerable:true,get:function(){return v.useInstrumenter}});var V=p(93438);Object.defineProperty(d,"createTracingClient",{enumerable:true,get:function(){return V.createTracingClient}})},48729:(i,d,p)=>{"use strict";Object.defineProperty(d,"__esModule",{value:true});d.createDefaultTracingSpan=createDefaultTracingSpan;d.createDefaultInstrumenter=createDefaultInstrumenter;d.useInstrumenter=useInstrumenter;d.getInstrumenter=getInstrumenter;const v=p(79186);const V=p(38914);function createDefaultTracingSpan(){return{end:()=>{},isRecording:()=>false,recordException:()=>{},setAttribute:()=>{},setStatus:()=>{},addEvent:()=>{}}}function createDefaultInstrumenter(){return{createRequestHeaders:()=>({}),parseTraceparentHeader:()=>undefined,startSpan:(i,d)=>({span:createDefaultTracingSpan(),tracingContext:(0,v.createTracingContext)({parentContext:d.tracingContext})}),withContext(i,d,...p){return d(...p)}}}function useInstrumenter(i){V.state.instrumenterImplementation=i}function getInstrumenter(){if(!V.state.instrumenterImplementation){V.state.instrumenterImplementation=createDefaultInstrumenter()}return V.state.instrumenterImplementation}},38914:(i,d)=>{"use strict";Object.defineProperty(d,"__esModule",{value:true});d.state=void 0;d.state={instrumenterImplementation:undefined}},93438:(i,d,p)=>{"use strict";Object.defineProperty(d,"__esModule",{value:true});d.createTracingClient=createTracingClient;const v=p(48729);const V=p(79186);function createTracingClient(i){const{namespace:d,packageName:p,packageVersion:K}=i;function startSpan(i,ee,te){const re=(0,v.getInstrumenter)().startSpan(i,{...te,packageName:p,packageVersion:K,tracingContext:ee?.tracingOptions?.tracingContext});let ne=re.tracingContext;const se=re.span;if(!ne.getValue(V.knownContextKeys.namespace)){ne=ne.setValue(V.knownContextKeys.namespace,d)}se.setAttribute("az.namespace",ne.getValue(V.knownContextKeys.namespace));const ie=Object.assign({},ee,{tracingOptions:{...ee?.tracingOptions,tracingContext:ne}});return{span:se,updatedOptions:ie}}async function withSpan(i,d,p,v){const{span:V,updatedOptions:K}=startSpan(i,d,v);try{const i=await withContext(K.tracingOptions.tracingContext,(()=>Promise.resolve(p(K,V))));V.setStatus({status:"success"});return i}catch(i){V.setStatus({status:"error",error:i});throw i}finally{V.end()}}function withContext(i,d,...p){return(0,v.getInstrumenter)().withContext(i,d,...p)}function parseTraceparentHeader(i){return(0,v.getInstrumenter)().parseTraceparentHeader(i)}function createRequestHeaders(i){return(0,v.getInstrumenter)().createRequestHeaders(i)}return{startSpan:startSpan,withSpan:withSpan,withContext:withContext,parseTraceparentHeader:parseTraceparentHeader,createRequestHeaders:createRequestHeaders}}},79186:(i,d)=>{"use strict";Object.defineProperty(d,"__esModule",{value:true});d.TracingContextImpl=d.knownContextKeys=void 0;d.createTracingContext=createTracingContext;d.knownContextKeys={span:Symbol.for("@azure/core-tracing span"),namespace:Symbol.for("@azure/core-tracing namespace")};function createTracingContext(i={}){let p=new TracingContextImpl(i.parentContext);if(i.span){p=p.setValue(d.knownContextKeys.span,i.span)}if(i.namespace){p=p.setValue(d.knownContextKeys.namespace,i.namespace)}return p}class TracingContextImpl{_contextMap;constructor(i){this._contextMap=i instanceof TracingContextImpl?new Map(i._contextMap):new Map}setValue(i,d){const p=new TracingContextImpl(this);p._contextMap.set(i,d);return p}getValue(i){return this._contextMap.get(i)}deleteValue(i){const d=new TracingContextImpl(this);d._contextMap.delete(i);return d}}d.TracingContextImpl=TracingContextImpl},95209:(i,d)=>{"use strict";Object.defineProperty(d,"__esModule",{value:true});d.cancelablePromiseRace=cancelablePromiseRace;async function cancelablePromiseRace(i,d){const p=new AbortController;function abortHandler(){p.abort()}d?.abortSignal?.addEventListener("abort",abortHandler);try{return await Promise.race(i.map((i=>i({abortSignal:p.signal}))))}finally{p.abort();d?.abortSignal?.removeEventListener("abort",abortHandler)}}},63128:(i,d,p)=>{"use strict";Object.defineProperty(d,"__esModule",{value:true});d.createAbortablePromise=createAbortablePromise;const v=p(83134);function createAbortablePromise(i,d){const{cleanupBeforeAbort:p,abortSignal:V,abortErrorMsg:K}=d??{};return new Promise(((d,ee)=>{function rejectOnAbort(){ee(new v.AbortError(K??"The operation was aborted."))}function removeListeners(){V?.removeEventListener("abort",onAbort)}function onAbort(){p?.();removeListeners();rejectOnAbort()}if(V?.aborted){return rejectOnAbort()}try{i((i=>{removeListeners();d(i)}),(i=>{removeListeners();ee(i)}))}catch(i){ee(i)}V?.addEventListener("abort",onAbort)}))}},90636:(i,d,p)=>{"use strict";Object.defineProperty(d,"__esModule",{value:true});d.delay=delay;d.calculateRetryDelay=calculateRetryDelay;const v=p(63128);const V=p(95750);const K="The delay was aborted.";function delay(i,d){let p;const{abortSignal:V,abortErrorMsg:ee}=d??{};return(0,v.createAbortablePromise)((d=>{p=setTimeout(d,i)}),{cleanupBeforeAbort:()=>clearTimeout(p),abortSignal:V,abortErrorMsg:ee??K})}function calculateRetryDelay(i,d){const p=d.retryDelayInMs*Math.pow(2,i);const v=Math.min(d.maxRetryDelayInMs,p);const K=v/2+(0,V.getRandomIntegerInclusive)(0,v/2);return{retryAfterInMs:K}}},99945:(i,d,p)=>{"use strict";Object.defineProperty(d,"__esModule",{value:true});d.getErrorMessage=getErrorMessage;const v=p(95750);function getErrorMessage(i){if((0,v.isError)(i)){return i.message}else{let d;try{if(typeof i==="object"&&i){d=JSON.stringify(i)}else{d=String(i)}}catch(i){d="[unable to stringify input]"}return`Unknown error ${d}`}}},87779:(i,d,p)=>{"use strict";Object.defineProperty(d,"__esModule",{value:true});d.isWebWorker=d.isReactNative=d.isNodeRuntime=d.isNodeLike=d.isNode=d.isDeno=d.isBun=d.isBrowser=d.objectHasProperty=d.isObjectWithProperties=d.isDefined=d.getErrorMessage=d.delay=d.createAbortablePromise=d.cancelablePromiseRace=void 0;d.calculateRetryDelay=calculateRetryDelay;d.computeSha256Hash=computeSha256Hash;d.computeSha256Hmac=computeSha256Hmac;d.getRandomIntegerInclusive=getRandomIntegerInclusive;d.isError=isError;d.isObject=isObject;d.randomUUID=randomUUID;d.uint8ArrayToString=uint8ArrayToString;d.stringToUint8Array=stringToUint8Array;const v=p(61860);const V=v.__importStar(p(95750));var K=p(95209);Object.defineProperty(d,"cancelablePromiseRace",{enumerable:true,get:function(){return K.cancelablePromiseRace}});var ee=p(63128);Object.defineProperty(d,"createAbortablePromise",{enumerable:true,get:function(){return ee.createAbortablePromise}});var te=p(90636);Object.defineProperty(d,"delay",{enumerable:true,get:function(){return te.delay}});var re=p(99945);Object.defineProperty(d,"getErrorMessage",{enumerable:true,get:function(){return re.getErrorMessage}});var ne=p(66277);Object.defineProperty(d,"isDefined",{enumerable:true,get:function(){return ne.isDefined}});Object.defineProperty(d,"isObjectWithProperties",{enumerable:true,get:function(){return ne.isObjectWithProperties}});Object.defineProperty(d,"objectHasProperty",{enumerable:true,get:function(){return ne.objectHasProperty}});function calculateRetryDelay(i,d){return V.calculateRetryDelay(i,d)}function computeSha256Hash(i,d){return V.computeSha256Hash(i,d)}function computeSha256Hmac(i,d,p){return V.computeSha256Hmac(i,d,p)}function getRandomIntegerInclusive(i,d){return V.getRandomIntegerInclusive(i,d)}function isError(i){return V.isError(i)}function isObject(i){return V.isObject(i)}function randomUUID(){return V.randomUUID()}d.isBrowser=V.isBrowser;d.isBun=V.isBun;d.isDeno=V.isDeno;d.isNode=V.isNodeLike;d.isNodeLike=V.isNodeLike;d.isNodeRuntime=V.isNodeRuntime;d.isReactNative=V.isReactNative;d.isWebWorker=V.isWebWorker;function uint8ArrayToString(i,d){return V.uint8ArrayToString(i,d)}function stringToUint8Array(i,d){return V.stringToUint8Array(i,d)}},66277:(i,d)=>{"use strict";Object.defineProperty(d,"__esModule",{value:true});d.isDefined=isDefined;d.isObjectWithProperties=isObjectWithProperties;d.objectHasProperty=objectHasProperty;function isDefined(i){return typeof i!=="undefined"&&i!==null}function isObjectWithProperties(i,d){if(!isDefined(i)||typeof i!=="object"){return false}for(const p of d){if(!objectHasProperty(i,p)){return false}}return true}function objectHasProperty(i,d){return isDefined(i)&&typeof i==="object"&&d in i}},78756:(i,d,p)=>{"use strict";Object.defineProperty(d,"__esModule",{value:true});d.XML_CHARKEY=d.XML_ATTRKEY=d.parseXML=d.stringifyXML=void 0;var v=p(48133);Object.defineProperty(d,"stringifyXML",{enumerable:true,get:function(){return v.stringifyXML}});Object.defineProperty(d,"parseXML",{enumerable:true,get:function(){return v.parseXML}});var V=p(93406);Object.defineProperty(d,"XML_ATTRKEY",{enumerable:true,get:function(){return V.XML_ATTRKEY}});Object.defineProperty(d,"XML_CHARKEY",{enumerable:true,get:function(){return V.XML_CHARKEY}})},93406:(i,d)=>{"use strict";Object.defineProperty(d,"__esModule",{value:true});d.XML_CHARKEY=d.XML_ATTRKEY=void 0;d.XML_ATTRKEY="$";d.XML_CHARKEY="_"},48133:(i,d,p)=>{"use strict";Object.defineProperty(d,"__esModule",{value:true});d.stringifyXML=stringifyXML;d.parseXML=parseXML;const v=p(89048);const V=p(93406);function getCommonOptions(i){var d;return{attributesGroupName:V.XML_ATTRKEY,textNodeName:(d=i.xmlCharKey)!==null&&d!==void 0?d:V.XML_CHARKEY,ignoreAttributes:false,suppressBooleanAttributes:false}}function getSerializerOptions(i={}){var d,p;return Object.assign(Object.assign({},getCommonOptions(i)),{attributeNamePrefix:"@_",format:true,suppressEmptyNode:true,indentBy:"",rootNodeName:(d=i.rootName)!==null&&d!==void 0?d:"root",cdataPropName:(p=i.cdataPropName)!==null&&p!==void 0?p:"__cdata"})}function getParserOptions(i={}){return Object.assign(Object.assign({},getCommonOptions(i)),{parseAttributeValue:false,parseTagValue:false,attributeNamePrefix:"",stopNodes:i.stopNodes,processEntities:true,trimValues:false})}function stringifyXML(i,d={}){const p=getSerializerOptions(d);const V=new v.XMLBuilder(p);const K={[p.rootNodeName]:i};const ee=V.build(K);return`${ee}`.replace(/\n/g,"")}async function parseXML(i,d={}){if(!i){throw new Error("Document is empty")}const p=v.XMLValidator.validate(i);if(p!==true){throw p}const V=new v.XMLParser(getParserOptions(d));const K=V.parse(i);if(K["?xml"]){delete K["?xml"]}if(!d.includeRoot){for(const i of Object.keys(K)){const d=K[i];return typeof d==="object"?Object.assign({},d):d}}return K}},26515:(i,d,p)=>{"use strict";Object.defineProperty(d,"__esModule",{value:true});d.AzureLogger=void 0;d.setLogLevel=setLogLevel;d.getLogLevel=getLogLevel;d.createClientLogger=createClientLogger;const v=p(82490);const V=(0,v.createLoggerContext)({logLevelEnvVarName:"AZURE_LOG_LEVEL",namespace:"azure"});d.AzureLogger=V.logger;function setLogLevel(i){V.setLogLevel(i)}function getLogLevel(){return V.getLogLevel()}function createClientLogger(i){return V.createClientLogger(i)}},48175:(i,d)=>{"use strict";Object.defineProperty(d,"__esModule",{value:true})},41564:(i,d,p)=>{"use strict";Object.defineProperty(d,"__esModule",{value:true});d.BatchResponseParser=void 0;const v=p(20778);const V=p(61584);const K=p(27323);const ee=p(72995);const te=p(46370);const re=": ";const ne=" ";const se=-1;class BatchResponseParser{batchResponse;responseBatchBoundary;perResponsePrefix;batchResponseEnding;subRequests;constructor(i,d){if(!i||!i.contentType){throw new RangeError("batchResponse is malformed or doesn't contain valid content-type.")}if(!d||d.size===0){throw new RangeError("Invalid state: subRequests is not provided or size is 0.")}this.batchResponse=i;this.subRequests=d;this.responseBatchBoundary=this.batchResponse.contentType.split("=")[1];this.perResponsePrefix=`--${this.responseBatchBoundary}${K.HTTP_LINE_ENDING}`;this.batchResponseEnding=`--${this.responseBatchBoundary}--`}async parseBatchResponse(){if(this.batchResponse._response.status!==K.HTTPURLConnection.HTTP_ACCEPTED){throw new Error(`Invalid state: batch request failed with status: '${this.batchResponse._response.status}'.`)}const i=await(0,ee.getBodyAsText)(this.batchResponse);const d=i.split(this.batchResponseEnding)[0].split(this.perResponsePrefix).slice(1);const p=d.length;if(p!==this.subRequests.size&&p!==1){throw new Error("Invalid state: sub responses' count is not equal to sub requests' count.")}const ie=new Array(p);let oe=0;let ae=0;for(let i=0;i =0&&de {const K=await this.download(d,p,{...v,tracingOptions:V.tracingOptions});if(K.readableStreamBody){await(0,he.readStreamToLocalFile)(K.readableStreamBody,i)}K.blobDownloadStream=undefined;return K}))}getBlobAndContainerNamesFromUrl(){let i;let d;try{const p=new URL(this.url);if(p.host.split(".")[1]==="blob"){const v=p.pathname.match("/([^/]*)(/(.*))?");i=v[1];d=v[3]}else if((0,fe.isIpEndpointStyle)(p)){const v=p.pathname.match("/([^/]*)/([^/]*)(/(.*))?");i=v[2];d=v[4]}else{const v=p.pathname.match("/([^/]*)(/(.*))?");i=v[1];d=v[3]}i=decodeURIComponent(i);d=decodeURIComponent(d);d=d.replace(/\\/g,"/");if(!i){throw new Error("Provided containerName is invalid.")}return{blobName:d,containerName:i}}catch(i){throw new Error("Unable to extract blobName and containerName with provided information.")}}async startCopyFromURL(i,d={}){return pe.tracingClient.withSpan("BlobClient-startCopyFromURL",d,(async p=>{d.conditions=d.conditions||{};d.sourceConditions=d.sourceConditions||{};return(0,fe.assertResponse)(await this.blobContext.startCopyFromURL(i,{abortSignal:d.abortSignal,leaseAccessConditions:d.conditions,metadata:d.metadata,modifiedAccessConditions:{...d.conditions,ifTags:d.conditions?.tagConditions},sourceModifiedAccessConditions:{sourceIfMatch:d.sourceConditions.ifMatch,sourceIfModifiedSince:d.sourceConditions.ifModifiedSince,sourceIfNoneMatch:d.sourceConditions.ifNoneMatch,sourceIfUnmodifiedSince:d.sourceConditions.ifUnmodifiedSince,sourceIfTags:d.sourceConditions.tagConditions},immutabilityPolicyExpiry:d.immutabilityPolicy?.expiriesOn,immutabilityPolicyMode:d.immutabilityPolicy?.policyMode,legalHold:d.legalHold,rehydratePriority:d.rehydratePriority,tier:(0,se.toAccessTier)(d.tier),blobTagsString:(0,fe.toBlobTagsString)(d.tags),sealBlob:d.sealBlob,tracingOptions:p.tracingOptions}))}))}generateSasUrl(i){return new Promise((d=>{if(!(this.credential instanceof ne.StorageSharedKeyCredential)){throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential")}const p=(0,ge.generateBlobSASQueryParameters)({containerName:this._containerName,blobName:this._name,snapshotTime:this._snapshot,versionId:this._versionId,...i},this.credential).toString();d((0,fe.appendToURLQuery)(this.url,p))}))}generateSasStringToSign(i){if(!(this.credential instanceof ne.StorageSharedKeyCredential)){throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential")}return(0,ge.generateBlobSASQueryParametersInternal)({containerName:this._containerName,blobName:this._name,snapshotTime:this._snapshot,versionId:this._versionId,...i},this.credential).stringToSign}generateUserDelegationSasUrl(i,d){return new Promise((p=>{const v=(0,ge.generateBlobSASQueryParameters)({containerName:this._containerName,blobName:this._name,snapshotTime:this._snapshot,versionId:this._versionId,...i},d,this.accountName).toString();p((0,fe.appendToURLQuery)(this.url,v))}))}generateUserDelegationSasStringToSign(i,d){return(0,ge.generateBlobSASQueryParametersInternal)({containerName:this._containerName,blobName:this._name,snapshotTime:this._snapshot,versionId:this._versionId,...i},d,this.accountName).stringToSign}async deleteImmutabilityPolicy(i={}){return pe.tracingClient.withSpan("BlobClient-deleteImmutabilityPolicy",i,(async i=>(0,fe.assertResponse)(await this.blobContext.deleteImmutabilityPolicy({tracingOptions:i.tracingOptions}))))}async setImmutabilityPolicy(i,d={}){return pe.tracingClient.withSpan("BlobClient-setImmutabilityPolicy",d,(async d=>(0,fe.assertResponse)(await this.blobContext.setImmutabilityPolicy({immutabilityPolicyExpiry:i.expiriesOn,immutabilityPolicyMode:i.policyMode,tracingOptions:d.tracingOptions}))))}async setLegalHold(i,d={}){return pe.tracingClient.withSpan("BlobClient-setLegalHold",d,(async d=>(0,fe.assertResponse)(await this.blobContext.setLegalHold(i,{tracingOptions:d.tracingOptions}))))}async getAccountInfo(i={}){return pe.tracingClient.withSpan("BlobClient-getAccountInfo",i,(async d=>(0,fe.assertResponse)(await this.blobContext.getAccountInfo({abortSignal:i.abortSignal,tracingOptions:d.tracingOptions}))))}}d.BlobClient=BlobClient;class AppendBlobClient extends BlobClient{appendBlobContext;constructor(i,d,p,ee){let te;let re;ee=ee||{};if((0,oe.isPipelineLike)(d)){re=i;te=d}else if(K.isNodeLike&&d instanceof ne.StorageSharedKeyCredential||d instanceof ne.AnonymousCredential||(0,V.isTokenCredential)(d)){re=i;ee=p;te=(0,oe.newPipeline)(d,ee)}else if(!d&&typeof d!=="string"){re=i;te=(0,oe.newPipeline)(new ne.AnonymousCredential,ee)}else if(d&&typeof d==="string"&&p&&typeof p==="string"){const V=d;const se=p;const ie=(0,fe.extractConnectionStringParts)(i);if(ie.kind==="AccountConnString"){if(K.isNodeLike){const i=new ne.StorageSharedKeyCredential(ie.accountName,ie.accountKey);re=(0,fe.appendToURLPath)((0,fe.appendToURLPath)(ie.url,encodeURIComponent(V)),encodeURIComponent(se));if(!ee.proxyOptions){ee.proxyOptions=(0,v.getDefaultProxySettings)(ie.proxyUri)}te=(0,oe.newPipeline)(i,ee)}else{throw new Error("Account connection string is only supported in Node.js environment")}}else if(ie.kind==="SASConnString"){re=(0,fe.appendToURLPath)((0,fe.appendToURLPath)(ie.url,encodeURIComponent(V)),encodeURIComponent(se))+"?"+ie.accountSas;te=(0,oe.newPipeline)(new ne.AnonymousCredential,ee)}else{throw new Error("Connection string must be either an Account connection string or a SAS connection string")}}else{throw new Error("Expecting non-empty strings for containerName and blobName parameters")}super(re,te);this.appendBlobContext=this.storageClientContext.appendBlob}withSnapshot(i){return new AppendBlobClient((0,fe.setURLParameter)(this.url,de.URLConstants.Parameters.SNAPSHOT,i.length===0?undefined:i),this.pipeline)}async create(i={}){i.conditions=i.conditions||{};(0,se.ensureCpkIfSpecified)(i.customerProvidedKey,this.isHttps);return pe.tracingClient.withSpan("AppendBlobClient-create",i,(async d=>(0,fe.assertResponse)(await this.appendBlobContext.create(0,{abortSignal:i.abortSignal,blobHttpHeaders:i.blobHTTPHeaders,leaseAccessConditions:i.conditions,metadata:i.metadata,modifiedAccessConditions:{...i.conditions,ifTags:i.conditions?.tagConditions},cpkInfo:i.customerProvidedKey,encryptionScope:i.encryptionScope,immutabilityPolicyExpiry:i.immutabilityPolicy?.expiriesOn,immutabilityPolicyMode:i.immutabilityPolicy?.policyMode,legalHold:i.legalHold,blobTagsString:(0,fe.toBlobTagsString)(i.tags),tracingOptions:d.tracingOptions}))))}async createIfNotExists(i={}){const d={ifNoneMatch:de.ETagAny};return pe.tracingClient.withSpan("AppendBlobClient-createIfNotExists",i,(async i=>{try{const p=(0,fe.assertResponse)(await this.create({...i,conditions:d}));return{succeeded:true,...p,_response:p._response}}catch(i){if(i.details?.errorCode==="BlobAlreadyExists"){return{succeeded:false,...i.response?.parsedHeaders,_response:i.response}}throw i}}))}async seal(i={}){i.conditions=i.conditions||{};return pe.tracingClient.withSpan("AppendBlobClient-seal",i,(async d=>(0,fe.assertResponse)(await this.appendBlobContext.seal({abortSignal:i.abortSignal,appendPositionAccessConditions:i.conditions,leaseAccessConditions:i.conditions,modifiedAccessConditions:{...i.conditions,ifTags:i.conditions?.tagConditions},tracingOptions:d.tracingOptions}))))}async appendBlock(i,d,p={}){p.conditions=p.conditions||{};(0,se.ensureCpkIfSpecified)(p.customerProvidedKey,this.isHttps);return pe.tracingClient.withSpan("AppendBlobClient-appendBlock",p,(async v=>(0,fe.assertResponse)(await this.appendBlobContext.appendBlock(d,i,{abortSignal:p.abortSignal,appendPositionAccessConditions:p.conditions,leaseAccessConditions:p.conditions,modifiedAccessConditions:{...p.conditions,ifTags:p.conditions?.tagConditions},requestOptions:{onUploadProgress:p.onProgress},transactionalContentMD5:p.transactionalContentMD5,transactionalContentCrc64:p.transactionalContentCrc64,cpkInfo:p.customerProvidedKey,encryptionScope:p.encryptionScope,tracingOptions:v.tracingOptions}))))}async appendBlockFromURL(i,d,p,v={}){v.conditions=v.conditions||{};v.sourceConditions=v.sourceConditions||{};(0,se.ensureCpkIfSpecified)(v.customerProvidedKey,this.isHttps);return pe.tracingClient.withSpan("AppendBlobClient-appendBlockFromURL",v,(async V=>(0,fe.assertResponse)(await this.appendBlobContext.appendBlockFromUrl(i,0,{abortSignal:v.abortSignal,sourceRange:(0,ce.rangeToString)({offset:d,count:p}),sourceContentMD5:v.sourceContentMD5,sourceContentCrc64:v.sourceContentCrc64,leaseAccessConditions:v.conditions,appendPositionAccessConditions:v.conditions,modifiedAccessConditions:{...v.conditions,ifTags:v.conditions?.tagConditions},sourceModifiedAccessConditions:{sourceIfMatch:v.sourceConditions?.ifMatch,sourceIfModifiedSince:v.sourceConditions?.ifModifiedSince,sourceIfNoneMatch:v.sourceConditions?.ifNoneMatch,sourceIfUnmodifiedSince:v.sourceConditions?.ifUnmodifiedSince},copySourceAuthorization:(0,fe.httpAuthorizationToString)(v.sourceAuthorization),cpkInfo:v.customerProvidedKey,encryptionScope:v.encryptionScope,fileRequestIntent:v.sourceShareTokenIntent,tracingOptions:V.tracingOptions}))))}}d.AppendBlobClient=AppendBlobClient;class BlockBlobClient extends BlobClient{_blobContext;blockBlobContext;constructor(i,d,p,ee){let te;let re;ee=ee||{};if((0,oe.isPipelineLike)(d)){re=i;te=d}else if(K.isNodeLike&&d instanceof ne.StorageSharedKeyCredential||d instanceof ne.AnonymousCredential||(0,V.isTokenCredential)(d)){re=i;ee=p;te=(0,oe.newPipeline)(d,ee)}else if(!d&&typeof d!=="string"){re=i;if(p&&typeof p!=="string"){ee=p}te=(0,oe.newPipeline)(new ne.AnonymousCredential,ee)}else if(d&&typeof d==="string"&&p&&typeof p==="string"){const V=d;const se=p;const ie=(0,fe.extractConnectionStringParts)(i);if(ie.kind==="AccountConnString"){if(K.isNodeLike){const i=new ne.StorageSharedKeyCredential(ie.accountName,ie.accountKey);re=(0,fe.appendToURLPath)((0,fe.appendToURLPath)(ie.url,encodeURIComponent(V)),encodeURIComponent(se));if(!ee.proxyOptions){ee.proxyOptions=(0,v.getDefaultProxySettings)(ie.proxyUri)}te=(0,oe.newPipeline)(i,ee)}else{throw new Error("Account connection string is only supported in Node.js environment")}}else if(ie.kind==="SASConnString"){re=(0,fe.appendToURLPath)((0,fe.appendToURLPath)(ie.url,encodeURIComponent(V)),encodeURIComponent(se))+"?"+ie.accountSas;te=(0,oe.newPipeline)(new ne.AnonymousCredential,ee)}else{throw new Error("Connection string must be either an Account connection string or a SAS connection string")}}else{throw new Error("Expecting non-empty strings for containerName and blobName parameters")}super(re,te);this.blockBlobContext=this.storageClientContext.blockBlob;this._blobContext=this.storageClientContext.blob}withSnapshot(i){return new BlockBlobClient((0,fe.setURLParameter)(this.url,de.URLConstants.Parameters.SNAPSHOT,i.length===0?undefined:i),this.pipeline)}async query(i,d={}){(0,se.ensureCpkIfSpecified)(d.customerProvidedKey,this.isHttps);if(!K.isNodeLike){throw new Error("This operation currently is only supported in Node.js.")}return pe.tracingClient.withSpan("BlockBlobClient-query",d,(async p=>{const v=(0,fe.assertResponse)(await this._blobContext.query({abortSignal:d.abortSignal,queryRequest:{queryType:"SQL",expression:i,inputSerialization:(0,fe.toQuerySerialization)(d.inputTextConfiguration),outputSerialization:(0,fe.toQuerySerialization)(d.outputTextConfiguration)},leaseAccessConditions:d.conditions,modifiedAccessConditions:{...d.conditions,ifTags:d.conditions?.tagConditions},cpkInfo:d.customerProvidedKey,tracingOptions:p.tracingOptions}));return new re.BlobQueryResponse(v,{abortSignal:d.abortSignal,onProgress:d.onProgress,onError:d.onError})}))}async upload(i,d,p={}){p.conditions=p.conditions||{};(0,se.ensureCpkIfSpecified)(p.customerProvidedKey,this.isHttps);return pe.tracingClient.withSpan("BlockBlobClient-upload",p,(async v=>(0,fe.assertResponse)(await this.blockBlobContext.upload(d,i,{abortSignal:p.abortSignal,blobHttpHeaders:p.blobHTTPHeaders,leaseAccessConditions:p.conditions,metadata:p.metadata,modifiedAccessConditions:{...p.conditions,ifTags:p.conditions?.tagConditions},requestOptions:{onUploadProgress:p.onProgress},cpkInfo:p.customerProvidedKey,encryptionScope:p.encryptionScope,immutabilityPolicyExpiry:p.immutabilityPolicy?.expiriesOn,immutabilityPolicyMode:p.immutabilityPolicy?.policyMode,legalHold:p.legalHold,tier:(0,se.toAccessTier)(p.tier),blobTagsString:(0,fe.toBlobTagsString)(p.tags),tracingOptions:v.tracingOptions}))))}async syncUploadFromURL(i,d={}){d.conditions=d.conditions||{};(0,se.ensureCpkIfSpecified)(d.customerProvidedKey,this.isHttps);return pe.tracingClient.withSpan("BlockBlobClient-syncUploadFromURL",d,(async p=>(0,fe.assertResponse)(await this.blockBlobContext.putBlobFromUrl(0,i,{...d,blobHttpHeaders:d.blobHTTPHeaders,leaseAccessConditions:d.conditions,modifiedAccessConditions:{...d.conditions,ifTags:d.conditions?.tagConditions},sourceModifiedAccessConditions:{sourceIfMatch:d.sourceConditions?.ifMatch,sourceIfModifiedSince:d.sourceConditions?.ifModifiedSince,sourceIfNoneMatch:d.sourceConditions?.ifNoneMatch,sourceIfUnmodifiedSince:d.sourceConditions?.ifUnmodifiedSince,sourceIfTags:d.sourceConditions?.tagConditions},cpkInfo:d.customerProvidedKey,copySourceAuthorization:(0,fe.httpAuthorizationToString)(d.sourceAuthorization),tier:(0,se.toAccessTier)(d.tier),blobTagsString:(0,fe.toBlobTagsString)(d.tags),copySourceTags:d.copySourceTags,fileRequestIntent:d.sourceShareTokenIntent,tracingOptions:p.tracingOptions}))))}async stageBlock(i,d,p,v={}){(0,se.ensureCpkIfSpecified)(v.customerProvidedKey,this.isHttps);return pe.tracingClient.withSpan("BlockBlobClient-stageBlock",v,(async V=>(0,fe.assertResponse)(await this.blockBlobContext.stageBlock(i,p,d,{abortSignal:v.abortSignal,leaseAccessConditions:v.conditions,requestOptions:{onUploadProgress:v.onProgress},transactionalContentMD5:v.transactionalContentMD5,transactionalContentCrc64:v.transactionalContentCrc64,cpkInfo:v.customerProvidedKey,encryptionScope:v.encryptionScope,tracingOptions:V.tracingOptions}))))}async stageBlockFromURL(i,d,p=0,v,V={}){(0,se.ensureCpkIfSpecified)(V.customerProvidedKey,this.isHttps);return pe.tracingClient.withSpan("BlockBlobClient-stageBlockFromURL",V,(async K=>(0,fe.assertResponse)(await this.blockBlobContext.stageBlockFromURL(i,0,d,{abortSignal:V.abortSignal,leaseAccessConditions:V.conditions,sourceContentMD5:V.sourceContentMD5,sourceContentCrc64:V.sourceContentCrc64,sourceRange:p===0&&!v?undefined:(0,ce.rangeToString)({offset:p,count:v}),cpkInfo:V.customerProvidedKey,encryptionScope:V.encryptionScope,copySourceAuthorization:(0,fe.httpAuthorizationToString)(V.sourceAuthorization),fileRequestIntent:V.sourceShareTokenIntent,tracingOptions:K.tracingOptions}))))}async commitBlockList(i,d={}){d.conditions=d.conditions||{};(0,se.ensureCpkIfSpecified)(d.customerProvidedKey,this.isHttps);return pe.tracingClient.withSpan("BlockBlobClient-commitBlockList",d,(async p=>(0,fe.assertResponse)(await this.blockBlobContext.commitBlockList({latest:i},{abortSignal:d.abortSignal,blobHttpHeaders:d.blobHTTPHeaders,leaseAccessConditions:d.conditions,metadata:d.metadata,modifiedAccessConditions:{...d.conditions,ifTags:d.conditions?.tagConditions},cpkInfo:d.customerProvidedKey,encryptionScope:d.encryptionScope,immutabilityPolicyExpiry:d.immutabilityPolicy?.expiriesOn,immutabilityPolicyMode:d.immutabilityPolicy?.policyMode,legalHold:d.legalHold,tier:(0,se.toAccessTier)(d.tier),blobTagsString:(0,fe.toBlobTagsString)(d.tags),tracingOptions:p.tracingOptions}))))}async getBlockList(i,d={}){return pe.tracingClient.withSpan("BlockBlobClient-getBlockList",d,(async p=>{const v=(0,fe.assertResponse)(await this.blockBlobContext.getBlockList(i,{abortSignal:d.abortSignal,leaseAccessConditions:d.conditions,modifiedAccessConditions:{...d.conditions,ifTags:d.conditions?.tagConditions},tracingOptions:p.tracingOptions}));if(!v.committedBlocks){v.committedBlocks=[]}if(!v.uncommittedBlocks){v.uncommittedBlocks=[]}return v}))}async uploadData(i,d={}){return pe.tracingClient.withSpan("BlockBlobClient-uploadData",d,(async d=>{if(K.isNodeLike){let p;if(i instanceof Buffer){p=i}else if(i instanceof ArrayBuffer){p=Buffer.from(i)}else{i=i;p=Buffer.from(i.buffer,i.byteOffset,i.byteLength)}return this.uploadSeekableInternal(((i,d)=>p.slice(i,i+d)),p.byteLength,d)}else{const p=new Blob([i]);return this.uploadSeekableInternal(((i,d)=>p.slice(i,i+d)),p.size,d)}}))}async uploadBrowserData(i,d={}){return pe.tracingClient.withSpan("BlockBlobClient-uploadBrowserData",d,(async d=>{const p=new Blob([i]);return this.uploadSeekableInternal(((i,d)=>p.slice(i,i+d)),p.size,d)}))}async uploadSeekableInternal(i,d,p={}){let v=p.blockSize??0;if(v<0||v>de.BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES){throw new RangeError(`blockSize option must be >= 0 and <= ${de.BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES}`)}const V=p.maxSingleShotSize??de.BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES;if(V<0||V>de.BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES){throw new RangeError(`maxSingleShotSize option must be >= 0 and <= ${de.BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}`)}if(v===0){if(d>de.BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES*de.BLOCK_BLOB_MAX_BLOCKS){throw new RangeError(`${d} is too larger to upload to a block blob.`)}if(d>V){v=Math.ceil(d/de.BLOCK_BLOB_MAX_BLOCKS);if(v
) to the summary buffer\n *\n * @returns {Summary} summary instance\n */\n addSeparator() {\n const element = this.wrap('hr', null);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML line break (
) to the summary buffer\n *\n * @returns {Summary} summary instance\n */\n addBreak() {\n const element = this.wrap('br', null);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML blockquote to the summary buffer\n *\n * @param {string} text quote text\n * @param {string} cite (optional) citation url\n *\n * @returns {Summary} summary instance\n */\n addQuote(text, cite) {\n const attrs = Object.assign({}, (cite && { cite }));\n const element = this.wrap('blockquote', text, attrs);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML anchor tag to the summary buffer\n *\n * @param {string} text link text/content\n * @param {string} href hyperlink\n *\n * @returns {Summary} summary instance\n */\n addLink(text, href) {\n const element = this.wrap('a', text, { href });\n return this.addRaw(element).addEOL();\n }\n}\nconst _summary = new Summary();\n/**\n * @deprecated use `core.summary`\n */\nexports.markdownSummary = _summary;\nexports.summary = _summary;\n//# sourceMappingURL=summary.js.map","\"use strict\";\n// We use any as a valid input type\n/* eslint-disable @typescript-eslint/no-explicit-any */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.toCommandValue = toCommandValue;\nexports.toCommandProperties = toCommandProperties;\n/**\n * Sanitizes an input into a string so it can be passed into issueCommand safely\n * @param input input to sanitize into a string\n */\nfunction toCommandValue(input) {\n if (input === null || input === undefined) {\n return '';\n }\n else if (typeof input === 'string' || input instanceof String) {\n return input;\n }\n return JSON.stringify(input);\n}\n/**\n *\n * @param annotationProperties\n * @returns The command properties to send with the actual annotation command\n * See IssueCommandProperties: https://github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646\n */\nfunction toCommandProperties(annotationProperties) {\n if (!Object.keys(annotationProperties).length) {\n return {};\n }\n return {\n title: annotationProperties.title,\n file: annotationProperties.file,\n line: annotationProperties.startLine,\n endLine: annotationProperties.endLine,\n col: annotationProperties.startColumn,\n endColumn: annotationProperties.endColumn\n };\n}\n//# sourceMappingURL=utils.js.map","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nconst VERSION = \"1.0.4\";\n\n/**\n * @param octokit Octokit instance\n * @param options Options passed to Octokit constructor\n */\n\nfunction requestLog(octokit) {\n octokit.hook.wrap(\"request\", (request, options) => {\n octokit.log.debug(\"request\", options);\n const start = Date.now();\n const requestOptions = octokit.request.endpoint.parse(options);\n const path = requestOptions.url.replace(options.baseUrl, \"\");\n return request(options).then(response => {\n octokit.log.info(`${requestOptions.method} ${path} - ${response.status} in ${Date.now() - start}ms`);\n return response;\n }).catch(error => {\n octokit.log.info(`${requestOptions.method} ${path} - ${error.status} in ${Date.now() - start}ms`);\n throw error;\n });\n });\n}\nrequestLog.VERSION = VERSION;\n\nexports.requestLog = requestLog;\n//# sourceMappingURL=index.js.map\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || (function () {\n var ownKeys = function(o) {\n ownKeys = Object.getOwnPropertyNames || function (o) {\n var ar = [];\n for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;\n return ar;\n };\n return ownKeys(o);\n };\n return function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== \"default\") __createBinding(result, mod, k[i]);\n __setModuleDefault(result, mod);\n return result;\n };\n})();\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.FinalizeCacheError = exports.ReserveCacheError = exports.ValidationError = void 0;\nexports.isFeatureAvailable = isFeatureAvailable;\nexports.restoreCache = restoreCache;\nexports.saveCache = saveCache;\nconst core = __importStar(require(\"@actions/core\"));\nconst path = __importStar(require(\"path\"));\nconst utils = __importStar(require(\"./internal/cacheUtils\"));\nconst cacheHttpClient = __importStar(require(\"./internal/cacheHttpClient\"));\nconst cacheTwirpClient = __importStar(require(\"./internal/shared/cacheTwirpClient\"));\nconst config_1 = require(\"./internal/config\");\nconst tar_1 = require(\"./internal/tar\");\nconst http_client_1 = require(\"@actions/http-client\");\nclass ValidationError extends Error {\n constructor(message) {\n super(message);\n this.name = 'ValidationError';\n Object.setPrototypeOf(this, ValidationError.prototype);\n }\n}\nexports.ValidationError = ValidationError;\nclass ReserveCacheError extends Error {\n constructor(message) {\n super(message);\n this.name = 'ReserveCacheError';\n Object.setPrototypeOf(this, ReserveCacheError.prototype);\n }\n}\nexports.ReserveCacheError = ReserveCacheError;\nclass FinalizeCacheError extends Error {\n constructor(message) {\n super(message);\n this.name = 'FinalizeCacheError';\n Object.setPrototypeOf(this, FinalizeCacheError.prototype);\n }\n}\nexports.FinalizeCacheError = FinalizeCacheError;\nfunction checkPaths(paths) {\n if (!paths || paths.length === 0) {\n throw new ValidationError(`Path Validation Error: At least one directory or file path is required`);\n }\n}\nfunction checkKey(key) {\n if (key.length > 512) {\n throw new ValidationError(`Key Validation Error: ${key} cannot be larger than 512 characters.`);\n }\n const regex = /^[^,]*$/;\n if (!regex.test(key)) {\n throw new ValidationError(`Key Validation Error: ${key} cannot contain commas.`);\n }\n}\n/**\n * isFeatureAvailable to check the presence of Actions cache service\n *\n * @returns boolean return true if Actions cache service feature is available, otherwise false\n */\nfunction isFeatureAvailable() {\n const cacheServiceVersion = (0, config_1.getCacheServiceVersion)();\n // Check availability based on cache service version\n switch (cacheServiceVersion) {\n case 'v2':\n // For v2, we need ACTIONS_RESULTS_URL\n return !!process.env['ACTIONS_RESULTS_URL'];\n case 'v1':\n default:\n // For v1, we only need ACTIONS_CACHE_URL\n return !!process.env['ACTIONS_CACHE_URL'];\n }\n}\n/**\n * Restores cache from keys\n *\n * @param paths a list of file paths to restore from the cache\n * @param primaryKey an explicit key for restoring the cache. Lookup is done with prefix matching.\n * @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for primaryKey\n * @param downloadOptions cache download options\n * @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform\n * @returns string returns the key for the cache hit, otherwise returns undefined\n */\nfunction restoreCache(paths_1, primaryKey_1, restoreKeys_1, options_1) {\n return __awaiter(this, arguments, void 0, function* (paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {\n const cacheServiceVersion = (0, config_1.getCacheServiceVersion)();\n core.debug(`Cache service version: ${cacheServiceVersion}`);\n checkPaths(paths);\n switch (cacheServiceVersion) {\n case 'v2':\n return yield restoreCacheV2(paths, primaryKey, restoreKeys, options, enableCrossOsArchive);\n case 'v1':\n default:\n return yield restoreCacheV1(paths, primaryKey, restoreKeys, options, enableCrossOsArchive);\n }\n });\n}\n/**\n * Restores cache using the legacy Cache Service\n *\n * @param paths a list of file paths to restore from the cache\n * @param primaryKey an explicit key for restoring the cache. Lookup is done with prefix matching.\n * @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for primaryKey\n * @param options cache download options\n * @param enableCrossOsArchive an optional boolean enabled to restore on Windows any cache created on any platform\n * @returns string returns the key for the cache hit, otherwise returns undefined\n */\nfunction restoreCacheV1(paths_1, primaryKey_1, restoreKeys_1, options_1) {\n return __awaiter(this, arguments, void 0, function* (paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {\n restoreKeys = restoreKeys || [];\n const keys = [primaryKey, ...restoreKeys];\n core.debug('Resolved Keys:');\n core.debug(JSON.stringify(keys));\n if (keys.length > 10) {\n throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`);\n }\n for (const key of keys) {\n checkKey(key);\n }\n const compressionMethod = yield utils.getCompressionMethod();\n let archivePath = '';\n try {\n // path are needed to compute version\n const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, {\n compressionMethod,\n enableCrossOsArchive\n });\n if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {\n // Cache not found\n return undefined;\n }\n if (options === null || options === void 0 ? void 0 : options.lookupOnly) {\n core.info('Lookup only - skipping download');\n return cacheEntry.cacheKey;\n }\n archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod));\n core.debug(`Archive Path: ${archivePath}`);\n // Download the cache from the cache entry\n yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options);\n if (core.isDebug()) {\n yield (0, tar_1.listTar)(archivePath, compressionMethod);\n }\n const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);\n core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`);\n yield (0, tar_1.extractTar)(archivePath, compressionMethod);\n core.info('Cache restored successfully');\n return cacheEntry.cacheKey;\n }\n catch (error) {\n const typedError = error;\n if (typedError.name === ValidationError.name) {\n throw error;\n }\n else {\n // warn on cache restore failure and continue build\n // Log server errors (5xx) as errors, all other errors as warnings\n if (typedError instanceof http_client_1.HttpClientError &&\n typeof typedError.statusCode === 'number' &&\n typedError.statusCode >= 500) {\n core.error(`Failed to restore: ${error.message}`);\n }\n else {\n core.warning(`Failed to restore: ${error.message}`);\n }\n }\n }\n finally {\n // Try to delete the archive to save space\n try {\n yield utils.unlinkFile(archivePath);\n }\n catch (error) {\n core.debug(`Failed to delete archive: ${error}`);\n }\n }\n return undefined;\n });\n}\n/**\n * Restores cache using Cache Service v2\n *\n * @param paths a list of file paths to restore from the cache\n * @param primaryKey an explicit key for restoring the cache. Lookup is done with prefix matching\n * @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for primaryKey\n * @param downloadOptions cache download options\n * @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform\n * @returns string returns the key for the cache hit, otherwise returns undefined\n */\nfunction restoreCacheV2(paths_1, primaryKey_1, restoreKeys_1, options_1) {\n return __awaiter(this, arguments, void 0, function* (paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {\n // Override UploadOptions to force the use of Azure\n options = Object.assign(Object.assign({}, options), { useAzureSdk: true });\n restoreKeys = restoreKeys || [];\n const keys = [primaryKey, ...restoreKeys];\n core.debug('Resolved Keys:');\n core.debug(JSON.stringify(keys));\n if (keys.length > 10) {\n throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`);\n }\n for (const key of keys) {\n checkKey(key);\n }\n let archivePath = '';\n try {\n const twirpClient = cacheTwirpClient.internalCacheTwirpClient();\n const compressionMethod = yield utils.getCompressionMethod();\n const request = {\n key: primaryKey,\n restoreKeys,\n version: utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive)\n };\n const response = yield twirpClient.GetCacheEntryDownloadURL(request);\n if (!response.ok) {\n core.debug(`Cache not found for version ${request.version} of keys: ${keys.join(', ')}`);\n return undefined;\n }\n const isRestoreKeyMatch = request.key !== response.matchedKey;\n if (isRestoreKeyMatch) {\n core.info(`Cache hit for restore-key: ${response.matchedKey}`);\n }\n else {\n core.info(`Cache hit for: ${response.matchedKey}`);\n }\n if (options === null || options === void 0 ? void 0 : options.lookupOnly) {\n core.info('Lookup only - skipping download');\n return response.matchedKey;\n }\n archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod));\n core.debug(`Archive path: ${archivePath}`);\n core.debug(`Starting download of archive to: ${archivePath}`);\n yield cacheHttpClient.downloadCache(response.signedDownloadUrl, archivePath, options);\n const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);\n core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`);\n if (core.isDebug()) {\n yield (0, tar_1.listTar)(archivePath, compressionMethod);\n }\n yield (0, tar_1.extractTar)(archivePath, compressionMethod);\n core.info('Cache restored successfully');\n return response.matchedKey;\n }\n catch (error) {\n const typedError = error;\n if (typedError.name === ValidationError.name) {\n throw error;\n }\n else {\n // Supress all non-validation cache related errors because caching should be optional\n // Log server errors (5xx) as errors, all other errors as warnings\n if (typedError instanceof http_client_1.HttpClientError &&\n typeof typedError.statusCode === 'number' &&\n typedError.statusCode >= 500) {\n core.error(`Failed to restore: ${error.message}`);\n }\n else {\n core.warning(`Failed to restore: ${error.message}`);\n }\n }\n }\n finally {\n try {\n if (archivePath) {\n yield utils.unlinkFile(archivePath);\n }\n }\n catch (error) {\n core.debug(`Failed to delete archive: ${error}`);\n }\n }\n return undefined;\n });\n}\n/**\n * Saves a list of files with the specified key\n *\n * @param paths a list of file paths to be cached\n * @param key an explicit key for restoring the cache\n * @param enableCrossOsArchive an optional boolean enabled to save cache on windows which could be restored on any platform\n * @param options cache upload options\n * @returns number returns cacheId if the cache was saved successfully and throws an error if save fails\n */\nfunction saveCache(paths_1, key_1, options_1) {\n return __awaiter(this, arguments, void 0, function* (paths, key, options, enableCrossOsArchive = false) {\n const cacheServiceVersion = (0, config_1.getCacheServiceVersion)();\n core.debug(`Cache service version: ${cacheServiceVersion}`);\n checkPaths(paths);\n checkKey(key);\n switch (cacheServiceVersion) {\n case 'v2':\n return yield saveCacheV2(paths, key, options, enableCrossOsArchive);\n case 'v1':\n default:\n return yield saveCacheV1(paths, key, options, enableCrossOsArchive);\n }\n });\n}\n/**\n * Save cache using the legacy Cache Service\n *\n * @param paths\n * @param key\n * @param options\n * @param enableCrossOsArchive\n * @returns\n */\nfunction saveCacheV1(paths_1, key_1, options_1) {\n return __awaiter(this, arguments, void 0, function* (paths, key, options, enableCrossOsArchive = false) {\n var _a, _b, _c, _d, _e;\n const compressionMethod = yield utils.getCompressionMethod();\n let cacheId = -1;\n const cachePaths = yield utils.resolvePaths(paths);\n core.debug('Cache Paths:');\n core.debug(`${JSON.stringify(cachePaths)}`);\n if (cachePaths.length === 0) {\n throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`);\n }\n const archiveFolder = yield utils.createTempDirectory();\n const archivePath = path.join(archiveFolder, utils.getCacheFileName(compressionMethod));\n core.debug(`Archive Path: ${archivePath}`);\n try {\n yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod);\n if (core.isDebug()) {\n yield (0, tar_1.listTar)(archivePath, compressionMethod);\n }\n const fileSizeLimit = 10 * 1024 * 1024 * 1024; // 10GB per repo limit\n const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);\n core.debug(`File Size: ${archiveFileSize}`);\n // For GHES, this check will take place in ReserveCache API with enterprise file size limit\n if (archiveFileSize > fileSizeLimit && !(0, config_1.isGhes)()) {\n throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`);\n }\n core.debug('Reserving Cache');\n const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, {\n compressionMethod,\n enableCrossOsArchive,\n cacheSize: archiveFileSize\n });\n if ((_a = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _a === void 0 ? void 0 : _a.cacheId) {\n cacheId = (_b = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _b === void 0 ? void 0 : _b.cacheId;\n }\n else if ((reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.statusCode) === 400) {\n throw new Error((_d = (_c = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _c === void 0 ? void 0 : _c.message) !== null && _d !== void 0 ? _d : `Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.`);\n }\n else {\n throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`);\n }\n core.debug(`Saving Cache (ID: ${cacheId})`);\n yield cacheHttpClient.saveCache(cacheId, archivePath, '', options);\n }\n catch (error) {\n const typedError = error;\n if (typedError.name === ValidationError.name) {\n throw error;\n }\n else if (typedError.name === ReserveCacheError.name) {\n core.info(`Failed to save: ${typedError.message}`);\n }\n else {\n // Log server errors (5xx) as errors, all other errors as warnings\n if (typedError instanceof http_client_1.HttpClientError &&\n typeof typedError.statusCode === 'number' &&\n typedError.statusCode >= 500) {\n core.error(`Failed to save: ${typedError.message}`);\n }\n else {\n core.warning(`Failed to save: ${typedError.message}`);\n }\n }\n }\n finally {\n // Try to delete the archive to save space\n try {\n yield utils.unlinkFile(archivePath);\n }\n catch (error) {\n core.debug(`Failed to delete archive: ${error}`);\n }\n }\n return cacheId;\n });\n}\n/**\n * Save cache using Cache Service v2\n *\n * @param paths a list of file paths to restore from the cache\n * @param key an explicit key for restoring the cache\n * @param options cache upload options\n * @param enableCrossOsArchive an optional boolean enabled to save cache on windows which could be restored on any platform\n * @returns\n */\nfunction saveCacheV2(paths_1, key_1, options_1) {\n return __awaiter(this, arguments, void 0, function* (paths, key, options, enableCrossOsArchive = false) {\n // Override UploadOptions to force the use of Azure\n // ...options goes first because we want to override the default values\n // set in UploadOptions with these specific figures\n options = Object.assign(Object.assign({}, options), { uploadChunkSize: 64 * 1024 * 1024, uploadConcurrency: 8, useAzureSdk: true });\n const compressionMethod = yield utils.getCompressionMethod();\n const twirpClient = cacheTwirpClient.internalCacheTwirpClient();\n let cacheId = -1;\n const cachePaths = yield utils.resolvePaths(paths);\n core.debug('Cache Paths:');\n core.debug(`${JSON.stringify(cachePaths)}`);\n if (cachePaths.length === 0) {\n throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`);\n }\n const archiveFolder = yield utils.createTempDirectory();\n const archivePath = path.join(archiveFolder, utils.getCacheFileName(compressionMethod));\n core.debug(`Archive Path: ${archivePath}`);\n try {\n yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod);\n if (core.isDebug()) {\n yield (0, tar_1.listTar)(archivePath, compressionMethod);\n }\n const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);\n core.debug(`File Size: ${archiveFileSize}`);\n // Set the archive size in the options, will be used to display the upload progress\n options.archiveSizeBytes = archiveFileSize;\n core.debug('Reserving Cache');\n const version = utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive);\n const request = {\n key,\n version\n };\n let signedUploadUrl;\n try {\n const response = yield twirpClient.CreateCacheEntry(request);\n if (!response.ok) {\n if (response.message) {\n core.warning(`Cache reservation failed: ${response.message}`);\n }\n throw new Error(response.message || 'Response was not ok');\n }\n signedUploadUrl = response.signedUploadUrl;\n }\n catch (error) {\n core.debug(`Failed to reserve cache: ${error}`);\n throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache.`);\n }\n core.debug(`Attempting to upload cache located at: ${archivePath}`);\n yield cacheHttpClient.saveCache(cacheId, archivePath, signedUploadUrl, options);\n const finalizeRequest = {\n key,\n version,\n sizeBytes: `${archiveFileSize}`\n };\n const finalizeResponse = yield twirpClient.FinalizeCacheEntryUpload(finalizeRequest);\n core.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`);\n if (!finalizeResponse.ok) {\n if (finalizeResponse.message) {\n throw new FinalizeCacheError(finalizeResponse.message);\n }\n throw new Error(`Unable to finalize cache with key ${key}, another job may be finalizing this cache.`);\n }\n cacheId = parseInt(finalizeResponse.entryId);\n }\n catch (error) {\n const typedError = error;\n if (typedError.name === ValidationError.name) {\n throw error;\n }\n else if (typedError.name === ReserveCacheError.name) {\n core.info(`Failed to save: ${typedError.message}`);\n }\n else if (typedError.name === FinalizeCacheError.name) {\n core.warning(typedError.message);\n }\n else {\n // Log server errors (5xx) as errors, all other errors as warnings\n if (typedError instanceof http_client_1.HttpClientError &&\n typeof typedError.statusCode === 'number' &&\n typedError.statusCode >= 500) {\n core.error(`Failed to save: ${typedError.message}`);\n }\n else {\n core.warning(`Failed to save: ${typedError.message}`);\n }\n }\n }\n finally {\n // Try to delete the archive to save space\n try {\n yield utils.unlinkFile(archivePath);\n }\n catch (error) {\n core.debug(`Failed to delete archive: ${error}`);\n }\n }\n return cacheId;\n });\n}\n//# sourceMappingURL=cache.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.CacheService = exports.GetCacheEntryDownloadURLResponse = exports.GetCacheEntryDownloadURLRequest = exports.FinalizeCacheEntryUploadResponse = exports.FinalizeCacheEntryUploadRequest = exports.CreateCacheEntryResponse = exports.CreateCacheEntryRequest = void 0;\n// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies\n// @generated from protobuf file \"results/api/v1/cache.proto\" (package \"github.actions.results.api.v1\", syntax proto3)\n// tslint:disable\nconst runtime_rpc_1 = require(\"@protobuf-ts/runtime-rpc\");\nconst runtime_1 = require(\"@protobuf-ts/runtime\");\nconst runtime_2 = require(\"@protobuf-ts/runtime\");\nconst runtime_3 = require(\"@protobuf-ts/runtime\");\nconst runtime_4 = require(\"@protobuf-ts/runtime\");\nconst runtime_5 = require(\"@protobuf-ts/runtime\");\nconst cachemetadata_1 = require(\"../../entities/v1/cachemetadata\");\n// @generated message type with reflection information, may provide speed optimized methods\nclass CreateCacheEntryRequest$Type extends runtime_5.MessageType {\n constructor() {\n super(\"github.actions.results.api.v1.CreateCacheEntryRequest\", [\n { no: 1, name: \"metadata\", kind: \"message\", T: () => cachemetadata_1.CacheMetadata },\n { no: 2, name: \"key\", kind: \"scalar\", T: 9 /*ScalarType.STRING*/ },\n { no: 3, name: \"version\", kind: \"scalar\", T: 9 /*ScalarType.STRING*/ }\n ]);\n }\n create(value) {\n const message = { key: \"\", version: \"\" };\n globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });\n if (value !== undefined)\n (0, runtime_3.reflectionMergePartial)(this, message, value);\n return message;\n }\n internalBinaryRead(reader, length, options, target) {\n let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;\n while (reader.pos < end) {\n let [fieldNo, wireType] = reader.tag();\n switch (fieldNo) {\n case /* github.actions.results.entities.v1.CacheMetadata metadata */ 1:\n message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata);\n break;\n case /* string key */ 2:\n message.key = reader.string();\n break;\n case /* string version */ 3:\n message.version = reader.string();\n break;\n default:\n let u = options.readUnknownField;\n if (u === \"throw\")\n throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);\n let d = reader.skip(wireType);\n if (u !== false)\n (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);\n }\n }\n return message;\n }\n internalBinaryWrite(message, writer, options) {\n /* github.actions.results.entities.v1.CacheMetadata metadata = 1; */\n if (message.metadata)\n cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join();\n /* string key = 2; */\n if (message.key !== \"\")\n writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key);\n /* string version = 3; */\n if (message.version !== \"\")\n writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.version);\n let u = options.writeUnknownFields;\n if (u !== false)\n (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);\n return writer;\n }\n}\n/**\n * @generated MessageType for protobuf message github.actions.results.api.v1.CreateCacheEntryRequest\n */\nexports.CreateCacheEntryRequest = new CreateCacheEntryRequest$Type();\n// @generated message type with reflection information, may provide speed optimized methods\nclass CreateCacheEntryResponse$Type extends runtime_5.MessageType {\n constructor() {\n super(\"github.actions.results.api.v1.CreateCacheEntryResponse\", [\n { no: 1, name: \"ok\", kind: \"scalar\", T: 8 /*ScalarType.BOOL*/ },\n { no: 2, name: \"signed_upload_url\", kind: \"scalar\", T: 9 /*ScalarType.STRING*/ },\n { no: 3, name: \"message\", kind: \"scalar\", T: 9 /*ScalarType.STRING*/ }\n ]);\n }\n create(value) {\n const message = { ok: false, signedUploadUrl: \"\", message: \"\" };\n globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });\n if (value !== undefined)\n (0, runtime_3.reflectionMergePartial)(this, message, value);\n return message;\n }\n internalBinaryRead(reader, length, options, target) {\n let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;\n while (reader.pos < end) {\n let [fieldNo, wireType] = reader.tag();\n switch (fieldNo) {\n case /* bool ok */ 1:\n message.ok = reader.bool();\n break;\n case /* string signed_upload_url */ 2:\n message.signedUploadUrl = reader.string();\n break;\n case /* string message */ 3:\n message.message = reader.string();\n break;\n default:\n let u = options.readUnknownField;\n if (u === \"throw\")\n throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);\n let d = reader.skip(wireType);\n if (u !== false)\n (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);\n }\n }\n return message;\n }\n internalBinaryWrite(message, writer, options) {\n /* bool ok = 1; */\n if (message.ok !== false)\n writer.tag(1, runtime_1.WireType.Varint).bool(message.ok);\n /* string signed_upload_url = 2; */\n if (message.signedUploadUrl !== \"\")\n writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.signedUploadUrl);\n /* string message = 3; */\n if (message.message !== \"\")\n writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.message);\n let u = options.writeUnknownFields;\n if (u !== false)\n (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);\n return writer;\n }\n}\n/**\n * @generated MessageType for protobuf message github.actions.results.api.v1.CreateCacheEntryResponse\n */\nexports.CreateCacheEntryResponse = new CreateCacheEntryResponse$Type();\n// @generated message type with reflection information, may provide speed optimized methods\nclass FinalizeCacheEntryUploadRequest$Type extends runtime_5.MessageType {\n constructor() {\n super(\"github.actions.results.api.v1.FinalizeCacheEntryUploadRequest\", [\n { no: 1, name: \"metadata\", kind: \"message\", T: () => cachemetadata_1.CacheMetadata },\n { no: 2, name: \"key\", kind: \"scalar\", T: 9 /*ScalarType.STRING*/ },\n { no: 3, name: \"size_bytes\", kind: \"scalar\", T: 3 /*ScalarType.INT64*/ },\n { no: 4, name: \"version\", kind: \"scalar\", T: 9 /*ScalarType.STRING*/ }\n ]);\n }\n create(value) {\n const message = { key: \"\", sizeBytes: \"0\", version: \"\" };\n globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });\n if (value !== undefined)\n (0, runtime_3.reflectionMergePartial)(this, message, value);\n return message;\n }\n internalBinaryRead(reader, length, options, target) {\n let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;\n while (reader.pos < end) {\n let [fieldNo, wireType] = reader.tag();\n switch (fieldNo) {\n case /* github.actions.results.entities.v1.CacheMetadata metadata */ 1:\n message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata);\n break;\n case /* string key */ 2:\n message.key = reader.string();\n break;\n case /* int64 size_bytes */ 3:\n message.sizeBytes = reader.int64().toString();\n break;\n case /* string version */ 4:\n message.version = reader.string();\n break;\n default:\n let u = options.readUnknownField;\n if (u === \"throw\")\n throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);\n let d = reader.skip(wireType);\n if (u !== false)\n (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);\n }\n }\n return message;\n }\n internalBinaryWrite(message, writer, options) {\n /* github.actions.results.entities.v1.CacheMetadata metadata = 1; */\n if (message.metadata)\n cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join();\n /* string key = 2; */\n if (message.key !== \"\")\n writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key);\n /* int64 size_bytes = 3; */\n if (message.sizeBytes !== \"0\")\n writer.tag(3, runtime_1.WireType.Varint).int64(message.sizeBytes);\n /* string version = 4; */\n if (message.version !== \"\")\n writer.tag(4, runtime_1.WireType.LengthDelimited).string(message.version);\n let u = options.writeUnknownFields;\n if (u !== false)\n (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);\n return writer;\n }\n}\n/**\n * @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeCacheEntryUploadRequest\n */\nexports.FinalizeCacheEntryUploadRequest = new FinalizeCacheEntryUploadRequest$Type();\n// @generated message type with reflection information, may provide speed optimized methods\nclass FinalizeCacheEntryUploadResponse$Type extends runtime_5.MessageType {\n constructor() {\n super(\"github.actions.results.api.v1.FinalizeCacheEntryUploadResponse\", [\n { no: 1, name: \"ok\", kind: \"scalar\", T: 8 /*ScalarType.BOOL*/ },\n { no: 2, name: \"entry_id\", kind: \"scalar\", T: 3 /*ScalarType.INT64*/ },\n { no: 3, name: \"message\", kind: \"scalar\", T: 9 /*ScalarType.STRING*/ }\n ]);\n }\n create(value) {\n const message = { ok: false, entryId: \"0\", message: \"\" };\n globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });\n if (value !== undefined)\n (0, runtime_3.reflectionMergePartial)(this, message, value);\n return message;\n }\n internalBinaryRead(reader, length, options, target) {\n let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;\n while (reader.pos < end) {\n let [fieldNo, wireType] = reader.tag();\n switch (fieldNo) {\n case /* bool ok */ 1:\n message.ok = reader.bool();\n break;\n case /* int64 entry_id */ 2:\n message.entryId = reader.int64().toString();\n break;\n case /* string message */ 3:\n message.message = reader.string();\n break;\n default:\n let u = options.readUnknownField;\n if (u === \"throw\")\n throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);\n let d = reader.skip(wireType);\n if (u !== false)\n (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);\n }\n }\n return message;\n }\n internalBinaryWrite(message, writer, options) {\n /* bool ok = 1; */\n if (message.ok !== false)\n writer.tag(1, runtime_1.WireType.Varint).bool(message.ok);\n /* int64 entry_id = 2; */\n if (message.entryId !== \"0\")\n writer.tag(2, runtime_1.WireType.Varint).int64(message.entryId);\n /* string message = 3; */\n if (message.message !== \"\")\n writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.message);\n let u = options.writeUnknownFields;\n if (u !== false)\n (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);\n return writer;\n }\n}\n/**\n * @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeCacheEntryUploadResponse\n */\nexports.FinalizeCacheEntryUploadResponse = new FinalizeCacheEntryUploadResponse$Type();\n// @generated message type with reflection information, may provide speed optimized methods\nclass GetCacheEntryDownloadURLRequest$Type extends runtime_5.MessageType {\n constructor() {\n super(\"github.actions.results.api.v1.GetCacheEntryDownloadURLRequest\", [\n { no: 1, name: \"metadata\", kind: \"message\", T: () => cachemetadata_1.CacheMetadata },\n { no: 2, name: \"key\", kind: \"scalar\", T: 9 /*ScalarType.STRING*/ },\n { no: 3, name: \"restore_keys\", kind: \"scalar\", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ },\n { no: 4, name: \"version\", kind: \"scalar\", T: 9 /*ScalarType.STRING*/ }\n ]);\n }\n create(value) {\n const message = { key: \"\", restoreKeys: [], version: \"\" };\n globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });\n if (value !== undefined)\n (0, runtime_3.reflectionMergePartial)(this, message, value);\n return message;\n }\n internalBinaryRead(reader, length, options, target) {\n let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;\n while (reader.pos < end) {\n let [fieldNo, wireType] = reader.tag();\n switch (fieldNo) {\n case /* github.actions.results.entities.v1.CacheMetadata metadata */ 1:\n message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata);\n break;\n case /* string key */ 2:\n message.key = reader.string();\n break;\n case /* repeated string restore_keys */ 3:\n message.restoreKeys.push(reader.string());\n break;\n case /* string version */ 4:\n message.version = reader.string();\n break;\n default:\n let u = options.readUnknownField;\n if (u === \"throw\")\n throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);\n let d = reader.skip(wireType);\n if (u !== false)\n (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);\n }\n }\n return message;\n }\n internalBinaryWrite(message, writer, options) {\n /* github.actions.results.entities.v1.CacheMetadata metadata = 1; */\n if (message.metadata)\n cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join();\n /* string key = 2; */\n if (message.key !== \"\")\n writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key);\n /* repeated string restore_keys = 3; */\n for (let i = 0; i < message.restoreKeys.length; i++)\n writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.restoreKeys[i]);\n /* string version = 4; */\n if (message.version !== \"\")\n writer.tag(4, runtime_1.WireType.LengthDelimited).string(message.version);\n let u = options.writeUnknownFields;\n if (u !== false)\n (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);\n return writer;\n }\n}\n/**\n * @generated MessageType for protobuf message github.actions.results.api.v1.GetCacheEntryDownloadURLRequest\n */\nexports.GetCacheEntryDownloadURLRequest = new GetCacheEntryDownloadURLRequest$Type();\n// @generated message type with reflection information, may provide speed optimized methods\nclass GetCacheEntryDownloadURLResponse$Type extends runtime_5.MessageType {\n constructor() {\n super(\"github.actions.results.api.v1.GetCacheEntryDownloadURLResponse\", [\n { no: 1, name: \"ok\", kind: \"scalar\", T: 8 /*ScalarType.BOOL*/ },\n { no: 2, name: \"signed_download_url\", kind: \"scalar\", T: 9 /*ScalarType.STRING*/ },\n { no: 3, name: \"matched_key\", kind: \"scalar\", T: 9 /*ScalarType.STRING*/ }\n ]);\n }\n create(value) {\n const message = { ok: false, signedDownloadUrl: \"\", matchedKey: \"\" };\n globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });\n if (value !== undefined)\n (0, runtime_3.reflectionMergePartial)(this, message, value);\n return message;\n }\n internalBinaryRead(reader, length, options, target) {\n let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;\n while (reader.pos < end) {\n let [fieldNo, wireType] = reader.tag();\n switch (fieldNo) {\n case /* bool ok */ 1:\n message.ok = reader.bool();\n break;\n case /* string signed_download_url */ 2:\n message.signedDownloadUrl = reader.string();\n break;\n case /* string matched_key */ 3:\n message.matchedKey = reader.string();\n break;\n default:\n let u = options.readUnknownField;\n if (u === \"throw\")\n throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);\n let d = reader.skip(wireType);\n if (u !== false)\n (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);\n }\n }\n return message;\n }\n internalBinaryWrite(message, writer, options) {\n /* bool ok = 1; */\n if (message.ok !== false)\n writer.tag(1, runtime_1.WireType.Varint).bool(message.ok);\n /* string signed_download_url = 2; */\n if (message.signedDownloadUrl !== \"\")\n writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.signedDownloadUrl);\n /* string matched_key = 3; */\n if (message.matchedKey !== \"\")\n writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.matchedKey);\n let u = options.writeUnknownFields;\n if (u !== false)\n (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);\n return writer;\n }\n}\n/**\n * @generated MessageType for protobuf message github.actions.results.api.v1.GetCacheEntryDownloadURLResponse\n */\nexports.GetCacheEntryDownloadURLResponse = new GetCacheEntryDownloadURLResponse$Type();\n/**\n * @generated ServiceType for protobuf service github.actions.results.api.v1.CacheService\n */\nexports.CacheService = new runtime_rpc_1.ServiceType(\"github.actions.results.api.v1.CacheService\", [\n { name: \"CreateCacheEntry\", options: {}, I: exports.CreateCacheEntryRequest, O: exports.CreateCacheEntryResponse },\n { name: \"FinalizeCacheEntryUpload\", options: {}, I: exports.FinalizeCacheEntryUploadRequest, O: exports.FinalizeCacheEntryUploadResponse },\n { name: \"GetCacheEntryDownloadURL\", options: {}, I: exports.GetCacheEntryDownloadURLRequest, O: exports.GetCacheEntryDownloadURLResponse }\n]);\n//# sourceMappingURL=cache.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.CacheServiceClientProtobuf = exports.CacheServiceClientJSON = void 0;\nconst cache_1 = require(\"./cache\");\nclass CacheServiceClientJSON {\n constructor(rpc) {\n this.rpc = rpc;\n this.CreateCacheEntry.bind(this);\n this.FinalizeCacheEntryUpload.bind(this);\n this.GetCacheEntryDownloadURL.bind(this);\n }\n CreateCacheEntry(request) {\n const data = cache_1.CreateCacheEntryRequest.toJson(request, {\n useProtoFieldName: true,\n emitDefaultValues: false,\n });\n const promise = this.rpc.request(\"github.actions.results.api.v1.CacheService\", \"CreateCacheEntry\", \"application/json\", data);\n return promise.then((data) => cache_1.CreateCacheEntryResponse.fromJson(data, {\n ignoreUnknownFields: true,\n }));\n }\n FinalizeCacheEntryUpload(request) {\n const data = cache_1.FinalizeCacheEntryUploadRequest.toJson(request, {\n useProtoFieldName: true,\n emitDefaultValues: false,\n });\n const promise = this.rpc.request(\"github.actions.results.api.v1.CacheService\", \"FinalizeCacheEntryUpload\", \"application/json\", data);\n return promise.then((data) => cache_1.FinalizeCacheEntryUploadResponse.fromJson(data, {\n ignoreUnknownFields: true,\n }));\n }\n GetCacheEntryDownloadURL(request) {\n const data = cache_1.GetCacheEntryDownloadURLRequest.toJson(request, {\n useProtoFieldName: true,\n emitDefaultValues: false,\n });\n const promise = this.rpc.request(\"github.actions.results.api.v1.CacheService\", \"GetCacheEntryDownloadURL\", \"application/json\", data);\n return promise.then((data) => cache_1.GetCacheEntryDownloadURLResponse.fromJson(data, {\n ignoreUnknownFields: true,\n }));\n }\n}\nexports.CacheServiceClientJSON = CacheServiceClientJSON;\nclass CacheServiceClientProtobuf {\n constructor(rpc) {\n this.rpc = rpc;\n this.CreateCacheEntry.bind(this);\n this.FinalizeCacheEntryUpload.bind(this);\n this.GetCacheEntryDownloadURL.bind(this);\n }\n CreateCacheEntry(request) {\n const data = cache_1.CreateCacheEntryRequest.toBinary(request);\n const promise = this.rpc.request(\"github.actions.results.api.v1.CacheService\", \"CreateCacheEntry\", \"application/protobuf\", data);\n return promise.then((data) => cache_1.CreateCacheEntryResponse.fromBinary(data));\n }\n FinalizeCacheEntryUpload(request) {\n const data = cache_1.FinalizeCacheEntryUploadRequest.toBinary(request);\n const promise = this.rpc.request(\"github.actions.results.api.v1.CacheService\", \"FinalizeCacheEntryUpload\", \"application/protobuf\", data);\n return promise.then((data) => cache_1.FinalizeCacheEntryUploadResponse.fromBinary(data));\n }\n GetCacheEntryDownloadURL(request) {\n const data = cache_1.GetCacheEntryDownloadURLRequest.toBinary(request);\n const promise = this.rpc.request(\"github.actions.results.api.v1.CacheService\", \"GetCacheEntryDownloadURL\", \"application/protobuf\", data);\n return promise.then((data) => cache_1.GetCacheEntryDownloadURLResponse.fromBinary(data));\n }\n}\nexports.CacheServiceClientProtobuf = CacheServiceClientProtobuf;\n//# sourceMappingURL=cache.twirp-client.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.CacheMetadata = void 0;\nconst runtime_1 = require(\"@protobuf-ts/runtime\");\nconst runtime_2 = require(\"@protobuf-ts/runtime\");\nconst runtime_3 = require(\"@protobuf-ts/runtime\");\nconst runtime_4 = require(\"@protobuf-ts/runtime\");\nconst runtime_5 = require(\"@protobuf-ts/runtime\");\nconst cachescope_1 = require(\"./cachescope\");\n// @generated message type with reflection information, may provide speed optimized methods\nclass CacheMetadata$Type extends runtime_5.MessageType {\n constructor() {\n super(\"github.actions.results.entities.v1.CacheMetadata\", [\n { no: 1, name: \"repository_id\", kind: \"scalar\", T: 3 /*ScalarType.INT64*/ },\n { no: 2, name: \"scope\", kind: \"message\", repeat: 1 /*RepeatType.PACKED*/, T: () => cachescope_1.CacheScope }\n ]);\n }\n create(value) {\n const message = { repositoryId: \"0\", scope: [] };\n globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });\n if (value !== undefined)\n (0, runtime_3.reflectionMergePartial)(this, message, value);\n return message;\n }\n internalBinaryRead(reader, length, options, target) {\n let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;\n while (reader.pos < end) {\n let [fieldNo, wireType] = reader.tag();\n switch (fieldNo) {\n case /* int64 repository_id */ 1:\n message.repositoryId = reader.int64().toString();\n break;\n case /* repeated github.actions.results.entities.v1.CacheScope scope */ 2:\n message.scope.push(cachescope_1.CacheScope.internalBinaryRead(reader, reader.uint32(), options));\n break;\n default:\n let u = options.readUnknownField;\n if (u === \"throw\")\n throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);\n let d = reader.skip(wireType);\n if (u !== false)\n (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);\n }\n }\n return message;\n }\n internalBinaryWrite(message, writer, options) {\n /* int64 repository_id = 1; */\n if (message.repositoryId !== \"0\")\n writer.tag(1, runtime_1.WireType.Varint).int64(message.repositoryId);\n /* repeated github.actions.results.entities.v1.CacheScope scope = 2; */\n for (let i = 0; i < message.scope.length; i++)\n cachescope_1.CacheScope.internalBinaryWrite(message.scope[i], writer.tag(2, runtime_1.WireType.LengthDelimited).fork(), options).join();\n let u = options.writeUnknownFields;\n if (u !== false)\n (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);\n return writer;\n }\n}\n/**\n * @generated MessageType for protobuf message github.actions.results.entities.v1.CacheMetadata\n */\nexports.CacheMetadata = new CacheMetadata$Type();\n//# sourceMappingURL=cachemetadata.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.CacheScope = void 0;\nconst runtime_1 = require(\"@protobuf-ts/runtime\");\nconst runtime_2 = require(\"@protobuf-ts/runtime\");\nconst runtime_3 = require(\"@protobuf-ts/runtime\");\nconst runtime_4 = require(\"@protobuf-ts/runtime\");\nconst runtime_5 = require(\"@protobuf-ts/runtime\");\n// @generated message type with reflection information, may provide speed optimized methods\nclass CacheScope$Type extends runtime_5.MessageType {\n constructor() {\n super(\"github.actions.results.entities.v1.CacheScope\", [\n { no: 1, name: \"scope\", kind: \"scalar\", T: 9 /*ScalarType.STRING*/ },\n { no: 2, name: \"permission\", kind: \"scalar\", T: 3 /*ScalarType.INT64*/ }\n ]);\n }\n create(value) {\n const message = { scope: \"\", permission: \"0\" };\n globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });\n if (value !== undefined)\n (0, runtime_3.reflectionMergePartial)(this, message, value);\n return message;\n }\n internalBinaryRead(reader, length, options, target) {\n let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;\n while (reader.pos < end) {\n let [fieldNo, wireType] = reader.tag();\n switch (fieldNo) {\n case /* string scope */ 1:\n message.scope = reader.string();\n break;\n case /* int64 permission */ 2:\n message.permission = reader.int64().toString();\n break;\n default:\n let u = options.readUnknownField;\n if (u === \"throw\")\n throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);\n let d = reader.skip(wireType);\n if (u !== false)\n (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);\n }\n }\n return message;\n }\n internalBinaryWrite(message, writer, options) {\n /* string scope = 1; */\n if (message.scope !== \"\")\n writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.scope);\n /* int64 permission = 2; */\n if (message.permission !== \"0\")\n writer.tag(2, runtime_1.WireType.Varint).int64(message.permission);\n let u = options.writeUnknownFields;\n if (u !== false)\n (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);\n return writer;\n }\n}\n/**\n * @generated MessageType for protobuf message github.actions.results.entities.v1.CacheScope\n */\nexports.CacheScope = new CacheScope$Type();\n//# sourceMappingURL=cachescope.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || (function () {\n var ownKeys = function(o) {\n ownKeys = Object.getOwnPropertyNames || function (o) {\n var ar = [];\n for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;\n return ar;\n };\n return ownKeys(o);\n };\n return function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== \"default\") __createBinding(result, mod, k[i]);\n __setModuleDefault(result, mod);\n return result;\n };\n})();\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getCacheEntry = getCacheEntry;\nexports.downloadCache = downloadCache;\nexports.reserveCache = reserveCache;\nexports.saveCache = saveCache;\nconst core = __importStar(require(\"@actions/core\"));\nconst http_client_1 = require(\"@actions/http-client\");\nconst auth_1 = require(\"@actions/http-client/lib/auth\");\nconst fs = __importStar(require(\"fs\"));\nconst url_1 = require(\"url\");\nconst utils = __importStar(require(\"./cacheUtils\"));\nconst uploadUtils_1 = require(\"./uploadUtils\");\nconst downloadUtils_1 = require(\"./downloadUtils\");\nconst options_1 = require(\"../options\");\nconst requestUtils_1 = require(\"./requestUtils\");\nconst config_1 = require(\"./config\");\nconst user_agent_1 = require(\"./shared/user-agent\");\nfunction getCacheApiUrl(resource) {\n const baseUrl = (0, config_1.getCacheServiceURL)();\n if (!baseUrl) {\n throw new Error('Cache Service Url not found, unable to restore cache.');\n }\n const url = `${baseUrl}_apis/artifactcache/${resource}`;\n core.debug(`Resource Url: ${url}`);\n return url;\n}\nfunction createAcceptHeader(type, apiVersion) {\n return `${type};api-version=${apiVersion}`;\n}\nfunction getRequestOptions() {\n const requestOptions = {\n headers: {\n Accept: createAcceptHeader('application/json', '6.0-preview.1')\n }\n };\n return requestOptions;\n}\nfunction createHttpClient() {\n const token = process.env['ACTIONS_RUNTIME_TOKEN'] || '';\n const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token);\n return new http_client_1.HttpClient((0, user_agent_1.getUserAgentString)(), [bearerCredentialHandler], getRequestOptions());\n}\nfunction getCacheEntry(keys, paths, options) {\n return __awaiter(this, void 0, void 0, function* () {\n const httpClient = createHttpClient();\n const version = utils.getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive);\n const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`;\n const response = yield (0, requestUtils_1.retryTypedResponse)('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); }));\n // Cache not found\n if (response.statusCode === 204) {\n // List cache for primary key only if cache miss occurs\n if (core.isDebug()) {\n yield printCachesListForDiagnostics(keys[0], httpClient, version);\n }\n return null;\n }\n if (!(0, requestUtils_1.isSuccessStatusCode)(response.statusCode)) {\n throw new Error(`Cache service responded with ${response.statusCode}`);\n }\n const cacheResult = response.result;\n const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation;\n if (!cacheDownloadUrl) {\n // Cache achiveLocation not found. This should never happen, and hence bail out.\n throw new Error('Cache not found.');\n }\n core.setSecret(cacheDownloadUrl);\n core.debug(`Cache Result:`);\n core.debug(JSON.stringify(cacheResult));\n return cacheResult;\n });\n}\nfunction printCachesListForDiagnostics(key, httpClient, version) {\n return __awaiter(this, void 0, void 0, function* () {\n const resource = `caches?key=${encodeURIComponent(key)}`;\n const response = yield (0, requestUtils_1.retryTypedResponse)('listCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); }));\n if (response.statusCode === 200) {\n const cacheListResult = response.result;\n const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount;\n if (totalCount && totalCount > 0) {\n core.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env['GITHUB_REF']}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key \\nOther caches with similar key:`);\n for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) {\n core.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`);\n }\n }\n }\n });\n}\nfunction downloadCache(archiveLocation, archivePath, options) {\n return __awaiter(this, void 0, void 0, function* () {\n const archiveUrl = new url_1.URL(archiveLocation);\n const downloadOptions = (0, options_1.getDownloadOptions)(options);\n if (archiveUrl.hostname.endsWith('.blob.core.windows.net')) {\n if (downloadOptions.useAzureSdk) {\n // Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability.\n yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions);\n }\n else if (downloadOptions.concurrentBlobDownloads) {\n // Use concurrent implementation with HttpClient to work around blob SDK issue\n yield (0, downloadUtils_1.downloadCacheHttpClientConcurrent)(archiveLocation, archivePath, downloadOptions);\n }\n else {\n // Otherwise, download using the Actions http-client.\n yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath);\n }\n }\n else {\n yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath);\n }\n });\n}\n// Reserve Cache\nfunction reserveCache(key, paths, options) {\n return __awaiter(this, void 0, void 0, function* () {\n const httpClient = createHttpClient();\n const version = utils.getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive);\n const reserveCacheRequest = {\n key,\n version,\n cacheSize: options === null || options === void 0 ? void 0 : options.cacheSize\n };\n const response = yield (0, requestUtils_1.retryTypedResponse)('reserveCache', () => __awaiter(this, void 0, void 0, function* () {\n return httpClient.postJson(getCacheApiUrl('caches'), reserveCacheRequest);\n }));\n return response;\n });\n}\nfunction getContentRange(start, end) {\n // Format: `bytes start-end/filesize\n // start and end are inclusive\n // filesize can be *\n // For a 200 byte chunk starting at byte 0:\n // Content-Range: bytes 0-199/*\n return `bytes ${start}-${end}/*`;\n}\nfunction uploadChunk(httpClient, resourceUrl, openStream, start, end) {\n return __awaiter(this, void 0, void 0, function* () {\n core.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);\n const additionalHeaders = {\n 'Content-Type': 'application/octet-stream',\n 'Content-Range': getContentRange(start, end)\n };\n const uploadChunkResponse = yield (0, requestUtils_1.retryHttpClientResponse)(`uploadChunk (start: ${start}, end: ${end})`, () => __awaiter(this, void 0, void 0, function* () {\n return httpClient.sendStream('PATCH', resourceUrl, openStream(), additionalHeaders);\n }));\n if (!(0, requestUtils_1.isSuccessStatusCode)(uploadChunkResponse.message.statusCode)) {\n throw new Error(`Cache service responded with ${uploadChunkResponse.message.statusCode} during upload chunk.`);\n }\n });\n}\nfunction uploadFile(httpClient, cacheId, archivePath, options) {\n return __awaiter(this, void 0, void 0, function* () {\n // Upload Chunks\n const fileSize = utils.getArchiveFileSizeInBytes(archivePath);\n const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`);\n const fd = fs.openSync(archivePath, 'r');\n const uploadOptions = (0, options_1.getUploadOptions)(options);\n const concurrency = utils.assertDefined('uploadConcurrency', uploadOptions.uploadConcurrency);\n const maxChunkSize = utils.assertDefined('uploadChunkSize', uploadOptions.uploadChunkSize);\n const parallelUploads = [...new Array(concurrency).keys()];\n core.debug('Awaiting all uploads');\n let offset = 0;\n try {\n yield Promise.all(parallelUploads.map(() => __awaiter(this, void 0, void 0, function* () {\n while (offset < fileSize) {\n const chunkSize = Math.min(fileSize - offset, maxChunkSize);\n const start = offset;\n const end = offset + chunkSize - 1;\n offset += maxChunkSize;\n yield uploadChunk(httpClient, resourceUrl, () => fs\n .createReadStream(archivePath, {\n fd,\n start,\n end,\n autoClose: false\n })\n .on('error', error => {\n throw new Error(`Cache upload failed because file read failed with ${error.message}`);\n }), start, end);\n }\n })));\n }\n finally {\n fs.closeSync(fd);\n }\n return;\n });\n}\nfunction commitCache(httpClient, cacheId, filesize) {\n return __awaiter(this, void 0, void 0, function* () {\n const commitCacheRequest = { size: filesize };\n return yield (0, requestUtils_1.retryTypedResponse)('commitCache', () => __awaiter(this, void 0, void 0, function* () {\n return httpClient.postJson(getCacheApiUrl(`caches/${cacheId.toString()}`), commitCacheRequest);\n }));\n });\n}\nfunction saveCache(cacheId, archivePath, signedUploadURL, options) {\n return __awaiter(this, void 0, void 0, function* () {\n const uploadOptions = (0, options_1.getUploadOptions)(options);\n if (uploadOptions.useAzureSdk) {\n // Use Azure storage SDK to upload caches directly to Azure\n if (!signedUploadURL) {\n throw new Error('Azure Storage SDK can only be used when a signed URL is provided.');\n }\n yield (0, uploadUtils_1.uploadCacheArchiveSDK)(signedUploadURL, archivePath, options);\n }\n else {\n const httpClient = createHttpClient();\n core.debug('Upload cache');\n yield uploadFile(httpClient, cacheId, archivePath, options);\n // Commit Cache\n core.debug('Commiting cache');\n const cacheSize = utils.getArchiveFileSizeInBytes(archivePath);\n core.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`);\n const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize);\n if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) {\n throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`);\n }\n core.info('Cache saved successfully');\n }\n });\n}\n//# sourceMappingURL=cacheHttpClient.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || (function () {\n var ownKeys = function(o) {\n ownKeys = Object.getOwnPropertyNames || function (o) {\n var ar = [];\n for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;\n return ar;\n };\n return ownKeys(o);\n };\n return function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== \"default\") __createBinding(result, mod, k[i]);\n __setModuleDefault(result, mod);\n return result;\n };\n})();\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __asyncValues = (this && this.__asyncValues) || function (o) {\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\n var m = o[Symbol.asyncIterator], i;\n return m ? m.call(o) : (o = typeof __values === \"function\" ? __values(o) : o[Symbol.iterator](), i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i);\n function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }\n function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.createTempDirectory = createTempDirectory;\nexports.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes;\nexports.resolvePaths = resolvePaths;\nexports.unlinkFile = unlinkFile;\nexports.getCompressionMethod = getCompressionMethod;\nexports.getCacheFileName = getCacheFileName;\nexports.getGnuTarPathOnWindows = getGnuTarPathOnWindows;\nexports.assertDefined = assertDefined;\nexports.getCacheVersion = getCacheVersion;\nexports.getRuntimeToken = getRuntimeToken;\nconst core = __importStar(require(\"@actions/core\"));\nconst exec = __importStar(require(\"@actions/exec\"));\nconst glob = __importStar(require(\"@actions/glob\"));\nconst io = __importStar(require(\"@actions/io\"));\nconst crypto = __importStar(require(\"crypto\"));\nconst fs = __importStar(require(\"fs\"));\nconst path = __importStar(require(\"path\"));\nconst semver = __importStar(require(\"semver\"));\nconst util = __importStar(require(\"util\"));\nconst constants_1 = require(\"./constants\");\nconst versionSalt = '1.0';\n// From https://github.com/actions/toolkit/blob/main/packages/tool-cache/src/tool-cache.ts#L23\nfunction createTempDirectory() {\n return __awaiter(this, void 0, void 0, function* () {\n const IS_WINDOWS = process.platform === 'win32';\n let tempDirectory = process.env['RUNNER_TEMP'] || '';\n if (!tempDirectory) {\n let baseLocation;\n if (IS_WINDOWS) {\n // On Windows use the USERPROFILE env variable\n baseLocation = process.env['USERPROFILE'] || 'C:\\\\';\n }\n else {\n if (process.platform === 'darwin') {\n baseLocation = '/Users';\n }\n else {\n baseLocation = '/home';\n }\n }\n tempDirectory = path.join(baseLocation, 'actions', 'temp');\n }\n const dest = path.join(tempDirectory, crypto.randomUUID());\n yield io.mkdirP(dest);\n return dest;\n });\n}\nfunction getArchiveFileSizeInBytes(filePath) {\n return fs.statSync(filePath).size;\n}\nfunction resolvePaths(patterns) {\n return __awaiter(this, void 0, void 0, function* () {\n var _a, e_1, _b, _c;\n var _d;\n const paths = [];\n const workspace = (_d = process.env['GITHUB_WORKSPACE']) !== null && _d !== void 0 ? _d : process.cwd();\n const globber = yield glob.create(patterns.join('\\n'), {\n implicitDescendants: false\n });\n try {\n for (var _e = true, _f = __asyncValues(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a; _e = true) {\n _c = _g.value;\n _e = false;\n const file = _c;\n const relativeFile = path\n .relative(workspace, file)\n .replace(new RegExp(`\\\\${path.sep}`, 'g'), '/');\n core.debug(`Matched: ${relativeFile}`);\n // Paths are made relative so the tar entries are all relative to the root of the workspace.\n if (relativeFile === '') {\n // path.relative returns empty string if workspace and file are equal\n paths.push('.');\n }\n else {\n paths.push(`${relativeFile}`);\n }\n }\n }\n catch (e_1_1) { e_1 = { error: e_1_1 }; }\n finally {\n try {\n if (!_e && !_a && (_b = _f.return)) yield _b.call(_f);\n }\n finally { if (e_1) throw e_1.error; }\n }\n return paths;\n });\n}\nfunction unlinkFile(filePath) {\n return __awaiter(this, void 0, void 0, function* () {\n return util.promisify(fs.unlink)(filePath);\n });\n}\nfunction getVersion(app_1) {\n return __awaiter(this, arguments, void 0, function* (app, additionalArgs = []) {\n let versionOutput = '';\n additionalArgs.push('--version');\n core.debug(`Checking ${app} ${additionalArgs.join(' ')}`);\n try {\n yield exec.exec(`${app}`, additionalArgs, {\n ignoreReturnCode: true,\n silent: true,\n listeners: {\n stdout: (data) => (versionOutput += data.toString()),\n stderr: (data) => (versionOutput += data.toString())\n }\n });\n }\n catch (err) {\n core.debug(err.message);\n }\n versionOutput = versionOutput.trim();\n core.debug(versionOutput);\n return versionOutput;\n });\n}\n// Use zstandard if possible to maximize cache performance\nfunction getCompressionMethod() {\n return __awaiter(this, void 0, void 0, function* () {\n const versionOutput = yield getVersion('zstd', ['--quiet']);\n const version = semver.clean(versionOutput);\n core.debug(`zstd version: ${version}`);\n if (versionOutput === '') {\n return constants_1.CompressionMethod.Gzip;\n }\n else {\n return constants_1.CompressionMethod.ZstdWithoutLong;\n }\n });\n}\nfunction getCacheFileName(compressionMethod) {\n return compressionMethod === constants_1.CompressionMethod.Gzip\n ? constants_1.CacheFilename.Gzip\n : constants_1.CacheFilename.Zstd;\n}\nfunction getGnuTarPathOnWindows() {\n return __awaiter(this, void 0, void 0, function* () {\n if (fs.existsSync(constants_1.GnuTarPathOnWindows)) {\n return constants_1.GnuTarPathOnWindows;\n }\n const versionOutput = yield getVersion('tar');\n return versionOutput.toLowerCase().includes('gnu tar') ? io.which('tar') : '';\n });\n}\nfunction assertDefined(name, value) {\n if (value === undefined) {\n throw Error(`Expected ${name} but value was undefiend`);\n }\n return value;\n}\nfunction getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) {\n // don't pass changes upstream\n const components = paths.slice();\n // Add compression method to cache version to restore\n // compressed cache as per compression method\n if (compressionMethod) {\n components.push(compressionMethod);\n }\n // Only check for windows platforms if enableCrossOsArchive is false\n if (process.platform === 'win32' && !enableCrossOsArchive) {\n components.push('windows-only');\n }\n // Add salt to cache version to support breaking changes in cache entry\n components.push(versionSalt);\n return crypto.createHash('sha256').update(components.join('|')).digest('hex');\n}\nfunction getRuntimeToken() {\n const token = process.env['ACTIONS_RUNTIME_TOKEN'];\n if (!token) {\n throw new Error('Unable to get the ACTIONS_RUNTIME_TOKEN env variable');\n }\n return token;\n}\n//# sourceMappingURL=cacheUtils.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.isGhes = isGhes;\nexports.getCacheServiceVersion = getCacheServiceVersion;\nexports.getCacheServiceURL = getCacheServiceURL;\nfunction isGhes() {\n const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com');\n const hostname = ghUrl.hostname.trimEnd().toUpperCase();\n const isGitHubHost = hostname === 'GITHUB.COM';\n const isGheHost = hostname.endsWith('.GHE.COM');\n const isLocalHost = hostname.endsWith('.LOCALHOST');\n return !isGitHubHost && !isGheHost && !isLocalHost;\n}\nfunction getCacheServiceVersion() {\n // Cache service v2 is not supported on GHES. We will default to\n // cache service v1 even if the feature flag was enabled by user.\n if (isGhes())\n return 'v1';\n return process.env['ACTIONS_CACHE_SERVICE_V2'] ? 'v2' : 'v1';\n}\nfunction getCacheServiceURL() {\n const version = getCacheServiceVersion();\n // Based on the version of the cache service, we will determine which\n // URL to use.\n switch (version) {\n case 'v1':\n return (process.env['ACTIONS_CACHE_URL'] ||\n process.env['ACTIONS_RESULTS_URL'] ||\n '');\n case 'v2':\n return process.env['ACTIONS_RESULTS_URL'] || '';\n default:\n throw new Error(`Unsupported cache service version: ${version}`);\n }\n}\n//# sourceMappingURL=config.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.CacheFileSizeLimit = exports.ManifestFilename = exports.TarFilename = exports.SystemTarPathOnWindows = exports.GnuTarPathOnWindows = exports.SocketTimeout = exports.DefaultRetryDelay = exports.DefaultRetryAttempts = exports.ArchiveToolType = exports.CompressionMethod = exports.CacheFilename = void 0;\nvar CacheFilename;\n(function (CacheFilename) {\n CacheFilename[\"Gzip\"] = \"cache.tgz\";\n CacheFilename[\"Zstd\"] = \"cache.tzst\";\n})(CacheFilename || (exports.CacheFilename = CacheFilename = {}));\nvar CompressionMethod;\n(function (CompressionMethod) {\n CompressionMethod[\"Gzip\"] = \"gzip\";\n // Long range mode was added to zstd in v1.3.2.\n // This enum is for earlier version of zstd that does not have --long support\n CompressionMethod[\"ZstdWithoutLong\"] = \"zstd-without-long\";\n CompressionMethod[\"Zstd\"] = \"zstd\";\n})(CompressionMethod || (exports.CompressionMethod = CompressionMethod = {}));\nvar ArchiveToolType;\n(function (ArchiveToolType) {\n ArchiveToolType[\"GNU\"] = \"gnu\";\n ArchiveToolType[\"BSD\"] = \"bsd\";\n})(ArchiveToolType || (exports.ArchiveToolType = ArchiveToolType = {}));\n// The default number of retry attempts.\nexports.DefaultRetryAttempts = 2;\n// The default delay in milliseconds between retry attempts.\nexports.DefaultRetryDelay = 5000;\n// Socket timeout in milliseconds during download. If no traffic is received\n// over the socket during this period, the socket is destroyed and the download\n// is aborted.\nexports.SocketTimeout = 5000;\n// The default path of GNUtar on hosted Windows runners\nexports.GnuTarPathOnWindows = `${process.env['PROGRAMFILES']}\\\\Git\\\\usr\\\\bin\\\\tar.exe`;\n// The default path of BSDtar on hosted Windows runners\nexports.SystemTarPathOnWindows = `${process.env['SYSTEMDRIVE']}\\\\Windows\\\\System32\\\\tar.exe`;\nexports.TarFilename = 'cache.tar';\nexports.ManifestFilename = 'manifest.txt';\nexports.CacheFileSizeLimit = 10 * Math.pow(1024, 3); // 10GiB per repository\n//# sourceMappingURL=constants.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || (function () {\n var ownKeys = function(o) {\n ownKeys = Object.getOwnPropertyNames || function (o) {\n var ar = [];\n for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;\n return ar;\n };\n return ownKeys(o);\n };\n return function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== \"default\") __createBinding(result, mod, k[i]);\n __setModuleDefault(result, mod);\n return result;\n };\n})();\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.DownloadProgress = void 0;\nexports.downloadCacheHttpClient = downloadCacheHttpClient;\nexports.downloadCacheHttpClientConcurrent = downloadCacheHttpClientConcurrent;\nexports.downloadCacheStorageSDK = downloadCacheStorageSDK;\nconst core = __importStar(require(\"@actions/core\"));\nconst http_client_1 = require(\"@actions/http-client\");\nconst storage_blob_1 = require(\"@azure/storage-blob\");\nconst buffer = __importStar(require(\"buffer\"));\nconst fs = __importStar(require(\"fs\"));\nconst stream = __importStar(require(\"stream\"));\nconst util = __importStar(require(\"util\"));\nconst utils = __importStar(require(\"./cacheUtils\"));\nconst constants_1 = require(\"./constants\");\nconst requestUtils_1 = require(\"./requestUtils\");\nconst abort_controller_1 = require(\"@azure/abort-controller\");\n/**\n * Pipes the body of a HTTP response to a stream\n *\n * @param response the HTTP response\n * @param output the writable stream\n */\nfunction pipeResponseToStream(response, output) {\n return __awaiter(this, void 0, void 0, function* () {\n const pipeline = util.promisify(stream.pipeline);\n yield pipeline(response.message, output);\n });\n}\n/**\n * Class for tracking the download state and displaying stats.\n */\nclass DownloadProgress {\n constructor(contentLength) {\n this.contentLength = contentLength;\n this.segmentIndex = 0;\n this.segmentSize = 0;\n this.segmentOffset = 0;\n this.receivedBytes = 0;\n this.displayedComplete = false;\n this.startTime = Date.now();\n }\n /**\n * Progress to the next segment. Only call this method when the previous segment\n * is complete.\n *\n * @param segmentSize the length of the next segment\n */\n nextSegment(segmentSize) {\n this.segmentOffset = this.segmentOffset + this.segmentSize;\n this.segmentIndex = this.segmentIndex + 1;\n this.segmentSize = segmentSize;\n this.receivedBytes = 0;\n core.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`);\n }\n /**\n * Sets the number of bytes received for the current segment.\n *\n * @param receivedBytes the number of bytes received\n */\n setReceivedBytes(receivedBytes) {\n this.receivedBytes = receivedBytes;\n }\n /**\n * Returns the total number of bytes transferred.\n */\n getTransferredBytes() {\n return this.segmentOffset + this.receivedBytes;\n }\n /**\n * Returns true if the download is complete.\n */\n isDone() {\n return this.getTransferredBytes() === this.contentLength;\n }\n /**\n * Prints the current download stats. Once the download completes, this will print one\n * last line and then stop.\n */\n display() {\n if (this.displayedComplete) {\n return;\n }\n const transferredBytes = this.segmentOffset + this.receivedBytes;\n const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1);\n const elapsedTime = Date.now() - this.startTime;\n const downloadSpeed = (transferredBytes /\n (1024 * 1024) /\n (elapsedTime / 1000)).toFixed(1);\n core.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`);\n if (this.isDone()) {\n this.displayedComplete = true;\n }\n }\n /**\n * Returns a function used to handle TransferProgressEvents.\n */\n onProgress() {\n return (progress) => {\n this.setReceivedBytes(progress.loadedBytes);\n };\n }\n /**\n * Starts the timer that displays the stats.\n *\n * @param delayInMs the delay between each write\n */\n startDisplayTimer(delayInMs = 1000) {\n const displayCallback = () => {\n this.display();\n if (!this.isDone()) {\n this.timeoutHandle = setTimeout(displayCallback, delayInMs);\n }\n };\n this.timeoutHandle = setTimeout(displayCallback, delayInMs);\n }\n /**\n * Stops the timer that displays the stats. As this typically indicates the download\n * is complete, this will display one last line, unless the last line has already\n * been written.\n */\n stopDisplayTimer() {\n if (this.timeoutHandle) {\n clearTimeout(this.timeoutHandle);\n this.timeoutHandle = undefined;\n }\n this.display();\n }\n}\nexports.DownloadProgress = DownloadProgress;\n/**\n * Download the cache using the Actions toolkit http-client\n *\n * @param archiveLocation the URL for the cache\n * @param archivePath the local path where the cache is saved\n */\nfunction downloadCacheHttpClient(archiveLocation, archivePath) {\n return __awaiter(this, void 0, void 0, function* () {\n const writeStream = fs.createWriteStream(archivePath);\n const httpClient = new http_client_1.HttpClient('actions/cache');\n const downloadResponse = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.get(archiveLocation); }));\n // Abort download if no traffic received over the socket.\n downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => {\n downloadResponse.message.destroy();\n core.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`);\n });\n yield pipeResponseToStream(downloadResponse, writeStream);\n // Validate download size.\n const contentLengthHeader = downloadResponse.message.headers['content-length'];\n if (contentLengthHeader) {\n const expectedLength = parseInt(contentLengthHeader);\n const actualLength = utils.getArchiveFileSizeInBytes(archivePath);\n if (actualLength !== expectedLength) {\n throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`);\n }\n }\n else {\n core.debug('Unable to validate download, no Content-Length header');\n }\n });\n}\n/**\n * Download the cache using the Actions toolkit http-client concurrently\n *\n * @param archiveLocation the URL for the cache\n * @param archivePath the local path where the cache is saved\n */\nfunction downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) {\n return __awaiter(this, void 0, void 0, function* () {\n var _a;\n const archiveDescriptor = yield fs.promises.open(archivePath, 'w');\n const httpClient = new http_client_1.HttpClient('actions/cache', undefined, {\n socketTimeout: options.timeoutInMs,\n keepAlive: true\n });\n try {\n const res = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCacheMetadata', () => __awaiter(this, void 0, void 0, function* () { return yield httpClient.request('HEAD', archiveLocation, null, {}); }));\n const lengthHeader = res.message.headers['content-length'];\n if (lengthHeader === undefined || lengthHeader === null) {\n throw new Error('Content-Length not found on blob response');\n }\n const length = parseInt(lengthHeader);\n if (Number.isNaN(length)) {\n throw new Error(`Could not interpret Content-Length: ${length}`);\n }\n const downloads = [];\n const blockSize = 4 * 1024 * 1024;\n for (let offset = 0; offset < length; offset += blockSize) {\n const count = Math.min(blockSize, length - offset);\n downloads.push({\n offset,\n promiseGetter: () => __awaiter(this, void 0, void 0, function* () {\n return yield downloadSegmentRetry(httpClient, archiveLocation, offset, count);\n })\n });\n }\n // reverse to use .pop instead of .shift\n downloads.reverse();\n let actives = 0;\n let bytesDownloaded = 0;\n const progress = new DownloadProgress(length);\n progress.startDisplayTimer();\n const progressFn = progress.onProgress();\n const activeDownloads = [];\n let nextDownload;\n const waitAndWrite = () => __awaiter(this, void 0, void 0, function* () {\n const segment = yield Promise.race(Object.values(activeDownloads));\n yield archiveDescriptor.write(segment.buffer, 0, segment.count, segment.offset);\n actives--;\n delete activeDownloads[segment.offset];\n bytesDownloaded += segment.count;\n progressFn({ loadedBytes: bytesDownloaded });\n });\n while ((nextDownload = downloads.pop())) {\n activeDownloads[nextDownload.offset] = nextDownload.promiseGetter();\n actives++;\n if (actives >= ((_a = options.downloadConcurrency) !== null && _a !== void 0 ? _a : 10)) {\n yield waitAndWrite();\n }\n }\n while (actives > 0) {\n yield waitAndWrite();\n }\n }\n finally {\n httpClient.dispose();\n yield archiveDescriptor.close();\n }\n });\n}\nfunction downloadSegmentRetry(httpClient, archiveLocation, offset, count) {\n return __awaiter(this, void 0, void 0, function* () {\n const retries = 5;\n let failures = 0;\n while (true) {\n try {\n const timeout = 30000;\n const result = yield promiseWithTimeout(timeout, downloadSegment(httpClient, archiveLocation, offset, count));\n if (typeof result === 'string') {\n throw new Error('downloadSegmentRetry failed due to timeout');\n }\n return result;\n }\n catch (err) {\n if (failures >= retries) {\n throw err;\n }\n failures++;\n }\n }\n });\n}\nfunction downloadSegment(httpClient, archiveLocation, offset, count) {\n return __awaiter(this, void 0, void 0, function* () {\n const partRes = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCachePart', () => __awaiter(this, void 0, void 0, function* () {\n return yield httpClient.get(archiveLocation, {\n Range: `bytes=${offset}-${offset + count - 1}`\n });\n }));\n if (!partRes.readBodyBuffer) {\n throw new Error('Expected HttpClientResponse to implement readBodyBuffer');\n }\n return {\n offset,\n count,\n buffer: yield partRes.readBodyBuffer()\n };\n });\n}\n/**\n * Download the cache using the Azure Storage SDK. Only call this method if the\n * URL points to an Azure Storage endpoint.\n *\n * @param archiveLocation the URL for the cache\n * @param archivePath the local path where the cache is saved\n * @param options the download options with the defaults set\n */\nfunction downloadCacheStorageSDK(archiveLocation, archivePath, options) {\n return __awaiter(this, void 0, void 0, function* () {\n var _a;\n const client = new storage_blob_1.BlockBlobClient(archiveLocation, undefined, {\n retryOptions: {\n // Override the timeout used when downloading each 4 MB chunk\n // The default is 2 min / MB, which is way too slow\n tryTimeoutInMs: options.timeoutInMs\n }\n });\n const properties = yield client.getProperties();\n const contentLength = (_a = properties.contentLength) !== null && _a !== void 0 ? _a : -1;\n if (contentLength < 0) {\n // We should never hit this condition, but just in case fall back to downloading the\n // file as one large stream\n core.debug('Unable to determine content length, downloading file with http-client...');\n yield downloadCacheHttpClient(archiveLocation, archivePath);\n }\n else {\n // Use downloadToBuffer for faster downloads, since internally it splits the\n // file into 4 MB chunks which can then be parallelized and retried independently\n //\n // If the file exceeds the buffer maximum length (~1 GB on 32-bit systems and ~2 GB\n // on 64-bit systems), split the download into multiple segments\n // ~2 GB = 2147483647, beyond this, we start getting out of range error. So, capping it accordingly.\n // Updated segment size to 128MB = 134217728 bytes, to complete a segment faster and fail fast\n const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH);\n const downloadProgress = new DownloadProgress(contentLength);\n const fd = fs.openSync(archivePath, 'w');\n try {\n downloadProgress.startDisplayTimer();\n const controller = new abort_controller_1.AbortController();\n const abortSignal = controller.signal;\n while (!downloadProgress.isDone()) {\n const segmentStart = downloadProgress.segmentOffset + downloadProgress.segmentSize;\n const segmentSize = Math.min(maxSegmentSize, contentLength - segmentStart);\n downloadProgress.nextSegment(segmentSize);\n const result = yield promiseWithTimeout(options.segmentTimeoutInMs || 3600000, client.downloadToBuffer(segmentStart, segmentSize, {\n abortSignal,\n concurrency: options.downloadConcurrency,\n onProgress: downloadProgress.onProgress()\n }));\n if (result === 'timeout') {\n controller.abort();\n throw new Error('Aborting cache download as the download time exceeded the timeout.');\n }\n else if (Buffer.isBuffer(result)) {\n fs.writeFileSync(fd, result);\n }\n }\n }\n finally {\n downloadProgress.stopDisplayTimer();\n fs.closeSync(fd);\n }\n }\n });\n}\nconst promiseWithTimeout = (timeoutMs, promise) => __awaiter(void 0, void 0, void 0, function* () {\n let timeoutHandle;\n const timeoutPromise = new Promise(resolve => {\n timeoutHandle = setTimeout(() => resolve('timeout'), timeoutMs);\n });\n return Promise.race([promise, timeoutPromise]).then(result => {\n clearTimeout(timeoutHandle);\n return result;\n });\n});\n//# sourceMappingURL=downloadUtils.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || (function () {\n var ownKeys = function(o) {\n ownKeys = Object.getOwnPropertyNames || function (o) {\n var ar = [];\n for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;\n return ar;\n };\n return ownKeys(o);\n };\n return function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== \"default\") __createBinding(result, mod, k[i]);\n __setModuleDefault(result, mod);\n return result;\n };\n})();\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.isSuccessStatusCode = isSuccessStatusCode;\nexports.isServerErrorStatusCode = isServerErrorStatusCode;\nexports.isRetryableStatusCode = isRetryableStatusCode;\nexports.retry = retry;\nexports.retryTypedResponse = retryTypedResponse;\nexports.retryHttpClientResponse = retryHttpClientResponse;\nconst core = __importStar(require(\"@actions/core\"));\nconst http_client_1 = require(\"@actions/http-client\");\nconst constants_1 = require(\"./constants\");\nfunction isSuccessStatusCode(statusCode) {\n if (!statusCode) {\n return false;\n }\n return statusCode >= 200 && statusCode < 300;\n}\nfunction isServerErrorStatusCode(statusCode) {\n if (!statusCode) {\n return true;\n }\n return statusCode >= 500;\n}\nfunction isRetryableStatusCode(statusCode) {\n if (!statusCode) {\n return false;\n }\n const retryableStatusCodes = [\n http_client_1.HttpCodes.BadGateway,\n http_client_1.HttpCodes.ServiceUnavailable,\n http_client_1.HttpCodes.GatewayTimeout\n ];\n return retryableStatusCodes.includes(statusCode);\n}\nfunction sleep(milliseconds) {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise(resolve => setTimeout(resolve, milliseconds));\n });\n}\nfunction retry(name_1, method_1, getStatusCode_1) {\n return __awaiter(this, arguments, void 0, function* (name, method, getStatusCode, maxAttempts = constants_1.DefaultRetryAttempts, delay = constants_1.DefaultRetryDelay, onError = undefined) {\n let errorMessage = '';\n let attempt = 1;\n while (attempt <= maxAttempts) {\n let response = undefined;\n let statusCode = undefined;\n let isRetryable = false;\n try {\n response = yield method();\n }\n catch (error) {\n if (onError) {\n response = onError(error);\n }\n isRetryable = true;\n errorMessage = error.message;\n }\n if (response) {\n statusCode = getStatusCode(response);\n if (!isServerErrorStatusCode(statusCode)) {\n return response;\n }\n }\n if (statusCode) {\n isRetryable = isRetryableStatusCode(statusCode);\n errorMessage = `Cache service responded with ${statusCode}`;\n }\n core.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`);\n if (!isRetryable) {\n core.debug(`${name} - Error is not retryable`);\n break;\n }\n yield sleep(delay);\n attempt++;\n }\n throw Error(`${name} failed: ${errorMessage}`);\n });\n}\nfunction retryTypedResponse(name_1, method_1) {\n return __awaiter(this, arguments, void 0, function* (name, method, maxAttempts = constants_1.DefaultRetryAttempts, delay = constants_1.DefaultRetryDelay) {\n return yield retry(name, method, (response) => response.statusCode, maxAttempts, delay, \n // If the error object contains the statusCode property, extract it and return\n // an TypedResponse
) to the summary buffer\n *\n * @returns {Summary} summary instance\n */\n addSeparator() {\n const element = this.wrap('hr', null);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML line break (
) to the summary buffer\n *\n * @returns {Summary} summary instance\n */\n addBreak() {\n const element = this.wrap('br', null);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML blockquote to the summary buffer\n *\n * @param {string} text quote text\n * @param {string} cite (optional) citation url\n *\n * @returns {Summary} summary instance\n */\n addQuote(text, cite) {\n const attrs = Object.assign({}, (cite && { cite }));\n const element = this.wrap('blockquote', text, attrs);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML anchor tag to the summary buffer\n *\n * @param {string} text link text/content\n * @param {string} href hyperlink\n *\n * @returns {Summary} summary instance\n */\n addLink(text, href) {\n const element = this.wrap('a', text, { href });\n return this.addRaw(element).addEOL();\n }\n}\nconst _summary = new Summary();\n/**\n * @deprecated use `core.summary`\n */\nexports.markdownSummary = _summary;\nexports.summary = _summary;\n//# sourceMappingURL=summary.js.map","\"use strict\";\n// We use any as a valid input type\n/* eslint-disable @typescript-eslint/no-explicit-any */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.toCommandValue = toCommandValue;\nexports.toCommandProperties = toCommandProperties;\n/**\n * Sanitizes an input into a string so it can be passed into issueCommand safely\n * @param input input to sanitize into a string\n */\nfunction toCommandValue(input) {\n if (input === null || input === undefined) {\n return '';\n }\n else if (typeof input === 'string' || input instanceof String) {\n return input;\n }\n return JSON.stringify(input);\n}\n/**\n *\n * @param annotationProperties\n * @returns The command properties to send with the actual annotation command\n * See IssueCommandProperties: https://github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646\n */\nfunction toCommandProperties(annotationProperties) {\n if (!Object.keys(annotationProperties).length) {\n return {};\n }\n return {\n title: annotationProperties.title,\n file: annotationProperties.file,\n line: annotationProperties.startLine,\n endLine: annotationProperties.endLine,\n col: annotationProperties.startColumn,\n endColumn: annotationProperties.endColumn\n };\n}\n//# sourceMappingURL=utils.js.map","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\n// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n///
) to the summary buffer\n *\n * @returns {Summary} summary instance\n */\n addSeparator() {\n const element = this.wrap('hr', null);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML line break (
) to the summary buffer\n *\n * @returns {Summary} summary instance\n */\n addBreak() {\n const element = this.wrap('br', null);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML blockquote to the summary buffer\n *\n * @param {string} text quote text\n * @param {string} cite (optional) citation url\n *\n * @returns {Summary} summary instance\n */\n addQuote(text, cite) {\n const attrs = Object.assign({}, (cite && { cite }));\n const element = this.wrap('blockquote', text, attrs);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML anchor tag to the summary buffer\n *\n * @param {string} text link text/content\n * @param {string} href hyperlink\n *\n * @returns {Summary} summary instance\n */\n addLink(text, href) {\n const element = this.wrap('a', text, { href });\n return this.addRaw(element).addEOL();\n }\n}\nconst _summary = new Summary();\n/**\n * @deprecated use `core.summary`\n */\nexports.markdownSummary = _summary;\nexports.summary = _summary;\n//# sourceMappingURL=summary.js.map","\"use strict\";\n// We use any as a valid input type\n/* eslint-disable @typescript-eslint/no-explicit-any */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.toCommandProperties = exports.toCommandValue = void 0;\n/**\n * Sanitizes an input into a string so it can be passed into issueCommand safely\n * @param input input to sanitize into a string\n */\nfunction toCommandValue(input) {\n if (input === null || input === undefined) {\n return '';\n }\n else if (typeof input === 'string' || input instanceof String) {\n return input;\n }\n return JSON.stringify(input);\n}\nexports.toCommandValue = toCommandValue;\n/**\n *\n * @param annotationProperties\n * @returns The command properties to send with the actual annotation command\n * See IssueCommandProperties: https://github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646\n */\nfunction toCommandProperties(annotationProperties) {\n if (!Object.keys(annotationProperties).length) {\n return {};\n }\n return {\n title: annotationProperties.title,\n file: annotationProperties.file,\n line: annotationProperties.startLine,\n endLine: annotationProperties.endLine,\n col: annotationProperties.startColumn,\n endColumn: annotationProperties.endColumn\n };\n}\nexports.toCommandProperties = toCommandProperties;\n//# sourceMappingURL=utils.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getExecOutput = exports.exec = void 0;\nconst string_decoder_1 = require(\"string_decoder\");\nconst tr = __importStar(require(\"./toolrunner\"));\n/**\n * Exec a command.\n * Output will be streamed to the live console.\n * Returns promise with return code\n *\n * @param commandLine command to execute (can include additional args). Must be correctly escaped.\n * @param args optional arguments for tool. Escaping is handled by the lib.\n * @param options optional exec options. See ExecOptions\n * @returns Promise
) to the summary buffer\n *\n * @returns {Summary} summary instance\n */\n addSeparator() {\n const element = this.wrap('hr', null);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML line break (
) to the summary buffer\n *\n * @returns {Summary} summary instance\n */\n addBreak() {\n const element = this.wrap('br', null);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML blockquote to the summary buffer\n *\n * @param {string} text quote text\n * @param {string} cite (optional) citation url\n *\n * @returns {Summary} summary instance\n */\n addQuote(text, cite) {\n const attrs = Object.assign({}, (cite && { cite }));\n const element = this.wrap('blockquote', text, attrs);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML anchor tag to the summary buffer\n *\n * @param {string} text link text/content\n * @param {string} href hyperlink\n *\n * @returns {Summary} summary instance\n */\n addLink(text, href) {\n const element = this.wrap('a', text, { href });\n return this.addRaw(element).addEOL();\n }\n}\nconst _summary = new Summary();\n/**\n * @deprecated use `core.summary`\n */\nexports.markdownSummary = _summary;\nexports.summary = _summary;\n//# sourceMappingURL=summary.js.map","\"use strict\";\n// We use any as a valid input type\n/* eslint-disable @typescript-eslint/no-explicit-any */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.toCommandValue = toCommandValue;\nexports.toCommandProperties = toCommandProperties;\n/**\n * Sanitizes an input into a string so it can be passed into issueCommand safely\n * @param input input to sanitize into a string\n */\nfunction toCommandValue(input) {\n if (input === null || input === undefined) {\n return '';\n }\n else if (typeof input === 'string' || input instanceof String) {\n return input;\n }\n return JSON.stringify(input);\n}\n/**\n *\n * @param annotationProperties\n * @returns The command properties to send with the actual annotation command\n * See IssueCommandProperties: https://github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646\n */\nfunction toCommandProperties(annotationProperties) {\n if (!Object.keys(annotationProperties).length) {\n return {};\n }\n return {\n title: annotationProperties.title,\n file: annotationProperties.file,\n line: annotationProperties.startLine,\n endLine: annotationProperties.endLine,\n col: annotationProperties.startColumn,\n endColumn: annotationProperties.endColumn\n };\n}\n//# sourceMappingURL=utils.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.PersonalAccessTokenCredentialHandler = exports.BearerCredentialHandler = exports.BasicCredentialHandler = void 0;\nclass BasicCredentialHandler {\n constructor(username, password) {\n this.username = username;\n this.password = password;\n }\n prepareRequest(options) {\n if (!options.headers) {\n throw Error('The request has no headers');\n }\n options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`;\n }\n // This handler cannot handle 401\n canHandleAuthentication() {\n return false;\n }\n handleAuthentication() {\n return __awaiter(this, void 0, void 0, function* () {\n throw new Error('not implemented');\n });\n }\n}\nexports.BasicCredentialHandler = BasicCredentialHandler;\nclass BearerCredentialHandler {\n constructor(token) {\n this.token = token;\n }\n // currently implements pre-authorization\n // TODO: support preAuth = false where it hooks on 401\n prepareRequest(options) {\n if (!options.headers) {\n throw Error('The request has no headers');\n }\n options.headers['Authorization'] = `Bearer ${this.token}`;\n }\n // This handler cannot handle 401\n canHandleAuthentication() {\n return false;\n }\n handleAuthentication() {\n return __awaiter(this, void 0, void 0, function* () {\n throw new Error('not implemented');\n });\n }\n}\nexports.BearerCredentialHandler = BearerCredentialHandler;\nclass PersonalAccessTokenCredentialHandler {\n constructor(token) {\n this.token = token;\n }\n // currently implements pre-authorization\n // TODO: support preAuth = false where it hooks on 401\n prepareRequest(options) {\n if (!options.headers) {\n throw Error('The request has no headers');\n }\n options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`;\n }\n // This handler cannot handle 401\n canHandleAuthentication() {\n return false;\n }\n handleAuthentication() {\n return __awaiter(this, void 0, void 0, function* () {\n throw new Error('not implemented');\n });\n }\n}\nexports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler;\n//# sourceMappingURL=auth.js.map","\"use strict\";\n/* eslint-disable @typescript-eslint/no-explicit-any */\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || (function () {\n var ownKeys = function(o) {\n ownKeys = Object.getOwnPropertyNames || function (o) {\n var ar = [];\n for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;\n return ar;\n };\n return ownKeys(o);\n };\n return function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== \"default\") __createBinding(result, mod, k[i]);\n __setModuleDefault(result, mod);\n return result;\n };\n})();\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.HttpClient = exports.HttpClientResponse = exports.HttpClientError = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0;\nexports.getProxyUrl = getProxyUrl;\nexports.isHttps = isHttps;\nconst http = __importStar(require(\"http\"));\nconst https = __importStar(require(\"https\"));\nconst pm = __importStar(require(\"./proxy\"));\nconst tunnel = __importStar(require(\"tunnel\"));\nconst undici_1 = require(\"undici\");\nvar HttpCodes;\n(function (HttpCodes) {\n HttpCodes[HttpCodes[\"OK\"] = 200] = \"OK\";\n HttpCodes[HttpCodes[\"MultipleChoices\"] = 300] = \"MultipleChoices\";\n HttpCodes[HttpCodes[\"MovedPermanently\"] = 301] = \"MovedPermanently\";\n HttpCodes[HttpCodes[\"ResourceMoved\"] = 302] = \"ResourceMoved\";\n HttpCodes[HttpCodes[\"SeeOther\"] = 303] = \"SeeOther\";\n HttpCodes[HttpCodes[\"NotModified\"] = 304] = \"NotModified\";\n HttpCodes[HttpCodes[\"UseProxy\"] = 305] = \"UseProxy\";\n HttpCodes[HttpCodes[\"SwitchProxy\"] = 306] = \"SwitchProxy\";\n HttpCodes[HttpCodes[\"TemporaryRedirect\"] = 307] = \"TemporaryRedirect\";\n HttpCodes[HttpCodes[\"PermanentRedirect\"] = 308] = \"PermanentRedirect\";\n HttpCodes[HttpCodes[\"BadRequest\"] = 400] = \"BadRequest\";\n HttpCodes[HttpCodes[\"Unauthorized\"] = 401] = \"Unauthorized\";\n HttpCodes[HttpCodes[\"PaymentRequired\"] = 402] = \"PaymentRequired\";\n HttpCodes[HttpCodes[\"Forbidden\"] = 403] = \"Forbidden\";\n HttpCodes[HttpCodes[\"NotFound\"] = 404] = \"NotFound\";\n HttpCodes[HttpCodes[\"MethodNotAllowed\"] = 405] = \"MethodNotAllowed\";\n HttpCodes[HttpCodes[\"NotAcceptable\"] = 406] = \"NotAcceptable\";\n HttpCodes[HttpCodes[\"ProxyAuthenticationRequired\"] = 407] = \"ProxyAuthenticationRequired\";\n HttpCodes[HttpCodes[\"RequestTimeout\"] = 408] = \"RequestTimeout\";\n HttpCodes[HttpCodes[\"Conflict\"] = 409] = \"Conflict\";\n HttpCodes[HttpCodes[\"Gone\"] = 410] = \"Gone\";\n HttpCodes[HttpCodes[\"TooManyRequests\"] = 429] = \"TooManyRequests\";\n HttpCodes[HttpCodes[\"InternalServerError\"] = 500] = \"InternalServerError\";\n HttpCodes[HttpCodes[\"NotImplemented\"] = 501] = \"NotImplemented\";\n HttpCodes[HttpCodes[\"BadGateway\"] = 502] = \"BadGateway\";\n HttpCodes[HttpCodes[\"ServiceUnavailable\"] = 503] = \"ServiceUnavailable\";\n HttpCodes[HttpCodes[\"GatewayTimeout\"] = 504] = \"GatewayTimeout\";\n})(HttpCodes || (exports.HttpCodes = HttpCodes = {}));\nvar Headers;\n(function (Headers) {\n Headers[\"Accept\"] = \"accept\";\n Headers[\"ContentType\"] = \"content-type\";\n})(Headers || (exports.Headers = Headers = {}));\nvar MediaTypes;\n(function (MediaTypes) {\n MediaTypes[\"ApplicationJson\"] = \"application/json\";\n})(MediaTypes || (exports.MediaTypes = MediaTypes = {}));\n/**\n * Returns the proxy URL, depending upon the supplied url and proxy environment variables.\n * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com\n */\nfunction getProxyUrl(serverUrl) {\n const proxyUrl = pm.getProxyUrl(new URL(serverUrl));\n return proxyUrl ? proxyUrl.href : '';\n}\nconst HttpRedirectCodes = [\n HttpCodes.MovedPermanently,\n HttpCodes.ResourceMoved,\n HttpCodes.SeeOther,\n HttpCodes.TemporaryRedirect,\n HttpCodes.PermanentRedirect\n];\nconst HttpResponseRetryCodes = [\n HttpCodes.BadGateway,\n HttpCodes.ServiceUnavailable,\n HttpCodes.GatewayTimeout\n];\nconst RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];\nconst ExponentialBackoffCeiling = 10;\nconst ExponentialBackoffTimeSlice = 5;\nclass HttpClientError extends Error {\n constructor(message, statusCode) {\n super(message);\n this.name = 'HttpClientError';\n this.statusCode = statusCode;\n Object.setPrototypeOf(this, HttpClientError.prototype);\n }\n}\nexports.HttpClientError = HttpClientError;\nclass HttpClientResponse {\n constructor(message) {\n this.message = message;\n }\n readBody() {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {\n let output = Buffer.alloc(0);\n this.message.on('data', (chunk) => {\n output = Buffer.concat([output, chunk]);\n });\n this.message.on('end', () => {\n resolve(output.toString());\n });\n }));\n });\n }\n readBodyBuffer() {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {\n const chunks = [];\n this.message.on('data', (chunk) => {\n chunks.push(chunk);\n });\n this.message.on('end', () => {\n resolve(Buffer.concat(chunks));\n });\n }));\n });\n }\n}\nexports.HttpClientResponse = HttpClientResponse;\nfunction isHttps(requestUrl) {\n const parsedUrl = new URL(requestUrl);\n return parsedUrl.protocol === 'https:';\n}\nclass HttpClient {\n constructor(userAgent, handlers, requestOptions) {\n this._ignoreSslError = false;\n this._allowRedirects = true;\n this._allowRedirectDowngrade = false;\n this._maxRedirects = 50;\n this._allowRetries = false;\n this._maxRetries = 1;\n this._keepAlive = false;\n this._disposed = false;\n this.userAgent = this._getUserAgentWithOrchestrationId(userAgent);\n this.handlers = handlers || [];\n this.requestOptions = requestOptions;\n if (requestOptions) {\n if (requestOptions.ignoreSslError != null) {\n this._ignoreSslError = requestOptions.ignoreSslError;\n }\n this._socketTimeout = requestOptions.socketTimeout;\n if (requestOptions.allowRedirects != null) {\n this._allowRedirects = requestOptions.allowRedirects;\n }\n if (requestOptions.allowRedirectDowngrade != null) {\n this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade;\n }\n if (requestOptions.maxRedirects != null) {\n this._maxRedirects = Math.max(requestOptions.maxRedirects, 0);\n }\n if (requestOptions.keepAlive != null) {\n this._keepAlive = requestOptions.keepAlive;\n }\n if (requestOptions.allowRetries != null) {\n this._allowRetries = requestOptions.allowRetries;\n }\n if (requestOptions.maxRetries != null) {\n this._maxRetries = requestOptions.maxRetries;\n }\n }\n }\n options(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('OPTIONS', requestUrl, null, additionalHeaders || {});\n });\n }\n get(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('GET', requestUrl, null, additionalHeaders || {});\n });\n }\n del(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('DELETE', requestUrl, null, additionalHeaders || {});\n });\n }\n post(requestUrl, data, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('POST', requestUrl, data, additionalHeaders || {});\n });\n }\n patch(requestUrl, data, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('PATCH', requestUrl, data, additionalHeaders || {});\n });\n }\n put(requestUrl, data, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('PUT', requestUrl, data, additionalHeaders || {});\n });\n }\n head(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('HEAD', requestUrl, null, additionalHeaders || {});\n });\n }\n sendStream(verb, requestUrl, stream, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request(verb, requestUrl, stream, additionalHeaders);\n });\n }\n /**\n * Gets a typed object from an endpoint\n * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise\n */\n getJson(requestUrl_1) {\n return __awaiter(this, arguments, void 0, function* (requestUrl, additionalHeaders = {}) {\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n const res = yield this.get(requestUrl, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n postJson(requestUrl_1, obj_1) {\n return __awaiter(this, arguments, void 0, function* (requestUrl, obj, additionalHeaders = {}) {\n const data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] =\n this._getExistingOrDefaultContentTypeHeader(additionalHeaders, MediaTypes.ApplicationJson);\n const res = yield this.post(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n putJson(requestUrl_1, obj_1) {\n return __awaiter(this, arguments, void 0, function* (requestUrl, obj, additionalHeaders = {}) {\n const data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] =\n this._getExistingOrDefaultContentTypeHeader(additionalHeaders, MediaTypes.ApplicationJson);\n const res = yield this.put(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n patchJson(requestUrl_1, obj_1) {\n return __awaiter(this, arguments, void 0, function* (requestUrl, obj, additionalHeaders = {}) {\n const data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] =\n this._getExistingOrDefaultContentTypeHeader(additionalHeaders, MediaTypes.ApplicationJson);\n const res = yield this.patch(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n /**\n * Makes a raw http request.\n * All other methods such as get, post, patch, and request ultimately call this.\n * Prefer get, del, post and patch\n */\n request(verb, requestUrl, data, headers) {\n return __awaiter(this, void 0, void 0, function* () {\n if (this._disposed) {\n throw new Error('Client has already been disposed.');\n }\n const parsedUrl = new URL(requestUrl);\n let info = this._prepareRequest(verb, parsedUrl, headers);\n // Only perform retries on reads since writes may not be idempotent.\n const maxTries = this._allowRetries && RetryableHttpVerbs.includes(verb)\n ? this._maxRetries + 1\n : 1;\n let numTries = 0;\n let response;\n do {\n response = yield this.requestRaw(info, data);\n // Check if it's an authentication challenge\n if (response &&\n response.message &&\n response.message.statusCode === HttpCodes.Unauthorized) {\n let authenticationHandler;\n for (const handler of this.handlers) {\n if (handler.canHandleAuthentication(response)) {\n authenticationHandler = handler;\n break;\n }\n }\n if (authenticationHandler) {\n return authenticationHandler.handleAuthentication(this, info, data);\n }\n else {\n // We have received an unauthorized response but have no handlers to handle it.\n // Let the response return to the caller.\n return response;\n }\n }\n let redirectsRemaining = this._maxRedirects;\n while (response.message.statusCode &&\n HttpRedirectCodes.includes(response.message.statusCode) &&\n this._allowRedirects &&\n redirectsRemaining > 0) {\n const redirectUrl = response.message.headers['location'];\n if (!redirectUrl) {\n // if there's no location to redirect to, we won't\n break;\n }\n const parsedRedirectUrl = new URL(redirectUrl);\n if (parsedUrl.protocol === 'https:' &&\n parsedUrl.protocol !== parsedRedirectUrl.protocol &&\n !this._allowRedirectDowngrade) {\n throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.');\n }\n // we need to finish reading the response before reassigning response\n // which will leak the open socket.\n yield response.readBody();\n // strip authorization header if redirected to a different hostname\n if (parsedRedirectUrl.hostname !== parsedUrl.hostname) {\n for (const header in headers) {\n // header names are case insensitive\n if (header.toLowerCase() === 'authorization') {\n delete headers[header];\n }\n }\n }\n // let's make the request with the new redirectUrl\n info = this._prepareRequest(verb, parsedRedirectUrl, headers);\n response = yield this.requestRaw(info, data);\n redirectsRemaining--;\n }\n if (!response.message.statusCode ||\n !HttpResponseRetryCodes.includes(response.message.statusCode)) {\n // If not a retry code, return immediately instead of retrying\n return response;\n }\n numTries += 1;\n if (numTries < maxTries) {\n yield response.readBody();\n yield this._performExponentialBackoff(numTries);\n }\n } while (numTries < maxTries);\n return response;\n });\n }\n /**\n * Needs to be called if keepAlive is set to true in request options.\n */\n dispose() {\n if (this._agent) {\n this._agent.destroy();\n }\n this._disposed = true;\n }\n /**\n * Raw request.\n * @param info\n * @param data\n */\n requestRaw(info, data) {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise((resolve, reject) => {\n function callbackForResult(err, res) {\n if (err) {\n reject(err);\n }\n else if (!res) {\n // If `err` is not passed, then `res` must be passed.\n reject(new Error('Unknown error'));\n }\n else {\n resolve(res);\n }\n }\n this.requestRawWithCallback(info, data, callbackForResult);\n });\n });\n }\n /**\n * Raw request with callback.\n * @param info\n * @param data\n * @param onResult\n */\n requestRawWithCallback(info, data, onResult) {\n if (typeof data === 'string') {\n if (!info.options.headers) {\n info.options.headers = {};\n }\n info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8');\n }\n let callbackCalled = false;\n function handleResult(err, res) {\n if (!callbackCalled) {\n callbackCalled = true;\n onResult(err, res);\n }\n }\n const req = info.httpModule.request(info.options, (msg) => {\n const res = new HttpClientResponse(msg);\n handleResult(undefined, res);\n });\n let socket;\n req.on('socket', sock => {\n socket = sock;\n });\n // If we ever get disconnected, we want the socket to timeout eventually\n req.setTimeout(this._socketTimeout || 3 * 60000, () => {\n if (socket) {\n socket.end();\n }\n handleResult(new Error(`Request timeout: ${info.options.path}`));\n });\n req.on('error', function (err) {\n // err has statusCode property\n // res should have headers\n handleResult(err);\n });\n if (data && typeof data === 'string') {\n req.write(data, 'utf8');\n }\n if (data && typeof data !== 'string') {\n data.on('close', function () {\n req.end();\n });\n data.pipe(req);\n }\n else {\n req.end();\n }\n }\n /**\n * Gets an http agent. This function is useful when you need an http agent that handles\n * routing through a proxy server - depending upon the url and proxy environment variables.\n * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com\n */\n getAgent(serverUrl) {\n const parsedUrl = new URL(serverUrl);\n return this._getAgent(parsedUrl);\n }\n getAgentDispatcher(serverUrl) {\n const parsedUrl = new URL(serverUrl);\n const proxyUrl = pm.getProxyUrl(parsedUrl);\n const useProxy = proxyUrl && proxyUrl.hostname;\n if (!useProxy) {\n return;\n }\n return this._getProxyAgentDispatcher(parsedUrl, proxyUrl);\n }\n _prepareRequest(method, requestUrl, headers) {\n const info = {};\n info.parsedUrl = requestUrl;\n const usingSsl = info.parsedUrl.protocol === 'https:';\n info.httpModule = usingSsl ? https : http;\n const defaultPort = usingSsl ? 443 : 80;\n info.options = {};\n info.options.host = info.parsedUrl.hostname;\n info.options.port = info.parsedUrl.port\n ? parseInt(info.parsedUrl.port)\n : defaultPort;\n info.options.path =\n (info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');\n info.options.method = method;\n info.options.headers = this._mergeHeaders(headers);\n if (this.userAgent != null) {\n info.options.headers['user-agent'] = this.userAgent;\n }\n info.options.agent = this._getAgent(info.parsedUrl);\n // gives handlers an opportunity to participate\n if (this.handlers) {\n for (const handler of this.handlers) {\n handler.prepareRequest(info.options);\n }\n }\n return info;\n }\n _mergeHeaders(headers) {\n if (this.requestOptions && this.requestOptions.headers) {\n return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers || {}));\n }\n return lowercaseKeys(headers || {});\n }\n /**\n * Gets an existing header value or returns a default.\n * Handles converting number header values to strings since HTTP headers must be strings.\n * Note: This returns string | string[] since some headers can have multiple values.\n * For headers that must always be a single string (like Content-Type), use the\n * specialized _getExistingOrDefaultContentTypeHeader method instead.\n */\n _getExistingOrDefaultHeader(additionalHeaders, header, _default) {\n let clientHeader;\n if (this.requestOptions && this.requestOptions.headers) {\n const headerValue = lowercaseKeys(this.requestOptions.headers)[header];\n if (headerValue) {\n clientHeader =\n typeof headerValue === 'number' ? headerValue.toString() : headerValue;\n }\n }\n const additionalValue = additionalHeaders[header];\n if (additionalValue !== undefined) {\n return typeof additionalValue === 'number'\n ? additionalValue.toString()\n : additionalValue;\n }\n if (clientHeader !== undefined) {\n return clientHeader;\n }\n return _default;\n }\n /**\n * Specialized version of _getExistingOrDefaultHeader for Content-Type header.\n * Always returns a single string (not an array) since Content-Type should be a single value.\n * Converts arrays to comma-separated strings and numbers to strings to ensure type safety.\n * This was split from _getExistingOrDefaultHeader to provide stricter typing for callers\n * that assign the result to places expecting a string (e.g., additionalHeaders[Headers.ContentType]).\n */\n _getExistingOrDefaultContentTypeHeader(additionalHeaders, _default) {\n let clientHeader;\n if (this.requestOptions && this.requestOptions.headers) {\n const headerValue = lowercaseKeys(this.requestOptions.headers)[Headers.ContentType];\n if (headerValue) {\n if (typeof headerValue === 'number') {\n clientHeader = String(headerValue);\n }\n else if (Array.isArray(headerValue)) {\n clientHeader = headerValue.join(', ');\n }\n else {\n clientHeader = headerValue;\n }\n }\n }\n const additionalValue = additionalHeaders[Headers.ContentType];\n // Return the first non-undefined value, converting numbers or arrays to strings if necessary\n if (additionalValue !== undefined) {\n if (typeof additionalValue === 'number') {\n return String(additionalValue);\n }\n else if (Array.isArray(additionalValue)) {\n return additionalValue.join(', ');\n }\n else {\n return additionalValue;\n }\n }\n if (clientHeader !== undefined) {\n return clientHeader;\n }\n return _default;\n }\n _getAgent(parsedUrl) {\n let agent;\n const proxyUrl = pm.getProxyUrl(parsedUrl);\n const useProxy = proxyUrl && proxyUrl.hostname;\n if (this._keepAlive && useProxy) {\n agent = this._proxyAgent;\n }\n if (!useProxy) {\n agent = this._agent;\n }\n // if agent is already assigned use that agent.\n if (agent) {\n return agent;\n }\n const usingSsl = parsedUrl.protocol === 'https:';\n let maxSockets = 100;\n if (this.requestOptions) {\n maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets;\n }\n // This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis.\n if (proxyUrl && proxyUrl.hostname) {\n const agentOptions = {\n maxSockets,\n keepAlive: this._keepAlive,\n proxy: Object.assign(Object.assign({}, ((proxyUrl.username || proxyUrl.password) && {\n proxyAuth: `${proxyUrl.username}:${proxyUrl.password}`\n })), { host: proxyUrl.hostname, port: proxyUrl.port })\n };\n let tunnelAgent;\n const overHttps = proxyUrl.protocol === 'https:';\n if (usingSsl) {\n tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp;\n }\n else {\n tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp;\n }\n agent = tunnelAgent(agentOptions);\n this._proxyAgent = agent;\n }\n // if tunneling agent isn't assigned create a new agent\n if (!agent) {\n const options = { keepAlive: this._keepAlive, maxSockets };\n agent = usingSsl ? new https.Agent(options) : new http.Agent(options);\n this._agent = agent;\n }\n if (usingSsl && this._ignoreSslError) {\n // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process\n // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options\n // we have to cast it to any and change it directly\n agent.options = Object.assign(agent.options || {}, {\n rejectUnauthorized: false\n });\n }\n return agent;\n }\n _getProxyAgentDispatcher(parsedUrl, proxyUrl) {\n let proxyAgent;\n if (this._keepAlive) {\n proxyAgent = this._proxyAgentDispatcher;\n }\n // if agent is already assigned use that agent.\n if (proxyAgent) {\n return proxyAgent;\n }\n const usingSsl = parsedUrl.protocol === 'https:';\n proxyAgent = new undici_1.ProxyAgent(Object.assign({ uri: proxyUrl.href, pipelining: !this._keepAlive ? 0 : 1 }, ((proxyUrl.username || proxyUrl.password) && {\n token: `Basic ${Buffer.from(`${proxyUrl.username}:${proxyUrl.password}`).toString('base64')}`\n })));\n this._proxyAgentDispatcher = proxyAgent;\n if (usingSsl && this._ignoreSslError) {\n // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process\n // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options\n // we have to cast it to any and change it directly\n proxyAgent.options = Object.assign(proxyAgent.options.requestTls || {}, {\n rejectUnauthorized: false\n });\n }\n return proxyAgent;\n }\n _getUserAgentWithOrchestrationId(userAgent) {\n const baseUserAgent = userAgent || 'actions/http-client';\n const orchId = process.env['ACTIONS_ORCHESTRATION_ID'];\n if (orchId) {\n // Sanitize the orchestration ID to ensure it contains only valid characters\n // Valid characters: 0-9, a-z, _, -, .\n const sanitizedId = orchId.replace(/[^a-z0-9_.-]/gi, '_');\n return `${baseUserAgent} actions_orchestration_id/${sanitizedId}`;\n }\n return baseUserAgent;\n }\n _performExponentialBackoff(retryNumber) {\n return __awaiter(this, void 0, void 0, function* () {\n retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber);\n const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber);\n return new Promise(resolve => setTimeout(() => resolve(), ms));\n });\n }\n _processResponse(res, options) {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () {\n const statusCode = res.message.statusCode || 0;\n const response = {\n statusCode,\n result: null,\n headers: {}\n };\n // not found leads to null obj returned\n if (statusCode === HttpCodes.NotFound) {\n resolve(response);\n }\n // get the result from the body\n function dateTimeDeserializer(key, value) {\n if (typeof value === 'string') {\n const a = new Date(value);\n if (!isNaN(a.valueOf())) {\n return a;\n }\n }\n return value;\n }\n let obj;\n let contents;\n try {\n contents = yield res.readBody();\n if (contents && contents.length > 0) {\n if (options && options.deserializeDates) {\n obj = JSON.parse(contents, dateTimeDeserializer);\n }\n else {\n obj = JSON.parse(contents);\n }\n response.result = obj;\n }\n response.headers = res.message.headers;\n }\n catch (err) {\n // Invalid resource (contents not json); leaving result obj null\n }\n // note that 3xx redirects are handled by the http layer.\n if (statusCode > 299) {\n let msg;\n // if exception/error in body, attempt to get better error\n if (obj && obj.message) {\n msg = obj.message;\n }\n else if (contents && contents.length > 0) {\n // it may be the case that the exception is in the body message as string\n msg = contents;\n }\n else {\n msg = `Failed request: (${statusCode})`;\n }\n const err = new HttpClientError(msg, statusCode);\n err.result = response.result;\n reject(err);\n }\n else {\n resolve(response);\n }\n }));\n });\n }\n}\nexports.HttpClient = HttpClient;\nconst lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});\n//# sourceMappingURL=index.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getProxyUrl = getProxyUrl;\nexports.checkBypass = checkBypass;\nfunction getProxyUrl(reqUrl) {\n const usingSsl = reqUrl.protocol === 'https:';\n if (checkBypass(reqUrl)) {\n return undefined;\n }\n const proxyVar = (() => {\n if (usingSsl) {\n return process.env['https_proxy'] || process.env['HTTPS_PROXY'];\n }\n else {\n return process.env['http_proxy'] || process.env['HTTP_PROXY'];\n }\n })();\n if (proxyVar) {\n try {\n return new DecodedURL(proxyVar);\n }\n catch (_a) {\n if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://'))\n return new DecodedURL(`http://${proxyVar}`);\n }\n }\n else {\n return undefined;\n }\n}\nfunction checkBypass(reqUrl) {\n if (!reqUrl.hostname) {\n return false;\n }\n const reqHost = reqUrl.hostname;\n if (isLoopbackAddress(reqHost)) {\n return true;\n }\n const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';\n if (!noProxy) {\n return false;\n }\n // Determine the request port\n let reqPort;\n if (reqUrl.port) {\n reqPort = Number(reqUrl.port);\n }\n else if (reqUrl.protocol === 'http:') {\n reqPort = 80;\n }\n else if (reqUrl.protocol === 'https:') {\n reqPort = 443;\n }\n // Format the request hostname and hostname with port\n const upperReqHosts = [reqUrl.hostname.toUpperCase()];\n if (typeof reqPort === 'number') {\n upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`);\n }\n // Compare request host against noproxy\n for (const upperNoProxyItem of noProxy\n .split(',')\n .map(x => x.trim().toUpperCase())\n .filter(x => x)) {\n if (upperNoProxyItem === '*' ||\n upperReqHosts.some(x => x === upperNoProxyItem ||\n x.endsWith(`.${upperNoProxyItem}`) ||\n (upperNoProxyItem.startsWith('.') &&\n x.endsWith(`${upperNoProxyItem}`)))) {\n return true;\n }\n }\n return false;\n}\nfunction isLoopbackAddress(host) {\n const hostLower = host.toLowerCase();\n return (hostLower === 'localhost' ||\n hostLower.startsWith('127.') ||\n hostLower.startsWith('[::1]') ||\n hostLower.startsWith('[0:0:0:0:0:0:0:1]'));\n}\nclass DecodedURL extends URL {\n constructor(url, base) {\n super(url, base);\n this._decodedUsername = decodeURIComponent(super.username);\n this._decodedPassword = decodeURIComponent(super.password);\n }\n get username() {\n return this._decodedUsername;\n }\n get password() {\n return this._decodedPassword;\n }\n}\n//# sourceMappingURL=proxy.js.map","'use strict'\n\nconst Client = require('./lib/dispatcher/client')\nconst Dispatcher = require('./lib/dispatcher/dispatcher')\nconst Pool = require('./lib/dispatcher/pool')\nconst BalancedPool = require('./lib/dispatcher/balanced-pool')\nconst Agent = require('./lib/dispatcher/agent')\nconst ProxyAgent = require('./lib/dispatcher/proxy-agent')\nconst EnvHttpProxyAgent = require('./lib/dispatcher/env-http-proxy-agent')\nconst RetryAgent = require('./lib/dispatcher/retry-agent')\nconst errors = require('./lib/core/errors')\nconst util = require('./lib/core/util')\nconst { InvalidArgumentError } = errors\nconst api = require('./lib/api')\nconst buildConnector = require('./lib/core/connect')\nconst MockClient = require('./lib/mock/mock-client')\nconst MockAgent = require('./lib/mock/mock-agent')\nconst MockPool = require('./lib/mock/mock-pool')\nconst mockErrors = require('./lib/mock/mock-errors')\nconst RetryHandler = require('./lib/handler/retry-handler')\nconst { getGlobalDispatcher, setGlobalDispatcher } = require('./lib/global')\nconst DecoratorHandler = require('./lib/handler/decorator-handler')\nconst RedirectHandler = require('./lib/handler/redirect-handler')\nconst createRedirectInterceptor = require('./lib/interceptor/redirect-interceptor')\n\nObject.assign(Dispatcher.prototype, api)\n\nmodule.exports.Dispatcher = Dispatcher\nmodule.exports.Client = Client\nmodule.exports.Pool = Pool\nmodule.exports.BalancedPool = BalancedPool\nmodule.exports.Agent = Agent\nmodule.exports.ProxyAgent = ProxyAgent\nmodule.exports.EnvHttpProxyAgent = EnvHttpProxyAgent\nmodule.exports.RetryAgent = RetryAgent\nmodule.exports.RetryHandler = RetryHandler\n\nmodule.exports.DecoratorHandler = DecoratorHandler\nmodule.exports.RedirectHandler = RedirectHandler\nmodule.exports.createRedirectInterceptor = createRedirectInterceptor\nmodule.exports.interceptors = {\n redirect: require('./lib/interceptor/redirect'),\n retry: require('./lib/interceptor/retry'),\n dump: require('./lib/interceptor/dump'),\n dns: require('./lib/interceptor/dns')\n}\n\nmodule.exports.buildConnector = buildConnector\nmodule.exports.errors = errors\nmodule.exports.util = {\n parseHeaders: util.parseHeaders,\n headerNameToString: util.headerNameToString\n}\n\nfunction makeDispatcher (fn) {\n return (url, opts, handler) => {\n if (typeof opts === 'function') {\n handler = opts\n opts = null\n }\n\n if (!url || (typeof url !== 'string' && typeof url !== 'object' && !(url instanceof URL))) {\n throw new InvalidArgumentError('invalid url')\n }\n\n if (opts != null && typeof opts !== 'object') {\n throw new InvalidArgumentError('invalid opts')\n }\n\n if (opts && opts.path != null) {\n if (typeof opts.path !== 'string') {\n throw new InvalidArgumentError('invalid opts.path')\n }\n\n let path = opts.path\n if (!opts.path.startsWith('/')) {\n path = `/${path}`\n }\n\n url = new URL(util.parseOrigin(url).origin + path)\n } else {\n if (!opts) {\n opts = typeof url === 'object' ? url : {}\n }\n\n url = util.parseURL(url)\n }\n\n const { agent, dispatcher = getGlobalDispatcher() } = opts\n\n if (agent) {\n throw new InvalidArgumentError('unsupported opts.agent. Did you mean opts.client?')\n }\n\n return fn.call(dispatcher, {\n ...opts,\n origin: url.origin,\n path: url.search ? `${url.pathname}${url.search}` : url.pathname,\n method: opts.method || (opts.body ? 'PUT' : 'GET')\n }, handler)\n }\n}\n\nmodule.exports.setGlobalDispatcher = setGlobalDispatcher\nmodule.exports.getGlobalDispatcher = getGlobalDispatcher\n\nconst fetchImpl = require('./lib/web/fetch').fetch\nmodule.exports.fetch = async function fetch (init, options = undefined) {\n try {\n return await fetchImpl(init, options)\n } catch (err) {\n if (err && typeof err === 'object') {\n Error.captureStackTrace(err)\n }\n\n throw err\n }\n}\nmodule.exports.Headers = require('./lib/web/fetch/headers').Headers\nmodule.exports.Response = require('./lib/web/fetch/response').Response\nmodule.exports.Request = require('./lib/web/fetch/request').Request\nmodule.exports.FormData = require('./lib/web/fetch/formdata').FormData\nmodule.exports.File = globalThis.File ?? require('node:buffer').File\nmodule.exports.FileReader = require('./lib/web/fileapi/filereader').FileReader\n\nconst { setGlobalOrigin, getGlobalOrigin } = require('./lib/web/fetch/global')\n\nmodule.exports.setGlobalOrigin = setGlobalOrigin\nmodule.exports.getGlobalOrigin = getGlobalOrigin\n\nconst { CacheStorage } = require('./lib/web/cache/cachestorage')\nconst { kConstruct } = require('./lib/web/cache/symbols')\n\n// Cache & CacheStorage are tightly coupled with fetch. Even if it may run\n// in an older version of Node, it doesn't have any use without fetch.\nmodule.exports.caches = new CacheStorage(kConstruct)\n\nconst { deleteCookie, getCookies, getSetCookies, setCookie } = require('./lib/web/cookies')\n\nmodule.exports.deleteCookie = deleteCookie\nmodule.exports.getCookies = getCookies\nmodule.exports.getSetCookies = getSetCookies\nmodule.exports.setCookie = setCookie\n\nconst { parseMIMEType, serializeAMimeType } = require('./lib/web/fetch/data-url')\n\nmodule.exports.parseMIMEType = parseMIMEType\nmodule.exports.serializeAMimeType = serializeAMimeType\n\nconst { CloseEvent, ErrorEvent, MessageEvent } = require('./lib/web/websocket/events')\nmodule.exports.WebSocket = require('./lib/web/websocket/websocket').WebSocket\nmodule.exports.CloseEvent = CloseEvent\nmodule.exports.ErrorEvent = ErrorEvent\nmodule.exports.MessageEvent = MessageEvent\n\nmodule.exports.request = makeDispatcher(api.request)\nmodule.exports.stream = makeDispatcher(api.stream)\nmodule.exports.pipeline = makeDispatcher(api.pipeline)\nmodule.exports.connect = makeDispatcher(api.connect)\nmodule.exports.upgrade = makeDispatcher(api.upgrade)\n\nmodule.exports.MockClient = MockClient\nmodule.exports.MockPool = MockPool\nmodule.exports.MockAgent = MockAgent\nmodule.exports.mockErrors = mockErrors\n\nconst { EventSource } = require('./lib/web/eventsource/eventsource')\n\nmodule.exports.EventSource = EventSource\n","const { addAbortListener } = require('../core/util')\nconst { RequestAbortedError } = require('../core/errors')\n\nconst kListener = Symbol('kListener')\nconst kSignal = Symbol('kSignal')\n\nfunction abort (self) {\n if (self.abort) {\n self.abort(self[kSignal]?.reason)\n } else {\n self.reason = self[kSignal]?.reason ?? new RequestAbortedError()\n }\n removeSignal(self)\n}\n\nfunction addSignal (self, signal) {\n self.reason = null\n\n self[kSignal] = null\n self[kListener] = null\n\n if (!signal) {\n return\n }\n\n if (signal.aborted) {\n abort(self)\n return\n }\n\n self[kSignal] = signal\n self[kListener] = () => {\n abort(self)\n }\n\n addAbortListener(self[kSignal], self[kListener])\n}\n\nfunction removeSignal (self) {\n if (!self[kSignal]) {\n return\n }\n\n if ('removeEventListener' in self[kSignal]) {\n self[kSignal].removeEventListener('abort', self[kListener])\n } else {\n self[kSignal].removeListener('abort', self[kListener])\n }\n\n self[kSignal] = null\n self[kListener] = null\n}\n\nmodule.exports = {\n addSignal,\n removeSignal\n}\n","'use strict'\n\nconst assert = require('node:assert')\nconst { AsyncResource } = require('node:async_hooks')\nconst { InvalidArgumentError, SocketError } = require('../core/errors')\nconst util = require('../core/util')\nconst { addSignal, removeSignal } = require('./abort-signal')\n\nclass ConnectHandler extends AsyncResource {\n constructor (opts, callback) {\n if (!opts || typeof opts !== 'object') {\n throw new InvalidArgumentError('invalid opts')\n }\n\n if (typeof callback !== 'function') {\n throw new InvalidArgumentError('invalid callback')\n }\n\n const { signal, opaque, responseHeaders } = opts\n\n if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') {\n throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget')\n }\n\n super('UNDICI_CONNECT')\n\n this.opaque = opaque || null\n this.responseHeaders = responseHeaders || null\n this.callback = callback\n this.abort = null\n\n addSignal(this, signal)\n }\n\n onConnect (abort, context) {\n if (this.reason) {\n abort(this.reason)\n return\n }\n\n assert(this.callback)\n\n this.abort = abort\n this.context = context\n }\n\n onHeaders () {\n throw new SocketError('bad connect', null)\n }\n\n onUpgrade (statusCode, rawHeaders, socket) {\n const { callback, opaque, context } = this\n\n removeSignal(this)\n\n this.callback = null\n\n let headers = rawHeaders\n // Indicates is an HTTP2Session\n if (headers != null) {\n headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)\n }\n\n this.runInAsyncScope(callback, null, null, {\n statusCode,\n headers,\n socket,\n opaque,\n context\n })\n }\n\n onError (err) {\n const { callback, opaque } = this\n\n removeSignal(this)\n\n if (callback) {\n this.callback = null\n queueMicrotask(() => {\n this.runInAsyncScope(callback, null, err, { opaque })\n })\n }\n }\n}\n\nfunction connect (opts, callback) {\n if (callback === undefined) {\n return new Promise((resolve, reject) => {\n connect.call(this, opts, (err, data) => {\n return err ? reject(err) : resolve(data)\n })\n })\n }\n\n try {\n const connectHandler = new ConnectHandler(opts, callback)\n this.dispatch({ ...opts, method: 'CONNECT' }, connectHandler)\n } catch (err) {\n if (typeof callback !== 'function') {\n throw err\n }\n const opaque = opts?.opaque\n queueMicrotask(() => callback(err, { opaque }))\n }\n}\n\nmodule.exports = connect\n","'use strict'\n\nconst {\n Readable,\n Duplex,\n PassThrough\n} = require('node:stream')\nconst {\n InvalidArgumentError,\n InvalidReturnValueError,\n RequestAbortedError\n} = require('../core/errors')\nconst util = require('../core/util')\nconst { AsyncResource } = require('node:async_hooks')\nconst { addSignal, removeSignal } = require('./abort-signal')\nconst assert = require('node:assert')\n\nconst kResume = Symbol('resume')\n\nclass PipelineRequest extends Readable {\n constructor () {\n super({ autoDestroy: true })\n\n this[kResume] = null\n }\n\n _read () {\n const { [kResume]: resume } = this\n\n if (resume) {\n this[kResume] = null\n resume()\n }\n }\n\n _destroy (err, callback) {\n this._read()\n\n callback(err)\n }\n}\n\nclass PipelineResponse extends Readable {\n constructor (resume) {\n super({ autoDestroy: true })\n this[kResume] = resume\n }\n\n _read () {\n this[kResume]()\n }\n\n _destroy (err, callback) {\n if (!err && !this._readableState.endEmitted) {\n err = new RequestAbortedError()\n }\n\n callback(err)\n }\n}\n\nclass PipelineHandler extends AsyncResource {\n constructor (opts, handler) {\n if (!opts || typeof opts !== 'object') {\n throw new InvalidArgumentError('invalid opts')\n }\n\n if (typeof handler !== 'function') {\n throw new InvalidArgumentError('invalid handler')\n }\n\n const { signal, method, opaque, onInfo, responseHeaders } = opts\n\n if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') {\n throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget')\n }\n\n if (method === 'CONNECT') {\n throw new InvalidArgumentError('invalid method')\n }\n\n if (onInfo && typeof onInfo !== 'function') {\n throw new InvalidArgumentError('invalid onInfo callback')\n }\n\n super('UNDICI_PIPELINE')\n\n this.opaque = opaque || null\n this.responseHeaders = responseHeaders || null\n this.handler = handler\n this.abort = null\n this.context = null\n this.onInfo = onInfo || null\n\n this.req = new PipelineRequest().on('error', util.nop)\n\n this.ret = new Duplex({\n readableObjectMode: opts.objectMode,\n autoDestroy: true,\n read: () => {\n const { body } = this\n\n if (body?.resume) {\n body.resume()\n }\n },\n write: (chunk, encoding, callback) => {\n const { req } = this\n\n if (req.push(chunk, encoding) || req._readableState.destroyed) {\n callback()\n } else {\n req[kResume] = callback\n }\n },\n destroy: (err, callback) => {\n const { body, req, res, ret, abort } = this\n\n if (!err && !ret._readableState.endEmitted) {\n err = new RequestAbortedError()\n }\n\n if (abort && err) {\n abort()\n }\n\n util.destroy(body, err)\n util.destroy(req, err)\n util.destroy(res, err)\n\n removeSignal(this)\n\n callback(err)\n }\n }).on('prefinish', () => {\n const { req } = this\n\n // Node < 15 does not call _final in same tick.\n req.push(null)\n })\n\n this.res = null\n\n addSignal(this, signal)\n }\n\n onConnect (abort, context) {\n const { ret, res } = this\n\n if (this.reason) {\n abort(this.reason)\n return\n }\n\n assert(!res, 'pipeline cannot be retried')\n assert(!ret.destroyed)\n\n this.abort = abort\n this.context = context\n }\n\n onHeaders (statusCode, rawHeaders, resume) {\n const { opaque, handler, context } = this\n\n if (statusCode < 200) {\n if (this.onInfo) {\n const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)\n this.onInfo({ statusCode, headers })\n }\n return\n }\n\n this.res = new PipelineResponse(resume)\n\n let body\n try {\n this.handler = null\n const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)\n body = this.runInAsyncScope(handler, null, {\n statusCode,\n headers,\n opaque,\n body: this.res,\n context\n })\n } catch (err) {\n this.res.on('error', util.nop)\n throw err\n }\n\n if (!body || typeof body.on !== 'function') {\n throw new InvalidReturnValueError('expected Readable')\n }\n\n body\n .on('data', (chunk) => {\n const { ret, body } = this\n\n if (!ret.push(chunk) && body.pause) {\n body.pause()\n }\n })\n .on('error', (err) => {\n const { ret } = this\n\n util.destroy(ret, err)\n })\n .on('end', () => {\n const { ret } = this\n\n ret.push(null)\n })\n .on('close', () => {\n const { ret } = this\n\n if (!ret._readableState.ended) {\n util.destroy(ret, new RequestAbortedError())\n }\n })\n\n this.body = body\n }\n\n onData (chunk) {\n const { res } = this\n return res.push(chunk)\n }\n\n onComplete (trailers) {\n const { res } = this\n res.push(null)\n }\n\n onError (err) {\n const { ret } = this\n this.handler = null\n util.destroy(ret, err)\n }\n}\n\nfunction pipeline (opts, handler) {\n try {\n const pipelineHandler = new PipelineHandler(opts, handler)\n this.dispatch({ ...opts, body: pipelineHandler.req }, pipelineHandler)\n return pipelineHandler.ret\n } catch (err) {\n return new PassThrough().destroy(err)\n }\n}\n\nmodule.exports = pipeline\n","'use strict'\n\nconst assert = require('node:assert')\nconst { Readable } = require('./readable')\nconst { InvalidArgumentError, RequestAbortedError } = require('../core/errors')\nconst util = require('../core/util')\nconst { getResolveErrorBodyCallback } = require('./util')\nconst { AsyncResource } = require('node:async_hooks')\n\nclass RequestHandler extends AsyncResource {\n constructor (opts, callback) {\n if (!opts || typeof opts !== 'object') {\n throw new InvalidArgumentError('invalid opts')\n }\n\n const { signal, method, opaque, body, onInfo, responseHeaders, throwOnError, highWaterMark } = opts\n\n try {\n if (typeof callback !== 'function') {\n throw new InvalidArgumentError('invalid callback')\n }\n\n if (highWaterMark && (typeof highWaterMark !== 'number' || highWaterMark < 0)) {\n throw new InvalidArgumentError('invalid highWaterMark')\n }\n\n if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') {\n throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget')\n }\n\n if (method === 'CONNECT') {\n throw new InvalidArgumentError('invalid method')\n }\n\n if (onInfo && typeof onInfo !== 'function') {\n throw new InvalidArgumentError('invalid onInfo callback')\n }\n\n super('UNDICI_REQUEST')\n } catch (err) {\n if (util.isStream(body)) {\n util.destroy(body.on('error', util.nop), err)\n }\n throw err\n }\n\n this.method = method\n this.responseHeaders = responseHeaders || null\n this.opaque = opaque || null\n this.callback = callback\n this.res = null\n this.abort = null\n this.body = body\n this.trailers = {}\n this.context = null\n this.onInfo = onInfo || null\n this.throwOnError = throwOnError\n this.highWaterMark = highWaterMark\n this.signal = signal\n this.reason = null\n this.removeAbortListener = null\n\n if (util.isStream(body)) {\n body.on('error', (err) => {\n this.onError(err)\n })\n }\n\n if (this.signal) {\n if (this.signal.aborted) {\n this.reason = this.signal.reason ?? new RequestAbortedError()\n } else {\n this.removeAbortListener = util.addAbortListener(this.signal, () => {\n this.reason = this.signal.reason ?? new RequestAbortedError()\n if (this.res) {\n util.destroy(this.res.on('error', util.nop), this.reason)\n } else if (this.abort) {\n this.abort(this.reason)\n }\n\n if (this.removeAbortListener) {\n this.res?.off('close', this.removeAbortListener)\n this.removeAbortListener()\n this.removeAbortListener = null\n }\n })\n }\n }\n }\n\n onConnect (abort, context) {\n if (this.reason) {\n abort(this.reason)\n return\n }\n\n assert(this.callback)\n\n this.abort = abort\n this.context = context\n }\n\n onHeaders (statusCode, rawHeaders, resume, statusMessage) {\n const { callback, opaque, abort, context, responseHeaders, highWaterMark } = this\n\n const headers = responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)\n\n if (statusCode < 200) {\n if (this.onInfo) {\n this.onInfo({ statusCode, headers })\n }\n return\n }\n\n const parsedHeaders = responseHeaders === 'raw' ? util.parseHeaders(rawHeaders) : headers\n const contentType = parsedHeaders['content-type']\n const contentLength = parsedHeaders['content-length']\n const res = new Readable({\n resume,\n abort,\n contentType,\n contentLength: this.method !== 'HEAD' && contentLength\n ? Number(contentLength)\n : null,\n highWaterMark\n })\n\n if (this.removeAbortListener) {\n res.on('close', this.removeAbortListener)\n }\n\n this.callback = null\n this.res = res\n if (callback !== null) {\n if (this.throwOnError && statusCode >= 400) {\n this.runInAsyncScope(getResolveErrorBodyCallback, null,\n { callback, body: res, contentType, statusCode, statusMessage, headers }\n )\n } else {\n this.runInAsyncScope(callback, null, null, {\n statusCode,\n headers,\n trailers: this.trailers,\n opaque,\n body: res,\n context\n })\n }\n }\n }\n\n onData (chunk) {\n return this.res.push(chunk)\n }\n\n onComplete (trailers) {\n util.parseHeaders(trailers, this.trailers)\n this.res.push(null)\n }\n\n onError (err) {\n const { res, callback, body, opaque } = this\n\n if (callback) {\n // TODO: Does this need queueMicrotask?\n this.callback = null\n queueMicrotask(() => {\n this.runInAsyncScope(callback, null, err, { opaque })\n })\n }\n\n if (res) {\n this.res = null\n // Ensure all queued handlers are invoked before destroying res.\n queueMicrotask(() => {\n util.destroy(res, err)\n })\n }\n\n if (body) {\n this.body = null\n util.destroy(body, err)\n }\n\n if (this.removeAbortListener) {\n res?.off('close', this.removeAbortListener)\n this.removeAbortListener()\n this.removeAbortListener = null\n }\n }\n}\n\nfunction request (opts, callback) {\n if (callback === undefined) {\n return new Promise((resolve, reject) => {\n request.call(this, opts, (err, data) => {\n return err ? reject(err) : resolve(data)\n })\n })\n }\n\n try {\n this.dispatch(opts, new RequestHandler(opts, callback))\n } catch (err) {\n if (typeof callback !== 'function') {\n throw err\n }\n const opaque = opts?.opaque\n queueMicrotask(() => callback(err, { opaque }))\n }\n}\n\nmodule.exports = request\nmodule.exports.RequestHandler = RequestHandler\n","'use strict'\n\nconst assert = require('node:assert')\nconst { finished, PassThrough } = require('node:stream')\nconst { InvalidArgumentError, InvalidReturnValueError } = require('../core/errors')\nconst util = require('../core/util')\nconst { getResolveErrorBodyCallback } = require('./util')\nconst { AsyncResource } = require('node:async_hooks')\nconst { addSignal, removeSignal } = require('./abort-signal')\n\nclass StreamHandler extends AsyncResource {\n constructor (opts, factory, callback) {\n if (!opts || typeof opts !== 'object') {\n throw new InvalidArgumentError('invalid opts')\n }\n\n const { signal, method, opaque, body, onInfo, responseHeaders, throwOnError } = opts\n\n try {\n if (typeof callback !== 'function') {\n throw new InvalidArgumentError('invalid callback')\n }\n\n if (typeof factory !== 'function') {\n throw new InvalidArgumentError('invalid factory')\n }\n\n if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') {\n throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget')\n }\n\n if (method === 'CONNECT') {\n throw new InvalidArgumentError('invalid method')\n }\n\n if (onInfo && typeof onInfo !== 'function') {\n throw new InvalidArgumentError('invalid onInfo callback')\n }\n\n super('UNDICI_STREAM')\n } catch (err) {\n if (util.isStream(body)) {\n util.destroy(body.on('error', util.nop), err)\n }\n throw err\n }\n\n this.responseHeaders = responseHeaders || null\n this.opaque = opaque || null\n this.factory = factory\n this.callback = callback\n this.res = null\n this.abort = null\n this.context = null\n this.trailers = null\n this.body = body\n this.onInfo = onInfo || null\n this.throwOnError = throwOnError || false\n\n if (util.isStream(body)) {\n body.on('error', (err) => {\n this.onError(err)\n })\n }\n\n addSignal(this, signal)\n }\n\n onConnect (abort, context) {\n if (this.reason) {\n abort(this.reason)\n return\n }\n\n assert(this.callback)\n\n this.abort = abort\n this.context = context\n }\n\n onHeaders (statusCode, rawHeaders, resume, statusMessage) {\n const { factory, opaque, context, callback, responseHeaders } = this\n\n const headers = responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)\n\n if (statusCode < 200) {\n if (this.onInfo) {\n this.onInfo({ statusCode, headers })\n }\n return\n }\n\n this.factory = null\n\n let res\n\n if (this.throwOnError && statusCode >= 400) {\n const parsedHeaders = responseHeaders === 'raw' ? util.parseHeaders(rawHeaders) : headers\n const contentType = parsedHeaders['content-type']\n res = new PassThrough()\n\n this.callback = null\n this.runInAsyncScope(getResolveErrorBodyCallback, null,\n { callback, body: res, contentType, statusCode, statusMessage, headers }\n )\n } else {\n if (factory === null) {\n return\n }\n\n res = this.runInAsyncScope(factory, null, {\n statusCode,\n headers,\n opaque,\n context\n })\n\n if (\n !res ||\n typeof res.write !== 'function' ||\n typeof res.end !== 'function' ||\n typeof res.on !== 'function'\n ) {\n throw new InvalidReturnValueError('expected Writable')\n }\n\n // TODO: Avoid finished. It registers an unnecessary amount of listeners.\n finished(res, { readable: false }, (err) => {\n const { callback, res, opaque, trailers, abort } = this\n\n this.res = null\n if (err || !res.readable) {\n util.destroy(res, err)\n }\n\n this.callback = null\n this.runInAsyncScope(callback, null, err || null, { opaque, trailers })\n\n if (err) {\n abort()\n }\n })\n }\n\n res.on('drain', resume)\n\n this.res = res\n\n const needDrain = res.writableNeedDrain !== undefined\n ? res.writableNeedDrain\n : res._writableState?.needDrain\n\n return needDrain !== true\n }\n\n onData (chunk) {\n const { res } = this\n\n return res ? res.write(chunk) : true\n }\n\n onComplete (trailers) {\n const { res } = this\n\n removeSignal(this)\n\n if (!res) {\n return\n }\n\n this.trailers = util.parseHeaders(trailers)\n\n res.end()\n }\n\n onError (err) {\n const { res, callback, opaque, body } = this\n\n removeSignal(this)\n\n this.factory = null\n\n if (res) {\n this.res = null\n util.destroy(res, err)\n } else if (callback) {\n this.callback = null\n queueMicrotask(() => {\n this.runInAsyncScope(callback, null, err, { opaque })\n })\n }\n\n if (body) {\n this.body = null\n util.destroy(body, err)\n }\n }\n}\n\nfunction stream (opts, factory, callback) {\n if (callback === undefined) {\n return new Promise((resolve, reject) => {\n stream.call(this, opts, factory, (err, data) => {\n return err ? reject(err) : resolve(data)\n })\n })\n }\n\n try {\n this.dispatch(opts, new StreamHandler(opts, factory, callback))\n } catch (err) {\n if (typeof callback !== 'function') {\n throw err\n }\n const opaque = opts?.opaque\n queueMicrotask(() => callback(err, { opaque }))\n }\n}\n\nmodule.exports = stream\n","'use strict'\n\nconst { InvalidArgumentError, SocketError } = require('../core/errors')\nconst { AsyncResource } = require('node:async_hooks')\nconst util = require('../core/util')\nconst { addSignal, removeSignal } = require('./abort-signal')\nconst assert = require('node:assert')\n\nclass UpgradeHandler extends AsyncResource {\n constructor (opts, callback) {\n if (!opts || typeof opts !== 'object') {\n throw new InvalidArgumentError('invalid opts')\n }\n\n if (typeof callback !== 'function') {\n throw new InvalidArgumentError('invalid callback')\n }\n\n const { signal, opaque, responseHeaders } = opts\n\n if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') {\n throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget')\n }\n\n super('UNDICI_UPGRADE')\n\n this.responseHeaders = responseHeaders || null\n this.opaque = opaque || null\n this.callback = callback\n this.abort = null\n this.context = null\n\n addSignal(this, signal)\n }\n\n onConnect (abort, context) {\n if (this.reason) {\n abort(this.reason)\n return\n }\n\n assert(this.callback)\n\n this.abort = abort\n this.context = null\n }\n\n onHeaders () {\n throw new SocketError('bad upgrade', null)\n }\n\n onUpgrade (statusCode, rawHeaders, socket) {\n assert(statusCode === 101)\n\n const { callback, opaque, context } = this\n\n removeSignal(this)\n\n this.callback = null\n const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)\n this.runInAsyncScope(callback, null, null, {\n headers,\n socket,\n opaque,\n context\n })\n }\n\n onError (err) {\n const { callback, opaque } = this\n\n removeSignal(this)\n\n if (callback) {\n this.callback = null\n queueMicrotask(() => {\n this.runInAsyncScope(callback, null, err, { opaque })\n })\n }\n }\n}\n\nfunction upgrade (opts, callback) {\n if (callback === undefined) {\n return new Promise((resolve, reject) => {\n upgrade.call(this, opts, (err, data) => {\n return err ? reject(err) : resolve(data)\n })\n })\n }\n\n try {\n const upgradeHandler = new UpgradeHandler(opts, callback)\n this.dispatch({\n ...opts,\n method: opts.method || 'GET',\n upgrade: opts.protocol || 'Websocket'\n }, upgradeHandler)\n } catch (err) {\n if (typeof callback !== 'function') {\n throw err\n }\n const opaque = opts?.opaque\n queueMicrotask(() => callback(err, { opaque }))\n }\n}\n\nmodule.exports = upgrade\n","'use strict'\n\nmodule.exports.request = require('./api-request')\nmodule.exports.stream = require('./api-stream')\nmodule.exports.pipeline = require('./api-pipeline')\nmodule.exports.upgrade = require('./api-upgrade')\nmodule.exports.connect = require('./api-connect')\n","// Ported from https://github.com/nodejs/undici/pull/907\n\n'use strict'\n\nconst assert = require('node:assert')\nconst { Readable } = require('node:stream')\nconst { RequestAbortedError, NotSupportedError, InvalidArgumentError, AbortError } = require('../core/errors')\nconst util = require('../core/util')\nconst { ReadableStreamFrom } = require('../core/util')\n\nconst kConsume = Symbol('kConsume')\nconst kReading = Symbol('kReading')\nconst kBody = Symbol('kBody')\nconst kAbort = Symbol('kAbort')\nconst kContentType = Symbol('kContentType')\nconst kContentLength = Symbol('kContentLength')\n\nconst noop = () => {}\n\nclass BodyReadable extends Readable {\n constructor ({\n resume,\n abort,\n contentType = '',\n contentLength,\n highWaterMark = 64 * 1024 // Same as nodejs fs streams.\n }) {\n super({\n autoDestroy: true,\n read: resume,\n highWaterMark\n })\n\n this._readableState.dataEmitted = false\n\n this[kAbort] = abort\n this[kConsume] = null\n this[kBody] = null\n this[kContentType] = contentType\n this[kContentLength] = contentLength\n\n // Is stream being consumed through Readable API?\n // This is an optimization so that we avoid checking\n // for 'data' and 'readable' listeners in the hot path\n // inside push().\n this[kReading] = false\n }\n\n destroy (err) {\n if (!err && !this._readableState.endEmitted) {\n err = new RequestAbortedError()\n }\n\n if (err) {\n this[kAbort]()\n }\n\n return super.destroy(err)\n }\n\n _destroy (err, callback) {\n // Workaround for Node \"bug\". If the stream is destroyed in same\n // tick as it is created, then a user who is waiting for a\n // promise (i.e micro tick) for installing a 'error' listener will\n // never get a chance and will always encounter an unhandled exception.\n if (!this[kReading]) {\n setImmediate(() => {\n callback(err)\n })\n } else {\n callback(err)\n }\n }\n\n on (ev, ...args) {\n if (ev === 'data' || ev === 'readable') {\n this[kReading] = true\n }\n return super.on(ev, ...args)\n }\n\n addListener (ev, ...args) {\n return this.on(ev, ...args)\n }\n\n off (ev, ...args) {\n const ret = super.off(ev, ...args)\n if (ev === 'data' || ev === 'readable') {\n this[kReading] = (\n this.listenerCount('data') > 0 ||\n this.listenerCount('readable') > 0\n )\n }\n return ret\n }\n\n removeListener (ev, ...args) {\n return this.off(ev, ...args)\n }\n\n push (chunk) {\n if (this[kConsume] && chunk !== null) {\n consumePush(this[kConsume], chunk)\n return this[kReading] ? super.push(chunk) : true\n }\n return super.push(chunk)\n }\n\n // https://fetch.spec.whatwg.org/#dom-body-text\n async text () {\n return consume(this, 'text')\n }\n\n // https://fetch.spec.whatwg.org/#dom-body-json\n async json () {\n return consume(this, 'json')\n }\n\n // https://fetch.spec.whatwg.org/#dom-body-blob\n async blob () {\n return consume(this, 'blob')\n }\n\n // https://fetch.spec.whatwg.org/#dom-body-bytes\n async bytes () {\n return consume(this, 'bytes')\n }\n\n // https://fetch.spec.whatwg.org/#dom-body-arraybuffer\n async arrayBuffer () {\n return consume(this, 'arrayBuffer')\n }\n\n // https://fetch.spec.whatwg.org/#dom-body-formdata\n async formData () {\n // TODO: Implement.\n throw new NotSupportedError()\n }\n\n // https://fetch.spec.whatwg.org/#dom-body-bodyused\n get bodyUsed () {\n return util.isDisturbed(this)\n }\n\n // https://fetch.spec.whatwg.org/#dom-body-body\n get body () {\n if (!this[kBody]) {\n this[kBody] = ReadableStreamFrom(this)\n if (this[kConsume]) {\n // TODO: Is this the best way to force a lock?\n this[kBody].getReader() // Ensure stream is locked.\n assert(this[kBody].locked)\n }\n }\n return this[kBody]\n }\n\n async dump (opts) {\n let limit = Number.isFinite(opts?.limit) ? opts.limit : 128 * 1024\n const signal = opts?.signal\n\n if (signal != null && (typeof signal !== 'object' || !('aborted' in signal))) {\n throw new InvalidArgumentError('signal must be an AbortSignal')\n }\n\n signal?.throwIfAborted()\n\n if (this._readableState.closeEmitted) {\n return null\n }\n\n return await new Promise((resolve, reject) => {\n if (this[kContentLength] > limit) {\n this.destroy(new AbortError())\n }\n\n const onAbort = () => {\n this.destroy(signal.reason ?? new AbortError())\n }\n signal?.addEventListener('abort', onAbort)\n\n this\n .on('close', function () {\n signal?.removeEventListener('abort', onAbort)\n if (signal?.aborted) {\n reject(signal.reason ?? new AbortError())\n } else {\n resolve(null)\n }\n })\n .on('error', noop)\n .on('data', function (chunk) {\n limit -= chunk.length\n if (limit <= 0) {\n this.destroy()\n }\n })\n .resume()\n })\n }\n}\n\n// https://streams.spec.whatwg.org/#readablestream-locked\nfunction isLocked (self) {\n // Consume is an implicit lock.\n return (self[kBody] && self[kBody].locked === true) || self[kConsume]\n}\n\n// https://fetch.spec.whatwg.org/#body-unusable\nfunction isUnusable (self) {\n return util.isDisturbed(self) || isLocked(self)\n}\n\nasync function consume (stream, type) {\n assert(!stream[kConsume])\n\n return new Promise((resolve, reject) => {\n if (isUnusable(stream)) {\n const rState = stream._readableState\n if (rState.destroyed && rState.closeEmitted === false) {\n stream\n .on('error', err => {\n reject(err)\n })\n .on('close', () => {\n reject(new TypeError('unusable'))\n })\n } else {\n reject(rState.errored ?? new TypeError('unusable'))\n }\n } else {\n queueMicrotask(() => {\n stream[kConsume] = {\n type,\n stream,\n resolve,\n reject,\n length: 0,\n body: []\n }\n\n stream\n .on('error', function (err) {\n consumeFinish(this[kConsume], err)\n })\n .on('close', function () {\n if (this[kConsume].body !== null) {\n consumeFinish(this[kConsume], new RequestAbortedError())\n }\n })\n\n consumeStart(stream[kConsume])\n })\n }\n })\n}\n\nfunction consumeStart (consume) {\n if (consume.body === null) {\n return\n }\n\n const { _readableState: state } = consume.stream\n\n if (state.bufferIndex) {\n const start = state.bufferIndex\n const end = state.buffer.length\n for (let n = start; n < end; n++) {\n consumePush(consume, state.buffer[n])\n }\n } else {\n for (const chunk of state.buffer) {\n consumePush(consume, chunk)\n }\n }\n\n if (state.endEmitted) {\n consumeEnd(this[kConsume])\n } else {\n consume.stream.on('end', function () {\n consumeEnd(this[kConsume])\n })\n }\n\n consume.stream.resume()\n\n while (consume.stream.read() != null) {\n // Loop\n }\n}\n\n/**\n * @param {Buffer[]} chunks\n * @param {number} length\n */\nfunction chunksDecode (chunks, length) {\n if (chunks.length === 0 || length === 0) {\n return ''\n }\n const buffer = chunks.length === 1 ? chunks[0] : Buffer.concat(chunks, length)\n const bufferLength = buffer.length\n\n // Skip BOM.\n const start =\n bufferLength > 2 &&\n buffer[0] === 0xef &&\n buffer[1] === 0xbb &&\n buffer[2] === 0xbf\n ? 3\n : 0\n return buffer.utf8Slice(start, bufferLength)\n}\n\n/**\n * @param {Buffer[]} chunks\n * @param {number} length\n * @returns {Uint8Array}\n */\nfunction chunksConcat (chunks, length) {\n if (chunks.length === 0 || length === 0) {\n return new Uint8Array(0)\n }\n if (chunks.length === 1) {\n // fast-path\n return new Uint8Array(chunks[0])\n }\n const buffer = new Uint8Array(Buffer.allocUnsafeSlow(length).buffer)\n\n let offset = 0\n for (let i = 0; i < chunks.length; ++i) {\n const chunk = chunks[i]\n buffer.set(chunk, offset)\n offset += chunk.length\n }\n\n return buffer\n}\n\nfunction consumeEnd (consume) {\n const { type, body, resolve, stream, length } = consume\n\n try {\n if (type === 'text') {\n resolve(chunksDecode(body, length))\n } else if (type === 'json') {\n resolve(JSON.parse(chunksDecode(body, length)))\n } else if (type === 'arrayBuffer') {\n resolve(chunksConcat(body, length).buffer)\n } else if (type === 'blob') {\n resolve(new Blob(body, { type: stream[kContentType] }))\n } else if (type === 'bytes') {\n resolve(chunksConcat(body, length))\n }\n\n consumeFinish(consume)\n } catch (err) {\n stream.destroy(err)\n }\n}\n\nfunction consumePush (consume, chunk) {\n consume.length += chunk.length\n consume.body.push(chunk)\n}\n\nfunction consumeFinish (consume, err) {\n if (consume.body === null) {\n return\n }\n\n if (err) {\n consume.reject(err)\n } else {\n consume.resolve()\n }\n\n consume.type = null\n consume.stream = null\n consume.resolve = null\n consume.reject = null\n consume.length = 0\n consume.body = null\n}\n\nmodule.exports = { Readable: BodyReadable, chunksDecode }\n","const assert = require('node:assert')\nconst {\n ResponseStatusCodeError\n} = require('../core/errors')\n\nconst { chunksDecode } = require('./readable')\nconst CHUNK_LIMIT = 128 * 1024\n\nasync function getResolveErrorBodyCallback ({ callback, body, contentType, statusCode, statusMessage, headers }) {\n assert(body)\n\n let chunks = []\n let length = 0\n\n try {\n for await (const chunk of body) {\n chunks.push(chunk)\n length += chunk.length\n if (length > CHUNK_LIMIT) {\n chunks = []\n length = 0\n break\n }\n }\n } catch {\n chunks = []\n length = 0\n // Do nothing....\n }\n\n const message = `Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`\n\n if (statusCode === 204 || !contentType || !length) {\n queueMicrotask(() => callback(new ResponseStatusCodeError(message, statusCode, headers)))\n return\n }\n\n const stackTraceLimit = Error.stackTraceLimit\n Error.stackTraceLimit = 0\n let payload\n\n try {\n if (isContentTypeApplicationJson(contentType)) {\n payload = JSON.parse(chunksDecode(chunks, length))\n } else if (isContentTypeText(contentType)) {\n payload = chunksDecode(chunks, length)\n }\n } catch {\n // process in a callback to avoid throwing in the microtask queue\n } finally {\n Error.stackTraceLimit = stackTraceLimit\n }\n queueMicrotask(() => callback(new ResponseStatusCodeError(message, statusCode, headers, payload)))\n}\n\nconst isContentTypeApplicationJson = (contentType) => {\n return (\n contentType.length > 15 &&\n contentType[11] === '/' &&\n contentType[0] === 'a' &&\n contentType[1] === 'p' &&\n contentType[2] === 'p' &&\n contentType[3] === 'l' &&\n contentType[4] === 'i' &&\n contentType[5] === 'c' &&\n contentType[6] === 'a' &&\n contentType[7] === 't' &&\n contentType[8] === 'i' &&\n contentType[9] === 'o' &&\n contentType[10] === 'n' &&\n contentType[12] === 'j' &&\n contentType[13] === 's' &&\n contentType[14] === 'o' &&\n contentType[15] === 'n'\n )\n}\n\nconst isContentTypeText = (contentType) => {\n return (\n contentType.length > 4 &&\n contentType[4] === '/' &&\n contentType[0] === 't' &&\n contentType[1] === 'e' &&\n contentType[2] === 'x' &&\n contentType[3] === 't'\n )\n}\n\nmodule.exports = {\n getResolveErrorBodyCallback,\n isContentTypeApplicationJson,\n isContentTypeText\n}\n","'use strict'\n\nconst net = require('node:net')\nconst assert = require('node:assert')\nconst util = require('./util')\nconst { InvalidArgumentError, ConnectTimeoutError } = require('./errors')\nconst timers = require('../util/timers')\n\nfunction noop () {}\n\nlet tls // include tls conditionally since it is not always available\n\n// TODO: session re-use does not wait for the first\n// connection to resolve the session and might therefore\n// resolve the same servername multiple times even when\n// re-use is enabled.\n\nlet SessionCache\n// FIXME: remove workaround when the Node bug is fixed\n// https://github.com/nodejs/node/issues/49344#issuecomment-1741776308\nif (global.FinalizationRegistry && !(process.env.NODE_V8_COVERAGE || process.env.UNDICI_NO_FG)) {\n SessionCache = class WeakSessionCache {\n constructor (maxCachedSessions) {\n this._maxCachedSessions = maxCachedSessions\n this._sessionCache = new Map()\n this._sessionRegistry = new global.FinalizationRegistry((key) => {\n if (this._sessionCache.size < this._maxCachedSessions) {\n return\n }\n\n const ref = this._sessionCache.get(key)\n if (ref !== undefined && ref.deref() === undefined) {\n this._sessionCache.delete(key)\n }\n })\n }\n\n get (sessionKey) {\n const ref = this._sessionCache.get(sessionKey)\n return ref ? ref.deref() : null\n }\n\n set (sessionKey, session) {\n if (this._maxCachedSessions === 0) {\n return\n }\n\n this._sessionCache.set(sessionKey, new WeakRef(session))\n this._sessionRegistry.register(session, sessionKey)\n }\n }\n} else {\n SessionCache = class SimpleSessionCache {\n constructor (maxCachedSessions) {\n this._maxCachedSessions = maxCachedSessions\n this._sessionCache = new Map()\n }\n\n get (sessionKey) {\n return this._sessionCache.get(sessionKey)\n }\n\n set (sessionKey, session) {\n if (this._maxCachedSessions === 0) {\n return\n }\n\n if (this._sessionCache.size >= this._maxCachedSessions) {\n // remove the oldest session\n const { value: oldestKey } = this._sessionCache.keys().next()\n this._sessionCache.delete(oldestKey)\n }\n\n this._sessionCache.set(sessionKey, session)\n }\n }\n}\n\nfunction buildConnector ({ allowH2, maxCachedSessions, socketPath, timeout, session: customSession, ...opts }) {\n if (maxCachedSessions != null && (!Number.isInteger(maxCachedSessions) || maxCachedSessions < 0)) {\n throw new InvalidArgumentError('maxCachedSessions must be a positive integer or zero')\n }\n\n const options = { path: socketPath, ...opts }\n const sessionCache = new SessionCache(maxCachedSessions == null ? 100 : maxCachedSessions)\n timeout = timeout == null ? 10e3 : timeout\n allowH2 = allowH2 != null ? allowH2 : false\n return function connect ({ hostname, host, protocol, port, servername, localAddress, httpSocket }, callback) {\n let socket\n if (protocol === 'https:') {\n if (!tls) {\n tls = require('node:tls')\n }\n servername = servername || options.servername || util.getServerName(host) || null\n\n const sessionKey = servername || hostname\n assert(sessionKey)\n\n const session = customSession || sessionCache.get(sessionKey) || null\n\n port = port || 443\n\n socket = tls.connect({\n highWaterMark: 16384, // TLS in node can't have bigger HWM anyway...\n ...options,\n servername,\n session,\n localAddress,\n // TODO(HTTP/2): Add support for h2c\n ALPNProtocols: allowH2 ? ['http/1.1', 'h2'] : ['http/1.1'],\n socket: httpSocket, // upgrade socket connection\n port,\n host: hostname\n })\n\n socket\n .on('session', function (session) {\n // TODO (fix): Can a session become invalid once established? Don't think so?\n sessionCache.set(sessionKey, session)\n })\n } else {\n assert(!httpSocket, 'httpSocket can only be sent on TLS update')\n\n port = port || 80\n\n socket = net.connect({\n highWaterMark: 64 * 1024, // Same as nodejs fs streams.\n ...options,\n localAddress,\n port,\n host: hostname\n })\n }\n\n // Set TCP keep alive options on the socket here instead of in connect() for the case of assigning the socket\n if (options.keepAlive == null || options.keepAlive) {\n const keepAliveInitialDelay = options.keepAliveInitialDelay === undefined ? 60e3 : options.keepAliveInitialDelay\n socket.setKeepAlive(true, keepAliveInitialDelay)\n }\n\n const clearConnectTimeout = setupConnectTimeout(new WeakRef(socket), { timeout, hostname, port })\n\n socket\n .setNoDelay(true)\n .once(protocol === 'https:' ? 'secureConnect' : 'connect', function () {\n queueMicrotask(clearConnectTimeout)\n\n if (callback) {\n const cb = callback\n callback = null\n cb(null, this)\n }\n })\n .on('error', function (err) {\n queueMicrotask(clearConnectTimeout)\n\n if (callback) {\n const cb = callback\n callback = null\n cb(err)\n }\n })\n\n return socket\n }\n}\n\n/**\n * @param {WeakRef