2019-12-12 21:16:16 +03:00
import * as assert from 'assert'
import * as core from '@actions/core'
import * as fs from 'fs'
import * as github from '@actions/github'
import * as io from '@actions/io'
import * as path from 'path'
import * as retryHelper from './retry-helper'
import * as toolCache from '@actions/tool-cache'
2020-03-25 22:12:22 +03:00
import * as urlHelper from './url-helper'
2019-12-12 21:16:16 +03:00
import { default as uuid } from 'uuid/v4'
import { ReposGetArchiveLinkParams } from '@octokit/rest'
const IS_WINDOWS = process . platform === 'win32'
export async function downloadRepository (
2019-12-12 21:49:26 +03:00
authToken : string ,
2019-12-12 21:16:16 +03:00
owner : string ,
repo : string ,
ref : string ,
commit : string ,
repositoryPath : string
) : Promise < void > {
// Download the archive
let archiveData = await retryHelper . execute ( async ( ) = > {
core . info ( 'Downloading the archive' )
2019-12-12 21:49:26 +03:00
return await downloadArchive ( authToken , owner , repo , ref , commit )
2019-12-12 21:16:16 +03:00
} )
// Write archive to disk
core . info ( 'Writing archive to disk' )
const uniqueId = uuid ( )
const archivePath = path . join ( repositoryPath , ` ${ uniqueId } .tar.gz ` )
await fs . promises . writeFile ( archivePath , archiveData )
archiveData = Buffer . from ( '' ) // Free memory
// Extract archive
core . info ( 'Extracting the archive' )
const extractPath = path . join ( repositoryPath , uniqueId )
await io . mkdirP ( extractPath )
if ( IS_WINDOWS ) {
await toolCache . extractZip ( archivePath , extractPath )
} else {
await toolCache . extractTar ( archivePath , extractPath )
}
io . rmRF ( archivePath )
// Determine the path of the repository content. The archive contains
// a top-level folder and the repository content is inside.
const archiveFileNames = await fs . promises . readdir ( extractPath )
assert . ok (
archiveFileNames . length == 1 ,
'Expected exactly one directory inside archive'
)
const archiveVersion = archiveFileNames [ 0 ] // The top-level folder name includes the short SHA
core . info ( ` Resolved version ${ archiveVersion } ` )
const tempRepositoryPath = path . join ( extractPath , archiveVersion )
// Move the files
for ( const fileName of await fs . promises . readdir ( tempRepositoryPath ) ) {
const sourcePath = path . join ( tempRepositoryPath , fileName )
const targetPath = path . join ( repositoryPath , fileName )
if ( IS_WINDOWS ) {
await io . cp ( sourcePath , targetPath , { recursive : true } ) // Copy on Windows (Windows Defender may have a lock)
} else {
await io . mv ( sourcePath , targetPath )
}
}
io . rmRF ( extractPath )
}
async function downloadArchive (
2019-12-12 21:49:26 +03:00
authToken : string ,
2019-12-12 21:16:16 +03:00
owner : string ,
repo : string ,
ref : string ,
commit : string
) : Promise < Buffer > {
2020-03-25 22:12:22 +03:00
const octokit = new github . GitHub ( authToken , { baseUrl : urlHelper.getApiUrl ( ) } )
2019-12-12 21:16:16 +03:00
const params : ReposGetArchiveLinkParams = {
owner : owner ,
repo : repo ,
archive_format : IS_WINDOWS ? 'zipball' : 'tarball' ,
ref : commit || ref
}
const response = await octokit . repos . getArchiveLink ( params )
if ( response . status != 200 ) {
throw new Error (
` Unexpected response from GitHub API. Status: ${ response . status } , Data: ${ response . data } `
)
}
return Buffer . from ( response . data ) // response.data is ArrayBuffer
}