test output link

This commit is contained in:
Trym Lund Flogard 2021-06-05 13:13:36 +02:00
parent 8167ea6a3e
commit a48c618622
14 changed files with 15281 additions and 1326 deletions

View File

@ -30,6 +30,10 @@ inputs:
Duration after which artifact will expire in days. 0 means using default retention. Duration after which artifact will expire in days. 0 means using default retention.
Minimum 1 day. Minimum 1 day.
Maximum 90 days unless changed from the repository settings page. Maximum 90 days unless changed from the repository settings page.
token:
description: GitHub Access Token
required: false
default: ${{ github.token }}
runs: runs:
using: 'node12' using: 'node12'
main: 'dist/index.js' main: 'dist/index.js'

6940
dist/index.js vendored

File diff suppressed because one or more lines are too long

1
dist/index.js.map vendored Normal file

File diff suppressed because one or more lines are too long

2581
dist/licenses.txt vendored Normal file

File diff suppressed because it is too large Load Diff

1
dist/sourcemap-register.js vendored Normal file

File diff suppressed because one or more lines are too long

6253
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -4,11 +4,11 @@
"description": "", "description": "",
"main": "lib/nextcloud-artifacts.js", "main": "lib/nextcloud-artifacts.js",
"scripts": { "scripts": {
"build": "npx tsc", "build": "tsc",
"format": "prettier --write **/*.ts", "format": "prettier --write **/*.ts",
"format-check": "prettier --check **/*.ts", "format-check": "prettier --check **/*.ts",
"lint": "eslint src/**/*.ts", "lint": "eslint src/**/*.ts",
"package": "npx ncc build --source-map --license licenses.txt", "package": "ncc build --source-map --license licenses.txt",
"test": "jest --ci" "test": "jest --ci"
}, },
"repository": { "repository": {
@ -44,6 +44,7 @@
"@types/uuid": "^8.3.0", "@types/uuid": "^8.3.0",
"@typescript-eslint/eslint-plugin": "^4.16.1", "@typescript-eslint/eslint-plugin": "^4.16.1",
"@typescript-eslint/parser": "^4.16.1", "@typescript-eslint/parser": "^4.16.1",
"@vercel/ncc": "^0.28.6",
"dotenv": "^10.0.0", "dotenv": "^10.0.0",
"eslint": "^7.21.0", "eslint": "^7.21.0",
"eslint-plugin-github": "^4.1.3", "eslint-plugin-github": "^4.1.3",

48
src/ActionInputs.ts Normal file
View File

@ -0,0 +1,48 @@
import * as core from '@actions/core'
import {NoFileOption} from './NoFileOption'
import {Inputs} from './Inputs'
export class ActionInputs implements Inputs {
get ArtifactName(): string {
return core.getInput('name')
}
get ArtifactPath(): string {
return core.getInput('path')
}
get Retention(): string {
return core.getInput('retention-days')
}
get Endpoint(): string {
return core.getInput('nextcloud-url')
}
get Username(): string {
return core.getInput('nextcloud-username')
}
get Password(): string {
return core.getInput('nextcloud-password')
}
get Token(): string {
return core.getInput('token', { required: true })
}
get NoFileBehvaior(): NoFileOption {
const notFoundAction = core.getInput('if-no-files-found') || NoFileOption.warn
const noFileBehavior: NoFileOption = NoFileOption[notFoundAction as keyof typeof NoFileOption]
if (!noFileBehavior) {
core.setFailed(
`Unrecognized ${'ifNoFilesFound'} input. Provided: ${notFoundAction}. Available options: ${Object.keys(
NoFileOption
)}`
)
}
return noFileBehavior
}
}

View File

@ -1,146 +1,137 @@
import * as glob from '@actions/glob' import * as glob from '@actions/glob'
import {stat} from 'fs' import { stat } from 'fs'
import {debug, info} from '@actions/core' import { debug, info } from '@actions/core'
import * as path from 'path' import * as path from 'path'
import {promisify} from 'util' import { promisify } from 'util'
const stats = promisify(stat) const stats = promisify(stat)
export class FileFinder { export class FileFinder {
private static DefaultGlobOptions: glob.GlobOptions = { private static DefaultGlobOptions: glob.GlobOptions = {
followSymbolicLinks: true, followSymbolicLinks: true,
implicitDescendants: true, implicitDescendants: true,
omitBrokenSymbolicLinks: true omitBrokenSymbolicLinks: true
}; }
private globOptions: glob.GlobOptions private globOptions: glob.GlobOptions
public constructor(private searchPath: string, globOptions?: glob.GlobOptions) { constructor(private searchPath: string, globOptions?: glob.GlobOptions) {
this.globOptions = globOptions || FileFinder.DefaultGlobOptions; this.globOptions = globOptions || FileFinder.DefaultGlobOptions
} }
public async findFiles() { async findFiles() {
const searchResults: string[] = [] const searchResults: string[] = []
const globber = await glob.create( const globber = await glob.create(this.searchPath, this.globOptions)
this.searchPath,
this.globOptions
);
const rawSearchResults: string[] = await globber.glob() const rawSearchResults: string[] = await globber.glob()
/* /*
Files are saved with case insensitivity. Uploading both a.txt and A.txt will files to be overwritten Files are saved with case insensitivity. Uploading both a.txt and A.txt will files to be overwritten
Detect any files that could be overwritten for user awareness Detect any files that could be overwritten for user awareness
*/ */
const set = new Set<string>() const set = new Set<string>()
/* /*
Directories will be rejected if attempted to be uploaded. This includes just empty Directories will be rejected if attempted to be uploaded. This includes just empty
directories so filter any directories out from the raw search results directories so filter any directories out from the raw search results
*/ */
for (const searchResult of rawSearchResults) { for (const searchResult of rawSearchResults) {
const fileStats = await stats(searchResult) const fileStats = await stats(searchResult)
// isDirectory() returns false for symlinks if using fs.lstat(), make sure to use fs.stat() instead // isDirectory() returns false for symlinks if using fs.lstat(), make sure to use fs.stat() instead
if (!fileStats.isDirectory()) { if (!fileStats.isDirectory()) {
debug(`File:${searchResult} was found using the provided searchPath`) debug(`File:${searchResult} was found using the provided searchPath`)
searchResults.push(searchResult) searchResults.push(searchResult)
// detect any files that would be overwritten because of case insensitivity // detect any files that would be overwritten because of case insensitivity
if (set.has(searchResult.toLowerCase())) { if (set.has(searchResult.toLowerCase())) {
info( info(
`Uploads are case insensitive: ${searchResult} was detected that it will be overwritten by another file with the same path` `Uploads are case insensitive: ${searchResult} was detected that it will be overwritten by another file with the same path`
) )
} else { } else {
set.add(searchResult.toLowerCase()) set.add(searchResult.toLowerCase())
}
} else {
debug(
`Removing ${searchResult} from rawSearchResults because it is a directory`
)
}
} }
} else {
debug(`Removing ${searchResult} from rawSearchResults because it is a directory`)
}
}
// Calculate the root directory for the artifact using the search paths that were utilized // Calculate the root directory for the artifact using the search paths that were utilized
const searchPaths: string[] = globber.getSearchPaths() const searchPaths: string[] = globber.getSearchPaths()
if (searchPaths.length > 1) { if (searchPaths.length > 1) {
info( info(`Multiple search paths detected. Calculating the least common ancestor of all paths`)
`Multiple search paths detected. Calculating the least common ancestor of all paths` const lcaSearchPath = this.getMultiPathLCA(searchPaths)
) info(`The least common ancestor is ${lcaSearchPath}. This will be the root directory of the artifact`)
const lcaSearchPath = this.getMultiPathLCA(searchPaths)
info(
`The least common ancestor is ${lcaSearchPath}. This will be the root directory of the artifact`
)
return { return {
filesToUpload: searchResults, filesToUpload: searchResults,
rootDirectory: lcaSearchPath rootDirectory: lcaSearchPath
} }
} }
/* /*
Special case for a single file artifact that is uploaded without a directory or wildcard pattern. The directory structure is Special case for a single file artifact that is uploaded without a directory or wildcard pattern. The directory structure is
not preserved and the root directory will be the single files parent directory not preserved and the root directory will be the single files parent directory
*/ */
if (searchResults.length === 1 && searchPaths[0] === searchResults[0]) { if (searchResults.length === 1 && searchPaths[0] === searchResults[0]) {
return { return {
filesToUpload: searchResults, filesToUpload: searchResults,
rootDirectory: path.dirname(searchResults[0]) rootDirectory: path.dirname(searchResults[0])
} }
}
return {
filesToUpload: searchResults,
rootDirectory: searchPaths[0]
}
} }
private getMultiPathLCA(searchPaths: string[]): string { return {
if (searchPaths.length < 2) { filesToUpload: searchResults,
throw new Error('At least two search paths must be provided') rootDirectory: searchPaths[0]
}
}
private getMultiPathLCA(searchPaths: string[]): string {
if (searchPaths.length < 2) {
throw new Error('At least two search paths must be provided')
}
const commonPaths = new Array<string>()
const splitPaths = new Array<string[]>()
let smallestPathLength = Number.MAX_SAFE_INTEGER
// split each of the search paths using the platform specific separator
for (const searchPath of searchPaths) {
debug(`Using search path ${searchPath}`)
const splitSearchPath = path.normalize(searchPath).split(path.sep)
// keep track of the smallest path length so that we don't accidentally later go out of bounds
smallestPathLength = Math.min(smallestPathLength, splitSearchPath.length)
splitPaths.push(splitSearchPath)
}
// on Unix-like file systems, the file separator exists at the beginning of the file path, make sure to preserve it
if (searchPaths[0].startsWith(path.sep)) {
commonPaths.push(path.sep)
}
let splitIndex = 0
// function to check if the paths are the same at a specific index
function isPathTheSame(): boolean {
const compare = splitPaths[0][splitIndex]
for (let i = 1; i < splitPaths.length; i++) {
if (compare !== splitPaths[i][splitIndex]) {
// a non-common index has been reached
return false
} }
const commonPaths = new Array<string>()
const splitPaths = new Array<string[]>()
let smallestPathLength = Number.MAX_SAFE_INTEGER
// split each of the search paths using the platform specific separator
for (const searchPath of searchPaths) {
debug(`Using search path ${searchPath}`)
const splitSearchPath = path.normalize(searchPath).split(path.sep)
// keep track of the smallest path length so that we don't accidentally later go out of bounds
smallestPathLength = Math.min(smallestPathLength, splitSearchPath.length)
splitPaths.push(splitSearchPath)
}
// on Unix-like file systems, the file separator exists at the beginning of the file path, make sure to preserve it
if (searchPaths[0].startsWith(path.sep)) {
commonPaths.push(path.sep)
}
let splitIndex = 0
// function to check if the paths are the same at a specific index
function isPathTheSame(): boolean {
const compare = splitPaths[0][splitIndex]
for (let i = 1; i < splitPaths.length; i++) {
if (compare !== splitPaths[i][splitIndex]) {
// a non-common index has been reached
return false
}
}
return true
}
// loop over all the search paths until there is a non-common ancestor or we go out of bounds
while (splitIndex < smallestPathLength) {
if (!isPathTheSame()) {
break
}
// if all are the same, add to the end result & increment the index
commonPaths.push(splitPaths[0][splitIndex])
splitIndex++
}
return path.join(...commonPaths)
} }
return true
}
// loop over all the search paths until there is a non-common ancestor or we go out of bounds
while (splitIndex < smallestPathLength) {
if (!isPathTheSame()) {
break
}
// if all are the same, add to the end result & increment the index
commonPaths.push(splitPaths[0][splitIndex])
splitIndex++
}
return path.join(...commonPaths)
}
} }

View File

@ -1,16 +1,16 @@
export enum NoFileOption { export enum NoFileOption {
/** /**
* Default. Output a warning but do not fail the action * Default. Output a warning but do not fail the action
*/ */
warn = 'warn', warn = 'warn',
/** /**
* Fail the action with an error message * Fail the action with an error message
*/ */
error = 'error', error = 'error',
/** /**
* Do not output any warnings or errors, the action does not fail * Do not output any warnings or errors, the action does not fail
*/ */
ignore = 'ignore', ignore = 'ignore'
} }

View File

@ -1,12 +1,15 @@
import { Inputs } from './Inputs'; import {NextcloudArtifact} from './nextcloud/NextcloudArtifact'
import { NextcloudArtifact } from './nextcloud/NextcloudArtifact'; import * as core from '@actions/core'
import * as core from '@actions/core'; import {ActionInputs} from './ActionInputs'
try { async function run() {
var artifact = new NextcloudArtifact(Inputs.ArtifactName, Inputs.ArtifactPath, Inputs.NoFileBehvaior); try {
artifact.run() const artifact = new NextcloudArtifact(new ActionInputs())
.catch(e => core.setFailed(e)); await artifact.run()
core.info("Finished"); core.info('Finished')
} catch (error) { } catch (error) {
core.setFailed(error.message); core.setFailed(error.message)
}
} }
run()

View File

@ -1,64 +1,139 @@
import * as core from '@actions/core'; import * as core from '@actions/core'
import { FileFinder } from '../FileFinder'; import * as github from '@actions/github'
import { Inputs } from '../Inputs'; import { GitHub } from '@actions/github/lib/utils'
import { NextcloudClient } from './NextcloudClient';
import { NoFileOption } from '../NoFileOption'; import { FileFinder } from '../FileFinder'
import { Inputs } from '../Inputs'
import { NextcloudClient } from './NextcloudClient'
import { NoFileOption } from '../NoFileOption'
export class NextcloudArtifact { export class NextcloudArtifact {
public constructor( readonly octokit: InstanceType<typeof GitHub>
private name: string, readonly context = NextcloudArtifact.getCheckRunContext()
private path: string, readonly token: string
private errorBehavior: NoFileOption) { } readonly name: string
readonly path: string
readonly errorBehavior: NoFileOption
public async run() { constructor(private inputs: Inputs) {
const fileFinder = new FileFinder(this.path); this.token = inputs.Token;
const files = await fileFinder.findFiles(); this.name = inputs.ArtifactName
this.path = inputs.ArtifactPath
this.errorBehavior = inputs.NoFileBehvaior
this.name = inputs.ArtifactName
this.octokit = github.getOctokit(this.token)
}
if (files.filesToUpload.length > 0) { async run() {
await this.uploadFiles(files); const fileFinder = new FileFinder(this.path)
} const files = await fileFinder.findFiles()
else {
this.logNoFilesFound(); if (files.filesToUpload.length > 0) {
} await this.uploadFiles(files)
} else {
this.logNoFilesFound()
}
}
private static getCheckRunContext(): { sha: string; runId: number } {
if (github.context.eventName === 'workflow_run') {
core.info('Action was triggered by workflow_run: using SHA and RUN_ID from triggering workflow')
const event = github.context.payload
if (!event.workflow_run) {
throw new Error("Event of type 'workflow_run' is missing 'workflow_run' field")
}
return {
sha: event.workflow_run.head_commit.id,
runId: event.workflow_run.id
}
} }
private async uploadFiles(files: { filesToUpload: string[]; rootDirectory: string; }) { const runId = github.context.runId
this.logUpload(files.filesToUpload.length, files.rootDirectory); if (github.context.payload.pull_request) {
core.info(`Action was triggered by ${github.context.eventName}: using SHA from head of source branch`)
const client = new NextcloudClient(Inputs.Endpoint, this.name, files.rootDirectory, Inputs.Username, Inputs.Password); const pr = github.context.payload.pull_request
return { sha: pr.head.sha, runId }
await client.uploadFiles(files.filesToUpload);
} }
private logUpload(fileCount: number, rootDirectory: string) { return { sha: github.context.sha, runId }
const s = fileCount === 1 ? '' : 's'; }
core.info(
`With the provided path, there will be ${fileCount} file${s} uploaded`
);
core.debug(`Root artifact directory is ${rootDirectory}`);
if (fileCount > 10000) { private async uploadFiles(files: { filesToUpload: string[]; rootDirectory: string }) {
core.warning( this.logUpload(files.filesToUpload.length, files.rootDirectory)
`There are over 10,000 files in this artifact, consider create an archive before upload to improve the upload performance.` const createResp = await this.octokit.rest.checks.create({
); head_sha: this.context.sha,
} name: 'Nextcloud Artifacts',
} status: 'in_progress',
output: {
title: 'Nextcloud Artifacts',
summary: ''
},
...github.context.repo
})
private logNoFilesFound() { const client = new NextcloudClient(
const errorMessage = `No files were found with the provided path: ${this.path}. No artifacts will be uploaded.`; this.inputs.Endpoint,
switch (this.errorBehavior) { this.name,
case NoFileOption.warn: { files.rootDirectory,
core.warning(errorMessage); this.inputs.Username,
break; this.inputs.Password
} )
case NoFileOption.error: {
core.setFailed(errorMessage); try {
break; const shareableUrl = await client.uploadFiles(files.filesToUpload)
} const resp = await this.octokit.rest.checks.update({
case NoFileOption.ignore: { check_run_id: createResp.data.id,
core.info(errorMessage); conclusion: 'success',
break; status: 'completed',
} output: {
} title: 'Nextcloud Artifacts',
summary: `${this.name}: ${shareableUrl}`
},
...github.context.repo
})
core.info(`Check run create response: ${resp.status}`)
core.info(`Check run URL: ${resp.data.url}`)
core.info(`Check run HTML: ${resp.data.html_url}`)
} catch (error) {
await this.octokit.rest.checks.update({
check_run_id: createResp.data.id,
conclusion: 'failure',
status: 'completed',
output: {
title: 'Nextcloud Artifacts'
},
...github.context.repo
})
} }
}
private logUpload(fileCount: number, rootDirectory: string) {
const s = fileCount === 1 ? '' : 's'
core.info(`With the provided path, there will be ${fileCount} file${s} uploaded`)
core.debug(`Root artifact directory is ${rootDirectory}`)
if (fileCount > 10000) {
core.warning(
`There are over 10,000 files in this artifact, consider create an archive before upload to improve the upload performance.`
)
}
}
private logNoFilesFound() {
const errorMessage = `No files were found with the provided path: ${this.path}. No artifacts will be uploaded.`
switch (this.errorBehavior) {
case NoFileOption.warn: {
core.warning(errorMessage)
break
}
case NoFileOption.error: {
core.setFailed(errorMessage)
break
}
case NoFileOption.ignore: {
core.info(errorMessage)
break
}
}
}
} }

View File

@ -1,64 +1,65 @@
import * as fsSync from 'fs' import * as fsSync from 'fs'
import * as path from 'path' import * as path from 'path'
import * as core from '@actions/core'; import * as core from '@actions/core'
import * as os from 'os'; import * as os from 'os'
import * as archiver from 'archiver'; import * as archiver from 'archiver'
import fetch, { HeadersInit } from 'node-fetch'; import fetch, { HeadersInit } from 'node-fetch'
import btoa from 'btoa'; import btoa from 'btoa'
import { v4 as uuidv4 } from 'uuid'; import { v4 as uuidv4 } from 'uuid'
import * as webdav from 'webdav' import * as webdav from 'webdav'
const fs = fsSync.promises; const fs = fsSync.promises
interface FileSpec { interface FileSpec {
absolutePath: string, absolutePath: string
uploadPath: string uploadPath: string
} }
export class NextcloudClient { export class NextcloudClient {
private guid: string; private guid: string
private headers: HeadersInit; private headers: HeadersInit
private davClient; private davClient
public constructor( constructor(
private endpoint: string, private endpoint: string,
private artifact: string, private artifact: string,
private rootDirectory: string, private rootDirectory: string,
private username: string, private username: string,
private password: string) { private password: string
this.guid = uuidv4(); ) {
this.headers = { 'Authorization': 'Basic ' + btoa(`${this.username}:${this.password}`) }; this.guid = uuidv4()
this.davClient = webdav.createClient(`${this.endpoint}/remote.php/dav/files/${this.username}`, { this.headers = { Authorization: 'Basic ' + btoa(`${this.username}:${this.password}`) }
username: this.username, this.davClient = webdav.createClient(`${this.endpoint}/remote.php/dav/files/${this.username}`, {
password: this.password, username: this.username,
}); password: this.password
})
}
async uploadFiles(files: string[]): Promise<string> {
core.info('Preparing upload...')
const spec = this.uploadSpec(files)
core.info('Zipping files...')
const zip = await this.zipFiles(spec)
core.info('Uploading to Nextcloud...')
const filePath = await this.upload(zip)
core.info(`File path: ${filePath}`)
core.info('Sharing file...')
return await this.shareFile(filePath)
}
private uploadSpec(files: string[]): FileSpec[] {
const specifications = []
if (!fsSync.existsSync(this.rootDirectory)) {
throw new Error(`this.rootDirectory ${this.rootDirectory} does not exist`)
} }
if (!fsSync.lstatSync(this.rootDirectory).isDirectory()) {
public async uploadFiles(files: string[]) { throw new Error(`this.rootDirectory ${this.rootDirectory} is not a valid directory`)
core.info("Preparing upload...");
const spec = this.uploadSpec(files);
core.info("Zipping files...");
var zip = await this.zipFiles(spec);
core.info("Uploading to Nextcloud...");
const path = await this.upload(zip);
core.info(`File path: ${path}`);
core.info("Sharing file...");
await this.shareFile(path);
} }
// Normalize and resolve, this allows for either absolute or relative paths to be used
private uploadSpec(files: string[]): FileSpec[] { let root = path.normalize(this.rootDirectory)
const specifications = []; root = path.resolve(root)
if (!fsSync.existsSync(this.rootDirectory)) { /*
throw new Error(`this.rootDirectory ${this.rootDirectory} does not exist`);
}
if (!fsSync.lstatSync(this.rootDirectory).isDirectory()) {
throw new Error(`this.rootDirectory ${this.rootDirectory} is not a valid directory`);
}
// Normalize and resolve, this allows for either absolute or relative paths to be used
let root = path.normalize(this.rootDirectory);
root = path.resolve(root);
/*
Example to demonstrate behavior Example to demonstrate behavior
Input: Input:
@ -77,20 +78,20 @@ export class NextcloudClient {
['/home/user/files/plz-upload/file1.txt', 'my-artifact/dir/file3.txt'] ['/home/user/files/plz-upload/file1.txt', 'my-artifact/dir/file3.txt']
] ]
*/ */
for (let file of files) { for (let file of files) {
if (!fsSync.existsSync(file)) { if (!fsSync.existsSync(file)) {
throw new Error(`File ${file} does not exist`); throw new Error(`File ${file} does not exist`)
} }
if (!fsSync.lstatSync(file).isDirectory()) { if (!fsSync.lstatSync(file).isDirectory()) {
// Normalize and resolve, this allows for either absolute or relative paths to be used // Normalize and resolve, this allows for either absolute or relative paths to be used
file = path.normalize(file); file = path.normalize(file)
file = path.resolve(file); file = path.resolve(file)
if (!file.startsWith(root)) { if (!file.startsWith(root)) {
throw new Error(`The rootDirectory: ${root} is not a parent directory of the file: ${file}`); throw new Error(`The rootDirectory: ${root} is not a parent directory of the file: ${file}`)
} }
// Check for forbidden characters in file paths that will be rejected during upload // Check for forbidden characters in file paths that will be rejected during upload
const uploadPath = file.replace(root, ''); const uploadPath = file.replace(root, '')
/* /*
uploadFilePath denotes where the file will be uploaded in the file container on the server. During a run, if multiple artifacts are uploaded, they will all uploadFilePath denotes where the file will be uploaded in the file container on the server. During a run, if multiple artifacts are uploaded, they will all
be saved in the same container. The artifact name is used as the root directory in the container to separate and distinguish uploaded artifacts be saved in the same container. The artifact name is used as the root directory in the container to separate and distinguish uploaded artifacts
@ -100,100 +101,105 @@ export class NextcloudClient {
join('artifact-name', 'file-to-upload.txt') join('artifact-name', 'file-to-upload.txt')
join('artifact-name', '/file-to-upload.txt') join('artifact-name', '/file-to-upload.txt')
*/ */
specifications.push({ specifications.push({
absolutePath: file, absolutePath: file,
uploadPath: path.join(this.artifact, uploadPath) uploadPath: path.join(this.artifact, uploadPath)
});
}
else {
// Directories are rejected by the server during upload
core.debug(`Removing ${file} from rawSearchResults because it is a directory`);
}
}
return specifications;
}
private async zipFiles(specs: FileSpec[]): Promise<string> {
const tempArtifactDir = path.join(os.tmpdir(), this.guid);
const artifactPath = path.join(tempArtifactDir, `artifact-${this.artifact}`);
await fs.mkdir(path.join(artifactPath, this.artifact), { recursive: true });
const copies = [];
for (let spec of specs) {
const dstpath = path.join(artifactPath, spec.uploadPath);
const dstDir = path.dirname(dstpath);
if (!fsSync.existsSync(dstDir)) {
await fs.mkdir(dstDir, { recursive: true });
}
copies.push(fs.copyFile(spec.absolutePath, dstpath));
}
await Promise.all(copies);
core.info(`files: ${await fs.readdir(path.join(artifactPath, this.artifact))}`);
const archivePath = path.join(artifactPath, `${this.artifact}.zip`);
await this.zip(path.join(artifactPath, this.artifact), archivePath);
core.info(`archive stat: ${(await fs.stat(archivePath)).size}`);
return archivePath;
}
private async zip(dirpath: string, destpath: string) {
const archive = archiver.create('zip', { zlib: { level: 9 } });
const stream = archive.directory(dirpath, false)
.pipe(fsSync.createWriteStream(destpath));
await archive.finalize();
return await new Promise<void>((resolve, reject) => {
stream.on('error', e => reject(e))
.on('close', () => resolve());
}) })
} else {
// Directories are rejected by the server during upload
core.debug(`Removing ${file} from rawSearchResults because it is a directory`)
}
}
return specifications
}
private async zipFiles(specs: FileSpec[]): Promise<string> {
const tempArtifactDir = path.join(os.tmpdir(), this.guid)
const artifactPath = path.join(tempArtifactDir, `artifact-${this.artifact}`)
await fs.mkdir(path.join(artifactPath, this.artifact), { recursive: true })
const copies = []
for (const spec of specs) {
const dstpath = path.join(artifactPath, spec.uploadPath)
const dstDir = path.dirname(dstpath)
if (!fsSync.existsSync(dstDir)) {
await fs.mkdir(dstDir, { recursive: true })
}
copies.push(fs.copyFile(spec.absolutePath, dstpath))
} }
private async upload(file: string): Promise<string> { await Promise.all(copies)
const remoteFileDir = `/artifacts/${this.guid}`; core.info(`files: ${await fs.readdir(path.join(artifactPath, this.artifact))}`)
core.info("Checking directory...");
if (!(await this.davClient.exists(remoteFileDir))) {
core.info("Creating directory...");
await this.davClient.createDirectory(remoteFileDir, { recursive: true });
}
const remoteFilePath = `${remoteFileDir}/${this.artifact}.zip`; const archivePath = path.join(artifactPath, `${this.artifact}.zip`)
core.info(`Transferring file... (${file})`); await this.zip(path.join(artifactPath, this.artifact), archivePath)
core.info(`archive stat: ${(await fs.stat(archivePath)).size}`)
const fileStat = await fs.stat(file); return archivePath
const fileStream = fsSync.createReadStream(file); }
const remoteStream = this.davClient.createWriteStream(remoteFilePath, {
headers: { "Content-Length": fileStat.size.toString() },
});
fileStream.pipe(remoteStream); private async zip(dirpath: string, destpath: string) {
const archive = archiver.create('zip', { zlib: { level: 9 } })
const stream = archive.directory(dirpath, false).pipe(fsSync.createWriteStream(destpath))
await new Promise<void>((resolve, reject) => { await archive.finalize()
fileStream.on('error', e => reject(e))
.on('finish', () => resolve());
});
return remoteFilePath; return await new Promise<void>((resolve, reject) => {
stream.on('error', e => reject(e)).on('close', () => resolve())
})
}
private async upload(file: string): Promise<string> {
const remoteFileDir = `/artifacts/${this.guid}`
core.info('Checking directory...')
if (!(await this.davClient.exists(remoteFileDir))) {
core.info('Creating directory...')
await this.davClient.createDirectory(remoteFileDir, { recursive: true })
} }
private async shareFile(remoteFilePath: string) { const remoteFilePath = `${remoteFileDir}/${this.artifact}.zip`
const url = this.endpoint + `/ocs/v2.php/apps/files_sharing/api/v1/shares`; core.info(`Transferring file... (${file})`)
const body = {
path: remoteFilePath,
shareType: 3,
publicUpload: "false",
permissions: 1,
};
const res = await fetch(url, { const fileStat = await fs.stat(file)
method: 'PUT', const fileStream = fsSync.createReadStream(file)
headers: this.headers, const remoteStream = this.davClient.createWriteStream(remoteFilePath, {
body: JSON.stringify(body), headers: { 'Content-Length': fileStat.size.toString() }
}); })
res.status
core.info(await res.text()) fileStream.pipe(remoteStream)
await new Promise<void>((resolve, reject) => {
fileStream.on('error', e => reject(e)).on('finish', () => resolve())
})
return remoteFilePath
}
private async shareFile(remoteFilePath: string): Promise<string> {
const url = this.endpoint + `/ocs/v2.php/apps/files_sharing/api/v1/shares`
const body = {
path: remoteFilePath,
shareType: 3,
publicUpload: 'false',
permissions: 1
} }
const res = await fetch(url, {
method: 'POST',
headers: Object.assign(this.headers, {
'OCS-APIRequest': true
}),
body: JSON.stringify(body)
})
const result = await res.text()
const re = /<url>(?<share_url>.*)<\/url>/
const match = re.exec(result)
const sharableUrl = (match?.groups || {})['share_url']
if (!sharableUrl) {
throw new Error('Failed to parse sharable URL.')
}
return sharableUrl
}
} }

9
workspace.code-workspace Normal file
View File

@ -0,0 +1,9 @@
{
"folders": [
{
"path": "."
}
],
"remoteAuthority": "wsl+Ubuntu-20.04",
"settings": {}
}