mirror of
https://github.com/trympet/nextcloud-artifacts-action.git
synced 2025-04-24 12:06:09 +02:00
test output link
This commit is contained in:
parent
8167ea6a3e
commit
a48c618622
@ -30,6 +30,10 @@ inputs:
|
||||
Duration after which artifact will expire in days. 0 means using default retention.
|
||||
Minimum 1 day.
|
||||
Maximum 90 days unless changed from the repository settings page.
|
||||
token:
|
||||
description: GitHub Access Token
|
||||
required: false
|
||||
default: ${{ github.token }}
|
||||
runs:
|
||||
using: 'node12'
|
||||
main: 'dist/index.js'
|
6948
dist/index.js
vendored
6948
dist/index.js
vendored
File diff suppressed because one or more lines are too long
1
dist/index.js.map
vendored
Normal file
1
dist/index.js.map
vendored
Normal file
File diff suppressed because one or more lines are too long
2581
dist/licenses.txt
vendored
Normal file
2581
dist/licenses.txt
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1
dist/sourcemap-register.js
vendored
Normal file
1
dist/sourcemap-register.js
vendored
Normal file
File diff suppressed because one or more lines are too long
6253
package-lock.json
generated
6253
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -4,11 +4,11 @@
|
||||
"description": "",
|
||||
"main": "lib/nextcloud-artifacts.js",
|
||||
"scripts": {
|
||||
"build": "npx tsc",
|
||||
"build": "tsc",
|
||||
"format": "prettier --write **/*.ts",
|
||||
"format-check": "prettier --check **/*.ts",
|
||||
"lint": "eslint src/**/*.ts",
|
||||
"package": "npx ncc build --source-map --license licenses.txt",
|
||||
"package": "ncc build --source-map --license licenses.txt",
|
||||
"test": "jest --ci"
|
||||
},
|
||||
"repository": {
|
||||
@ -44,6 +44,7 @@
|
||||
"@types/uuid": "^8.3.0",
|
||||
"@typescript-eslint/eslint-plugin": "^4.16.1",
|
||||
"@typescript-eslint/parser": "^4.16.1",
|
||||
"@vercel/ncc": "^0.28.6",
|
||||
"dotenv": "^10.0.0",
|
||||
"eslint": "^7.21.0",
|
||||
"eslint-plugin-github": "^4.1.3",
|
||||
|
48
src/ActionInputs.ts
Normal file
48
src/ActionInputs.ts
Normal file
@ -0,0 +1,48 @@
|
||||
import * as core from '@actions/core'
|
||||
import {NoFileOption} from './NoFileOption'
|
||||
import {Inputs} from './Inputs'
|
||||
|
||||
export class ActionInputs implements Inputs {
|
||||
get ArtifactName(): string {
|
||||
return core.getInput('name')
|
||||
}
|
||||
|
||||
get ArtifactPath(): string {
|
||||
return core.getInput('path')
|
||||
}
|
||||
|
||||
get Retention(): string {
|
||||
return core.getInput('retention-days')
|
||||
}
|
||||
|
||||
get Endpoint(): string {
|
||||
return core.getInput('nextcloud-url')
|
||||
}
|
||||
|
||||
get Username(): string {
|
||||
return core.getInput('nextcloud-username')
|
||||
}
|
||||
|
||||
get Password(): string {
|
||||
return core.getInput('nextcloud-password')
|
||||
}
|
||||
|
||||
get Token(): string {
|
||||
return core.getInput('token', { required: true })
|
||||
}
|
||||
|
||||
get NoFileBehvaior(): NoFileOption {
|
||||
const notFoundAction = core.getInput('if-no-files-found') || NoFileOption.warn
|
||||
const noFileBehavior: NoFileOption = NoFileOption[notFoundAction as keyof typeof NoFileOption]
|
||||
|
||||
if (!noFileBehavior) {
|
||||
core.setFailed(
|
||||
`Unrecognized ${'ifNoFilesFound'} input. Provided: ${notFoundAction}. Available options: ${Object.keys(
|
||||
NoFileOption
|
||||
)}`
|
||||
)
|
||||
}
|
||||
|
||||
return noFileBehavior
|
||||
}
|
||||
}
|
@ -1,146 +1,137 @@
|
||||
import * as glob from '@actions/glob'
|
||||
import {stat} from 'fs'
|
||||
import {debug, info} from '@actions/core'
|
||||
import { stat } from 'fs'
|
||||
import { debug, info } from '@actions/core'
|
||||
import * as path from 'path'
|
||||
import {promisify} from 'util'
|
||||
import { promisify } from 'util'
|
||||
const stats = promisify(stat)
|
||||
|
||||
export class FileFinder {
|
||||
private static DefaultGlobOptions: glob.GlobOptions = {
|
||||
followSymbolicLinks: true,
|
||||
implicitDescendants: true,
|
||||
omitBrokenSymbolicLinks: true
|
||||
};
|
||||
private static DefaultGlobOptions: glob.GlobOptions = {
|
||||
followSymbolicLinks: true,
|
||||
implicitDescendants: true,
|
||||
omitBrokenSymbolicLinks: true
|
||||
}
|
||||
|
||||
private globOptions: glob.GlobOptions
|
||||
private globOptions: glob.GlobOptions
|
||||
|
||||
public constructor(private searchPath: string, globOptions?: glob.GlobOptions) {
|
||||
this.globOptions = globOptions || FileFinder.DefaultGlobOptions;
|
||||
}
|
||||
constructor(private searchPath: string, globOptions?: glob.GlobOptions) {
|
||||
this.globOptions = globOptions || FileFinder.DefaultGlobOptions
|
||||
}
|
||||
|
||||
public async findFiles() {
|
||||
const searchResults: string[] = []
|
||||
const globber = await glob.create(
|
||||
this.searchPath,
|
||||
this.globOptions
|
||||
);
|
||||
async findFiles() {
|
||||
const searchResults: string[] = []
|
||||
const globber = await glob.create(this.searchPath, this.globOptions)
|
||||
|
||||
const rawSearchResults: string[] = await globber.glob()
|
||||
const rawSearchResults: string[] = await globber.glob()
|
||||
|
||||
/*
|
||||
/*
|
||||
Files are saved with case insensitivity. Uploading both a.txt and A.txt will files to be overwritten
|
||||
Detect any files that could be overwritten for user awareness
|
||||
*/
|
||||
const set = new Set<string>()
|
||||
const set = new Set<string>()
|
||||
|
||||
/*
|
||||
/*
|
||||
Directories will be rejected if attempted to be uploaded. This includes just empty
|
||||
directories so filter any directories out from the raw search results
|
||||
*/
|
||||
for (const searchResult of rawSearchResults) {
|
||||
const fileStats = await stats(searchResult)
|
||||
// isDirectory() returns false for symlinks if using fs.lstat(), make sure to use fs.stat() instead
|
||||
if (!fileStats.isDirectory()) {
|
||||
debug(`File:${searchResult} was found using the provided searchPath`)
|
||||
searchResults.push(searchResult)
|
||||
for (const searchResult of rawSearchResults) {
|
||||
const fileStats = await stats(searchResult)
|
||||
// isDirectory() returns false for symlinks if using fs.lstat(), make sure to use fs.stat() instead
|
||||
if (!fileStats.isDirectory()) {
|
||||
debug(`File:${searchResult} was found using the provided searchPath`)
|
||||
searchResults.push(searchResult)
|
||||
|
||||
// detect any files that would be overwritten because of case insensitivity
|
||||
if (set.has(searchResult.toLowerCase())) {
|
||||
info(
|
||||
`Uploads are case insensitive: ${searchResult} was detected that it will be overwritten by another file with the same path`
|
||||
)
|
||||
} else {
|
||||
set.add(searchResult.toLowerCase())
|
||||
}
|
||||
} else {
|
||||
debug(
|
||||
`Removing ${searchResult} from rawSearchResults because it is a directory`
|
||||
)
|
||||
}
|
||||
// detect any files that would be overwritten because of case insensitivity
|
||||
if (set.has(searchResult.toLowerCase())) {
|
||||
info(
|
||||
`Uploads are case insensitive: ${searchResult} was detected that it will be overwritten by another file with the same path`
|
||||
)
|
||||
} else {
|
||||
set.add(searchResult.toLowerCase())
|
||||
}
|
||||
} else {
|
||||
debug(`Removing ${searchResult} from rawSearchResults because it is a directory`)
|
||||
}
|
||||
}
|
||||
|
||||
// Calculate the root directory for the artifact using the search paths that were utilized
|
||||
const searchPaths: string[] = globber.getSearchPaths()
|
||||
// Calculate the root directory for the artifact using the search paths that were utilized
|
||||
const searchPaths: string[] = globber.getSearchPaths()
|
||||
|
||||
if (searchPaths.length > 1) {
|
||||
info(
|
||||
`Multiple search paths detected. Calculating the least common ancestor of all paths`
|
||||
)
|
||||
const lcaSearchPath = this.getMultiPathLCA(searchPaths)
|
||||
info(
|
||||
`The least common ancestor is ${lcaSearchPath}. This will be the root directory of the artifact`
|
||||
)
|
||||
if (searchPaths.length > 1) {
|
||||
info(`Multiple search paths detected. Calculating the least common ancestor of all paths`)
|
||||
const lcaSearchPath = this.getMultiPathLCA(searchPaths)
|
||||
info(`The least common ancestor is ${lcaSearchPath}. This will be the root directory of the artifact`)
|
||||
|
||||
return {
|
||||
filesToUpload: searchResults,
|
||||
rootDirectory: lcaSearchPath
|
||||
}
|
||||
}
|
||||
return {
|
||||
filesToUpload: searchResults,
|
||||
rootDirectory: lcaSearchPath
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
/*
|
||||
Special case for a single file artifact that is uploaded without a directory or wildcard pattern. The directory structure is
|
||||
not preserved and the root directory will be the single files parent directory
|
||||
*/
|
||||
if (searchResults.length === 1 && searchPaths[0] === searchResults[0]) {
|
||||
return {
|
||||
filesToUpload: searchResults,
|
||||
rootDirectory: path.dirname(searchResults[0])
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
filesToUpload: searchResults,
|
||||
rootDirectory: searchPaths[0]
|
||||
}
|
||||
if (searchResults.length === 1 && searchPaths[0] === searchResults[0]) {
|
||||
return {
|
||||
filesToUpload: searchResults,
|
||||
rootDirectory: path.dirname(searchResults[0])
|
||||
}
|
||||
}
|
||||
|
||||
private getMultiPathLCA(searchPaths: string[]): string {
|
||||
if (searchPaths.length < 2) {
|
||||
throw new Error('At least two search paths must be provided')
|
||||
return {
|
||||
filesToUpload: searchResults,
|
||||
rootDirectory: searchPaths[0]
|
||||
}
|
||||
}
|
||||
|
||||
private getMultiPathLCA(searchPaths: string[]): string {
|
||||
if (searchPaths.length < 2) {
|
||||
throw new Error('At least two search paths must be provided')
|
||||
}
|
||||
|
||||
const commonPaths = new Array<string>()
|
||||
const splitPaths = new Array<string[]>()
|
||||
let smallestPathLength = Number.MAX_SAFE_INTEGER
|
||||
|
||||
// split each of the search paths using the platform specific separator
|
||||
for (const searchPath of searchPaths) {
|
||||
debug(`Using search path ${searchPath}`)
|
||||
|
||||
const splitSearchPath = path.normalize(searchPath).split(path.sep)
|
||||
|
||||
// keep track of the smallest path length so that we don't accidentally later go out of bounds
|
||||
smallestPathLength = Math.min(smallestPathLength, splitSearchPath.length)
|
||||
splitPaths.push(splitSearchPath)
|
||||
}
|
||||
|
||||
// on Unix-like file systems, the file separator exists at the beginning of the file path, make sure to preserve it
|
||||
if (searchPaths[0].startsWith(path.sep)) {
|
||||
commonPaths.push(path.sep)
|
||||
}
|
||||
|
||||
let splitIndex = 0
|
||||
// function to check if the paths are the same at a specific index
|
||||
function isPathTheSame(): boolean {
|
||||
const compare = splitPaths[0][splitIndex]
|
||||
for (let i = 1; i < splitPaths.length; i++) {
|
||||
if (compare !== splitPaths[i][splitIndex]) {
|
||||
// a non-common index has been reached
|
||||
return false
|
||||
}
|
||||
|
||||
const commonPaths = new Array<string>()
|
||||
const splitPaths = new Array<string[]>()
|
||||
let smallestPathLength = Number.MAX_SAFE_INTEGER
|
||||
|
||||
// split each of the search paths using the platform specific separator
|
||||
for (const searchPath of searchPaths) {
|
||||
debug(`Using search path ${searchPath}`)
|
||||
|
||||
const splitSearchPath = path.normalize(searchPath).split(path.sep)
|
||||
|
||||
// keep track of the smallest path length so that we don't accidentally later go out of bounds
|
||||
smallestPathLength = Math.min(smallestPathLength, splitSearchPath.length)
|
||||
splitPaths.push(splitSearchPath)
|
||||
}
|
||||
|
||||
// on Unix-like file systems, the file separator exists at the beginning of the file path, make sure to preserve it
|
||||
if (searchPaths[0].startsWith(path.sep)) {
|
||||
commonPaths.push(path.sep)
|
||||
}
|
||||
|
||||
let splitIndex = 0
|
||||
// function to check if the paths are the same at a specific index
|
||||
function isPathTheSame(): boolean {
|
||||
const compare = splitPaths[0][splitIndex]
|
||||
for (let i = 1; i < splitPaths.length; i++) {
|
||||
if (compare !== splitPaths[i][splitIndex]) {
|
||||
// a non-common index has been reached
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// loop over all the search paths until there is a non-common ancestor or we go out of bounds
|
||||
while (splitIndex < smallestPathLength) {
|
||||
if (!isPathTheSame()) {
|
||||
break
|
||||
}
|
||||
// if all are the same, add to the end result & increment the index
|
||||
commonPaths.push(splitPaths[0][splitIndex])
|
||||
splitIndex++
|
||||
}
|
||||
return path.join(...commonPaths)
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// loop over all the search paths until there is a non-common ancestor or we go out of bounds
|
||||
while (splitIndex < smallestPathLength) {
|
||||
if (!isPathTheSame()) {
|
||||
break
|
||||
}
|
||||
// if all are the same, add to the end result & increment the index
|
||||
commonPaths.push(splitPaths[0][splitIndex])
|
||||
splitIndex++
|
||||
}
|
||||
return path.join(...commonPaths)
|
||||
}
|
||||
}
|
||||
|
@ -1,16 +1,16 @@
|
||||
export enum NoFileOption {
|
||||
/**
|
||||
* Default. Output a warning but do not fail the action
|
||||
*/
|
||||
warn = 'warn',
|
||||
|
||||
/**
|
||||
* Fail the action with an error message
|
||||
*/
|
||||
error = 'error',
|
||||
|
||||
/**
|
||||
* Do not output any warnings or errors, the action does not fail
|
||||
*/
|
||||
ignore = 'ignore',
|
||||
}
|
||||
/**
|
||||
* Default. Output a warning but do not fail the action
|
||||
*/
|
||||
warn = 'warn',
|
||||
|
||||
/**
|
||||
* Fail the action with an error message
|
||||
*/
|
||||
error = 'error',
|
||||
|
||||
/**
|
||||
* Do not output any warnings or errors, the action does not fail
|
||||
*/
|
||||
ignore = 'ignore'
|
||||
}
|
||||
|
@ -1,12 +1,15 @@
|
||||
import { Inputs } from './Inputs';
|
||||
import { NextcloudArtifact } from './nextcloud/NextcloudArtifact';
|
||||
import * as core from '@actions/core';
|
||||
import {NextcloudArtifact} from './nextcloud/NextcloudArtifact'
|
||||
import * as core from '@actions/core'
|
||||
import {ActionInputs} from './ActionInputs'
|
||||
|
||||
try {
|
||||
var artifact = new NextcloudArtifact(Inputs.ArtifactName, Inputs.ArtifactPath, Inputs.NoFileBehvaior);
|
||||
artifact.run()
|
||||
.catch(e => core.setFailed(e));
|
||||
core.info("Finished");
|
||||
} catch (error) {
|
||||
core.setFailed(error.message);
|
||||
async function run() {
|
||||
try {
|
||||
const artifact = new NextcloudArtifact(new ActionInputs())
|
||||
await artifact.run()
|
||||
core.info('Finished')
|
||||
} catch (error) {
|
||||
core.setFailed(error.message)
|
||||
}
|
||||
}
|
||||
|
||||
run()
|
||||
|
@ -1,64 +1,139 @@
|
||||
import * as core from '@actions/core';
|
||||
import { FileFinder } from '../FileFinder';
|
||||
import { Inputs } from '../Inputs';
|
||||
import { NextcloudClient } from './NextcloudClient';
|
||||
import { NoFileOption } from '../NoFileOption';
|
||||
import * as core from '@actions/core'
|
||||
import * as github from '@actions/github'
|
||||
import { GitHub } from '@actions/github/lib/utils'
|
||||
|
||||
import { FileFinder } from '../FileFinder'
|
||||
import { Inputs } from '../Inputs'
|
||||
import { NextcloudClient } from './NextcloudClient'
|
||||
import { NoFileOption } from '../NoFileOption'
|
||||
|
||||
export class NextcloudArtifact {
|
||||
public constructor(
|
||||
private name: string,
|
||||
private path: string,
|
||||
private errorBehavior: NoFileOption) { }
|
||||
readonly octokit: InstanceType<typeof GitHub>
|
||||
readonly context = NextcloudArtifact.getCheckRunContext()
|
||||
readonly token: string
|
||||
readonly name: string
|
||||
readonly path: string
|
||||
readonly errorBehavior: NoFileOption
|
||||
|
||||
public async run() {
|
||||
const fileFinder = new FileFinder(this.path);
|
||||
const files = await fileFinder.findFiles();
|
||||
constructor(private inputs: Inputs) {
|
||||
this.token = inputs.Token;
|
||||
this.name = inputs.ArtifactName
|
||||
this.path = inputs.ArtifactPath
|
||||
this.errorBehavior = inputs.NoFileBehvaior
|
||||
this.name = inputs.ArtifactName
|
||||
this.octokit = github.getOctokit(this.token)
|
||||
}
|
||||
|
||||
if (files.filesToUpload.length > 0) {
|
||||
await this.uploadFiles(files);
|
||||
}
|
||||
else {
|
||||
this.logNoFilesFound();
|
||||
}
|
||||
async run() {
|
||||
const fileFinder = new FileFinder(this.path)
|
||||
const files = await fileFinder.findFiles()
|
||||
|
||||
if (files.filesToUpload.length > 0) {
|
||||
await this.uploadFiles(files)
|
||||
} else {
|
||||
this.logNoFilesFound()
|
||||
}
|
||||
}
|
||||
|
||||
private static getCheckRunContext(): { sha: string; runId: number } {
|
||||
if (github.context.eventName === 'workflow_run') {
|
||||
core.info('Action was triggered by workflow_run: using SHA and RUN_ID from triggering workflow')
|
||||
const event = github.context.payload
|
||||
if (!event.workflow_run) {
|
||||
throw new Error("Event of type 'workflow_run' is missing 'workflow_run' field")
|
||||
}
|
||||
return {
|
||||
sha: event.workflow_run.head_commit.id,
|
||||
runId: event.workflow_run.id
|
||||
}
|
||||
}
|
||||
|
||||
private async uploadFiles(files: { filesToUpload: string[]; rootDirectory: string; }) {
|
||||
this.logUpload(files.filesToUpload.length, files.rootDirectory);
|
||||
|
||||
const client = new NextcloudClient(Inputs.Endpoint, this.name, files.rootDirectory, Inputs.Username, Inputs.Password);
|
||||
|
||||
await client.uploadFiles(files.filesToUpload);
|
||||
const runId = github.context.runId
|
||||
if (github.context.payload.pull_request) {
|
||||
core.info(`Action was triggered by ${github.context.eventName}: using SHA from head of source branch`)
|
||||
const pr = github.context.payload.pull_request
|
||||
return { sha: pr.head.sha, runId }
|
||||
}
|
||||
|
||||
private logUpload(fileCount: number, rootDirectory: string) {
|
||||
const s = fileCount === 1 ? '' : 's';
|
||||
core.info(
|
||||
`With the provided path, there will be ${fileCount} file${s} uploaded`
|
||||
);
|
||||
core.debug(`Root artifact directory is ${rootDirectory}`);
|
||||
return { sha: github.context.sha, runId }
|
||||
}
|
||||
|
||||
if (fileCount > 10000) {
|
||||
core.warning(
|
||||
`There are over 10,000 files in this artifact, consider create an archive before upload to improve the upload performance.`
|
||||
);
|
||||
}
|
||||
}
|
||||
private async uploadFiles(files: { filesToUpload: string[]; rootDirectory: string }) {
|
||||
this.logUpload(files.filesToUpload.length, files.rootDirectory)
|
||||
const createResp = await this.octokit.rest.checks.create({
|
||||
head_sha: this.context.sha,
|
||||
name: 'Nextcloud Artifacts',
|
||||
status: 'in_progress',
|
||||
output: {
|
||||
title: 'Nextcloud Artifacts',
|
||||
summary: ''
|
||||
},
|
||||
...github.context.repo
|
||||
})
|
||||
|
||||
private logNoFilesFound() {
|
||||
const errorMessage = `No files were found with the provided path: ${this.path}. No artifacts will be uploaded.`;
|
||||
switch (this.errorBehavior) {
|
||||
case NoFileOption.warn: {
|
||||
core.warning(errorMessage);
|
||||
break;
|
||||
}
|
||||
case NoFileOption.error: {
|
||||
core.setFailed(errorMessage);
|
||||
break;
|
||||
}
|
||||
case NoFileOption.ignore: {
|
||||
core.info(errorMessage);
|
||||
break;
|
||||
}
|
||||
}
|
||||
const client = new NextcloudClient(
|
||||
this.inputs.Endpoint,
|
||||
this.name,
|
||||
files.rootDirectory,
|
||||
this.inputs.Username,
|
||||
this.inputs.Password
|
||||
)
|
||||
|
||||
try {
|
||||
const shareableUrl = await client.uploadFiles(files.filesToUpload)
|
||||
const resp = await this.octokit.rest.checks.update({
|
||||
check_run_id: createResp.data.id,
|
||||
conclusion: 'success',
|
||||
status: 'completed',
|
||||
output: {
|
||||
title: 'Nextcloud Artifacts',
|
||||
summary: `${this.name}: ${shareableUrl}`
|
||||
},
|
||||
...github.context.repo
|
||||
})
|
||||
core.info(`Check run create response: ${resp.status}`)
|
||||
core.info(`Check run URL: ${resp.data.url}`)
|
||||
core.info(`Check run HTML: ${resp.data.html_url}`)
|
||||
} catch (error) {
|
||||
await this.octokit.rest.checks.update({
|
||||
check_run_id: createResp.data.id,
|
||||
conclusion: 'failure',
|
||||
status: 'completed',
|
||||
output: {
|
||||
title: 'Nextcloud Artifacts'
|
||||
},
|
||||
...github.context.repo
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
private logUpload(fileCount: number, rootDirectory: string) {
|
||||
const s = fileCount === 1 ? '' : 's'
|
||||
core.info(`With the provided path, there will be ${fileCount} file${s} uploaded`)
|
||||
core.debug(`Root artifact directory is ${rootDirectory}`)
|
||||
|
||||
if (fileCount > 10000) {
|
||||
core.warning(
|
||||
`There are over 10,000 files in this artifact, consider create an archive before upload to improve the upload performance.`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
private logNoFilesFound() {
|
||||
const errorMessage = `No files were found with the provided path: ${this.path}. No artifacts will be uploaded.`
|
||||
switch (this.errorBehavior) {
|
||||
case NoFileOption.warn: {
|
||||
core.warning(errorMessage)
|
||||
break
|
||||
}
|
||||
case NoFileOption.error: {
|
||||
core.setFailed(errorMessage)
|
||||
break
|
||||
}
|
||||
case NoFileOption.ignore: {
|
||||
core.info(errorMessage)
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,64 +1,65 @@
|
||||
import * as fsSync from 'fs'
|
||||
import * as path from 'path'
|
||||
import * as core from '@actions/core';
|
||||
import * as os from 'os';
|
||||
import * as archiver from 'archiver';
|
||||
import fetch, { HeadersInit } from 'node-fetch';
|
||||
import btoa from 'btoa';
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
import * as core from '@actions/core'
|
||||
import * as os from 'os'
|
||||
import * as archiver from 'archiver'
|
||||
import fetch, { HeadersInit } from 'node-fetch'
|
||||
import btoa from 'btoa'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import * as webdav from 'webdav'
|
||||
|
||||
const fs = fsSync.promises;
|
||||
const fs = fsSync.promises
|
||||
|
||||
interface FileSpec {
|
||||
absolutePath: string,
|
||||
uploadPath: string
|
||||
absolutePath: string
|
||||
uploadPath: string
|
||||
}
|
||||
|
||||
export class NextcloudClient {
|
||||
private guid: string;
|
||||
private headers: HeadersInit;
|
||||
private davClient;
|
||||
private guid: string
|
||||
private headers: HeadersInit
|
||||
private davClient
|
||||
|
||||
public constructor(
|
||||
private endpoint: string,
|
||||
private artifact: string,
|
||||
private rootDirectory: string,
|
||||
private username: string,
|
||||
private password: string) {
|
||||
this.guid = uuidv4();
|
||||
this.headers = { 'Authorization': 'Basic ' + btoa(`${this.username}:${this.password}`) };
|
||||
this.davClient = webdav.createClient(`${this.endpoint}/remote.php/dav/files/${this.username}`, {
|
||||
username: this.username,
|
||||
password: this.password,
|
||||
});
|
||||
constructor(
|
||||
private endpoint: string,
|
||||
private artifact: string,
|
||||
private rootDirectory: string,
|
||||
private username: string,
|
||||
private password: string
|
||||
) {
|
||||
this.guid = uuidv4()
|
||||
this.headers = { Authorization: 'Basic ' + btoa(`${this.username}:${this.password}`) }
|
||||
this.davClient = webdav.createClient(`${this.endpoint}/remote.php/dav/files/${this.username}`, {
|
||||
username: this.username,
|
||||
password: this.password
|
||||
})
|
||||
}
|
||||
|
||||
async uploadFiles(files: string[]): Promise<string> {
|
||||
core.info('Preparing upload...')
|
||||
const spec = this.uploadSpec(files)
|
||||
core.info('Zipping files...')
|
||||
const zip = await this.zipFiles(spec)
|
||||
|
||||
core.info('Uploading to Nextcloud...')
|
||||
const filePath = await this.upload(zip)
|
||||
core.info(`File path: ${filePath}`)
|
||||
core.info('Sharing file...')
|
||||
return await this.shareFile(filePath)
|
||||
}
|
||||
|
||||
private uploadSpec(files: string[]): FileSpec[] {
|
||||
const specifications = []
|
||||
if (!fsSync.existsSync(this.rootDirectory)) {
|
||||
throw new Error(`this.rootDirectory ${this.rootDirectory} does not exist`)
|
||||
}
|
||||
|
||||
public async uploadFiles(files: string[]) {
|
||||
core.info("Preparing upload...");
|
||||
const spec = this.uploadSpec(files);
|
||||
core.info("Zipping files...");
|
||||
var zip = await this.zipFiles(spec);
|
||||
|
||||
core.info("Uploading to Nextcloud...");
|
||||
const path = await this.upload(zip);
|
||||
core.info(`File path: ${path}`);
|
||||
core.info("Sharing file...");
|
||||
await this.shareFile(path);
|
||||
if (!fsSync.lstatSync(this.rootDirectory).isDirectory()) {
|
||||
throw new Error(`this.rootDirectory ${this.rootDirectory} is not a valid directory`)
|
||||
}
|
||||
|
||||
private uploadSpec(files: string[]): FileSpec[] {
|
||||
const specifications = [];
|
||||
if (!fsSync.existsSync(this.rootDirectory)) {
|
||||
throw new Error(`this.rootDirectory ${this.rootDirectory} does not exist`);
|
||||
}
|
||||
if (!fsSync.lstatSync(this.rootDirectory).isDirectory()) {
|
||||
throw new Error(`this.rootDirectory ${this.rootDirectory} is not a valid directory`);
|
||||
}
|
||||
// Normalize and resolve, this allows for either absolute or relative paths to be used
|
||||
let root = path.normalize(this.rootDirectory);
|
||||
root = path.resolve(root);
|
||||
/*
|
||||
// Normalize and resolve, this allows for either absolute or relative paths to be used
|
||||
let root = path.normalize(this.rootDirectory)
|
||||
root = path.resolve(root)
|
||||
/*
|
||||
Example to demonstrate behavior
|
||||
|
||||
Input:
|
||||
@ -77,20 +78,20 @@ export class NextcloudClient {
|
||||
['/home/user/files/plz-upload/file1.txt', 'my-artifact/dir/file3.txt']
|
||||
]
|
||||
*/
|
||||
for (let file of files) {
|
||||
if (!fsSync.existsSync(file)) {
|
||||
throw new Error(`File ${file} does not exist`);
|
||||
}
|
||||
if (!fsSync.lstatSync(file).isDirectory()) {
|
||||
// Normalize and resolve, this allows for either absolute or relative paths to be used
|
||||
file = path.normalize(file);
|
||||
file = path.resolve(file);
|
||||
if (!file.startsWith(root)) {
|
||||
throw new Error(`The rootDirectory: ${root} is not a parent directory of the file: ${file}`);
|
||||
}
|
||||
// Check for forbidden characters in file paths that will be rejected during upload
|
||||
const uploadPath = file.replace(root, '');
|
||||
/*
|
||||
for (let file of files) {
|
||||
if (!fsSync.existsSync(file)) {
|
||||
throw new Error(`File ${file} does not exist`)
|
||||
}
|
||||
if (!fsSync.lstatSync(file).isDirectory()) {
|
||||
// Normalize and resolve, this allows for either absolute or relative paths to be used
|
||||
file = path.normalize(file)
|
||||
file = path.resolve(file)
|
||||
if (!file.startsWith(root)) {
|
||||
throw new Error(`The rootDirectory: ${root} is not a parent directory of the file: ${file}`)
|
||||
}
|
||||
// Check for forbidden characters in file paths that will be rejected during upload
|
||||
const uploadPath = file.replace(root, '')
|
||||
/*
|
||||
uploadFilePath denotes where the file will be uploaded in the file container on the server. During a run, if multiple artifacts are uploaded, they will all
|
||||
be saved in the same container. The artifact name is used as the root directory in the container to separate and distinguish uploaded artifacts
|
||||
|
||||
@ -100,100 +101,105 @@ export class NextcloudClient {
|
||||
join('artifact-name', 'file-to-upload.txt')
|
||||
join('artifact-name', '/file-to-upload.txt')
|
||||
*/
|
||||
specifications.push({
|
||||
absolutePath: file,
|
||||
uploadPath: path.join(this.artifact, uploadPath)
|
||||
});
|
||||
}
|
||||
else {
|
||||
// Directories are rejected by the server during upload
|
||||
core.debug(`Removing ${file} from rawSearchResults because it is a directory`);
|
||||
}
|
||||
}
|
||||
return specifications;
|
||||
}
|
||||
|
||||
|
||||
private async zipFiles(specs: FileSpec[]): Promise<string> {
|
||||
const tempArtifactDir = path.join(os.tmpdir(), this.guid);
|
||||
const artifactPath = path.join(tempArtifactDir, `artifact-${this.artifact}`);
|
||||
await fs.mkdir(path.join(artifactPath, this.artifact), { recursive: true });
|
||||
const copies = [];
|
||||
for (let spec of specs) {
|
||||
const dstpath = path.join(artifactPath, spec.uploadPath);
|
||||
const dstDir = path.dirname(dstpath);
|
||||
if (!fsSync.existsSync(dstDir)) {
|
||||
await fs.mkdir(dstDir, { recursive: true });
|
||||
}
|
||||
|
||||
copies.push(fs.copyFile(spec.absolutePath, dstpath));
|
||||
}
|
||||
|
||||
await Promise.all(copies);
|
||||
core.info(`files: ${await fs.readdir(path.join(artifactPath, this.artifact))}`);
|
||||
|
||||
const archivePath = path.join(artifactPath, `${this.artifact}.zip`);
|
||||
await this.zip(path.join(artifactPath, this.artifact), archivePath);
|
||||
core.info(`archive stat: ${(await fs.stat(archivePath)).size}`);
|
||||
|
||||
return archivePath;
|
||||
}
|
||||
|
||||
private async zip(dirpath: string, destpath: string) {
|
||||
const archive = archiver.create('zip', { zlib: { level: 9 } });
|
||||
const stream = archive.directory(dirpath, false)
|
||||
.pipe(fsSync.createWriteStream(destpath));
|
||||
|
||||
await archive.finalize();
|
||||
|
||||
return await new Promise<void>((resolve, reject) => {
|
||||
stream.on('error', e => reject(e))
|
||||
.on('close', () => resolve());
|
||||
specifications.push({
|
||||
absolutePath: file,
|
||||
uploadPath: path.join(this.artifact, uploadPath)
|
||||
})
|
||||
} else {
|
||||
// Directories are rejected by the server during upload
|
||||
core.debug(`Removing ${file} from rawSearchResults because it is a directory`)
|
||||
}
|
||||
}
|
||||
return specifications
|
||||
}
|
||||
|
||||
private async zipFiles(specs: FileSpec[]): Promise<string> {
|
||||
const tempArtifactDir = path.join(os.tmpdir(), this.guid)
|
||||
const artifactPath = path.join(tempArtifactDir, `artifact-${this.artifact}`)
|
||||
await fs.mkdir(path.join(artifactPath, this.artifact), { recursive: true })
|
||||
const copies = []
|
||||
for (const spec of specs) {
|
||||
const dstpath = path.join(artifactPath, spec.uploadPath)
|
||||
const dstDir = path.dirname(dstpath)
|
||||
if (!fsSync.existsSync(dstDir)) {
|
||||
await fs.mkdir(dstDir, { recursive: true })
|
||||
}
|
||||
|
||||
copies.push(fs.copyFile(spec.absolutePath, dstpath))
|
||||
}
|
||||
|
||||
private async upload(file: string): Promise<string> {
|
||||
const remoteFileDir = `/artifacts/${this.guid}`;
|
||||
core.info("Checking directory...");
|
||||
if (!(await this.davClient.exists(remoteFileDir))) {
|
||||
core.info("Creating directory...");
|
||||
await this.davClient.createDirectory(remoteFileDir, { recursive: true });
|
||||
}
|
||||
await Promise.all(copies)
|
||||
core.info(`files: ${await fs.readdir(path.join(artifactPath, this.artifact))}`)
|
||||
|
||||
const remoteFilePath = `${remoteFileDir}/${this.artifact}.zip`;
|
||||
core.info(`Transferring file... (${file})`);
|
||||
const archivePath = path.join(artifactPath, `${this.artifact}.zip`)
|
||||
await this.zip(path.join(artifactPath, this.artifact), archivePath)
|
||||
core.info(`archive stat: ${(await fs.stat(archivePath)).size}`)
|
||||
|
||||
const fileStat = await fs.stat(file);
|
||||
const fileStream = fsSync.createReadStream(file);
|
||||
const remoteStream = this.davClient.createWriteStream(remoteFilePath, {
|
||||
headers: { "Content-Length": fileStat.size.toString() },
|
||||
});
|
||||
return archivePath
|
||||
}
|
||||
|
||||
fileStream.pipe(remoteStream);
|
||||
private async zip(dirpath: string, destpath: string) {
|
||||
const archive = archiver.create('zip', { zlib: { level: 9 } })
|
||||
const stream = archive.directory(dirpath, false).pipe(fsSync.createWriteStream(destpath))
|
||||
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
fileStream.on('error', e => reject(e))
|
||||
.on('finish', () => resolve());
|
||||
});
|
||||
await archive.finalize()
|
||||
|
||||
return remoteFilePath;
|
||||
return await new Promise<void>((resolve, reject) => {
|
||||
stream.on('error', e => reject(e)).on('close', () => resolve())
|
||||
})
|
||||
}
|
||||
|
||||
private async upload(file: string): Promise<string> {
|
||||
const remoteFileDir = `/artifacts/${this.guid}`
|
||||
core.info('Checking directory...')
|
||||
if (!(await this.davClient.exists(remoteFileDir))) {
|
||||
core.info('Creating directory...')
|
||||
await this.davClient.createDirectory(remoteFileDir, { recursive: true })
|
||||
}
|
||||
|
||||
private async shareFile(remoteFilePath: string) {
|
||||
const url = this.endpoint + `/ocs/v2.php/apps/files_sharing/api/v1/shares`;
|
||||
const body = {
|
||||
path: remoteFilePath,
|
||||
shareType: 3,
|
||||
publicUpload: "false",
|
||||
permissions: 1,
|
||||
};
|
||||
const remoteFilePath = `${remoteFileDir}/${this.artifact}.zip`
|
||||
core.info(`Transferring file... (${file})`)
|
||||
|
||||
const res = await fetch(url, {
|
||||
method: 'PUT',
|
||||
headers: this.headers,
|
||||
body: JSON.stringify(body),
|
||||
});
|
||||
res.status
|
||||
core.info(await res.text())
|
||||
const fileStat = await fs.stat(file)
|
||||
const fileStream = fsSync.createReadStream(file)
|
||||
const remoteStream = this.davClient.createWriteStream(remoteFilePath, {
|
||||
headers: { 'Content-Length': fileStat.size.toString() }
|
||||
})
|
||||
|
||||
fileStream.pipe(remoteStream)
|
||||
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
fileStream.on('error', e => reject(e)).on('finish', () => resolve())
|
||||
})
|
||||
|
||||
return remoteFilePath
|
||||
}
|
||||
|
||||
private async shareFile(remoteFilePath: string): Promise<string> {
|
||||
const url = this.endpoint + `/ocs/v2.php/apps/files_sharing/api/v1/shares`
|
||||
const body = {
|
||||
path: remoteFilePath,
|
||||
shareType: 3,
|
||||
publicUpload: 'false',
|
||||
permissions: 1
|
||||
}
|
||||
}
|
||||
|
||||
const res = await fetch(url, {
|
||||
method: 'POST',
|
||||
headers: Object.assign(this.headers, {
|
||||
'OCS-APIRequest': true
|
||||
}),
|
||||
body: JSON.stringify(body)
|
||||
})
|
||||
|
||||
const result = await res.text()
|
||||
const re = /<url>(?<share_url>.*)<\/url>/
|
||||
const match = re.exec(result)
|
||||
const sharableUrl = (match?.groups || {})['share_url']
|
||||
if (!sharableUrl) {
|
||||
throw new Error('Failed to parse sharable URL.')
|
||||
}
|
||||
|
||||
return sharableUrl
|
||||
}
|
||||
}
|
||||
|
9
workspace.code-workspace
Normal file
9
workspace.code-workspace
Normal file
@ -0,0 +1,9 @@
|
||||
{
|
||||
"folders": [
|
||||
{
|
||||
"path": "."
|
||||
}
|
||||
],
|
||||
"remoteAuthority": "wsl+Ubuntu-20.04",
|
||||
"settings": {}
|
||||
}
|
Loading…
x
Reference in New Issue
Block a user