1
1
import * as yaml from "js-yaml" ;
2
2
import chalk from "chalk" ;
3
- import path from "path" ;
4
3
import fs from "fs-extra" ;
5
4
import yargs from "yargs" ;
6
5
import { Commander } from "./commander.js" ;
@@ -13,35 +12,24 @@ import {Utils} from "./utils.js";
13
12
import { Argv } from "./argv.js" ;
14
13
import assert from "assert" ;
15
14
16
- const generateGitIgnore = ( cwd : string , stateDir : string ) => {
17
- const gitIgnoreFilePath = `${ cwd } /${ stateDir } /.gitignore` ;
18
- const gitIgnoreContent = "*\n!.gitignore\n" ;
19
- if ( ! fs . existsSync ( gitIgnoreFilePath ) ) {
20
- fs . outputFileSync ( gitIgnoreFilePath , gitIgnoreContent ) ;
21
- }
22
- } ;
23
-
24
15
export async function handler ( args : any , writeStreams : WriteStreams , jobs : Job [ ] = [ ] ) {
25
16
const argv = await Argv . build ( args , writeStreams ) ;
26
17
const cwd = argv . cwd ;
27
18
const stateDir = argv . stateDir ;
28
- const file = argv . file ;
29
19
let parser : Parser | null = null ;
30
20
31
21
if ( argv . completion ) {
32
22
yargs ( process . argv . slice ( 2 ) ) . showCompletionScript ( ) ;
33
23
return [ ] ;
34
24
}
35
25
36
- assert ( fs . existsSync ( `${ cwd } /${ file } ` ) , `${ path . resolve ( cwd ) } /${ file } could not be found` ) ;
37
-
38
26
if ( argv . fetchIncludes ) {
39
27
await Parser . create ( argv , writeStreams , 0 , jobs ) ;
40
28
return [ ] ;
41
29
}
42
30
43
31
if ( argv . preview ) {
44
- const pipelineIid = await state . getPipelineIid ( cwd , stateDir ) ;
32
+ const pipelineIid = await state . getPipelineIid ( stateDir ) ;
45
33
parser = await Parser . create ( argv , writeStreams , pipelineIid , jobs , false ) ;
46
34
const gitlabData = parser . gitlabData ;
47
35
for ( const jobName of Object . keys ( gitlabData ) ) {
@@ -55,46 +43,43 @@ export async function handler (args: any, writeStreams: WriteStreams, jobs: Job[
55
43
}
56
44
writeStreams . stdout ( `---\n${ yaml . dump ( gitlabData , { lineWidth : 160 } ) } ` ) ;
57
45
} else if ( argv . list || argv . listAll ) {
58
- const pipelineIid = await state . getPipelineIid ( cwd , stateDir ) ;
46
+ const pipelineIid = await state . getPipelineIid ( stateDir ) ;
59
47
parser = await Parser . create ( argv , writeStreams , pipelineIid , jobs ) ;
60
48
Commander . runList ( parser , writeStreams , argv . listAll ) ;
61
49
} else if ( argv . listJson ) {
62
- const pipelineIid = await state . getPipelineIid ( cwd , stateDir ) ;
50
+ const pipelineIid = await state . getPipelineIid ( stateDir ) ;
63
51
parser = await Parser . create ( argv , writeStreams , pipelineIid , jobs ) ;
64
52
Commander . runJson ( parser , writeStreams ) ;
65
53
} else if ( argv . listCsv || argv . listCsvAll ) {
66
- const pipelineIid = await state . getPipelineIid ( cwd , stateDir ) ;
54
+ const pipelineIid = await state . getPipelineIid ( stateDir ) ;
67
55
parser = await Parser . create ( argv , writeStreams , pipelineIid , jobs ) ;
68
56
Commander . runCsv ( parser , writeStreams , argv . listCsvAll ) ;
69
57
} else if ( argv . job . length > 0 ) {
70
58
assert ( argv . stage === null , "You cannot use --stage when starting individual jobs" ) ;
71
- generateGitIgnore ( cwd , stateDir ) ;
72
59
const time = process . hrtime ( ) ;
73
60
if ( argv . needs || argv . onlyNeeds ) {
74
- await fs . remove ( `${ cwd } / ${ stateDir } /artifacts` ) ;
75
- await state . incrementPipelineIid ( cwd , stateDir ) ;
61
+ await fs . remove ( `${ stateDir } /artifacts` ) ;
62
+ await state . incrementPipelineIid ( stateDir ) ;
76
63
}
77
- const pipelineIid = await state . getPipelineIid ( cwd , stateDir ) ;
64
+ const pipelineIid = await state . getPipelineIid ( stateDir ) ;
78
65
parser = await Parser . create ( argv , writeStreams , pipelineIid , jobs ) ;
79
66
await Utils . rsyncTrackedFiles ( cwd , stateDir , ".docker" ) ;
80
67
await Commander . runJobs ( argv , parser , writeStreams ) ;
81
68
if ( argv . needs || argv . onlyNeeds ) {
82
69
writeStreams . stderr ( chalk `{grey pipeline finished} in {grey ${ prettyHrtime ( process . hrtime ( time ) ) } }\n` ) ;
83
70
}
84
71
} else if ( argv . stage ) {
85
- generateGitIgnore ( cwd , stateDir ) ;
86
72
const time = process . hrtime ( ) ;
87
- const pipelineIid = await state . getPipelineIid ( cwd , stateDir ) ;
73
+ const pipelineIid = await state . getPipelineIid ( stateDir ) ;
88
74
parser = await Parser . create ( argv , writeStreams , pipelineIid , jobs ) ;
89
75
await Utils . rsyncTrackedFiles ( cwd , stateDir , ".docker" ) ;
90
76
await Commander . runJobsInStage ( argv , parser , writeStreams ) ;
91
77
writeStreams . stderr ( chalk `{grey pipeline finished} in {grey ${ prettyHrtime ( process . hrtime ( time ) ) } }\n` ) ;
92
78
} else {
93
- generateGitIgnore ( cwd , stateDir ) ;
94
79
const time = process . hrtime ( ) ;
95
- await fs . remove ( `${ cwd } / ${ stateDir } /artifacts` ) ;
96
- await state . incrementPipelineIid ( cwd , stateDir ) ;
97
- const pipelineIid = await state . getPipelineIid ( cwd , stateDir ) ;
80
+ await fs . remove ( `${ stateDir } /artifacts` ) ;
81
+ await state . incrementPipelineIid ( stateDir ) ;
82
+ const pipelineIid = await state . getPipelineIid ( stateDir ) ;
98
83
parser = await Parser . create ( argv , writeStreams , pipelineIid , jobs ) ;
99
84
await Utils . rsyncTrackedFiles ( cwd , stateDir , ".docker" ) ;
100
85
await Commander . runPipeline ( argv , parser , writeStreams ) ;
0 commit comments