1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78
|
//go:build integration
// +build integration
package commands
import (
"io/ioutil"
"os"
"runtime"
"testing"
"time"
"github.com/prometheus/client_golang/prometheus"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitlab-runner/common"
"gitlab.com/gitlab-org/gitlab-runner/common/buildtest"
)
func TestBuildsHelperCollect(t *testing.T) {
dir, err := ioutil.TempDir("", "gitlab-runner-helper-collector")
require.NoError(t, err)
defer os.RemoveAll(dir)
ch := make(chan prometheus.Metric, 50)
b := newBuildsHelper()
longRunningBuild, err := common.GetLongRunningBuild()
require.NoError(t, err)
shell := "bash"
if runtime.GOOS == "windows" {
shell = "powershell"
}
build := &common.Build{
JobResponse: longRunningBuild,
Runner: &common.RunnerConfig{
RunnerSettings: common.RunnerSettings{
BuildsDir: dir,
Executor: "shell",
Shell: shell,
},
},
}
trace := &common.Trace{Writer: ioutil.Discard}
done := make(chan error)
go func() {
done <- buildtest.RunBuildWithTrace(t, build, trace)
}()
b.builds = append(b.builds, build)
// collect many logs whilst the build is being executed to trigger any
// potential race conditions that arise from the build progressing whilst
// metrics are collected.
for i := 0; i < 200; i++ {
if i == 100 {
// Build might have not started yet, wait until cancel is
// successful.
require.Eventually(
t,
func() bool {
return trace.Abort()
},
time.Minute,
10*time.Millisecond,
)
}
b.Collect(ch)
<-ch
}
err = <-done
expected := &common.BuildError{FailureReason: common.JobCanceled}
assert.ErrorIs(t, err, expected)
}
|