Skip to content

Commit

Permalink
refactor: migate scenario tests to models pacakge (#4139)
Browse files Browse the repository at this point in the history
closes #4140
part of #3832

---------

Co-authored-by: frrist <[email protected]>
  • Loading branch information
frrist and frrist committed Jun 27, 2024
1 parent 6786a6d commit a6a3103
Show file tree
Hide file tree
Showing 32 changed files with 950 additions and 578 deletions.
6 changes: 3 additions & 3 deletions pkg/docker/test.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ import (
"github.com/docker/docker/client"
"github.com/stretchr/testify/require"

"github.com/bacalhau-project/bacalhau/pkg/model"
"github.com/bacalhau-project/bacalhau/pkg/models"
)

// MustHaveDocker will skip the test if the test is running in an environment that cannot support cross-platform
Expand All @@ -20,8 +20,8 @@ func MustHaveDocker(t testing.TB) {

// EngineSpecRequiresDocker will skip the test if the test is running in an environment that cannot support cross-platform
// Docker images, and the passed model.EngineSpec type is model.EngineDocker
func EngineSpecRequiresDocker(t testing.TB, engineSpec model.EngineSpec) {
MaybeNeedDocker(t, engineSpec.Engine() == model.EngineDocker)
func EngineSpecRequiresDocker(t testing.TB, engineSpec *models.SpecConfig) {
MaybeNeedDocker(t, engineSpec.Type == models.EngineDocker)
}

// MaybeNeedDocker will skip the test if the test is running in an environment that cannot support cross-platform
Expand Down
8 changes: 8 additions & 0 deletions pkg/executor/docker/models/types.go
Original file line number Diff line number Diff line change
Expand Up @@ -128,3 +128,11 @@ func (b *DockerEngineBuilder) Build() (*models.SpecConfig, error) {
Params: b.spec.ToMap(),
}, nil
}

func (b *DockerEngineBuilder) MustBuild() *models.SpecConfig {
spec, err := b.Build()
if err != nil {
panic(err)
}
return spec
}
8 changes: 8 additions & 0 deletions pkg/executor/wasm/models/types.go
Original file line number Diff line number Diff line change
Expand Up @@ -229,3 +229,11 @@ func (b *WasmEngineBuilder) Build() (*models.SpecConfig, error) {
Params: b.spec.ToMap(),
}, nil
}

func (b *WasmEngineBuilder) MustBuild() *models.SpecConfig {
spec, err := b.Build()
if err != nil {
panic(err)
}
return spec
}
16 changes: 10 additions & 6 deletions pkg/job/parser.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,13 @@ import (
"strings"

"github.com/bacalhau-project/bacalhau/pkg/models"
publisher_local "github.com/bacalhau-project/bacalhau/pkg/publisher/local"
)

const (
s3Prefix = "s3"
ipfsPrefix = "ipfs"
s3Prefix = "s3"
ipfsPrefix = "ipfs"
localPrefix = "local"
)

// ParsePublisherString parses a publisher string into a SpecConfig without having to
Expand Down Expand Up @@ -64,10 +66,10 @@ func ParsePublisherString(publisher string) (*models.SpecConfig, error) {
parsedURI.Scheme = parsedURI.Path
}

var res models.SpecConfig
var res *models.SpecConfig
switch parsedURI.Scheme {
case ipfsPrefix:
res = models.SpecConfig{
res = &models.SpecConfig{
Type: models.PublisherIPFS,
}
case s3Prefix:
Expand All @@ -77,13 +79,15 @@ func ParsePublisherString(publisher string) (*models.SpecConfig, error) {
if _, ok := options["key"]; !ok {
options["key"] = strings.TrimLeft(parsedURI.Path, "/")
}
res = models.SpecConfig{
res = &models.SpecConfig{
Type: models.PublisherS3,
Params: options,
}
case localPrefix:
res = publisher_local.NewSpecConfig()
default:
return nil, fmt.Errorf("unknown publisher type: %s", parsedURI.Scheme)
}

return &res, nil
return res, nil
}
2 changes: 2 additions & 0 deletions pkg/node/node.go
Original file line number Diff line number Diff line change
Expand Up @@ -185,6 +185,8 @@ func NewNode(
return nil, err
}

// TODO calling `Keys` on the publishers takes ~10 seconds per call
// https://github.com/bacalhau-project/bacalhau/issues/4153
metrics.NodeInfo.Add(ctx, 1,
attribute.StringSlice("node_publishers", publishers.Keys(ctx)),
attribute.StringSlice("node_storages", storages.Keys(ctx)),
Expand Down
5 changes: 3 additions & 2 deletions pkg/node/requester.go
Original file line number Diff line number Diff line change
Expand Up @@ -233,9 +233,10 @@ func NewRequesterNode(
// parse the publisher to generate a models.SpecConfig and add it to each job
// which is without a publisher
config, err := job.ParsePublisherString(requesterConfig.DefaultPublisher)
if err == nil {
jobTransformers = append(jobTransformers, transformer.DefaultPublisher(config))
if err != nil {
return nil, fmt.Errorf("parsing default publisher spec (%s): %w", requesterConfig.DefaultPublisher, err)
}
jobTransformers = append(jobTransformers, transformer.DefaultPublisher(config))
}

endpointV2 := orchestrator.NewBaseEndpoint(&orchestrator.BaseEndpointParams{
Expand Down
14 changes: 12 additions & 2 deletions pkg/storage/inline/types.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,11 @@ import (
"errors"
"fmt"

"github.com/bacalhau-project/bacalhau/pkg/lib/validate"
"github.com/bacalhau-project/bacalhau/pkg/models"
"github.com/fatih/structs"
"github.com/mitchellh/mapstructure"

"github.com/bacalhau-project/bacalhau/pkg/lib/validate"
"github.com/bacalhau-project/bacalhau/pkg/models"
)

type Source struct {
Expand Down Expand Up @@ -42,3 +43,12 @@ func DecodeSpec(spec *models.SpecConfig) (Source, error) {

return c, c.Validate()
}

func NewSpecConfig(url string) *models.SpecConfig {
s := Source{URL: url}

return &models.SpecConfig{
Type: models.StorageSourceInline,
Params: s.ToMap(),
}
}
33 changes: 23 additions & 10 deletions pkg/test/compute/resourcelimits_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,8 @@ import (
"github.com/stretchr/testify/suite"

"github.com/bacalhau-project/bacalhau/pkg/config/types"
"github.com/bacalhau-project/bacalhau/pkg/publicapi/apimodels"
clientv2 "github.com/bacalhau-project/bacalhau/pkg/publicapi/client/v2"

legacy_job "github.com/bacalhau-project/bacalhau/pkg/legacyjob"
"github.com/bacalhau-project/bacalhau/pkg/model"
Expand Down Expand Up @@ -60,14 +62,13 @@ type TotalResourceTestCaseCheck struct {

type TotalResourceTestCase struct {
// the total list of jobs to throw at the cluster all at the same time
jobs []model.ResourceUsageConfig
totalLimits model.ResourceUsageConfig
jobs []*models.ResourcesConfig
totalLimits *models.ResourcesConfig
wait TotalResourceTestCaseCheck
checkers []TotalResourceTestCaseCheck
}

func (suite *ComputeNodeResourceLimitsSuite) TestTotalResourceLimits() {

// for this test we use the transport so the compute_node is calling
// the executor in a go-routine and we can test what jobs
// look like over time - this test leave each job running for X seconds
Expand Down Expand Up @@ -126,7 +127,7 @@ func (suite *ComputeNodeResourceLimitsSuite) TestTotalResourceLimits() {
return size.Bytes(), err
}

resourcesConfig := legacy.FromLegacyResourceUsageConfig(testCase.totalLimits)
resourcesConfig := testCase.totalLimits
parsedResources, err := resourcesConfig.ToResources()
require.NoError(suite.T(), err)

Expand All @@ -148,12 +149,24 @@ func (suite *ComputeNodeResourceLimitsSuite) TestTotalResourceLimits() {

for _, jobResources := range testCase.jobs {
// what the job is doesn't matter - it will only end up
j := testutils.MakeNoopJob(suite.T())
j.Spec.Resources = jobResources
_, err := stack.Nodes[0].RequesterNode.Endpoint.SubmitJob(ctx, model.JobCreatePayload{
ClientID: "123",
APIVersion: j.APIVersion,
Spec: &j.Spec,
j := &models.Job{
Name: suite.T().Name(),
Type: models.JobTypeBatch,
Count: 1,
Tasks: []*models.Task{
{
Name: suite.T().Name(),
Engine: &models.SpecConfig{
Type: models.EngineNoop,
},
ResourcesConfig: jobResources,
},
},
}
j.Normalize()
client := clientv2.New(fmt.Sprintf("http://%s:%d", stack.Nodes[0].APIServer.Address, stack.Nodes[0].APIServer.Port))
_, err := client.Jobs().Put(ctx, &apimodels.PutJobRequest{
Job: j,
})
require.NoError(suite.T(), err)

Expand Down
9 changes: 4 additions & 5 deletions pkg/test/compute/utils_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@ import (
"fmt"

_ "github.com/bacalhau-project/bacalhau/pkg/logger"
"github.com/bacalhau-project/bacalhau/pkg/model"
"github.com/bacalhau-project/bacalhau/pkg/models"
localdirectory "github.com/bacalhau-project/bacalhau/pkg/storage/local_directory"
)
Expand Down Expand Up @@ -34,16 +33,16 @@ func addResourceUsage(execution *models.Execution, usage models.Resources) *mode
return execution
}

func getResources(c, m, d string) model.ResourceUsageConfig {
return model.ResourceUsageConfig{
func getResources(c, m, d string) *models.ResourcesConfig {
return &models.ResourcesConfig{
CPU: c,
Memory: m,
Disk: d,
}
}

func getResourcesArray(data [][]string) []model.ResourceUsageConfig {
var res []model.ResourceUsageConfig
func getResourcesArray(data [][]string) []*models.ResourcesConfig {
var res []*models.ResourcesConfig
for _, d := range data {
res = append(res, getResources(d[0], d[1], d[2]))
}
Expand Down
15 changes: 6 additions & 9 deletions pkg/test/devstack/concurrency_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,10 @@ import (

"github.com/stretchr/testify/suite"

"github.com/bacalhau-project/bacalhau/pkg/downloader"
legacy_job "github.com/bacalhau-project/bacalhau/pkg/legacyjob"

"github.com/bacalhau-project/bacalhau/pkg/devstack"
"github.com/bacalhau-project/bacalhau/pkg/downloader"
_ "github.com/bacalhau-project/bacalhau/pkg/logger"
"github.com/bacalhau-project/bacalhau/pkg/model"
"github.com/bacalhau-project/bacalhau/pkg/models"
"github.com/bacalhau-project/bacalhau/pkg/test/scenario"
)

Expand All @@ -27,22 +25,21 @@ func TestDevstackConcurrencySuite(t *testing.T) {
}

func (suite *DevstackConcurrencySuite) TestConcurrencyLimit() {

testCase := scenario.WasmHelloWorld(suite.T())
testCase.Stack = &scenario.StackConfig{
DevStackOptions: &devstack.DevStackOptions{
NumberOfHybridNodes: 1,
NumberOfComputeOnlyNodes: 2,
},
}
testCase.Deal = model.Deal{Concurrency: 2}
testCase.Job.Count = 2
testCase.ResultsChecker = scenario.FileEquals(
downloader.DownloadFilenameStdout,
"Hello, world!\nHello, world!\n",
)
testCase.JobCheckers = []legacy_job.CheckStatesFunction{
legacy_job.WaitForExecutionStates(map[model.ExecutionStateType]int{
model.ExecutionStateCompleted: testCase.Deal.Concurrency,
testCase.JobCheckers = []scenario.StateChecks{
scenario.WaitForExecutionStates(map[models.ExecutionStateType]int{
models.ExecutionStateCompleted: testCase.Job.Count,
}),
}

Expand Down
70 changes: 41 additions & 29 deletions pkg/test/devstack/default_publisher_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,10 @@ import (
"os"
"testing"

legacy_job "github.com/bacalhau-project/bacalhau/pkg/legacyjob"
wasmmodels "github.com/bacalhau-project/bacalhau/pkg/executor/wasm/models"
_ "github.com/bacalhau-project/bacalhau/pkg/logger"
"github.com/bacalhau-project/bacalhau/pkg/model"
"github.com/bacalhau-project/bacalhau/pkg/models"
"github.com/bacalhau-project/bacalhau/pkg/test/scenario"
testutils "github.com/bacalhau-project/bacalhau/pkg/test/utils"
"github.com/bacalhau-project/bacalhau/testdata/wasm/cat"

"github.com/stretchr/testify/suite"
Expand All @@ -27,46 +26,59 @@ func TestDefaultPublisherSuite(t *testing.T) {

func (s *DefaultPublisherSuite) TestNoDefaultPublisher() {
testcase := scenario.Scenario{
Spec: testutils.MakeSpecWithOpts(s.T(),
legacy_job.WithEngineSpec(
model.NewWasmEngineBuilder(scenario.InlineData(cat.Program())).
WithEntrypoint("_start").
WithParameters(
"data/hello.txt",
"does/not/exist.txt",
).
Build(),
),
),
Job: &models.Job{
Name: s.T().Name(),
Type: models.JobTypeBatch,
Count: 1,
Tasks: []*models.Task{
{
Name: s.T().Name(),
Engine: wasmmodels.NewWasmEngineBuilder(scenario.InlineData(cat.Program())).
WithEntrypoint("_start").
WithParameters(
"data/hello.txt",
"does/not/exist.txt",
).
MustBuild(),
},
},
},
ResultsChecker: expectResultsNone,
JobCheckers: []legacy_job.CheckStatesFunction{
legacy_job.WaitForSuccessfulCompletion(),
JobCheckers: []scenario.StateChecks{
scenario.WaitForSuccessfulCompletion(),
},
}

s.RunScenario(testcase)
}

func (s *DefaultPublisherSuite) TestDefaultPublisher() {
// we are skipping this test because the orchestrator endpoint doesn't require a publisher
stack := scenario.StackConfig{}
stack.DefaultPublisher = "local"

testcase := scenario.Scenario{
Spec: testutils.MakeSpecWithOpts(s.T(),
legacy_job.WithEngineSpec(
model.NewWasmEngineBuilder(scenario.InlineData(cat.Program())).
WithEntrypoint("_start").
WithParameters(
"data/hello.txt",
"does/not/exist.txt",
).
Build(),
),
),
Job: &models.Job{
Name: s.T().Name(),
Type: models.JobTypeBatch,
Count: 1,
Tasks: []*models.Task{
{
Name: s.T().Name(),
Engine: wasmmodels.NewWasmEngineBuilder(scenario.InlineData(cat.Program())).
WithEntrypoint("_start").
WithParameters(
"data/hello.txt",
"does/not/exist.txt",
).
MustBuild(),
},
},
},
Stack: &stack,
ResultsChecker: expectResultsSome,
JobCheckers: []legacy_job.CheckStatesFunction{
legacy_job.WaitForSuccessfulCompletion(),
JobCheckers: []scenario.StateChecks{
scenario.WaitForSuccessfulCompletion(),
},
}

Expand Down
Loading

0 comments on commit a6a3103

Please sign in to comment.