From 4def2144b089e9204c373fb057251da10a8fac91 Mon Sep 17 00:00:00 2001 From: Ed Schouten Date: Fri, 7 Nov 2025 05:57:05 +0100 Subject: [PATCH 1/3] Remove support for REv2.0 output files/directories On 2019-11-19, REv2 changed the way expected output paths are declared in the Command message. It used to be the case that files and directories were declared separately, but this prevents you from writing an action that captures either of those, depending on what the action generated. Given that this feature was added 6.5 years ago, let's go ahead and remove support for the old scheme. Clients have had more than enough time to implement the new scheme. --- cmd/bb_worker/main.go | 1 - pkg/builder/command.go | 2 +- pkg/builder/local_build_executor.go | 42 +- pkg/builder/local_build_executor_test.go | 12 - pkg/builder/output_hierarchy.go | 126 +--- pkg/builder/output_hierarchy_test.go | 670 +++++------------- .../configuration/bb_worker/bb_worker.pb.go | 50 +- .../configuration/bb_worker/bb_worker.proto | 13 +- 8 files changed, 228 insertions(+), 688 deletions(-) diff --git a/cmd/bb_worker/main.go b/cmd/bb_worker/main.go index 0dd047e9..f471e7b3 100644 --- a/cmd/bb_worker/main.go +++ b/cmd/bb_worker/main.go @@ -442,7 +442,6 @@ func main() { int(configuration.MaximumMessageSizeBytes), runnerConfiguration.EnvironmentVariables, configuration.ForceUploadTreesAndDirectories, - configuration.SupportLegacyOutputFilesAndDirectories, ) if prefetchingConfiguration != nil { diff --git a/pkg/builder/command.go b/pkg/builder/command.go index 630b7800..3af30a37 100644 --- a/pkg/builder/command.go +++ b/pkg/builder/command.go @@ -86,7 +86,7 @@ func ConvertCommandToShellScript(command *remoteexecution.Command, w io.StringWr } // Create parent directories of outputs. - outputHierarchy, err := NewOutputHierarchy(command, false) + outputHierarchy, err := NewOutputHierarchy(command) if err != nil { return err } diff --git a/pkg/builder/local_build_executor.go b/pkg/builder/local_build_executor.go index 7a428b28..a468dc61 100644 --- a/pkg/builder/local_build_executor.go +++ b/pkg/builder/local_build_executor.go @@ -65,32 +65,30 @@ func (el *capturingErrorLogger) GetError() error { } type localBuildExecutor struct { - contentAddressableStorage blobstore.BlobAccess - buildDirectoryCreator BuildDirectoryCreator - runner runner_pb.RunnerClient - clock clock.Clock - maximumWritableFileUploadDelay time.Duration - inputRootCharacterDevices map[path.Component]filesystem.DeviceNumber - maximumMessageSizeBytes int - environmentVariables map[string]string - forceUploadTreesAndDirectories bool - supportLegacyOutputFilesAndDirectories bool + contentAddressableStorage blobstore.BlobAccess + buildDirectoryCreator BuildDirectoryCreator + runner runner_pb.RunnerClient + clock clock.Clock + maximumWritableFileUploadDelay time.Duration + inputRootCharacterDevices map[path.Component]filesystem.DeviceNumber + maximumMessageSizeBytes int + environmentVariables map[string]string + forceUploadTreesAndDirectories bool } // NewLocalBuildExecutor returns a BuildExecutor that executes build // steps on the local system. -func NewLocalBuildExecutor(contentAddressableStorage blobstore.BlobAccess, buildDirectoryCreator BuildDirectoryCreator, runner runner_pb.RunnerClient, clock clock.Clock, maximumWritableFileUploadDelay time.Duration, inputRootCharacterDevices map[path.Component]filesystem.DeviceNumber, maximumMessageSizeBytes int, environmentVariables map[string]string, forceUploadTreesAndDirectories, supportLegacyOutputFilesAndDirectories bool) BuildExecutor { +func NewLocalBuildExecutor(contentAddressableStorage blobstore.BlobAccess, buildDirectoryCreator BuildDirectoryCreator, runner runner_pb.RunnerClient, clock clock.Clock, maximumWritableFileUploadDelay time.Duration, inputRootCharacterDevices map[path.Component]filesystem.DeviceNumber, maximumMessageSizeBytes int, environmentVariables map[string]string, forceUploadTreesAndDirectories bool) BuildExecutor { return &localBuildExecutor{ - contentAddressableStorage: contentAddressableStorage, - buildDirectoryCreator: buildDirectoryCreator, - runner: runner, - clock: clock, - maximumWritableFileUploadDelay: maximumWritableFileUploadDelay, - inputRootCharacterDevices: inputRootCharacterDevices, - maximumMessageSizeBytes: maximumMessageSizeBytes, - environmentVariables: environmentVariables, - forceUploadTreesAndDirectories: forceUploadTreesAndDirectories, - supportLegacyOutputFilesAndDirectories: supportLegacyOutputFilesAndDirectories, + contentAddressableStorage: contentAddressableStorage, + buildDirectoryCreator: buildDirectoryCreator, + runner: runner, + clock: clock, + maximumWritableFileUploadDelay: maximumWritableFileUploadDelay, + inputRootCharacterDevices: inputRootCharacterDevices, + maximumMessageSizeBytes: maximumMessageSizeBytes, + environmentVariables: environmentVariables, + forceUploadTreesAndDirectories: forceUploadTreesAndDirectories, } } @@ -233,7 +231,7 @@ func (be *localBuildExecutor) Execute(ctx context.Context, filePool pool.FilePoo return response } command := commandMessage.(*remoteexecution.Command) - outputHierarchy, err := NewOutputHierarchy(command, be.supportLegacyOutputFilesAndDirectories) + outputHierarchy, err := NewOutputHierarchy(command) if err != nil { attachErrorToExecuteResponse(response, err) return response diff --git a/pkg/builder/local_build_executor_test.go b/pkg/builder/local_build_executor_test.go index 412002e6..f8e2d1d0 100644 --- a/pkg/builder/local_build_executor_test.go +++ b/pkg/builder/local_build_executor_test.go @@ -48,7 +48,6 @@ func TestLocalBuildExecutorInvalidActionDigest(t *testing.T) { /* maximumMessageSizeBytes = */ 10000, /* environmentVariables = */ map[string]string{}, /* forceUploadTreesAndDirectories = */ false, - /* supportLegacyOutputFilesAndDirectories = */ false, ) filePool := mock.NewMockFilePool(ctrl) @@ -98,7 +97,6 @@ func TestLocalBuildExecutorMissingAction(t *testing.T) { /* maximumMessageSizeBytes = */ 10000, /* environmentVariables = */ map[string]string{}, /* forceUploadTreesAndDirectories = */ false, - /* supportLegacyOutputFilesAndDirectories = */ false, ) filePool := mock.NewMockFilePool(ctrl) @@ -144,7 +142,6 @@ func TestLocalBuildExecutorBuildDirectoryCreatorFailedFailed(t *testing.T) { /* maximumMessageSizeBytes = */ 10000, /* environmentVariables = */ map[string]string{}, /* forceUploadTreesAndDirectories = */ false, - /* supportLegacyOutputFilesAndDirectories = */ false, ) filePool := mock.NewMockFilePool(ctrl) @@ -212,7 +209,6 @@ func TestLocalBuildExecutorInputRootPopulationFailed(t *testing.T) { /* maximumMessageSizeBytes = */ 10000, /* environmentVariables = */ map[string]string{}, /* forceUploadTreesAndDirectories = */ false, - /* supportLegacyOutputFilesAndDirectories = */ false, ) metadata := make(chan *remoteworker.CurrentState_Executing, 10) @@ -289,7 +285,6 @@ func TestLocalBuildExecutorOutputDirectoryCreationFailure(t *testing.T) { /* maximumMessageSizeBytes = */ 10000, /* environmentVariables = */ map[string]string{}, /* forceUploadTreesAndDirectories = */ false, - /* supportLegacyOutputFilesAndDirectories = */ false, ) metadata := make(chan *remoteworker.CurrentState_Executing, 10) @@ -359,7 +354,6 @@ func TestLocalBuildExecutorMissingCommand(t *testing.T) { /* maximumMessageSizeBytes = */ 10000, /* environmentVariables = */ map[string]string{}, /* forceUploadTreesAndDirectories = */ false, - /* supportLegacyOutputFilesAndDirectories = */ false, ) metadata := make(chan *remoteworker.CurrentState_Executing, 10) @@ -486,7 +480,6 @@ func TestLocalBuildExecutorOutputSymlinkReadingFailure(t *testing.T) { /* maximumMessageSizeBytes = */ 10000, /* environmentVariables = */ map[string]string{}, /* forceUploadTreesAndDirectories = */ false, - /* supportLegacyOutputFilesAndDirectories = */ false, ) metadata := make(chan *remoteworker.CurrentState_Executing, 10) @@ -719,7 +712,6 @@ func TestLocalBuildExecutorSuccess(t *testing.T) { "PWD": "dont-overwrite", }, /* forceUploadTreesAndDirectories = */ false, - /* supportLegacyOutputFilesAndDirectories = */ false, ) requestMetadata, err := anypb.New(&remoteexecution.RequestMetadata{ @@ -803,7 +795,6 @@ func TestLocalBuildExecutorCachingInvalidTimeout(t *testing.T) { /* maximumMessageSizeBytes = */ 10000, /* environmentVariables = */ map[string]string{}, /* forceUploadTreesAndDirectories = */ false, - /* supportLegacyOutputFilesAndDirectories = */ false, ) // Execution should fail, as the number of nanoseconds in the @@ -923,7 +914,6 @@ func TestLocalBuildExecutorInputRootIOFailureDuringExecution(t *testing.T) { /* maximumMessageSizeBytes = */ 10000, /* environmentVariables = */ map[string]string{}, /* forceUploadTreesAndDirectories = */ false, - /* supportLegacyOutputFilesAndDirectories = */ false, ) metadata := make(chan *remoteworker.CurrentState_Executing, 10) @@ -1056,7 +1046,6 @@ func TestLocalBuildExecutorTimeoutDuringExecution(t *testing.T) { /* maximumMessageSizeBytes = */ 10000, /* environmentVariables = */ map[string]string{}, /* forceUploadTreesAndDirectories = */ false, - /* supportLegacyOutputFilesAndDirectories = */ false, ) metadata := make(chan *remoteworker.CurrentState_Executing, 10) @@ -1158,7 +1147,6 @@ func TestLocalBuildExecutorCharacterDeviceNodeCreationFailed(t *testing.T) { /* maximumMessageSizeBytes = */ 10000, /* environmentVariables = */ map[string]string{}, /* forceUploadTreesAndDirectories = */ false, - /* supportLegacyOutputFilesAndDirectories = */ false, ) metadata := make(chan *remoteworker.CurrentState_Executing, 10) diff --git a/pkg/builder/output_hierarchy.go b/pkg/builder/output_hierarchy.go index 67a73279..2e65d8e5 100644 --- a/pkg/builder/output_hierarchy.go +++ b/pkg/builder/output_hierarchy.go @@ -22,10 +22,8 @@ import ( // OutputNode is a node in a directory hierarchy that contains one or // more locations where output directories and files are expected. type outputNode struct { - directoriesToUpload map[path.Component][]string - filesToUpload map[path.Component][]string - pathsToUpload map[path.Component][]string - subdirectories map[path.Component]*outputNode + pathsToUpload map[path.Component][]string + subdirectories map[path.Component]*outputNode } func (on *outputNode) getSubdirectoryNames() []path.Component { @@ -51,10 +49,8 @@ func sortToUpload(m map[path.Component][]string) []path.Component { // expected. func newOutputDirectory() *outputNode { return &outputNode{ - directoriesToUpload: map[path.Component][]string{}, - filesToUpload: map[path.Component][]string{}, - pathsToUpload: map[path.Component][]string{}, - subdirectories: map[path.Component]*outputNode{}, + pathsToUpload: map[path.Component][]string{}, + subdirectories: map[path.Component]*outputNode{}, } } @@ -70,7 +66,7 @@ func (on *outputNode) createParentDirectories(d ParentPopulatableDirectory, dPat } // Recurse if we need to create one or more directories within. - if child := on.subdirectories[name]; len(child.subdirectories) > 0 || len(child.directoriesToUpload) > 0 { + if child := on.subdirectories[name]; len(child.subdirectories) > 0 { childDirectory, err := d.EnterParentPopulatableDirectory(name) if err != nil { return util.StatusWrapf(err, "Failed to enter output parent directory %#v", childPath.GetUNIXString()) @@ -82,28 +78,6 @@ func (on *outputNode) createParentDirectories(d ParentPopulatableDirectory, dPat } } } - - // Although REv2 explicitly documents that only parents of - // output directories are created (i.e., not the output - // directory itself), Bazel changed its behaviour and now - // creates output directories when using local execution. See - // these issues for details: - // - // https://github.com/bazelbuild/bazel/issues/6262 - // https://github.com/bazelbuild/bazel/issues/6393 - // - // Considering that the 'output_directories' field is deprecated - // in REv2.1 anyway, be consistent with Bazel's local execution. - // Once Bazel switches to REv2.1, it will be forced to solve - // this matter in a protocol conforming way. - for _, name := range sortToUpload(on.directoriesToUpload) { - if _, ok := on.subdirectories[name]; !ok { - childPath := dPath.Append(name) - if err := d.Mkdir(name, 0o777); err != nil && !os.IsExist(err) { - return util.StatusWrapf(err, "Failed to create output directory %#v", childPath.GetUNIXString()) - } - } - } return nil } @@ -111,46 +85,8 @@ func (on *outputNode) createParentDirectories(d ParentPopulatableDirectory, dPat // OutputHierarchy.UploadOutputs() to upload output directories and // files from the locations where they are expected. func (on *outputNode) uploadOutputs(s *uploadOutputsState, d UploadableDirectory, dPath *path.Trace) { - // Upload REv2.0 output directories that are expected to be - // present in this directory. - for _, component := range sortToUpload(on.directoriesToUpload) { - childPath := dPath.Append(component) - paths := on.directoriesToUpload[component] - if fileInfo, err := d.Lstat(component); err == nil { - switch fileType := fileInfo.Type(); fileType { - case filesystem.FileTypeDirectory: - s.uploadOutputDirectory(d, component, childPath, paths) - case filesystem.FileTypeSymlink: - s.uploadOutputSymlink(d, component, childPath, &s.actionResult.OutputDirectorySymlinks, paths) - default: - s.saveError(status.Errorf(codes.InvalidArgument, "Output directory %#v is not a directory or symlink", childPath.GetUNIXString())) - } - } else if !os.IsNotExist(err) { - s.saveError(util.StatusWrapf(err, "Failed to read attributes of output directory %#v", childPath.GetUNIXString())) - } - } - - // Upload REv2.0 output files that are expected to be present in - // this directory. - for _, component := range sortToUpload(on.filesToUpload) { - childPath := dPath.Append(component) - paths := on.filesToUpload[component] - if fileInfo, err := d.Lstat(component); err == nil { - switch fileType := fileInfo.Type(); fileType { - case filesystem.FileTypeRegularFile: - s.uploadOutputFile(d, component, childPath, fileInfo.IsExecutable(), paths) - case filesystem.FileTypeSymlink: - s.uploadOutputSymlink(d, component, childPath, &s.actionResult.OutputFileSymlinks, paths) - default: - s.saveError(status.Errorf(codes.InvalidArgument, "Output file %#v is not a regular file or symlink", childPath.GetUNIXString())) - } - } else if !os.IsNotExist(err) { - s.saveError(util.StatusWrapf(err, "Failed to read attributes of output file %#v", childPath.GetUNIXString())) - } - } - - // Upload REv2.1 output paths that are expected to be present in - // this directory. + // Upload output paths that are expected to be present in this + // directory. for _, component := range sortToUpload(on.pathsToUpload) { childPath := dPath.Append(component) paths := on.pathsToUpload[component] @@ -468,7 +404,7 @@ type OutputHierarchy struct { // NewOutputHierarchy creates a new OutputHierarchy that uses the // working directory and the output paths specified in an REv2 Command // message. -func NewOutputHierarchy(command *remoteexecution.Command, supportLegacyOutputFilesAndDirectories bool) (*OutputHierarchy, error) { +func NewOutputHierarchy(command *remoteexecution.Command) (*OutputHierarchy, error) { var workingDirectory outputNodePath if err := path.Resolve(path.UNIXFormat.NewParser(command.WorkingDirectory), path.NewRelativeScopeWalker(&workingDirectory)); err != nil { return nil, util.StatusWrap(err, "Invalid working directory") @@ -480,44 +416,14 @@ func NewOutputHierarchy(command *remoteexecution.Command, supportLegacyOutputFil command.OutputDirectoryFormat == remoteexecution.Command_TREE_AND_DIRECTORY, } - if len(command.OutputPaths) == 0 { - // Register REv2.0 output directories. - for _, outputDirectory := range command.OutputDirectories { - if !supportLegacyOutputFilesAndDirectories { - return nil, status.Error(codes.Unimplemented, "Command specifies output paths using REv2.0's output_files field, which is deprecated and not enabled on this worker") - } - if on, name, err := oh.lookup(workingDirectory, outputDirectory); err != nil { - return nil, util.StatusWrapf(err, "Invalid output directory %#v", outputDirectory) - } else if on == nil { - oh.rootsToUpload = append(oh.rootsToUpload, outputDirectory) - } else { - on.directoriesToUpload[*name] = append(on.directoriesToUpload[*name], outputDirectory) - } - } - - // Register REv2.0 output files. - for _, outputFile := range command.OutputFiles { - if !supportLegacyOutputFilesAndDirectories { - return nil, status.Error(codes.Unimplemented, "Command specifies output paths using REv2.0's output_files field, which is deprecated and not enabled on this worker") - } - if on, name, err := oh.lookup(workingDirectory, outputFile); err != nil { - return nil, util.StatusWrapf(err, "Invalid output file %#v", outputFile) - } else if on == nil { - return nil, status.Errorf(codes.InvalidArgument, "Output file %#v resolves to the input root directory", outputFile) - } else { - on.filesToUpload[*name] = append(on.filesToUpload[*name], outputFile) - } - } - } else { - // Register REv2.1 output paths. - for _, outputPath := range command.OutputPaths { - if on, name, err := oh.lookup(workingDirectory, outputPath); err != nil { - return nil, util.StatusWrapf(err, "Invalid output path %#v", outputPath) - } else if on == nil { - oh.rootsToUpload = append(oh.rootsToUpload, outputPath) - } else { - on.pathsToUpload[*name] = append(on.pathsToUpload[*name], outputPath) - } + // Register output paths. + for _, outputPath := range command.OutputPaths { + if on, name, err := oh.lookup(workingDirectory, outputPath); err != nil { + return nil, util.StatusWrapf(err, "Invalid output path %#v", outputPath) + } else if on == nil { + oh.rootsToUpload = append(oh.rootsToUpload, outputPath) + } else { + on.pathsToUpload[*name] = append(on.pathsToUpload[*name], outputPath) } } return oh, nil diff --git a/pkg/builder/output_hierarchy_test.go b/pkg/builder/output_hierarchy_test.go index a60d595e..2889b019 100644 --- a/pkg/builder/output_hierarchy_test.go +++ b/pkg/builder/output_hierarchy_test.go @@ -26,39 +26,31 @@ func TestOutputHierarchyCreation(t *testing.T) { t.Run("AbsoluteWorkingDirectory", func(t *testing.T) { _, err := builder.NewOutputHierarchy(&remoteexecution.Command{ WorkingDirectory: "/tmp/hello/../..", - }, false) + }) testutil.RequireEqualStatus(t, status.Error(codes.InvalidArgument, "Invalid working directory: Path is absolute, while a relative path was expected"), err) }) t.Run("InvalidWorkingDirectory", func(t *testing.T) { _, err := builder.NewOutputHierarchy(&remoteexecution.Command{ WorkingDirectory: "hello/../..", - }, false) + }) testutil.RequireEqualStatus(t, status.Error(codes.InvalidArgument, "Invalid working directory: Path resolves to a location outside the input root directory"), err) }) - t.Run("AbsoluteOutputDirectory", func(t *testing.T) { - _, err := builder.NewOutputHierarchy(&remoteexecution.Command{ - WorkingDirectory: ".", - OutputDirectories: []string{"/etc/passwd"}, - }, true) - testutil.RequireEqualStatus(t, status.Error(codes.InvalidArgument, "Invalid output directory \"/etc/passwd\": Path is absolute, while a relative path was expected"), err) - }) - - t.Run("InvalidOutputDirectory", func(t *testing.T) { + t.Run("AbsoluteOutputPath", func(t *testing.T) { _, err := builder.NewOutputHierarchy(&remoteexecution.Command{ - WorkingDirectory: "hello", - OutputDirectories: []string{"../.."}, - }, true) - testutil.RequireEqualStatus(t, status.Error(codes.InvalidArgument, "Invalid output directory \"../..\": Path resolves to a location outside the input root directory"), err) + WorkingDirectory: ".", + OutputPaths: []string{"/etc/passwd"}, + }) + testutil.RequireEqualStatus(t, status.Error(codes.InvalidArgument, "Invalid output path \"/etc/passwd\": Path is absolute, while a relative path was expected"), err) }) - t.Run("InvalidOutputFile", func(t *testing.T) { + t.Run("InvalidOutputPath", func(t *testing.T) { _, err := builder.NewOutputHierarchy(&remoteexecution.Command{ WorkingDirectory: "hello", - OutputFiles: []string{".."}, - }, true) - testutil.RequireEqualStatus(t, status.Error(codes.InvalidArgument, "Output file \"..\" resolves to the input root directory"), err) + OutputPaths: []string{"../.."}, + }) + testutil.RequireEqualStatus(t, status.Error(codes.InvalidArgument, "Invalid output path \"../..\": Path resolves to a location outside the input root directory"), err) }) } @@ -71,7 +63,7 @@ func TestOutputHierarchyCreateParentDirectories(t *testing.T) { // No parent directories should be created. oh, err := builder.NewOutputHierarchy(&remoteexecution.Command{ WorkingDirectory: ".", - }, false) + }) require.NoError(t, err) require.NoError(t, oh.CreateParentDirectories(root)) }) @@ -83,7 +75,7 @@ func TestOutputHierarchyCreateParentDirectories(t *testing.T) { // not cause any Mkdir() calls. oh, err := builder.NewOutputHierarchy(&remoteexecution.Command{ WorkingDirectory: "foo/bar", - }, true) + }) require.NoError(t, err) require.NoError(t, oh.CreateParentDirectories(root)) }) @@ -93,49 +85,22 @@ func TestOutputHierarchyCreateParentDirectories(t *testing.T) { // the root directory. There is thus no need to create // any output directories. oh, err := builder.NewOutputHierarchy(&remoteexecution.Command{ - WorkingDirectory: "foo", - OutputDirectories: []string{".."}, - OutputFiles: []string{"../file"}, - OutputPaths: []string{"../path"}, - }, false) - require.NoError(t, err) - require.NoError(t, oh.CreateParentDirectories(root)) - }) - - t.Run("Success", func(t *testing.T) { - // Create /foo/bar/baz. - root.EXPECT().Mkdir(path.MustNewComponent("foo"), os.FileMode(0o777)) - foo := mock.NewMockParentPopulatableDirectory(ctrl) - root.EXPECT().EnterParentPopulatableDirectory(path.MustNewComponent("foo")).Return(foo, nil) - foo.EXPECT().Mkdir(path.MustNewComponent("bar"), os.FileMode(0o777)) - bar := mock.NewMockParentPopulatableDirectory(ctrl) - foo.EXPECT().EnterParentPopulatableDirectory(path.MustNewComponent("bar")).Return(bar, nil) - bar.EXPECT().Mkdir(path.MustNewComponent("baz"), os.FileMode(0o777)) - bar.EXPECT().Close() - // Create /foo/qux. - foo.EXPECT().Mkdir(path.MustNewComponent("qux"), os.FileMode(0o777)) - foo.EXPECT().Close() - - oh, err := builder.NewOutputHierarchy(&remoteexecution.Command{ - WorkingDirectory: "foo", - OutputDirectories: []string{"bar/baz"}, - OutputFiles: []string{"../foo/qux/xyzzy"}, - }, true) + WorkingDirectory: "foo", + OutputPaths: []string{"../path"}, + }) require.NoError(t, err) require.NoError(t, oh.CreateParentDirectories(root)) }) - t.Run("SuccessPaths", func(t *testing.T) { - // No /foo/bar/baz since OutputPaths is set. - // Create /alice. + t.Run("DotDot", func(t *testing.T) { + // The leading ".." should cause the "alice" directory + // to be created within the root directory. root.EXPECT().Mkdir(path.MustNewComponent("alice"), os.FileMode(0o777)) oh, err := builder.NewOutputHierarchy(&remoteexecution.Command{ - WorkingDirectory: "foo", - OutputDirectories: []string{"bar/baz"}, - OutputFiles: []string{"../foo/qux/xyzzy"}, - OutputPaths: []string{"../alice/bob"}, - }, false) + WorkingDirectory: "foo", + OutputPaths: []string{"../alice/bob"}, + }) require.NoError(t, err) require.NoError(t, oh.CreateParentDirectories(root)) }) @@ -151,8 +116,8 @@ func TestOutputHierarchyCreateParentDirectories(t *testing.T) { oh, err := builder.NewOutputHierarchy(&remoteexecution.Command{ WorkingDirectory: "foo", - OutputFiles: []string{"bar/baz"}, - }, true) + OutputPaths: []string{"bar/baz"}, + }) require.NoError(t, err) testutil.RequireEqualStatus( t, @@ -172,46 +137,8 @@ func TestOutputHierarchyCreateParentDirectories(t *testing.T) { oh, err := builder.NewOutputHierarchy(&remoteexecution.Command{ WorkingDirectory: "foo", - OutputFiles: []string{"bar/baz"}, - }, true) - require.NoError(t, err) - require.NoError(t, oh.CreateParentDirectories(root)) - }) - - t.Run("MkdirFailureOutput", func(t *testing.T) { - // Failure to create a location where an output - // directory is expected. - root.EXPECT().Mkdir(path.MustNewComponent("foo"), os.FileMode(0o777)) - foo := mock.NewMockParentPopulatableDirectory(ctrl) - root.EXPECT().EnterParentPopulatableDirectory(path.MustNewComponent("foo")).Return(foo, nil) - foo.EXPECT().Mkdir(path.MustNewComponent("bar"), os.FileMode(0o777)).Return(status.Error(codes.Internal, "I/O error")) - foo.EXPECT().Close() - - oh, err := builder.NewOutputHierarchy(&remoteexecution.Command{ - WorkingDirectory: "foo", - OutputDirectories: []string{"bar"}, - }, true) - require.NoError(t, err) - testutil.RequireEqualStatus( - t, - status.Error(codes.Internal, "Failed to create output directory \"foo/bar\": I/O error"), - oh.CreateParentDirectories(root)) - }) - - t.Run("MkdirFailureOutputExists", func(t *testing.T) { - // This test is identical to the previous, except that - // the error is EEXIST. This should not cause a hard - // failure. - root.EXPECT().Mkdir(path.MustNewComponent("foo"), os.FileMode(0o777)) - foo := mock.NewMockParentPopulatableDirectory(ctrl) - root.EXPECT().EnterParentPopulatableDirectory(path.MustNewComponent("foo")).Return(foo, nil) - foo.EXPECT().Mkdir(path.MustNewComponent("bar"), os.FileMode(0o777)).Return(os.ErrExist) - foo.EXPECT().Close() - - oh, err := builder.NewOutputHierarchy(&remoteexecution.Command{ - WorkingDirectory: "foo", - OutputDirectories: []string{"bar"}, - }, true) + OutputPaths: []string{"bar/baz"}, + }) require.NoError(t, err) require.NoError(t, oh.CreateParentDirectories(root)) }) @@ -225,9 +152,9 @@ func TestOutputHierarchyCreateParentDirectories(t *testing.T) { foo.EXPECT().Close() oh, err := builder.NewOutputHierarchy(&remoteexecution.Command{ - WorkingDirectory: "foo", - OutputDirectories: []string{"bar/baz"}, - }, true) + WorkingDirectory: "foo", + OutputPaths: []string{"bar/baz/qux"}, + }) require.NoError(t, err) testutil.RequireEqualStatus( t, @@ -249,7 +176,7 @@ func TestOutputHierarchyUploadOutputs(t *testing.T) { // should not trigger any I/O. oh, err := builder.NewOutputHierarchy(&remoteexecution.Command{ WorkingDirectory: ".", - }, false) + }) require.NoError(t, err) var actionResult remoteexecution.ActionResult require.NoError( @@ -265,7 +192,7 @@ func TestOutputHierarchyUploadOutputs(t *testing.T) { require.Equal(t, remoteexecution.ActionResult{}, actionResult) }) - testSuccess := func(t *testing.T, command *remoteexecution.Command, expectedResult remoteexecution.ActionResult) { + t.Run("Success", func(t *testing.T) { // Declare output directories, files and paths. For each // of these output types, let them match one of the // valid file types. @@ -391,7 +318,35 @@ func TestOutputHierarchyUploadOutputs(t *testing.T) { foo.EXPECT().Close() - oh, err := builder.NewOutputHierarchy(command, true) + oh, err := builder.NewOutputHierarchy(&remoteexecution.Command{ + WorkingDirectory: "foo", + OutputPaths: []string{ + "file-regular", + "../foo/file-regular", + "file-executable", + "../foo/file-executable", + "file-symlink", + "../foo/file-symlink", + "file-enoent", + "../foo/file-enoent", + "directory-directory", + "../foo/directory-directory", + "directory-symlink", + "../foo/directory-symlink", + "directory-enoent", + "../foo/directory-enoent", + "path-directory", + "../foo/path-directory", + "path-regular", + "../foo/path-regular", + "path-executable", + "../foo/path-executable", + "path-symlink", + "../foo/path-symlink", + "path-enoent", + "../foo/path-enoent", + }, + }) require.NoError(t, err) var actionResult remoteexecution.ActionResult require.NoError( @@ -404,378 +359,135 @@ func TestOutputHierarchyUploadOutputs(t *testing.T) { writableFileUploadDelay, &actionResult, /* forceUploadTreesAndDirectories = */ false)) - require.Equal(t, expectedResult, actionResult) - } - - t.Run("Success", func(t *testing.T) { - t.Run("FilesAndDirectories", func(t *testing.T) { - testSuccess(t, &remoteexecution.Command{ - WorkingDirectory: "foo", - OutputDirectories: []string{ - "directory-directory", - "../foo/directory-directory", - "directory-symlink", - "../foo/directory-symlink", - "directory-enoent", - "../foo/directory-enoent", - "path-directory", - "../foo/path-directory", - }, - OutputFiles: []string{ - "file-regular", - "../foo/file-regular", - "file-executable", - "../foo/file-executable", - "file-symlink", - "../foo/file-symlink", - "file-enoent", - "../foo/file-enoent", - "path-regular", - "../foo/path-regular", - "path-executable", - "../foo/path-executable", - "path-symlink", - "../foo/path-symlink", - "path-enoent", - "../foo/path-enoent", - }, - }, remoteexecution.ActionResult{ - OutputDirectories: []*remoteexecution.OutputDirectory{ - { - Path: "directory-directory", - TreeDigest: &remoteexecution.Digest{ - Hash: "55aed4acf40a28132fb2d2de2b5962f0", - SizeBytes: 184, - }, - IsTopologicallySorted: true, - }, - { - Path: "../foo/directory-directory", - TreeDigest: &remoteexecution.Digest{ - Hash: "55aed4acf40a28132fb2d2de2b5962f0", - SizeBytes: 184, - }, - IsTopologicallySorted: true, - }, - { - Path: "path-directory", - TreeDigest: &remoteexecution.Digest{ - Hash: "9dd94c5a4b02914af42e8e6372e0b709", - SizeBytes: 2, - }, - IsTopologicallySorted: true, - }, - { - Path: "../foo/path-directory", - TreeDigest: &remoteexecution.Digest{ - Hash: "9dd94c5a4b02914af42e8e6372e0b709", - SizeBytes: 2, - }, - IsTopologicallySorted: true, + require.Equal(t, remoteexecution.ActionResult{ + OutputDirectories: []*remoteexecution.OutputDirectory{ + { + Path: "directory-directory", + TreeDigest: &remoteexecution.Digest{ + Hash: "55aed4acf40a28132fb2d2de2b5962f0", + SizeBytes: 184, }, + IsTopologicallySorted: true, }, - OutputDirectorySymlinks: []*remoteexecution.OutputSymlink{ - { - Path: "directory-symlink", - Target: "directory-symlink-target", - }, - { - Path: "../foo/directory-symlink", - Target: "directory-symlink-target", + { + Path: "../foo/directory-directory", + TreeDigest: &remoteexecution.Digest{ + Hash: "55aed4acf40a28132fb2d2de2b5962f0", + SizeBytes: 184, }, + IsTopologicallySorted: true, }, - OutputFiles: []*remoteexecution.OutputFile{ - { - Path: "file-executable", - Digest: &remoteexecution.Digest{ - Hash: "7590e1b46240ecb5ea65a80db7ee6fae", - SizeBytes: 15, - }, - IsExecutable: true, - }, - { - Path: "../foo/file-executable", - Digest: &remoteexecution.Digest{ - Hash: "7590e1b46240ecb5ea65a80db7ee6fae", - SizeBytes: 15, - }, - IsExecutable: true, - }, - { - Path: "file-regular", - Digest: &remoteexecution.Digest{ - Hash: "a58c2f2281011ca2e631b39baa1ab657", - SizeBytes: 12, - }, + { + Path: "path-directory", + TreeDigest: &remoteexecution.Digest{ + Hash: "9dd94c5a4b02914af42e8e6372e0b709", + SizeBytes: 2, }, - { - Path: "../foo/file-regular", - Digest: &remoteexecution.Digest{ - Hash: "a58c2f2281011ca2e631b39baa1ab657", - SizeBytes: 12, - }, + IsTopologicallySorted: true, + }, + { + Path: "../foo/path-directory", + TreeDigest: &remoteexecution.Digest{ + Hash: "9dd94c5a4b02914af42e8e6372e0b709", + SizeBytes: 2, }, - { - Path: "path-executable", - Digest: &remoteexecution.Digest{ - Hash: "87729325cd08d300fb0e238a3a8da443", - SizeBytes: 15, - }, - IsExecutable: true, + IsTopologicallySorted: true, + }, + }, + OutputFiles: []*remoteexecution.OutputFile{ + { + Path: "file-executable", + Digest: &remoteexecution.Digest{ + Hash: "7590e1b46240ecb5ea65a80db7ee6fae", + SizeBytes: 15, }, - { - Path: "../foo/path-executable", - Digest: &remoteexecution.Digest{ - Hash: "87729325cd08d300fb0e238a3a8da443", - SizeBytes: 15, - }, - IsExecutable: true, + IsExecutable: true, + }, + { + Path: "../foo/file-executable", + Digest: &remoteexecution.Digest{ + Hash: "7590e1b46240ecb5ea65a80db7ee6fae", + SizeBytes: 15, }, - { - Path: "path-regular", - Digest: &remoteexecution.Digest{ - Hash: "44206648b7bb2f3b0d2ed0c52ad2e269", - SizeBytes: 12, - }, + IsExecutable: true, + }, + { + Path: "file-regular", + Digest: &remoteexecution.Digest{ + Hash: "a58c2f2281011ca2e631b39baa1ab657", + SizeBytes: 12, }, - { - Path: "../foo/path-regular", - Digest: &remoteexecution.Digest{ - Hash: "44206648b7bb2f3b0d2ed0c52ad2e269", - SizeBytes: 12, - }, + }, + { + Path: "../foo/file-regular", + Digest: &remoteexecution.Digest{ + Hash: "a58c2f2281011ca2e631b39baa1ab657", + SizeBytes: 12, }, }, - OutputFileSymlinks: []*remoteexecution.OutputSymlink{ - { - Path: "file-symlink", - Target: "file-symlink-target", + { + Path: "path-executable", + Digest: &remoteexecution.Digest{ + Hash: "87729325cd08d300fb0e238a3a8da443", + SizeBytes: 15, }, - { - Path: "../foo/file-symlink", - Target: "file-symlink-target", + IsExecutable: true, + }, + { + Path: "../foo/path-executable", + Digest: &remoteexecution.Digest{ + Hash: "87729325cd08d300fb0e238a3a8da443", + SizeBytes: 15, }, - { - Path: "path-symlink", - Target: "path-symlink-target", + IsExecutable: true, + }, + { + Path: "path-regular", + Digest: &remoteexecution.Digest{ + Hash: "44206648b7bb2f3b0d2ed0c52ad2e269", + SizeBytes: 12, }, - { - Path: "../foo/path-symlink", - Target: "path-symlink-target", + }, + { + Path: "../foo/path-regular", + Digest: &remoteexecution.Digest{ + Hash: "44206648b7bb2f3b0d2ed0c52ad2e269", + SizeBytes: 12, }, }, - }) - }) - t.Run("Paths", func(t *testing.T) { - testSuccess(t, &remoteexecution.Command{ - WorkingDirectory: "foo", - OutputPaths: []string{ - "file-regular", - "../foo/file-regular", - "file-executable", - "../foo/file-executable", - "file-symlink", - "../foo/file-symlink", - "file-enoent", - "../foo/file-enoent", - "directory-directory", - "../foo/directory-directory", - "directory-symlink", - "../foo/directory-symlink", - "directory-enoent", - "../foo/directory-enoent", - "path-directory", - "../foo/path-directory", - "path-regular", - "../foo/path-regular", - "path-executable", - "../foo/path-executable", - "path-symlink", - "../foo/path-symlink", - "path-enoent", - "../foo/path-enoent", + }, + OutputSymlinks: []*remoteexecution.OutputSymlink{ + { + Path: "directory-symlink", + Target: "directory-symlink-target", }, - }, remoteexecution.ActionResult{ - OutputDirectories: []*remoteexecution.OutputDirectory{ - { - Path: "directory-directory", - TreeDigest: &remoteexecution.Digest{ - Hash: "55aed4acf40a28132fb2d2de2b5962f0", - SizeBytes: 184, - }, - IsTopologicallySorted: true, - }, - { - Path: "../foo/directory-directory", - TreeDigest: &remoteexecution.Digest{ - Hash: "55aed4acf40a28132fb2d2de2b5962f0", - SizeBytes: 184, - }, - IsTopologicallySorted: true, - }, - { - Path: "path-directory", - TreeDigest: &remoteexecution.Digest{ - Hash: "9dd94c5a4b02914af42e8e6372e0b709", - SizeBytes: 2, - }, - IsTopologicallySorted: true, - }, - { - Path: "../foo/path-directory", - TreeDigest: &remoteexecution.Digest{ - Hash: "9dd94c5a4b02914af42e8e6372e0b709", - SizeBytes: 2, - }, - IsTopologicallySorted: true, - }, + { + Path: "../foo/directory-symlink", + Target: "directory-symlink-target", }, - OutputFiles: []*remoteexecution.OutputFile{ - { - Path: "file-executable", - Digest: &remoteexecution.Digest{ - Hash: "7590e1b46240ecb5ea65a80db7ee6fae", - SizeBytes: 15, - }, - IsExecutable: true, - }, - { - Path: "../foo/file-executable", - Digest: &remoteexecution.Digest{ - Hash: "7590e1b46240ecb5ea65a80db7ee6fae", - SizeBytes: 15, - }, - IsExecutable: true, - }, - { - Path: "file-regular", - Digest: &remoteexecution.Digest{ - Hash: "a58c2f2281011ca2e631b39baa1ab657", - SizeBytes: 12, - }, - }, - { - Path: "../foo/file-regular", - Digest: &remoteexecution.Digest{ - Hash: "a58c2f2281011ca2e631b39baa1ab657", - SizeBytes: 12, - }, - }, - { - Path: "path-executable", - Digest: &remoteexecution.Digest{ - Hash: "87729325cd08d300fb0e238a3a8da443", - SizeBytes: 15, - }, - IsExecutable: true, - }, - { - Path: "../foo/path-executable", - Digest: &remoteexecution.Digest{ - Hash: "87729325cd08d300fb0e238a3a8da443", - SizeBytes: 15, - }, - IsExecutable: true, - }, - { - Path: "path-regular", - Digest: &remoteexecution.Digest{ - Hash: "44206648b7bb2f3b0d2ed0c52ad2e269", - SizeBytes: 12, - }, - }, - { - Path: "../foo/path-regular", - Digest: &remoteexecution.Digest{ - Hash: "44206648b7bb2f3b0d2ed0c52ad2e269", - SizeBytes: 12, - }, - }, + { + Path: "file-symlink", + Target: "file-symlink-target", }, - OutputSymlinks: []*remoteexecution.OutputSymlink{ - { - Path: "directory-symlink", - Target: "directory-symlink-target", - }, - { - Path: "../foo/directory-symlink", - Target: "directory-symlink-target", - }, - { - Path: "file-symlink", - Target: "file-symlink-target", - }, - { - Path: "../foo/file-symlink", - Target: "file-symlink-target", - }, - { - Path: "path-symlink", - Target: "path-symlink-target", - }, - { - Path: "../foo/path-symlink", - Target: "path-symlink-target", - }, + { + Path: "../foo/file-symlink", + Target: "file-symlink-target", }, - }) - }) - }) - - t.Run("RootDirectory", func(t *testing.T) { - // Special case: it is also permitted to add the root - // directory as an REv2.0 output directory. This - // shouldn't cause any Lstat() calls, as the root - // directory always exists. It is also impossible to - // call Lstat() on it, as that would require us to - // traverse upwards. - root.EXPECT().ReadDir().Return(nil, nil) - contentAddressableStorage.EXPECT().Put( - ctx, - digest.MustNewDigest("example", remoteexecution.DigestFunction_MD5, "9dd94c5a4b02914af42e8e6372e0b709", 2), - gomock.Any()). - DoAndReturn(func(ctx context.Context, digest digest.Digest, b buffer.Buffer) error { - m, err := b.ToProto(&remoteexecution.Tree{}, 10000) - require.NoError(t, err) - testutil.RequireEqualProto(t, &remoteexecution.Tree{ - Root: &remoteexecution.Directory{}, - }, m) - return nil - }) - - oh, err := builder.NewOutputHierarchy(&remoteexecution.Command{ - WorkingDirectory: "foo", - OutputDirectories: []string{".."}, - }, true) - require.NoError(t, err) - var actionResult remoteexecution.ActionResult - require.NoError( - t, - oh.UploadOutputs( - ctx, - root, - contentAddressableStorage, - digestFunction, - writableFileUploadDelay, - &actionResult, - /* forceUploadTreesAndDirectories = */ false)) - require.Equal(t, remoteexecution.ActionResult{ - OutputDirectories: []*remoteexecution.OutputDirectory{ { - Path: "..", - TreeDigest: &remoteexecution.Digest{ - Hash: "9dd94c5a4b02914af42e8e6372e0b709", - SizeBytes: 2, - }, - IsTopologicallySorted: true, + Path: "path-symlink", + Target: "path-symlink-target", + }, + { + Path: "../foo/path-symlink", + Target: "path-symlink-target", }, }, }, actionResult) }) t.Run("RootPath", func(t *testing.T) { - // Similar to the previous test, it is also permitted to - // add the root directory as an REv2.1 output path. + // It is permitted to add the root directory as an + // output path. root.EXPECT().ReadDir().Return(nil, nil) contentAddressableStorage.EXPECT().Put( ctx, @@ -793,7 +505,7 @@ func TestOutputHierarchyUploadOutputs(t *testing.T) { oh, err := builder.NewOutputHierarchy(&remoteexecution.Command{ WorkingDirectory: "foo", OutputPaths: []string{".."}, - }, false) + }) require.NoError(t, err) var actionResult remoteexecution.ActionResult require.NoError( @@ -820,57 +532,7 @@ func TestOutputHierarchyUploadOutputs(t *testing.T) { }, actionResult) }) - t.Run("LstatFailureDirectory", func(t *testing.T) { - // Failure to Lstat() an output directory should cause - // it to be skipped. - root.EXPECT().Lstat(path.MustNewComponent("foo")).Return(filesystem.FileInfo{}, status.Error(codes.Internal, "I/O error")) - - oh, err := builder.NewOutputHierarchy(&remoteexecution.Command{ - WorkingDirectory: "", - OutputDirectories: []string{"foo"}, - }, true) - require.NoError(t, err) - var actionResult remoteexecution.ActionResult - testutil.RequireEqualStatus( - t, - status.Error(codes.Internal, "Failed to read attributes of output directory \"foo\": I/O error"), - oh.UploadOutputs( - ctx, - root, - contentAddressableStorage, - digestFunction, - writableFileUploadDelay, - &actionResult, - /* forceUploadTreesAndDirectories = */ false)) - require.Equal(t, remoteexecution.ActionResult{}, actionResult) - }) - - t.Run("LstatFailureFile", func(t *testing.T) { - // Failure to Lstat() an output file should cause it to - // be skipped. - root.EXPECT().Lstat(path.MustNewComponent("foo")).Return(filesystem.FileInfo{}, status.Error(codes.Internal, "I/O error")) - - oh, err := builder.NewOutputHierarchy(&remoteexecution.Command{ - WorkingDirectory: "", - OutputFiles: []string{"foo"}, - }, true) - require.NoError(t, err) - var actionResult remoteexecution.ActionResult - testutil.RequireEqualStatus( - t, - status.Error(codes.Internal, "Failed to read attributes of output file \"foo\": I/O error"), - oh.UploadOutputs( - ctx, - root, - contentAddressableStorage, - digestFunction, - writableFileUploadDelay, - &actionResult, - /* forceUploadTreesAndDirectories = */ false)) - require.Equal(t, remoteexecution.ActionResult{}, actionResult) - }) - - t.Run("LstatFailurePath", func(t *testing.T) { + t.Run("LstatFailure", func(t *testing.T) { // Failure to Lstat() an output path should cause it to // be skipped. root.EXPECT().Lstat(path.MustNewComponent("foo")).Return(filesystem.FileInfo{}, status.Error(codes.Internal, "I/O error")) @@ -878,7 +540,7 @@ func TestOutputHierarchyUploadOutputs(t *testing.T) { oh, err := builder.NewOutputHierarchy(&remoteexecution.Command{ WorkingDirectory: "", OutputPaths: []string{"foo"}, - }, false) + }) require.NoError(t, err) var actionResult remoteexecution.ActionResult testutil.RequireEqualStatus( @@ -992,7 +654,7 @@ func TestOutputHierarchyUploadOutputs(t *testing.T) { oh, err := builder.NewOutputHierarchy(&remoteexecution.Command{ OutputPaths: []string{"."}, OutputDirectoryFormat: remoteexecution.Command_TREE_AND_DIRECTORY, - }, false) + }) require.NoError(t, err) var actionResult remoteexecution.ActionResult require.NoError( diff --git a/pkg/proto/configuration/bb_worker/bb_worker.pb.go b/pkg/proto/configuration/bb_worker/bb_worker.pb.go index 05cad385..af8712e9 100644 --- a/pkg/proto/configuration/bb_worker/bb_worker.pb.go +++ b/pkg/proto/configuration/bb_worker/bb_worker.pb.go @@ -33,24 +33,23 @@ const ( ) type ApplicationConfiguration struct { - state protoimpl.MessageState `protogen:"open.v1"` - Blobstore *blobstore.BlobstoreConfiguration `protobuf:"bytes,1,opt,name=blobstore,proto3" json:"blobstore,omitempty"` - BrowserUrl string `protobuf:"bytes,2,opt,name=browser_url,json=browserUrl,proto3" json:"browser_url,omitempty"` - MaximumMessageSizeBytes int64 `protobuf:"varint,6,opt,name=maximum_message_size_bytes,json=maximumMessageSizeBytes,proto3" json:"maximum_message_size_bytes,omitempty"` - Scheduler *grpc.ClientConfiguration `protobuf:"bytes,8,opt,name=scheduler,proto3" json:"scheduler,omitempty"` - Global *global.Configuration `protobuf:"bytes,19,opt,name=global,proto3" json:"global,omitempty"` - BuildDirectories []*BuildDirectoryConfiguration `protobuf:"bytes,20,rep,name=build_directories,json=buildDirectories,proto3" json:"build_directories,omitempty"` - FilePool *filesystem.FilePoolConfiguration `protobuf:"bytes,22,opt,name=file_pool,json=filePool,proto3" json:"file_pool,omitempty"` - CompletedActionLoggers []*CompletedActionLoggingConfiguration `protobuf:"bytes,23,rep,name=completed_action_loggers,json=completedActionLoggers,proto3" json:"completed_action_loggers,omitempty"` - OutputUploadConcurrency int64 `protobuf:"varint,24,opt,name=output_upload_concurrency,json=outputUploadConcurrency,proto3" json:"output_upload_concurrency,omitempty"` - DirectoryCache *cas.CachingDirectoryFetcherConfiguration `protobuf:"bytes,25,opt,name=directory_cache,json=directoryCache,proto3" json:"directory_cache,omitempty"` - Prefetching *PrefetchingConfiguration `protobuf:"bytes,26,opt,name=prefetching,proto3" json:"prefetching,omitempty"` - ForceUploadTreesAndDirectories bool `protobuf:"varint,27,opt,name=force_upload_trees_and_directories,json=forceUploadTreesAndDirectories,proto3" json:"force_upload_trees_and_directories,omitempty"` - InputDownloadConcurrency int64 `protobuf:"varint,28,opt,name=input_download_concurrency,json=inputDownloadConcurrency,proto3" json:"input_download_concurrency,omitempty"` - SupportLegacyOutputFilesAndDirectories bool `protobuf:"varint,29,opt,name=support_legacy_output_files_and_directories,json=supportLegacyOutputFilesAndDirectories,proto3" json:"support_legacy_output_files_and_directories,omitempty"` - HttpExecutionTimeoutCompensators []*HttpExecutionTimeoutCompensator `protobuf:"bytes,30,rep,name=http_execution_timeout_compensators,json=httpExecutionTimeoutCompensators,proto3" json:"http_execution_timeout_compensators,omitempty"` - unknownFields protoimpl.UnknownFields - sizeCache protoimpl.SizeCache + state protoimpl.MessageState `protogen:"open.v1"` + Blobstore *blobstore.BlobstoreConfiguration `protobuf:"bytes,1,opt,name=blobstore,proto3" json:"blobstore,omitempty"` + BrowserUrl string `protobuf:"bytes,2,opt,name=browser_url,json=browserUrl,proto3" json:"browser_url,omitempty"` + MaximumMessageSizeBytes int64 `protobuf:"varint,6,opt,name=maximum_message_size_bytes,json=maximumMessageSizeBytes,proto3" json:"maximum_message_size_bytes,omitempty"` + Scheduler *grpc.ClientConfiguration `protobuf:"bytes,8,opt,name=scheduler,proto3" json:"scheduler,omitempty"` + Global *global.Configuration `protobuf:"bytes,19,opt,name=global,proto3" json:"global,omitempty"` + BuildDirectories []*BuildDirectoryConfiguration `protobuf:"bytes,20,rep,name=build_directories,json=buildDirectories,proto3" json:"build_directories,omitempty"` + FilePool *filesystem.FilePoolConfiguration `protobuf:"bytes,22,opt,name=file_pool,json=filePool,proto3" json:"file_pool,omitempty"` + CompletedActionLoggers []*CompletedActionLoggingConfiguration `protobuf:"bytes,23,rep,name=completed_action_loggers,json=completedActionLoggers,proto3" json:"completed_action_loggers,omitempty"` + OutputUploadConcurrency int64 `protobuf:"varint,24,opt,name=output_upload_concurrency,json=outputUploadConcurrency,proto3" json:"output_upload_concurrency,omitempty"` + DirectoryCache *cas.CachingDirectoryFetcherConfiguration `protobuf:"bytes,25,opt,name=directory_cache,json=directoryCache,proto3" json:"directory_cache,omitempty"` + Prefetching *PrefetchingConfiguration `protobuf:"bytes,26,opt,name=prefetching,proto3" json:"prefetching,omitempty"` + ForceUploadTreesAndDirectories bool `protobuf:"varint,27,opt,name=force_upload_trees_and_directories,json=forceUploadTreesAndDirectories,proto3" json:"force_upload_trees_and_directories,omitempty"` + InputDownloadConcurrency int64 `protobuf:"varint,28,opt,name=input_download_concurrency,json=inputDownloadConcurrency,proto3" json:"input_download_concurrency,omitempty"` + HttpExecutionTimeoutCompensators []*HttpExecutionTimeoutCompensator `protobuf:"bytes,30,rep,name=http_execution_timeout_compensators,json=httpExecutionTimeoutCompensators,proto3" json:"http_execution_timeout_compensators,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ApplicationConfiguration) Reset() { @@ -174,13 +173,6 @@ func (x *ApplicationConfiguration) GetInputDownloadConcurrency() int64 { return 0 } -func (x *ApplicationConfiguration) GetSupportLegacyOutputFilesAndDirectories() bool { - if x != nil { - return x.SupportLegacyOutputFilesAndDirectories - } - return false -} - func (x *ApplicationConfiguration) GetHttpExecutionTimeoutCompensators() []*HttpExecutionTimeoutCompensator { if x != nil { return x.HttpExecutionTimeoutCompensators @@ -770,8 +762,7 @@ var File_github_com_buildbarn_bb_remote_execution_pkg_proto_configuration_bb_wor const file_github_com_buildbarn_bb_remote_execution_pkg_proto_configuration_bb_worker_bb_worker_proto_rawDesc = "" + "\n" + - "Zgithub.com/buildbarn/bb-remote-execution/pkg/proto/configuration/bb_worker/bb_worker.proto\x12!buildbarn.configuration.bb_worker\x1a6build/bazel/remote/execution/v2/remote_execution.proto\x1aNgithub.com/buildbarn/bb-remote-execution/pkg/proto/configuration/cas/cas.proto\x1a\\github.com/buildbarn/bb-remote-execution/pkg/proto/configuration/filesystem/filesystem.proto\x1aagithub.com/buildbarn/bb-remote-execution/pkg/proto/configuration/filesystem/virtual/virtual.proto\x1aTgithub.com/buildbarn/bb-remote-execution/pkg/proto/resourceusage/resourceusage.proto\x1aQgithub.com/buildbarn/bb-storage/pkg/proto/configuration/blobstore/blobstore.proto\x1aOgithub.com/buildbarn/bb-storage/pkg/proto/configuration/eviction/eviction.proto\x1aKgithub.com/buildbarn/bb-storage/pkg/proto/configuration/global/global.proto\x1aGgithub.com/buildbarn/bb-storage/pkg/proto/configuration/grpc/grpc.proto\x1aPgithub.com/buildbarn/bb-storage/pkg/proto/configuration/http/client/client.proto\x1a\x1egoogle/protobuf/duration.proto\"\xd1\n" + - "\n" + + "Zgithub.com/buildbarn/bb-remote-execution/pkg/proto/configuration/bb_worker/bb_worker.proto\x12!buildbarn.configuration.bb_worker\x1a6build/bazel/remote/execution/v2/remote_execution.proto\x1aNgithub.com/buildbarn/bb-remote-execution/pkg/proto/configuration/cas/cas.proto\x1a\\github.com/buildbarn/bb-remote-execution/pkg/proto/configuration/filesystem/filesystem.proto\x1aagithub.com/buildbarn/bb-remote-execution/pkg/proto/configuration/filesystem/virtual/virtual.proto\x1aTgithub.com/buildbarn/bb-remote-execution/pkg/proto/resourceusage/resourceusage.proto\x1aQgithub.com/buildbarn/bb-storage/pkg/proto/configuration/blobstore/blobstore.proto\x1aOgithub.com/buildbarn/bb-storage/pkg/proto/configuration/eviction/eviction.proto\x1aKgithub.com/buildbarn/bb-storage/pkg/proto/configuration/global/global.proto\x1aGgithub.com/buildbarn/bb-storage/pkg/proto/configuration/grpc/grpc.proto\x1aPgithub.com/buildbarn/bb-storage/pkg/proto/configuration/http/client/client.proto\x1a\x1egoogle/protobuf/duration.proto\"\xfa\t\n" + "\x18ApplicationConfiguration\x12W\n" + "\tblobstore\x18\x01 \x01(\v29.buildbarn.configuration.blobstore.BlobstoreConfigurationR\tblobstore\x12\x1f\n" + "\vbrowser_url\x18\x02 \x01(\tR\n" + @@ -786,10 +777,9 @@ const file_github_com_buildbarn_bb_remote_execution_pkg_proto_configuration_bb_w "\x0fdirectory_cache\x18\x19 \x01(\v2A.buildbarn.configuration.cas.CachingDirectoryFetcherConfigurationR\x0edirectoryCache\x12]\n" + "\vprefetching\x18\x1a \x01(\v2;.buildbarn.configuration.bb_worker.PrefetchingConfigurationR\vprefetching\x12J\n" + "\"force_upload_trees_and_directories\x18\x1b \x01(\bR\x1eforceUploadTreesAndDirectories\x12<\n" + - "\x1ainput_download_concurrency\x18\x1c \x01(\x03R\x18inputDownloadConcurrency\x12[\n" + - "+support_legacy_output_files_and_directories\x18\x1d \x01(\bR&supportLegacyOutputFilesAndDirectories\x12\x91\x01\n" + + "\x1ainput_download_concurrency\x18\x1c \x01(\x03R\x18inputDownloadConcurrency\x12\x91\x01\n" + "#http_execution_timeout_compensators\x18\x1e \x03(\v2B.buildbarn.configuration.bb_worker.HttpExecutionTimeoutCompensatorR httpExecutionTimeoutCompensatorsJ\x04\b\t\x10\n" + - "J\x04\b\f\x10\rJ\x04\b\x10\x10\x11J\x04\b\x12\x10\x13J\x04\b\x15\x10\x16\"\xbd\x02\n" + + "J\x04\b\f\x10\rJ\x04\b\x10\x10\x11J\x04\b\x12\x10\x13J\x04\b\x15\x10\x16J\x04\b\x1d\x10\x1e\"\xbd\x02\n" + "\x1bBuildDirectoryConfiguration\x12^\n" + "\x06native\x18\x01 \x01(\v2D.buildbarn.configuration.bb_worker.NativeBuildDirectoryConfigurationH\x00R\x06native\x12a\n" + "\avirtual\x18\x02 \x01(\v2E.buildbarn.configuration.bb_worker.VirtualBuildDirectoryConfigurationH\x00R\avirtual\x12P\n" + diff --git a/pkg/proto/configuration/bb_worker/bb_worker.proto b/pkg/proto/configuration/bb_worker/bb_worker.proto index dde3637c..8e867811 100644 --- a/pkg/proto/configuration/bb_worker/bb_worker.proto +++ b/pkg/proto/configuration/bb_worker/bb_worker.proto @@ -157,14 +157,11 @@ message ApplicationConfiguration { // worker threads. int64 input_download_concurrency = 28; - // If set, provide support for specifying action outputs using - // REv2.0's Command.output_files and Command.output_directories - // fields. When not set, only REv2.1's Command.output_paths option is - // supported. - // - // NOTE: This feature is deprecated, and will be removed after - // 2026-04-01. - bool support_legacy_output_files_and_directories = 29; + // Was 'support_legacy_output_files_and_directories', which could be + // set to enable support for specifying action outputs using REv2.0's + // Command.output_files and Command.output_directories fields. This + // implementation only supports REv2.1's Command.output_paths. + reserved 29; // Additional helper processes that the worker needs to call into to // suspend and resume the clocks that are used to enforce the From fd937c7783e331ddc4af39b9a225f91d2ecd3c4c Mon Sep 17 00:00:00 2001 From: Ed Schouten Date: Fri, 7 Nov 2025 06:01:35 +0100 Subject: [PATCH 2/3] Remove support for platform properties in Command messages REv2.2 promoted the platform properties from the Command message to the Action message. This has the advantage that the scheduler no longer needs to process any Command messages. This saves one round trip to the CAS. Command messages can in some cases also become very large, meaning it adds unnecessary memory pressure on the scheduler. Given that this feature was added on 2020-10-17 and clients were able to adopt quickly. it's time to remove support for the old scheme. --- .../configuration/scheduler/scheduler.pb.go | 60 +++---- .../configuration/scheduler/scheduler.proto | 21 +-- pkg/scheduler/platform/BUILD.bazel | 6 - .../action_and_command_key_extractor.go | 79 --------- .../action_and_command_key_extractor_test.go | 153 ------------------ pkg/scheduler/platform/configuration.go | 2 - 6 files changed, 26 insertions(+), 295 deletions(-) delete mode 100644 pkg/scheduler/platform/action_and_command_key_extractor.go delete mode 100644 pkg/scheduler/platform/action_and_command_key_extractor_test.go diff --git a/pkg/proto/configuration/scheduler/scheduler.pb.go b/pkg/proto/configuration/scheduler/scheduler.pb.go index cd7a8bf9..c965fe74 100644 --- a/pkg/proto/configuration/scheduler/scheduler.pb.go +++ b/pkg/proto/configuration/scheduler/scheduler.pb.go @@ -231,7 +231,6 @@ type PlatformKeyExtractorConfiguration struct { // Types that are valid to be assigned to Kind: // // *PlatformKeyExtractorConfiguration_Action - // *PlatformKeyExtractorConfiguration_ActionAndCommand // *PlatformKeyExtractorConfiguration_Static Kind isPlatformKeyExtractorConfiguration_Kind `protobuf_oneof:"kind"` unknownFields protoimpl.UnknownFields @@ -284,15 +283,6 @@ func (x *PlatformKeyExtractorConfiguration) GetAction() *emptypb.Empty { return nil } -func (x *PlatformKeyExtractorConfiguration) GetActionAndCommand() *emptypb.Empty { - if x != nil { - if x, ok := x.Kind.(*PlatformKeyExtractorConfiguration_ActionAndCommand); ok { - return x.ActionAndCommand - } - } - return nil -} - func (x *PlatformKeyExtractorConfiguration) GetStatic() *v2.Platform { if x != nil { if x, ok := x.Kind.(*PlatformKeyExtractorConfiguration_Static); ok { @@ -310,19 +300,12 @@ type PlatformKeyExtractorConfiguration_Action struct { Action *emptypb.Empty `protobuf:"bytes,1,opt,name=action,proto3,oneof"` } -type PlatformKeyExtractorConfiguration_ActionAndCommand struct { - ActionAndCommand *emptypb.Empty `protobuf:"bytes,2,opt,name=action_and_command,json=actionAndCommand,proto3,oneof"` -} - type PlatformKeyExtractorConfiguration_Static struct { Static *v2.Platform `protobuf:"bytes,3,opt,name=static,proto3,oneof"` } func (*PlatformKeyExtractorConfiguration_Action) isPlatformKeyExtractorConfiguration_Kind() {} -func (*PlatformKeyExtractorConfiguration_ActionAndCommand) isPlatformKeyExtractorConfiguration_Kind() { -} - func (*PlatformKeyExtractorConfiguration_Static) isPlatformKeyExtractorConfiguration_Kind() {} type InvocationKeyExtractorConfiguration struct { @@ -694,12 +677,11 @@ const file_github_com_buildbarn_bb_remote_execution_pkg_proto_configuration_sche "\aBackend\x120\n" + "\x14instance_name_prefix\x18\x01 \x01(\tR\x12instanceNamePrefix\x12E\n" + "\bplatform\x18\x02 \x01(\v2).build.bazel.remote.execution.v2.PlatformR\bplatform\x12a\n" + - "\raction_router\x18\x03 \x01(\v2<.buildbarn.configuration.scheduler.ActionRouterConfigurationR\factionRouter\"\xea\x01\n" + + "\raction_router\x18\x03 \x01(\v2<.buildbarn.configuration.scheduler.ActionRouterConfigurationR\factionRouter\"\xa8\x01\n" + "!PlatformKeyExtractorConfiguration\x120\n" + - "\x06action\x18\x01 \x01(\v2\x16.google.protobuf.EmptyH\x00R\x06action\x12F\n" + - "\x12action_and_command\x18\x02 \x01(\v2\x16.google.protobuf.EmptyH\x00R\x10actionAndCommand\x12C\n" + + "\x06action\x18\x01 \x01(\v2\x16.google.protobuf.EmptyH\x00R\x06action\x12C\n" + "\x06static\x18\x03 \x01(\v2).build.bazel.remote.execution.v2.PlatformH\x00R\x06staticB\x06\n" + - "\x04kind\"\xa4\x02\n" + + "\x04kindJ\x04\b\x02\x10\x03\"\xa4\x02\n" + "#InvocationKeyExtractorConfiguration\x12F\n" + "\x12tool_invocation_id\x18\x02 \x01(\v2\x16.google.protobuf.EmptyH\x00R\x10toolInvocationId\x12T\n" + "\x19correlated_invocations_id\x18\x03 \x01(\v2\x16.google.protobuf.EmptyH\x00R\x17correlatedInvocationsId\x12Q\n" + @@ -756,24 +738,23 @@ var file_github_com_buildbarn_bb_remote_execution_pkg_proto_configuration_schedu 8, // 6: buildbarn.configuration.scheduler.DemultiplexingActionRouterConfiguration.backends:type_name -> buildbarn.configuration.scheduler.DemultiplexingActionRouterConfiguration.Backend 0, // 7: buildbarn.configuration.scheduler.DemultiplexingActionRouterConfiguration.default_action_router:type_name -> buildbarn.configuration.scheduler.ActionRouterConfiguration 9, // 8: buildbarn.configuration.scheduler.PlatformKeyExtractorConfiguration.action:type_name -> google.protobuf.Empty - 9, // 9: buildbarn.configuration.scheduler.PlatformKeyExtractorConfiguration.action_and_command:type_name -> google.protobuf.Empty - 10, // 10: buildbarn.configuration.scheduler.PlatformKeyExtractorConfiguration.static:type_name -> build.bazel.remote.execution.v2.Platform - 9, // 11: buildbarn.configuration.scheduler.InvocationKeyExtractorConfiguration.tool_invocation_id:type_name -> google.protobuf.Empty - 9, // 12: buildbarn.configuration.scheduler.InvocationKeyExtractorConfiguration.correlated_invocations_id:type_name -> google.protobuf.Empty - 9, // 13: buildbarn.configuration.scheduler.InvocationKeyExtractorConfiguration.authentication_metadata:type_name -> google.protobuf.Empty - 11, // 14: buildbarn.configuration.scheduler.InitialSizeClassAnalyzerConfiguration.default_execution_timeout:type_name -> google.protobuf.Duration - 11, // 15: buildbarn.configuration.scheduler.InitialSizeClassAnalyzerConfiguration.maximum_execution_timeout:type_name -> google.protobuf.Duration - 6, // 16: buildbarn.configuration.scheduler.InitialSizeClassAnalyzerConfiguration.feedback_driven:type_name -> buildbarn.configuration.scheduler.InitialSizeClassFeedbackDrivenAnalyzerConfiguration - 11, // 17: buildbarn.configuration.scheduler.InitialSizeClassFeedbackDrivenAnalyzerConfiguration.failure_cache_duration:type_name -> google.protobuf.Duration - 7, // 18: buildbarn.configuration.scheduler.InitialSizeClassFeedbackDrivenAnalyzerConfiguration.page_rank:type_name -> buildbarn.configuration.scheduler.InitialSizeClassPageRankStrategyCalculatorConfiguration - 11, // 19: buildbarn.configuration.scheduler.InitialSizeClassPageRankStrategyCalculatorConfiguration.minimum_execution_timeout:type_name -> google.protobuf.Duration - 10, // 20: buildbarn.configuration.scheduler.DemultiplexingActionRouterConfiguration.Backend.platform:type_name -> build.bazel.remote.execution.v2.Platform - 0, // 21: buildbarn.configuration.scheduler.DemultiplexingActionRouterConfiguration.Backend.action_router:type_name -> buildbarn.configuration.scheduler.ActionRouterConfiguration - 22, // [22:22] is the sub-list for method output_type - 22, // [22:22] is the sub-list for method input_type - 22, // [22:22] is the sub-list for extension type_name - 22, // [22:22] is the sub-list for extension extendee - 0, // [0:22] is the sub-list for field type_name + 10, // 9: buildbarn.configuration.scheduler.PlatformKeyExtractorConfiguration.static:type_name -> build.bazel.remote.execution.v2.Platform + 9, // 10: buildbarn.configuration.scheduler.InvocationKeyExtractorConfiguration.tool_invocation_id:type_name -> google.protobuf.Empty + 9, // 11: buildbarn.configuration.scheduler.InvocationKeyExtractorConfiguration.correlated_invocations_id:type_name -> google.protobuf.Empty + 9, // 12: buildbarn.configuration.scheduler.InvocationKeyExtractorConfiguration.authentication_metadata:type_name -> google.protobuf.Empty + 11, // 13: buildbarn.configuration.scheduler.InitialSizeClassAnalyzerConfiguration.default_execution_timeout:type_name -> google.protobuf.Duration + 11, // 14: buildbarn.configuration.scheduler.InitialSizeClassAnalyzerConfiguration.maximum_execution_timeout:type_name -> google.protobuf.Duration + 6, // 15: buildbarn.configuration.scheduler.InitialSizeClassAnalyzerConfiguration.feedback_driven:type_name -> buildbarn.configuration.scheduler.InitialSizeClassFeedbackDrivenAnalyzerConfiguration + 11, // 16: buildbarn.configuration.scheduler.InitialSizeClassFeedbackDrivenAnalyzerConfiguration.failure_cache_duration:type_name -> google.protobuf.Duration + 7, // 17: buildbarn.configuration.scheduler.InitialSizeClassFeedbackDrivenAnalyzerConfiguration.page_rank:type_name -> buildbarn.configuration.scheduler.InitialSizeClassPageRankStrategyCalculatorConfiguration + 11, // 18: buildbarn.configuration.scheduler.InitialSizeClassPageRankStrategyCalculatorConfiguration.minimum_execution_timeout:type_name -> google.protobuf.Duration + 10, // 19: buildbarn.configuration.scheduler.DemultiplexingActionRouterConfiguration.Backend.platform:type_name -> build.bazel.remote.execution.v2.Platform + 0, // 20: buildbarn.configuration.scheduler.DemultiplexingActionRouterConfiguration.Backend.action_router:type_name -> buildbarn.configuration.scheduler.ActionRouterConfiguration + 21, // [21:21] is the sub-list for method output_type + 21, // [21:21] is the sub-list for method input_type + 21, // [21:21] is the sub-list for extension type_name + 21, // [21:21] is the sub-list for extension extendee + 0, // [0:21] is the sub-list for field type_name } func init() { @@ -789,7 +770,6 @@ func file_github_com_buildbarn_bb_remote_execution_pkg_proto_configuration_sched } file_github_com_buildbarn_bb_remote_execution_pkg_proto_configuration_scheduler_scheduler_proto_msgTypes[3].OneofWrappers = []any{ (*PlatformKeyExtractorConfiguration_Action)(nil), - (*PlatformKeyExtractorConfiguration_ActionAndCommand)(nil), (*PlatformKeyExtractorConfiguration_Static)(nil), } file_github_com_buildbarn_bb_remote_execution_pkg_proto_configuration_scheduler_scheduler_proto_msgTypes[4].OneofWrappers = []any{ diff --git a/pkg/proto/configuration/scheduler/scheduler.proto b/pkg/proto/configuration/scheduler/scheduler.proto index 8917ca33..ba95c610 100644 --- a/pkg/proto/configuration/scheduler/scheduler.proto +++ b/pkg/proto/configuration/scheduler/scheduler.proto @@ -92,23 +92,8 @@ message PlatformKeyExtractorConfiguration { oneof kind { // Attempt to extract platform properties from the REv2 Action // message. - // - // This is sufficient when exclusively dealing with clients that - // implement REv2.2 or later, such as Bazel 4.1.0 and later. google.protobuf.Empty action = 1; - // Attempt to extract platform properties from the REv2 Action - // message, and fall back to the Command message in case the Action - // message contains no platform properties. - // - // This is less efficient than only considering the Action message, - // but allows requests from clients that implement REv2.1 or earlier - // to be respected as well. - // - // NOTE: This feature is deprecated, and will be removed after - // 2026-04-01. - google.protobuf.Empty action_and_command = 2; - // Do not respect platform properties from the client's request, but // use a static value provided in configuration. // @@ -119,6 +104,12 @@ message PlatformKeyExtractorConfiguration { // workers of a newer similar platform. build.bazel.remote.execution.v2.Platform static = 3; } + + // Was 'action_and_command', which caused it to extract platform + // properties from the REv2 Action message, and fall back to the + // Command message in case the Action message contains no platform + // properties. This was provided to support REv2.1 and earlier. + reserved 2; } message InvocationKeyExtractorConfiguration { diff --git a/pkg/scheduler/platform/BUILD.bazel b/pkg/scheduler/platform/BUILD.bazel index 04ba2dbb..f08447c6 100644 --- a/pkg/scheduler/platform/BUILD.bazel +++ b/pkg/scheduler/platform/BUILD.bazel @@ -3,7 +3,6 @@ load("@rules_go//go:def.bzl", "go_library", "go_test") go_library( name = "platform", srcs = [ - "action_and_command_key_extractor.go", "action_key_extractor.go", "configuration.go", "key.go", @@ -21,7 +20,6 @@ go_library( "@com_github_buildbarn_bb_storage//pkg/digest", "@com_github_buildbarn_bb_storage//pkg/util", "@com_github_golang_protobuf//jsonpb", - "@com_github_prometheus_client_golang//prometheus", "@org_golang_google_grpc//codes", "@org_golang_google_grpc//status", "@org_golang_google_protobuf//encoding/protojson", @@ -31,23 +29,19 @@ go_library( go_test( name = "platform_test", srcs = [ - "action_and_command_key_extractor_test.go", "action_key_extractor_test.go", "key_test.go", "static_key_extractor_test.go", ], deps = [ ":platform", - "//internal/mock", "//pkg/proto/buildqueuestate", "@bazel_remote_apis//build/bazel/remote/execution/v2:remote_execution_go_proto", - "@com_github_buildbarn_bb_storage//pkg/blobstore/buffer", "@com_github_buildbarn_bb_storage//pkg/digest", "@com_github_buildbarn_bb_storage//pkg/testutil", "@com_github_buildbarn_bb_storage//pkg/util", "@com_github_stretchr_testify//require", "@org_golang_google_grpc//codes", "@org_golang_google_grpc//status", - "@org_uber_go_mock//gomock", ], ) diff --git a/pkg/scheduler/platform/action_and_command_key_extractor.go b/pkg/scheduler/platform/action_and_command_key_extractor.go deleted file mode 100644 index 0fcd6593..00000000 --- a/pkg/scheduler/platform/action_and_command_key_extractor.go +++ /dev/null @@ -1,79 +0,0 @@ -package platform - -import ( - "context" - "sync" - - remoteexecution "github.com/bazelbuild/remote-apis/build/bazel/remote/execution/v2" - "github.com/buildbarn/bb-storage/pkg/blobstore" - "github.com/buildbarn/bb-storage/pkg/digest" - "github.com/buildbarn/bb-storage/pkg/util" - "github.com/prometheus/client_golang/prometheus" -) - -var ( - actionAndCommandKeyExtractorPrometheusMetrics sync.Once - - actionAndCommandKeyExtractorCommandMessagesReadTotal = prometheus.NewCounter( - prometheus.CounterOpts{ - Namespace: "buildbarn", - Subsystem: "builder", - Name: "action_and_command_key_extractor_command_messages_read_total", - Help: "Number of times REv2 Action messages did not contain platform properties, meaning Command messages had to be loaded instead.", - }) -) - -type actionAndCommandKeyExtractor struct { - contentAddressableStorage blobstore.BlobAccess - maximumMessageSizeBytes int -} - -// NewActionAndCommandKeyExtractor creates a new KeyExtractor is capable -// of extracting a platform key from an REv2 Action message. If no -// platform properties are specified in the Action, it falls back to -// reading them from the Command message. -// -// This platform key extractor needs to be used if requests from clients -// that implement REv2.1 or older need to be processed, as platform -// properties were only added to the Action message in REv2.2. -func NewActionAndCommandKeyExtractor(contentAddressableStorage blobstore.BlobAccess, maximumMessageSizeBytes int) KeyExtractor { - actionAndCommandKeyExtractorPrometheusMetrics.Do(func() { - prometheus.MustRegister(actionAndCommandKeyExtractorCommandMessagesReadTotal) - }) - - return &actionAndCommandKeyExtractor{ - contentAddressableStorage: contentAddressableStorage, - maximumMessageSizeBytes: maximumMessageSizeBytes, - } -} - -func (ke *actionAndCommandKeyExtractor) ExtractKey(ctx context.Context, digestFunction digest.Function, action *remoteexecution.Action) (Key, error) { - instanceName := digestFunction.GetInstanceName() - if action.Platform != nil { - // REv2.2 or newer: platform properties are stored in - // the Action message. - key, err := NewKey(instanceName, action.Platform) - if err != nil { - return Key{}, util.StatusWrap(err, "Failed to extract platform key from action") - } - return key, nil - } - - // REv2.1 or older: platform properties are stored in the - // Command message. - commandDigest, err := digestFunction.NewDigestFromProto(action.CommandDigest) - if err != nil { - return Key{}, util.StatusWrap(err, "Failed to extract digest for command") - } - commandMessage, err := ke.contentAddressableStorage.Get(ctx, commandDigest).ToProto(&remoteexecution.Command{}, ke.maximumMessageSizeBytes) - if err != nil { - return Key{}, util.StatusWrap(err, "Failed to obtain command") - } - command := commandMessage.(*remoteexecution.Command) - key, err := NewKey(instanceName, command.Platform) - if err != nil { - return Key{}, util.StatusWrap(err, "Failed to extract platform key from command") - } - actionAndCommandKeyExtractorCommandMessagesReadTotal.Inc() - return key, nil -} diff --git a/pkg/scheduler/platform/action_and_command_key_extractor_test.go b/pkg/scheduler/platform/action_and_command_key_extractor_test.go deleted file mode 100644 index da30e2fd..00000000 --- a/pkg/scheduler/platform/action_and_command_key_extractor_test.go +++ /dev/null @@ -1,153 +0,0 @@ -package platform_test - -import ( - "context" - "testing" - - remoteexecution "github.com/bazelbuild/remote-apis/build/bazel/remote/execution/v2" - "github.com/buildbarn/bb-remote-execution/internal/mock" - "github.com/buildbarn/bb-remote-execution/pkg/proto/buildqueuestate" - "github.com/buildbarn/bb-remote-execution/pkg/scheduler/platform" - "github.com/buildbarn/bb-storage/pkg/blobstore/buffer" - "github.com/buildbarn/bb-storage/pkg/digest" - "github.com/buildbarn/bb-storage/pkg/testutil" - "github.com/stretchr/testify/require" - - "google.golang.org/grpc/codes" - "google.golang.org/grpc/status" - - "go.uber.org/mock/gomock" -) - -func TestActionAndCommandKeyExtractor(t *testing.T) { - ctrl, ctx := gomock.WithContext(context.Background(), t) - - contentAddressableStorage := mock.NewMockBlobAccess(ctrl) - keyExtractor := platform.NewActionAndCommandKeyExtractor(contentAddressableStorage, 1024*1024) - digestFunction := digest.MustNewFunction("hello", remoteexecution.DigestFunction_MD5) - - t.Run("ActionInvalidProperties", func(t *testing.T) { - _, err := keyExtractor.ExtractKey(ctx, digestFunction, &remoteexecution.Action{ - Platform: &remoteexecution.Platform{ - Properties: []*remoteexecution.Platform_Property{ - {Name: "os", Value: "linux"}, - {Name: "arch", Value: "aarch64"}, - }, - }, - }) - testutil.RequirePrefixedStatus(t, status.Error(codes.InvalidArgument, "Failed to extract platform key from action: Platform properties are not lexicographically sorted, as property "), err) - }) - - t.Run("ActionSuccess", func(t *testing.T) { - key, err := keyExtractor.ExtractKey(ctx, digestFunction, &remoteexecution.Action{ - Platform: &remoteexecution.Platform{ - Properties: []*remoteexecution.Platform_Property{ - {Name: "arch", Value: "aarch64"}, - {Name: "os", Value: "linux"}, - }, - }, - }) - require.NoError(t, err) - testutil.RequireEqualProto(t, &buildqueuestate.PlatformQueueName{ - InstanceNamePrefix: "hello", - Platform: &remoteexecution.Platform{ - Properties: []*remoteexecution.Platform_Property{ - {Name: "arch", Value: "aarch64"}, - {Name: "os", Value: "linux"}, - }, - }, - }, key.GetPlatformQueueName()) - }) - - t.Run("CommandInvalidDigest", func(t *testing.T) { - _, err := keyExtractor.ExtractKey(ctx, digestFunction, &remoteexecution.Action{ - CommandDigest: &remoteexecution.Digest{ - Hash: "4216455ceebbc3038bd0550c85b6a3bf", - SizeBytes: -1, - }, - }) - testutil.RequireEqualStatus(t, status.Error(codes.InvalidArgument, "Failed to extract digest for command: Invalid digest size: -1 bytes"), err) - }) - - t.Run("CommandStorageFailure", func(t *testing.T) { - contentAddressableStorage.EXPECT().Get(ctx, digest.MustNewDigest("hello", remoteexecution.DigestFunction_MD5, "4216455ceebbc3038bd0550c85b6a3bf", 123)). - Return(buffer.NewBufferFromError(status.Error(codes.Internal, "Cannot establish network connection"))) - - _, err := keyExtractor.ExtractKey(ctx, digestFunction, &remoteexecution.Action{ - CommandDigest: &remoteexecution.Digest{ - Hash: "4216455ceebbc3038bd0550c85b6a3bf", - SizeBytes: 123, - }, - }) - testutil.RequireEqualStatus(t, status.Error(codes.Internal, "Failed to obtain command: Cannot establish network connection"), err) - }) - - t.Run("CommandInvalidProperties", func(t *testing.T) { - contentAddressableStorage.EXPECT().Get(ctx, digest.MustNewDigest("hello", remoteexecution.DigestFunction_MD5, "4216455ceebbc3038bd0550c85b6a3bf", 123)). - Return(buffer.NewProtoBufferFromProto(&remoteexecution.Command{ - Platform: &remoteexecution.Platform{ - Properties: []*remoteexecution.Platform_Property{ - {Name: "os", Value: "linux"}, - {Name: "arch", Value: "aarch64"}, - }, - }, - }, buffer.UserProvided)) - - _, err := keyExtractor.ExtractKey(ctx, digestFunction, &remoteexecution.Action{ - CommandDigest: &remoteexecution.Digest{ - Hash: "4216455ceebbc3038bd0550c85b6a3bf", - SizeBytes: 123, - }, - }) - testutil.RequirePrefixedStatus(t, status.Error(codes.InvalidArgument, "Failed to extract platform key from command: Platform properties are not lexicographically sorted, as property "), err) - }) - - t.Run("CommandSuccess", func(t *testing.T) { - contentAddressableStorage.EXPECT().Get(ctx, digest.MustNewDigest("hello", remoteexecution.DigestFunction_MD5, "4216455ceebbc3038bd0550c85b6a3bf", 123)). - Return(buffer.NewProtoBufferFromProto(&remoteexecution.Command{ - Platform: &remoteexecution.Platform{ - Properties: []*remoteexecution.Platform_Property{ - {Name: "arch", Value: "aarch64"}, - {Name: "os", Value: "linux"}, - }, - }, - }, buffer.UserProvided)) - - key, err := keyExtractor.ExtractKey(ctx, digestFunction, &remoteexecution.Action{ - CommandDigest: &remoteexecution.Digest{ - Hash: "4216455ceebbc3038bd0550c85b6a3bf", - SizeBytes: 123, - }, - }) - require.NoError(t, err) - testutil.RequireEqualProto(t, &buildqueuestate.PlatformQueueName{ - InstanceNamePrefix: "hello", - Platform: &remoteexecution.Platform{ - Properties: []*remoteexecution.Platform_Property{ - {Name: "arch", Value: "aarch64"}, - {Name: "os", Value: "linux"}, - }, - }, - }, key.GetPlatformQueueName()) - }) - - t.Run("NoPlatformPresent", func(t *testing.T) { - // If no platform object is, assume the empty set of - // platform properties. Clients such as BuildStream are - // known for not providing them. - contentAddressableStorage.EXPECT().Get(ctx, digest.MustNewDigest("hello", remoteexecution.DigestFunction_MD5, "4216455ceebbc3038bd0550c85b6a3bf", 123)). - Return(buffer.NewProtoBufferFromProto(&remoteexecution.Command{}, buffer.UserProvided)) - - key, err := keyExtractor.ExtractKey(ctx, digestFunction, &remoteexecution.Action{ - CommandDigest: &remoteexecution.Digest{ - Hash: "4216455ceebbc3038bd0550c85b6a3bf", - SizeBytes: 123, - }, - }) - require.NoError(t, err) - testutil.RequireEqualProto(t, &buildqueuestate.PlatformQueueName{ - InstanceNamePrefix: "hello", - Platform: &remoteexecution.Platform{}, - }, key.GetPlatformQueueName()) - }) -} diff --git a/pkg/scheduler/platform/configuration.go b/pkg/scheduler/platform/configuration.go index 1b0a403f..634c2924 100644 --- a/pkg/scheduler/platform/configuration.go +++ b/pkg/scheduler/platform/configuration.go @@ -17,8 +17,6 @@ func NewKeyExtractorFromConfiguration(configuration *pb.PlatformKeyExtractorConf switch kind := configuration.Kind.(type) { case *pb.PlatformKeyExtractorConfiguration_Action: return ActionKeyExtractor, nil - case *pb.PlatformKeyExtractorConfiguration_ActionAndCommand: - return NewActionAndCommandKeyExtractor(contentAddressableStorage, maximumMessageSizeBytes), nil case *pb.PlatformKeyExtractorConfiguration_Static: return NewStaticKeyExtractor(kind.Static), nil default: From fbba2749e47d1c6a636fe4bfb16940440d972ddc Mon Sep 17 00:00:00 2001 From: Ed Schouten Date: Fri, 7 Nov 2025 06:06:22 +0100 Subject: [PATCH 3/3] Bump the deprecated API version to REv2.2 With support for REv2.0 and REv2.1 specific features removed, we should go ahead and bump the deprecated API version to REv2.2. This will force clients to stop using these old features, or simply stop working altogether. --- pkg/scheduler/in_memory_build_queue.go | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/pkg/scheduler/in_memory_build_queue.go b/pkg/scheduler/in_memory_build_queue.go index 37eea4f8..ff54260c 100644 --- a/pkg/scheduler/in_memory_build_queue.go +++ b/pkg/scheduler/in_memory_build_queue.go @@ -288,14 +288,13 @@ var inMemoryBuildQueueCapabilitiesProvider = capabilities.NewStaticProvider(&rem // - REv2.2 moves the platform properties from Command to Action. // - REv2.3 changes the way Command.arguments[0] is interpreted. // - // For some of these features we still support the old behavior - // by enabling configuration options in the scheduler and - // worker. However, these options will be removed after - // 2026-04-01. + // We should set the deprecated API version to REv2.3 as well, + // as we always process Commands.arguments[0] according to the + // new semantics. However, that would break compatibility with + // Bazel 8.x and below. // - // TODO: Bump the deprecated API version when the legacy options - // are removed. - DeprecatedApiVersion: &semver.SemVer{Major: 2, Minor: 0}, + // TODO: Bump the deprecated API version to REv2.3 on 2027-07-01. + DeprecatedApiVersion: &semver.SemVer{Major: 2, Minor: 2}, LowApiVersion: &semver.SemVer{Major: 2, Minor: 3}, HighApiVersion: &semver.SemVer{Major: 2, Minor: 11}, })