Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[fleet] add install_script e2e test setup #31791

Merged
merged 11 commits into from
Dec 10, 2024
Merged
Show file tree
Hide file tree
Changes from 8 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitlab-ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -147,6 +147,7 @@ variables:
S3_SBOM_STORAGE_URI: s3://sbom-root-us1-ddbuild-io/$CI_PROJECT_NAME/$CI_PIPELINE_ID
S3_RELEASE_ARTIFACTS_URI: s3://dd-release-artifacts/$CI_PROJECT_NAME/$CI_PIPELINE_ID
S3_RELEASE_INSTALLER_ARTIFACTS_URI: s3://dd-release-artifacts/datadog-installer/$CI_PIPELINE_ID
S3_TESTING_INSTALLER_ARTIFACTS_URI: s3://installtesting.datad0g.com
## comment out both lines below (S3_OMNIBUS_CACHE_BUCKET and USE_S3_CACHING) to allow
## build to succeed with S3 caching disabled.
S3_OMNIBUS_CACHE_BUCKET: dd-ci-datadog-agent-omnibus-cache-build-stable
Expand Down
1 change: 1 addition & 0 deletions .gitlab/JOBOWNERS
Original file line number Diff line number Diff line change
Expand Up @@ -116,6 +116,7 @@ publish_winget* @DataDog/windows-agent
powershell_script_deploy @DataDog/windows-agent
windows_bootstrapper_deploy @DataDog/windows-agent
qa_*_oci @DataDog/agent-delivery
qa_installer_script @DataDog/agent-delivery

# Deploy containers
deploy_containers* @Datadog/agent-delivery
Expand Down
14 changes: 14 additions & 0 deletions .gitlab/deploy_packages/e2e.yml
Original file line number Diff line number Diff line change
Expand Up @@ -25,3 +25,17 @@ qa_installer_oci:
IMG_REGISTRIES: agent-qa
IMG_SOURCES: registry.ddbuild.io/ci/remote-updates/datadog-installer:pipeline-${CI_PIPELINE_ID}
IMG_DESTINATIONS: installer-package:pipeline-${CI_PIPELINE_ID}

qa_installer_script:
image: registry.ddbuild.io/ci/datadog-agent-buildimages/gitlab_agent_deploy$DATADOG_AGENT_BUILDIMAGES_SUFFIX:$DATADOG_AGENT_BUILDIMAGES
stage: deploy_packages
tags: ["arch:amd64"]
rules:
- !reference [.on_installer_or_e2e_changes]
- !reference [.manual]
needs:
- installer-install-scripts
before_script:
- ls $OMNIBUS_PACKAGE_DIR
script:
- $S3_CP_CMD --recursive --exclude "*" --include "install-*.sh" "$OMNIBUS_PACKAGE_DIR" "${S3_TESTING_INSTALLER_ARTIFACTS_URI}/${CI_COMMIT_SHA}/scripts/"
12 changes: 11 additions & 1 deletion .gitlab/deploy_packages/oci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ include:
- !reference [.manual]
before_script:
- ls $OMNIBUS_PACKAGE_DIR
script:
script: &deploy_packages_script
raphaelgavache marked this conversation as resolved.
Show resolved Hide resolved
- set +x
- !reference [.retrieve_linux_go_tools_deps]
- VERSION="$(inv agent.version --url-safe)-1" || exit $?
Expand All @@ -32,9 +32,19 @@ deploy_agent_oci:
needs: [ "agent_oci", "go_tools_deps"]
variables:
OCI_PRODUCT: "datadog-agent"
script:
- *deploy_packages_script
# Used for install scripts e2e tests
- datadog-package replicate-s3 registry.ddbuild.io/ci/remote-updates/${OCI_PRODUCT}:pipeline-${CI_PIPELINE_ID} us-east-1 installtesting.datad0g.com agent-package ${VERSION}
- datadog-package replicate-s3 registry.ddbuild.io/ci/remote-updates/${OCI_PRODUCT}:pipeline-${CI_PIPELINE_ID} us-east-1 installtesting.datad0g.com agent-package ${CI_COMMIT_SHA}

deploy_installer_oci:
extends: ".deploy_packages_oci"
needs: [ "installer_oci", "go_tools_deps" ]
variables:
OCI_PRODUCT: "datadog-installer"
script:
- [.deploy_packages_oci, script]
# Used for install scripts e2e tests
- datadog-package replicate-s3 registry.ddbuild.io/ci/remote-updates/${OCI_PRODUCT}:pipeline-${CI_PIPELINE_ID} us-east-1 installtesting.datad0g.com installer-package ${VERSION}
- datadog-package replicate-s3 registry.ddbuild.io/ci/remote-updates/${OCI_PRODUCT}:pipeline-${CI_PIPELINE_ID} us-east-1 installtesting.datad0g.com installer-package ${CI_COMMIT_SHA}
raphaelgavache marked this conversation as resolved.
Show resolved Hide resolved
14 changes: 14 additions & 0 deletions .gitlab/e2e/e2e.yml
Original file line number Diff line number Diff line change
Expand Up @@ -369,6 +369,20 @@ new-e2e-apm:
- EXTRA_PARAMS: --run TestVMFakeintakeSuiteUDS
- EXTRA_PARAMS: --run TestVMFakeintakeSuiteTCP

new-e2e-installer-script:
extends: .new_e2e_template
rules:
- !reference [.on_installer_or_e2e_changes]
- !reference [.manual]
needs:
- !reference [.needs_new_e2e_template]
- qa_installer_oci
- qa_installer_script
variables:
TARGETS: ./tests/installer/script
TEAM: fleet
FLEET_INSTALL_METHOD: "install_script"

new-e2e-installer:
extends: .new_e2e_template
rules:
Expand Down
14 changes: 7 additions & 7 deletions pkg/fleet/installer/setup/common/config.go
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ var (
datadogConfFile = filepath.Join(configDir, "datadog.yaml")
logsConfFile = filepath.Join(configDir, "conf.d/configured_at_install_logs.yaml")
sparkConfigFile = filepath.Join(configDir, "conf.d/spark.d/spark.yaml")
injectTracerConfigFile = filepath.Join(configDir, "/etc/datadog-agent/inject/tracer.yaml")
injectTracerConfigFile = filepath.Join(configDir, "inject/tracer.yaml")
)

// HostInstaller is a struct that represents the agent agentConfiguration
Expand Down Expand Up @@ -61,21 +61,21 @@ type logsConfig struct {
type LogConfig struct {
Type string `yaml:"type"`
Path string `yaml:"path"`
Service string `yaml:"service"`
Source string `yaml:"source"`
Service string `yaml:"service,omitempty"`
Source string `yaml:"source,omitempty"`
}

type sparkConfig struct {
InitConfig interface{} `yaml:"init_config"`
InitConfig interface{} `yaml:"init_config,omitempty"`
Instances []SparkInstance `yaml:"instances"`
}

// SparkInstance is a struct that represents a single spark instance
type SparkInstance struct {
SparkURL string `yaml:"spark_url"`
SparkClusterMode string `yaml:"spark_cluster_mode"`
SparkClusterMode string `yaml:"spark_cluster_mode,omitempty"`
ClusterName string `yaml:"cluster_name"`
StreamingMetrics bool `yaml:"streaming_metrics"`
StreamingMetrics bool `yaml:"streaming_metrics,omitempty"`
}

type injectorConfig struct {
Expand All @@ -100,7 +100,7 @@ func NewHostInstaller(env *env.Env) (*HostInstaller, error) {
}

func newHostInstaller(env *env.Env, ddUID, ddGID int) (*HostInstaller, error) {
i := &HostInstaller{}
i := &HostInstaller{agentConfig: make(map[string]interface{})}
if env.APIKey == "" {
return nil, fmt.Errorf("DD_API key is required")
}
Expand Down
61 changes: 61 additions & 0 deletions pkg/fleet/installer/setup/common/config_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
// Unless explicitly stated otherwise all files in this repository are licensed
// under the Apache License Version 2.0.
// This product includes software developed at Datadog (https://www.datadoghq.com/).
// Copyright 2016-present Datadog, Inc.

//go:build !windows

// Package common contains the HostInstaller struct which is used to write the agent agentConfiguration to disk
package common

import (
"os"
"path/filepath"
"testing"

"github.com/stretchr/testify/assert"

"github.com/DataDog/datadog-agent/pkg/fleet/installer/env"
)

func assertFileContent(t *testing.T, file, content string) {
b, err := os.ReadFile(file)
assert.NoError(t, err)
assert.Equal(t, content, string(b))
}

func TestAgentConfigs(t *testing.T) {
configDir = t.TempDir()
datadogConfFile = filepath.Join(configDir, "datadog.yaml")
logsConfFile = filepath.Join(configDir, "conf.d/configured_at_install_logs.yaml")
sparkConfigFile = filepath.Join(configDir, "conf.d/spark.d/spark.yaml")

i, err := newHostInstaller(&env.Env{APIKey: "a"}, 0, 0)
assert.NotNil(t, i)
assert.Nil(t, err)

i.AddAgentConfig("key", "value")
i.AddLogConfig(LogConfig{Type: "file", Path: "/var/log/app.log", Service: "app"})
i.AddHostTag("k1", "v1")
i.AddHostTag("k2", "v2")
i.AddSparkInstance(SparkInstance{ClusterName: "cluster", SparkURL: "http://localhost:8080"})

assert.NoError(t, i.writeConfigs())
assertFileContent(t, datadogConfFile, `api_key: a
key: value
logs_enabled: true
tags:
- k1:v1
- k2:v2
`)

assertFileContent(t, logsConfFile, `logs:
- type: file
path: /var/log/app.log
service: app
`)
assertFileContent(t, sparkConfigFile, `instances:
- spark_url: http://localhost:8080
cluster_name: cluster
`)
}
8 changes: 7 additions & 1 deletion pkg/fleet/installer/setup/setup.go
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,13 @@ const (
func Setup(ctx context.Context, env *env.Env, flavor string) error {
switch flavor {
case FlavorDatabricks:
return djm.SetupDatabricks(ctx, env)
// temporary until the whole e2e test pipeline is setup
if err := djm.SetupDatabricks(ctx, env); err != nil {
fmt.Printf("Databricks setup failed: %v\n", err)
} else {
fmt.Println("Databricks setup completed")
}
return nil
default:
return fmt.Errorf("unknown setup flavor %s", flavor)
}
Expand Down
7 changes: 6 additions & 1 deletion tasks/installer.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
from invoke.exceptions import Exit

from tasks.build_tags import filter_incompatible_tags, get_build_tags, get_default_build_tags
from tasks.libs.common.git import get_commit_sha
from tasks.libs.common.utils import REPO_PATH, bin_name, get_build_flags
from tasks.libs.releasing.version import get_version

Expand Down Expand Up @@ -106,12 +107,16 @@ def build_linux_script(
with open(INSTALL_SCRIPT_TEMPLATE) as f:
install_script = f.read()

# default version on pipelines, using the commit sha instead
if version == "nightly-a7":
version = get_commit_sha(ctx)

archs = ['amd64', 'arm64']
for arch in archs:
build_downloader(ctx, flavor=flavor, version=version, os='linux', arch=arch)
with open(DOWNLOADER_BIN, 'rb') as f:
encoded_bin = base64.encodebytes(f.read()).decode('utf-8')
install_script = install_script.replace(f'DOWNLOADER_BIN_{arch.upper()}', encoded_bin)
install_script = install_script.replace(f'DOWNLOADER_BIN_LINUX_{arch.upper()}', encoded_bin)

commit_sha = ctx.run('git rev-parse HEAD', hide=True).stdout.strip()
install_script = install_script.replace('INSTALLER_COMMIT', commit_sha)
Expand Down
34 changes: 34 additions & 0 deletions test/new-e2e/tests/installer/script/databricks_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
// Unless explicitly stated otherwise all files in this repository are licensed
// under the Apache License Version 2.0.
// This product includes software developed at Datadog (https://www.datadoghq.com/).
// Copyright 2016-present Datadog, Inc.

package installscript

import (
"fmt"
"os"
"testing"

"github.com/DataDog/datadog-agent/test/new-e2e/pkg/e2e"
"github.com/DataDog/datadog-agent/test/new-e2e/pkg/environments"
awshost "github.com/DataDog/datadog-agent/test/new-e2e/pkg/environments/aws/host"
osdesc "github.com/DataDog/test-infra-definitions/components/os"
"github.com/DataDog/test-infra-definitions/scenarios/aws/ec2"
)

type vmUpdaterSuite struct {
commitHash string
e2e.BaseSuite[environments.Host]
}

func (s *vmUpdaterSuite) TestInstallScript() {
url := fmt.Sprintf("https://installtesting.datad0g.com/%s/scripts/install-databricks.sh", s.commitHash)
s.Env().RemoteHost.MustExecute(fmt.Sprintf("curl -L %s > install_script; export DD_INSTALLER_REGISTRY_URL_INSTALLER_PACKAGE=installtesting.datad0g.com; sudo -E bash install_script", url))
}

func TestUpdaterSuite(t *testing.T) {
e2e.Run(t, &vmUpdaterSuite{commitHash: os.Getenv("CI_COMMIT_SHA")}, e2e.WithProvisioner(awshost.ProvisionerNoFakeIntake(
awshost.WithEC2InstanceOptions(ec2.WithOSArch(osdesc.UbuntuDefault, osdesc.ARM64Arch)),
)))
}
Loading