diff --git a/README.md b/README.md
index d125db17b2..915d904010 100644
--- a/README.md
+++ b/README.md
@@ -370,6 +370,30 @@ Use this command to install the package in Kibana.
 
 The command uses Kibana API to install the package in Kibana. The package must be exposed via the Package Registry or built locally in zip format so they can be installed using --zip parameter. Zip packages can be installed directly in Kibana >= 8.7.0. More details in this [HOWTO guide](https://github.com/elastic/elastic-package/blob/main/docs/howto/install_package.md).
 
+### `elastic-package links`
+
+_Context: global_
+
+Use this command to manage linked files in the repository.
+
+### `elastic-package links check`
+
+_Context: global_
+
+Use this command to check if linked files references inside the current directory are up to date.
+
+### `elastic-package links list`
+
+_Context: global_
+
+Use this command to list all packages that have linked file references that include the current directory.
+
+### `elastic-package links update`
+
+_Context: global_
+
+Use this command to update all linked files references inside the current directory.
+
 ### `elastic-package lint`
 
 _Context: package_
diff --git a/cmd/links.go b/cmd/links.go
new file mode 100644
index 0000000000..843e366106
--- /dev/null
+++ b/cmd/links.go
@@ -0,0 +1,146 @@
+// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+// or more contributor license agreements. Licensed under the Elastic License;
+// you may not use this file except in compliance with the Elastic License.
+
+package cmd
+
+import (
+	"fmt"
+	"os"
+	"path/filepath"
+
+	"github.com/spf13/cobra"
+
+	"github.com/elastic/elastic-package/internal/cobraext"
+	"github.com/elastic/elastic-package/internal/files"
+)
+
+const (
+	linksLongDescription       = `Use this command to manage linked files in the repository.`
+	linksCheckLongDescription  = `Use this command to check if linked files references inside the current directory are up to date.`
+	linksUpdateLongDescription = `Use this command to update all linked files references inside the current directory.`
+	linksListLongDescription   = `Use this command to list all packages that have linked file references that include the current directory.`
+)
+
+func setupLinksCommand() *cobraext.Command {
+	cmd := &cobra.Command{
+		Use:   "links",
+		Short: "Manage linked files",
+		Long:  linksLongDescription,
+		RunE: func(parent *cobra.Command, args []string) error {
+			return cobraext.ComposeCommandsParentContext(parent, args, parent.Commands()...)
+		},
+	}
+
+	cmd.AddCommand(getLinksCheckCommand())
+	cmd.AddCommand(getLinksUpdateCommand())
+	cmd.AddCommand(getLinksListCommand())
+
+	return cobraext.NewCommand(cmd, cobraext.ContextGlobal)
+}
+
+func getLinksCheckCommand() *cobra.Command {
+	cmd := &cobra.Command{
+		Use:   "check",
+		Short: "Check for linked files changes",
+		Long:  linksCheckLongDescription,
+		Args:  cobra.NoArgs,
+		RunE:  linksCheckCommandAction,
+	}
+	return cmd
+}
+
+func linksCheckCommandAction(cmd *cobra.Command, args []string) error {
+	cmd.Printf("Check for linked files changes\n")
+	pwd, err := os.Getwd()
+	if err != nil {
+		return fmt.Errorf("reading current working directory failed: %w", err)
+	}
+
+	linkedFiles, err := files.AreLinkedFilesUpToDate(pwd)
+	if err != nil {
+		return fmt.Errorf("checking linked files are up-to-date failed: %w", err)
+	}
+	for _, f := range linkedFiles {
+		if !f.UpToDate {
+			cmd.Printf("%s is outdated.\n", filepath.Join(f.WorkDir, f.LinkFilePath))
+		}
+	}
+	if len(linkedFiles) > 0 {
+		return fmt.Errorf("linked files are outdated")
+	}
+	return nil
+}
+
+func getLinksUpdateCommand() *cobra.Command {
+	cmd := &cobra.Command{
+		Use:   "update",
+		Short: "Update linked files checksums if needed.",
+		Long:  linksUpdateLongDescription,
+		Args:  cobra.NoArgs,
+		RunE:  linksUpdateCommandAction,
+	}
+	return cmd
+}
+
+func linksUpdateCommandAction(cmd *cobra.Command, args []string) error {
+	cmd.Printf("Update linked files checksums if needed.\n")
+	pwd, err := os.Getwd()
+	if err != nil {
+		return fmt.Errorf("reading current working directory failed: %w", err)
+	}
+
+	updatedLinks, err := files.UpdateLinkedFilesChecksums(pwd)
+	if err != nil {
+		return fmt.Errorf("updating linked files checksums failed: %w", err)
+	}
+
+	for _, l := range updatedLinks {
+		cmd.Printf("%s was updated.\n", l.LinkFilePath)
+	}
+
+	return nil
+}
+
+func getLinksListCommand() *cobra.Command {
+	cmd := &cobra.Command{
+		Use:   "list",
+		Short: "List packages linking files from this path",
+		Long:  linksListLongDescription,
+		Args:  cobra.NoArgs,
+		RunE:  linksListCommandAction,
+	}
+	cmd.Flags().BoolP(cobraext.PackagesFlagName, "", false, cobraext.PackagesFlagDescription)
+	return cmd
+}
+
+func linksListCommandAction(cmd *cobra.Command, args []string) error {
+	onlyPackages, err := cmd.Flags().GetBool(cobraext.PackagesFlagName)
+	if err != nil {
+		return cobraext.FlagParsingError(err, cobraext.PackagesFlagName)
+	}
+
+	pwd, err := os.Getwd()
+	if err != nil {
+		return fmt.Errorf("reading current working directory failed: %w", err)
+	}
+
+	byPackage, err := files.LinkedFilesByPackageFrom(pwd)
+	if err != nil {
+		return fmt.Errorf("listing linked packages failed: %w", err)
+	}
+
+	for i := range byPackage {
+		for p, links := range byPackage[i] {
+			if onlyPackages {
+				cmd.Println(p)
+				continue
+			}
+			for _, l := range links {
+				cmd.Println(l)
+			}
+		}
+	}
+
+	return nil
+}
diff --git a/cmd/lint.go b/cmd/lint.go
index a2a26025fe..bba2188f9d 100644
--- a/cmd/lint.go
+++ b/cmd/lint.go
@@ -45,7 +45,6 @@ func setupLintCommand() *cobraext.Command {
 
 func lintCommandAction(cmd *cobra.Command, args []string) error {
 	cmd.Println("Lint the package")
-
 	readmeFiles, err := docs.AreReadmesUpToDate()
 	if err != nil {
 		for _, f := range readmeFiles {
diff --git a/cmd/root.go b/cmd/root.go
index 52478b8b2b..e449ff5169 100644
--- a/cmd/root.go
+++ b/cmd/root.go
@@ -28,6 +28,7 @@ var commands = []*cobraext.Command{
 	setupExportCommand(),
 	setupFormatCommand(),
 	setupInstallCommand(),
+	setupLinksCommand(),
 	setupLintCommand(),
 	setupProfilesCommand(),
 	setupReportsCommand(),
diff --git a/docs/howto/links.md b/docs/howto/links.md
new file mode 100644
index 0000000000..35a9ad6cda
--- /dev/null
+++ b/docs/howto/links.md
@@ -0,0 +1,34 @@
+# HOWTO: Use links to reuse common files.
+
+## Introduction
+
+Many packages have files that are equal between them. This is more common in pipelines, 
+input configurations, and field definitions.
+
+In order to help developers, there is the ability to define links, so a file that might be reused needs to only be defined once, and can be reused from any other packages.
+
+
+# Links
+
+Currently, there are some specific places where links can be defined:
+
+- `elasticsearch/ingest_pipeline`
+- `data_stream/**/elasticsearch/ingest_pipeline`
+- `agent/input`
+- `data_stream/**/agent/stream`
+- `data_stream/**/fields`
+
+A link consists of a file with a `.link` extension that contains a path, relative to its location, to the file that it will be replaced with. It also consists of a checksum to validate the linked file is up to date with the package expectations.
+
+`data_stream/foo/elasticsearch/ingest_pipeline/default.yml.link`
+
+```
+../../../../../testpackage/data_stream/test/elasticsearch/ingest_pipeline/default.yml f7c5f0c03aca8ef68c379a62447bdafbf0dcf32b1ff2de143fd6878ee01a91ad
+```
+
+This will use the contents of the linked file during validation, tests, and building of the package, so functionally nothing changes from the package point of view.
+
+## The `_dev/shared` folder
+
+As a convenience, shared files can be placed under `_dev/shared` if they are going to be
+reused from several places. They can even be added outside of any package, in any place in the repository.
diff --git a/go.mod b/go.mod
index fd99f84359..1a7046cf90 100644
--- a/go.mod
+++ b/go.mod
@@ -2,6 +2,8 @@ module github.com/elastic/elastic-package
 
 go 1.24.2
 
+replace github.com/elastic/package-spec/v3 => github.com/elastic/package-spec/v3 v3.0.0-20250409140721-851b65d4339d
+
 require (
 	github.com/AlecAivazis/survey/v2 v2.3.7
 	github.com/Masterminds/semver/v3 v3.3.1
diff --git a/go.sum b/go.sum
index 330fb2c426..26ecced323 100644
--- a/go.sum
+++ b/go.sum
@@ -84,8 +84,8 @@ github.com/elastic/gojsonschema v1.2.1 h1:cUMbgsz0wyEB4x7xf3zUEvUVDl6WCz2RKcQPul
 github.com/elastic/gojsonschema v1.2.1/go.mod h1:biw5eBS2Z4T02wjATMRSfecfjCmwaDPvuaqf844gLrg=
 github.com/elastic/kbncontent v0.1.4 h1:GoUkJkqkn2H6iJTnOHcxEqYVVYyjvcebLQVaSR1aSvU=
 github.com/elastic/kbncontent v0.1.4/go.mod h1:kOPREITK9gSJsiw/WKe7QWSO+PRiZMyEFQCw+CMLAHI=
-github.com/elastic/package-spec/v3 v3.3.5 h1:D0AXRiTNcF8Ue8gLIafF/BLOk7V2yqSFVUy/p0fwArM=
-github.com/elastic/package-spec/v3 v3.3.5/go.mod h1:+q7JpjqBFnNVMmh9VAVfZdOxQ3EmdCD+KM8Cg6VhKgg=
+github.com/elastic/package-spec/v3 v3.0.0-20250409140721-851b65d4339d h1:jg8qN/0ZAxbo65coqJUFx01OC2PMkWc+6kaf9labTkc=
+github.com/elastic/package-spec/v3 v3.0.0-20250409140721-851b65d4339d/go.mod h1:+q7JpjqBFnNVMmh9VAVfZdOxQ3EmdCD+KM8Cg6VhKgg=
 github.com/emicklei/go-restful/v3 v3.11.0 h1:rAQeMHw1c7zTmncogyy8VvRZwtkmkZ4FxERmMY4rD+g=
 github.com/emicklei/go-restful/v3 v3.11.0/go.mod h1:6n3XBCmQQb25CM2LCACGz8ukIrRry+4bhvbpWn3mrbc=
 github.com/evanphx/json-patch v5.9.0+incompatible h1:fBXyNpNMuTTDdquAq/uisOr2lShz4oaXpDTX2bLe7ls=
diff --git a/internal/builder/packages.go b/internal/builder/packages.go
index 94c1b89b77..6293740628 100644
--- a/internal/builder/packages.go
+++ b/internal/builder/packages.go
@@ -184,6 +184,15 @@ func BuildPackage(options BuildOptions) (string, error) {
 		return "", fmt.Errorf("adding dynamic mappings: %w", err)
 	}
 
+	logger.Debug("Include linked files")
+	links, err := files.IncludeLinkedFiles(options.PackageRoot, destinationDir)
+	if err != nil {
+		return "", fmt.Errorf("including linked files failed: %w", err)
+	}
+	for _, l := range links {
+		logger.Debugf("Linked file included (path: %s)", l.TargetFilePath(destinationDir))
+	}
+
 	if options.CreateZip {
 		return buildZippedPackage(options, destinationDir)
 	}
diff --git a/internal/cobraext/flags.go b/internal/cobraext/flags.go
index c8f19eff22..fda51d0de0 100644
--- a/internal/cobraext/flags.go
+++ b/internal/cobraext/flags.go
@@ -138,6 +138,9 @@ const (
 	GenerateTestResultFlagName        = "generate"
 	GenerateTestResultFlagDescription = "generate test result file"
 
+	PackagesFlagName        = "packages"
+	PackagesFlagDescription = "whether to return packages names or complete paths for the linked files found"
+
 	ProfileFlagName        = "profile"
 	ProfileFlagDescription = "select a profile to use for the stack configuration. Can also be set with %s"
 
diff --git a/internal/elasticsearch/ingest/datastream.go b/internal/elasticsearch/ingest/datastream.go
index 6d4848446c..acab9590cc 100644
--- a/internal/elasticsearch/ingest/datastream.go
+++ b/internal/elasticsearch/ingest/datastream.go
@@ -19,13 +19,16 @@ import (
 	"gopkg.in/yaml.v3"
 
 	"github.com/elastic/elastic-package/internal/elasticsearch"
+	"github.com/elastic/elastic-package/internal/files"
 	"github.com/elastic/elastic-package/internal/packages"
 )
 
 var (
-	ingestPipelineTag   = regexp.MustCompile(`{{\s*IngestPipeline.+}}`)
-	defaultPipelineJSON = "default.json"
-	defaultPipelineYML  = "default.yml"
+	ingestPipelineTag       = regexp.MustCompile(`{{\s*IngestPipeline.+}}`)
+	defaultPipelineJSON     = "default.json"
+	defaultPipelineJSONLink = "default.json"
+	defaultPipelineYML      = "default.yml.link"
+	defaultPipelineYMLLink  = "default.yml.link"
 )
 
 type Rule struct {
@@ -71,7 +74,7 @@ func loadIngestPipelineFiles(dataStreamPath string, nonce int64) ([]Pipeline, er
 	elasticsearchPath := filepath.Join(dataStreamPath, "elasticsearch", "ingest_pipeline")
 
 	var pipelineFiles []string
-	for _, pattern := range []string{"*.json", "*.yml"} {
+	for _, pattern := range []string{"*.json", "*.yml", "*.link"} {
 		files, err := filepath.Glob(filepath.Join(elasticsearchPath, pattern))
 		if err != nil {
 			return nil, fmt.Errorf("listing '%s' in '%s': %w", pattern, elasticsearchPath, err)
@@ -79,9 +82,10 @@ func loadIngestPipelineFiles(dataStreamPath string, nonce int64) ([]Pipeline, er
 		pipelineFiles = append(pipelineFiles, files...)
 	}
 
+	linksFS := files.NewLinksFS(elasticsearchPath)
 	var pipelines []Pipeline
 	for _, path := range pipelineFiles {
-		c, err := os.ReadFile(path)
+		c, err := linksFS.ReadFile(path)
 		if err != nil {
 			return nil, fmt.Errorf("reading ingest pipeline failed (path: %s): %w", path, err)
 		}
@@ -108,7 +112,7 @@ func loadIngestPipelineFiles(dataStreamPath string, nonce int64) ([]Pipeline, er
 		pipelines = append(pipelines, Pipeline{
 			Path:            path,
 			Name:            getPipelineNameWithNonce(name[:strings.Index(name, ".")], nonce),
-			Format:          filepath.Ext(path)[1:],
+			Format:          filepath.Ext(strings.TrimSuffix(path, ".link"))[1:],
 			Content:         cWithRerouteProcessors,
 			ContentOriginal: c,
 		})
@@ -119,7 +123,8 @@ func loadIngestPipelineFiles(dataStreamPath string, nonce int64) ([]Pipeline, er
 func addRerouteProcessors(pipeline []byte, dataStreamPath, path string) ([]byte, error) {
 	// Only attach routing_rules.yml reroute processors after the default pipeline
 	filename := filepath.Base(path)
-	if filename != defaultPipelineJSON && filename != defaultPipelineYML {
+	if filename != defaultPipelineJSON && filename != defaultPipelineYML &&
+		filename != defaultPipelineJSONLink && filename != defaultPipelineYMLLink {
 		return pipeline, nil
 	}
 
diff --git a/internal/files/copy.go b/internal/files/copy.go
index 2220cf2a63..9e0733f0c4 100644
--- a/internal/files/copy.go
+++ b/internal/files/copy.go
@@ -13,7 +13,7 @@ import (
 
 var (
 	defaultFoldersToSkip   = []string{"_dev", "build", ".git"}
-	defaultFileGlobsToSkip = []string{".DS_Store", ".*.swp"}
+	defaultFileGlobsToSkip = []string{".DS_Store", ".*.swp", "*.link"}
 )
 
 // CopyAll method copies files from the source to the destination skipping empty directories.
diff --git a/internal/files/linkedfiles.go b/internal/files/linkedfiles.go
new file mode 100644
index 0000000000..d384b1a352
--- /dev/null
+++ b/internal/files/linkedfiles.go
@@ -0,0 +1,408 @@
+// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+// or more contributor license agreements. Licensed under the Elastic License;
+// you may not use this file except in compliance with the Elastic License.
+
+package files
+
+import (
+	"bufio"
+	"bytes"
+	"crypto/sha256"
+	"encoding/hex"
+	"fmt"
+	"io"
+	"io/fs"
+	"os"
+	"path/filepath"
+	"slices"
+	"strings"
+
+	"github.com/elastic/elastic-package/internal/logger"
+	"github.com/elastic/elastic-package/internal/packages"
+)
+
+const linkExtension = ".link"
+
+var _ fs.FS = (*LinksFS)(nil)
+
+// LinksFS is a filesystem that handles linked files.
+// It wraps another filesystem and checks for linked files with the ".link" extension.
+// If a linked file is found, it reads the link file to determine the target file
+// and its checksum. If the target file is up to date, it returns the target file.
+// Otherwise, it returns an error.
+type LinksFS struct {
+	workDir string
+	inner   fs.FS
+}
+
+// NewLinksFS creates a new LinksFS.
+func NewLinksFS(workDir string) *LinksFS {
+	return &LinksFS{workDir: workDir, inner: os.DirFS(workDir)}
+}
+
+// Open opens a file in the filesystem.
+func (lfs *LinksFS) Open(name string) (fs.File, error) {
+	name, err := filepath.Rel(lfs.workDir, name)
+	if err != nil {
+		return nil, fmt.Errorf("could not get relative path: %w", err)
+	}
+	fmt.Println(name)
+	if filepath.Ext(name) != linkExtension {
+		return lfs.inner.Open(name)
+	}
+	pathName := filepath.Join(lfs.workDir, name)
+	l, err := NewLinkedFile(pathName)
+	if err != nil {
+		return nil, err
+	}
+	if !l.UpToDate {
+		return nil, fmt.Errorf("linked file %s is not up to date", name)
+	}
+	includedPath := filepath.Join(lfs.workDir, filepath.Dir(name), l.IncludedFilePath)
+	return os.Open(includedPath)
+}
+
+// ReadFile reads a file from the filesystem.
+func (lfs *LinksFS) ReadFile(name string) ([]byte, error) {
+	f, err := lfs.Open(name)
+	if err != nil {
+		return nil, err
+	}
+	defer f.Close()
+	b, err := io.ReadAll(f)
+	if err != nil {
+		return nil, err
+	}
+	return b, nil
+}
+
+// A Link represents a linked file.
+// It contains the path to the link file, the checksum of the linked file,
+// the path to the target file, and the checksum of the included file contents.
+// It also contains a boolean indicating whether the link is up to date.
+type Link struct {
+	WorkDir string
+
+	LinkFilePath    string
+	LinkChecksum    string
+	LinkPackageName string
+
+	IncludedFilePath             string
+	IncludedFileContentsChecksum string
+	IncludedPackageName          string
+
+	UpToDate bool
+}
+
+// NewLinkedFile creates a new Link from the given link file path.
+func NewLinkedFile(linkFilePath string) (Link, error) {
+	var l Link
+	l.WorkDir = filepath.Dir(linkFilePath)
+	if linkPackageRoot, _, _ := packages.FindPackageRootFrom(l.WorkDir); linkPackageRoot != "" {
+		l.LinkPackageName = filepath.Base(linkPackageRoot)
+	}
+
+	firstLine, err := readFirstLine(linkFilePath)
+	if err != nil {
+		return Link{}, err
+	}
+	l.LinkFilePath, err = filepath.Rel(l.WorkDir, linkFilePath)
+	if err != nil {
+		return Link{}, fmt.Errorf("could not get relative path: %w", err)
+	}
+
+	fields := strings.Fields(firstLine)
+	l.IncludedFilePath = fields[0]
+	if len(fields) == 2 {
+		l.LinkChecksum = fields[1]
+	}
+
+	pathName := filepath.Join(l.WorkDir, filepath.FromSlash(l.IncludedFilePath))
+	if _, err := os.Stat(pathName); err != nil {
+		return Link{}, err
+	}
+
+	notInRoot, err := pathIsInRepositoryRoot(pathName)
+	if err != nil {
+		return Link{}, fmt.Errorf("could not check if path %v is in repository root: %w", pathName, err)
+	}
+	if !notInRoot {
+		return Link{}, fmt.Errorf("path %v escapes the repository root", pathName)
+	}
+
+	cs, err := getLinkedFileChecksum(pathName)
+	if err != nil {
+		return Link{}, fmt.Errorf("could not collect file %v: %w", l.IncludedFilePath, err)
+	}
+	if l.LinkChecksum == cs {
+		l.UpToDate = true
+	}
+	l.IncludedFileContentsChecksum = cs
+
+	if includedPackageRoot, _, _ := packages.FindPackageRootFrom(filepath.Dir(pathName)); includedPackageRoot != "" {
+		l.IncludedPackageName = filepath.Base(includedPackageRoot)
+	}
+
+	return l, nil
+}
+
+// UpdateChecksum function updates the checksum of the linked file.
+// It returns true if the checksum was updated, false if it was already up-to-date.
+func (l *Link) UpdateChecksum() (bool, error) {
+	if l.UpToDate {
+		return false, nil
+	}
+	if l.IncludedFilePath == "" {
+		return false, fmt.Errorf("file path is empty for file %v", l.IncludedFilePath)
+	}
+	if l.IncludedFileContentsChecksum == "" {
+		return false, fmt.Errorf("checksum is empty for file %v", l.IncludedFilePath)
+	}
+	newContent := fmt.Sprintf("%v %v", filepath.ToSlash(l.IncludedFilePath), l.IncludedFileContentsChecksum)
+	if err := WriteFile(filepath.Join(l.WorkDir, l.LinkFilePath), []byte(newContent)); err != nil {
+		return false, fmt.Errorf("could not update checksum for file %v: %w", l.LinkFilePath, err)
+	}
+	l.LinkChecksum = l.IncludedFileContentsChecksum
+	l.UpToDate = true
+	return true, nil
+}
+
+func (l *Link) TargetFilePath(workDir ...string) string {
+	targetFilePath := filepath.FromSlash(strings.TrimSuffix(l.LinkFilePath, linkExtension))
+	wd := l.WorkDir
+	if len(workDir) > 0 {
+		wd = workDir[0]
+	}
+	return filepath.Join(wd, targetFilePath)
+}
+
+// IncludeLinkedFiles function includes linked files from the source
+// directory to the target directory.
+// It returns a slice of Link structs representing the included files.
+// It also updates the checksum of the linked files.
+// Both directories must be relative to the root.
+func IncludeLinkedFiles(fromDir, toDir string) ([]Link, error) {
+	links, err := ListLinkedFiles(fromDir)
+	if err != nil {
+		return nil, fmt.Errorf("including linked files failed: %w", err)
+	}
+	for _, l := range links {
+		if _, err := l.UpdateChecksum(); err != nil {
+			return nil, fmt.Errorf("could not update checksum for file %v: %w", l.LinkFilePath, err)
+		}
+		targetFilePath := l.TargetFilePath(toDir)
+		if err := CopyFile(
+			filepath.Join(l.WorkDir, filepath.FromSlash(l.IncludedFilePath)),
+			targetFilePath,
+		); err != nil {
+			return nil, fmt.Errorf("could not write file %v: %w", targetFilePath, err)
+		}
+	}
+
+	return links, nil
+}
+
+// ListLinkedFiles function returns a slice of Link structs representing linked files.
+func ListLinkedFiles(fromDir string) ([]Link, error) {
+	var linkFiles []string
+	if err := filepath.Walk(
+		filepath.FromSlash(fromDir),
+		func(path string, info os.FileInfo, err error) error {
+			if err != nil {
+				return err
+			}
+			if !info.IsDir() && strings.HasSuffix(info.Name(), linkExtension) {
+				linkFiles = append(linkFiles, path)
+			}
+			return nil
+		}); err != nil {
+		return nil, err
+	}
+
+	links := make([]Link, len(linkFiles))
+
+	for i, f := range linkFiles {
+		l, err := NewLinkedFile(filepath.FromSlash(f))
+		if err != nil {
+			return nil, fmt.Errorf("could not initialize linked file %v: %w", f, err)
+		}
+		links[i] = l
+	}
+
+	return links, nil
+}
+
+// CopyFile function copies a file from to to inside the root.
+func CopyFile(from, to string) error {
+	from = filepath.FromSlash(from)
+	source, err := os.Open(from)
+	if err != nil {
+		return err
+	}
+	defer source.Close()
+
+	to = filepath.FromSlash(to)
+	dir := filepath.Dir(to)
+	if _, err := os.Stat(dir); os.IsNotExist(err) {
+		if err := os.MkdirAll(dir, 0700); err != nil {
+			return err
+		}
+	}
+	destination, err := os.Create(to)
+	if err != nil {
+		return err
+	}
+	defer destination.Close()
+
+	_, err = io.Copy(destination, source)
+	return err
+}
+
+// WriteFile function writes a byte slice to a file inside the root.
+func WriteFile(to string, b []byte) error {
+	to = filepath.FromSlash(to)
+	if _, err := os.Stat(filepath.Dir(to)); os.IsNotExist(err) {
+		if err := os.MkdirAll(filepath.Dir(to), 0700); err != nil {
+			return err
+		}
+	}
+	return os.WriteFile(to, b, 0644)
+}
+
+// AreLinkedFilesUpToDate function checks if all the linked files are up-to-date.
+func AreLinkedFilesUpToDate(fromDir string) ([]Link, error) {
+	links, err := ListLinkedFiles(fromDir)
+	if err != nil {
+		return nil, fmt.Errorf("including linked files failed: %w", err)
+	}
+
+	var outdated []Link
+	for _, l := range links {
+		logger.Debugf("Check if %s is up-to-date", l.LinkFilePath)
+		if !l.UpToDate {
+			outdated = append(outdated, l)
+		}
+	}
+
+	return outdated, nil
+}
+
+// UpdateLinkedFilesChecksums function updates the checksums of the linked files.
+// It returns a slice of updated links.
+// If no links were updated, it returns an empty slice.
+func UpdateLinkedFilesChecksums(fromDir string) ([]Link, error) {
+	links, err := ListLinkedFiles(fromDir)
+	if err != nil {
+		return nil, fmt.Errorf("updating linked files checksums failed: %w", err)
+	}
+
+	var updatedLinks []Link
+	for _, l := range links {
+		updated, err := l.UpdateChecksum()
+		if err != nil {
+			return nil, fmt.Errorf("updating linked files checksums failed: %w", err)
+		}
+		if updated {
+			updatedLinks = append(updatedLinks, l)
+		}
+	}
+
+	return updatedLinks, nil
+}
+
+// LinkedFilesByPackageFrom function returns a slice of maps containing linked files grouped by package.
+// Each map contains the package name as the key and a slice of linked file paths as the value.
+func LinkedFilesByPackageFrom(fromDir string) ([]map[string][]string, error) {
+	root, err := FindRepositoryRoot()
+	if err != nil {
+		return nil, err
+	}
+	links, err := ListLinkedFiles(root.Name())
+	if err != nil {
+		return nil, fmt.Errorf("including linked files failed: %w", err)
+	}
+
+	var packageName string
+	if packageRoot, _, _ := packages.FindPackageRootFrom(fromDir); packageRoot != "" {
+		packageName = filepath.Base(packageRoot)
+	}
+	byPackageMap := map[string][]string{}
+	for _, l := range links {
+		if l.LinkPackageName == l.IncludedPackageName ||
+			packageName != l.IncludedPackageName {
+			continue
+		}
+		byPackageMap[l.LinkPackageName] = append(byPackageMap[l.LinkPackageName], filepath.Join(l.WorkDir, l.LinkFilePath))
+	}
+
+	var packages []string
+	for p := range byPackageMap {
+		packages = append(packages, p)
+	}
+	slices.Sort(packages)
+
+	var byPackage []map[string][]string
+	for _, p := range packages {
+		m := map[string][]string{p: byPackageMap[p]}
+		byPackage = append(byPackage, m)
+	}
+	return byPackage, nil
+}
+
+func getLinkedFileChecksum(path string) (string, error) {
+	b, err := os.ReadFile(filepath.FromSlash(path))
+	if err != nil {
+		return "", err
+	}
+	cs, err := checksum(b)
+	if err != nil {
+		return "", err
+	}
+	return cs, nil
+}
+
+func readFirstLine(filePath string) (string, error) {
+	file, err := os.Open(filepath.FromSlash(filePath))
+	if err != nil {
+		return "", err
+	}
+	defer file.Close()
+
+	scanner := bufio.NewScanner(file)
+	if scanner.Scan() {
+		return scanner.Text(), nil
+	}
+
+	if err := scanner.Err(); err != nil {
+		return "", err
+	}
+
+	return "", fmt.Errorf("file is empty or first line is missing")
+}
+
+func checksum(b []byte) (string, error) {
+	hash := sha256.New()
+	if _, err := io.Copy(hash, bytes.NewReader(b)); err != nil {
+		return "", err
+	}
+	return hex.EncodeToString(hash.Sum(nil)), nil
+}
+
+func pathIsInRepositoryRoot(path string) (bool, error) {
+	path = filepath.FromSlash(path)
+	root, err := FindRepositoryRoot()
+	if err != nil {
+		return false, err
+	}
+	if filepath.IsAbs(path) {
+		path, err = filepath.Rel(root.Name(), path)
+		if err != nil {
+			return false, fmt.Errorf("could not get relative path: %w", err)
+		}
+	}
+
+	if _, err := root.Stat(path); err != nil {
+		return false, nil
+	}
+	return true, nil
+}
diff --git a/internal/files/linkedfiles_test.go b/internal/files/linkedfiles_test.go
new file mode 100644
index 0000000000..05befac9be
--- /dev/null
+++ b/internal/files/linkedfiles_test.go
@@ -0,0 +1,168 @@
+// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+// or more contributor license agreements. Licensed under the Elastic License;
+// you may not use this file except in compliance with the Elastic License.
+
+package files
+
+import (
+	"bytes"
+	"os"
+	"path/filepath"
+	"strings"
+	"testing"
+
+	"github.com/stretchr/testify/assert"
+	"github.com/stretchr/testify/require"
+)
+
+func TestLinkUpdateChecksum(t *testing.T) {
+	wd, err := os.Getwd()
+	assert.NoError(t, err)
+	basePath := filepath.Join(wd, filepath.FromSlash("testdata/links"))
+	outdatedFile, err := NewLinkedFile(filepath.Join(basePath, "outdated.yml.link"))
+	t.Cleanup(func() {
+		_ = WriteFile(filepath.Join(outdatedFile.WorkDir, outdatedFile.LinkFilePath), []byte(outdatedFile.IncludedFilePath))
+	})
+	assert.NoError(t, err)
+	assert.False(t, outdatedFile.UpToDate)
+	assert.Empty(t, outdatedFile.LinkChecksum)
+	updated, err := outdatedFile.UpdateChecksum()
+	assert.NoError(t, err)
+	assert.True(t, updated)
+	assert.Equal(t, "d709feed45b708c9548a18ca48f3ad4f41be8d3f691f83d7417ca902a20e6c1e", outdatedFile.LinkChecksum)
+	assert.True(t, outdatedFile.UpToDate)
+
+	uptodateFile, err := NewLinkedFile(filepath.Join(basePath, "uptodate.yml.link"))
+	assert.NoError(t, err)
+	assert.True(t, uptodateFile.UpToDate)
+	updated, err = uptodateFile.UpdateChecksum()
+	assert.NoError(t, err)
+	assert.False(t, updated)
+}
+
+func TestListLinkedFiles(t *testing.T) {
+	wd, err := os.Getwd()
+	assert.NoError(t, err)
+	basePath := filepath.Join(wd, filepath.FromSlash("testdata/links"))
+	linkedFiles, err := ListLinkedFiles(basePath)
+	require.NoError(t, err)
+	require.NotEmpty(t, linkedFiles)
+	require.Len(t, linkedFiles, 2)
+	assert.Equal(t, "outdated.yml.link", linkedFiles[0].LinkFilePath)
+	assert.Empty(t, linkedFiles[0].LinkChecksum)
+	assert.Equal(t, "outdated.yml", linkedFiles[0].TargetFilePath(""))
+	assert.Equal(t, "./included.yml", linkedFiles[0].IncludedFilePath)
+	assert.Equal(t, "d709feed45b708c9548a18ca48f3ad4f41be8d3f691f83d7417ca902a20e6c1e", linkedFiles[0].IncludedFileContentsChecksum)
+	assert.False(t, linkedFiles[0].UpToDate)
+	assert.Equal(t, "uptodate.yml.link", linkedFiles[1].LinkFilePath)
+	assert.Equal(t, "d709feed45b708c9548a18ca48f3ad4f41be8d3f691f83d7417ca902a20e6c1e", linkedFiles[1].LinkChecksum)
+	assert.Equal(t, "uptodate.yml", linkedFiles[1].TargetFilePath(""))
+	assert.Equal(t, "./included.yml", linkedFiles[1].IncludedFilePath)
+	assert.Equal(t, "d709feed45b708c9548a18ca48f3ad4f41be8d3f691f83d7417ca902a20e6c1e", linkedFiles[1].IncludedFileContentsChecksum)
+	assert.True(t, linkedFiles[1].UpToDate)
+}
+
+func TestCopyFile(t *testing.T) {
+	fileA := "fileA.txt"
+	fileB := "fileB.txt"
+	t.Cleanup(func() { _ = os.Remove(fileA) })
+	t.Cleanup(func() { _ = os.Remove(fileB) })
+
+	createDummyFile(t, fileA, "This is the content of the file.")
+
+	assert.NoError(t, CopyFile(fileA, fileB))
+
+	equal, err := filesEqual(fileA, fileB)
+	assert.NoError(t, err)
+	assert.True(t, equal, "files should be equal after copying")
+}
+
+func TestAreLinkedFilesUpToDate(t *testing.T) {
+	wd, err := os.Getwd()
+	assert.NoError(t, err)
+	basePath := filepath.Join(wd, filepath.FromSlash("testdata/links"))
+	linkedFiles, err := AreLinkedFilesUpToDate(basePath)
+	assert.NoError(t, err)
+	assert.NotEmpty(t, linkedFiles)
+	assert.Len(t, linkedFiles, 1)
+	assert.Equal(t, "outdated.yml.link", linkedFiles[0].LinkFilePath)
+	assert.Empty(t, linkedFiles[0].LinkChecksum)
+	assert.Equal(t, "outdated.yml", linkedFiles[0].TargetFilePath(""))
+	assert.Equal(t, "./included.yml", linkedFiles[0].IncludedFilePath)
+	assert.Equal(t, "d709feed45b708c9548a18ca48f3ad4f41be8d3f691f83d7417ca902a20e6c1e", linkedFiles[0].IncludedFileContentsChecksum)
+	assert.False(t, linkedFiles[0].UpToDate)
+}
+
+func TestUpdateLinkedFilesChecksums(t *testing.T) {
+	wd, err := os.Getwd()
+	assert.NoError(t, err)
+	basePath := filepath.Join(wd, filepath.FromSlash("testdata/links"))
+	updated, err := UpdateLinkedFilesChecksums(basePath)
+	t.Cleanup(func() {
+		_ = WriteFile(filepath.Join(updated[0].WorkDir, updated[0].LinkFilePath), []byte(updated[0].IncludedFilePath))
+	})
+	assert.NoError(t, err)
+	assert.NotEmpty(t, updated)
+	assert.Len(t, updated, 1)
+	assert.True(t, updated[0].UpToDate)
+	assert.Equal(t, "d709feed45b708c9548a18ca48f3ad4f41be8d3f691f83d7417ca902a20e6c1e", updated[0].LinkChecksum)
+
+}
+
+func TestLinkedFilesByPackageFrom(t *testing.T) {
+	wd, err := os.Getwd()
+	assert.NoError(t, err)
+	basePath := filepath.Join(wd, filepath.FromSlash("testdata/links"))
+	m, err := LinkedFilesByPackageFrom(basePath)
+	assert.NoError(t, err)
+	assert.NotEmpty(t, m)
+	assert.Len(t, m, 1)
+	assert.NotEmpty(t, m[0])
+	assert.Len(t, m[0], 1)
+	assert.NotEmpty(t, m[0]["testpackage"])
+	assert.Len(t, m[0]["testpackage"], 1)
+	match := strings.HasSuffix(
+		filepath.ToSlash(m[0]["testpackage"][0]),
+		"/testdata/testpackage/included.yml.link",
+	)
+	assert.True(t, match)
+}
+
+func TestIncludeLinkedFiles(t *testing.T) {
+	wd, err := os.Getwd()
+	assert.NoError(t, err)
+	fromDir := filepath.Join(wd, filepath.FromSlash("testdata/testpackage"))
+	toDir := t.TempDir()
+	linkedFiles, err := IncludeLinkedFiles(fromDir, toDir)
+	assert.NoError(t, err)
+	require.Equal(t, 1, len(linkedFiles))
+	assert.FileExists(t, linkedFiles[0].TargetFilePath(toDir))
+	equal, err := filesEqual(
+		filepath.Join(linkedFiles[0].WorkDir, filepath.FromSlash(linkedFiles[0].IncludedFilePath)),
+		linkedFiles[0].TargetFilePath(toDir),
+	)
+	assert.NoError(t, err)
+	assert.True(t, equal, "files should be equal after copying")
+}
+
+func createDummyFile(t *testing.T, filename, content string) {
+	file, err := os.Create(filename)
+	assert.NoError(t, err)
+	defer file.Close()
+	_, err = file.WriteString(content)
+	assert.NoError(t, err)
+}
+
+func filesEqual(file1, file2 string) (bool, error) {
+	f1, err := os.ReadFile(file1)
+	if err != nil {
+		return false, err
+	}
+
+	f2, err := os.ReadFile(file2)
+	if err != nil {
+		return false, err
+	}
+
+	return bytes.Equal(f1, f2), nil
+}
diff --git a/internal/files/repository.go b/internal/files/repository.go
index 9519e08757..d7ae0bf68d 100644
--- a/internal/files/repository.go
+++ b/internal/files/repository.go
@@ -13,6 +13,21 @@ import (
 	"gopkg.in/yaml.v3"
 )
 
+func FindRepositoryRoot() (*os.Root, error) {
+	rootPath, err := FindRepositoryRootDirectory()
+	if err != nil {
+		return nil, fmt.Errorf("root not found: %w", err)
+	}
+
+	// scope any possible operation to the repository folder
+	dirRoot, err := os.OpenRoot(rootPath)
+	if err != nil {
+		return nil, fmt.Errorf("could not open root: %w", err)
+	}
+
+	return dirRoot, nil
+}
+
 func FindRepositoryRootDirectory() (string, error) {
 	workDir, err := os.Getwd()
 	if err != nil {
diff --git a/internal/files/testdata/links/included.yml b/internal/files/testdata/links/included.yml
new file mode 100644
index 0000000000..923ec99b96
--- /dev/null
+++ b/internal/files/testdata/links/included.yml
@@ -0,0 +1,3 @@
+processors:
+  - test:
+      foo: bar
\ No newline at end of file
diff --git a/internal/files/testdata/links/outdated.yml.link b/internal/files/testdata/links/outdated.yml.link
new file mode 100644
index 0000000000..76781e5392
--- /dev/null
+++ b/internal/files/testdata/links/outdated.yml.link
@@ -0,0 +1 @@
+./included.yml
\ No newline at end of file
diff --git a/internal/files/testdata/links/uptodate.yml.link b/internal/files/testdata/links/uptodate.yml.link
new file mode 100644
index 0000000000..d0a9c517ea
--- /dev/null
+++ b/internal/files/testdata/links/uptodate.yml.link
@@ -0,0 +1 @@
+./included.yml d709feed45b708c9548a18ca48f3ad4f41be8d3f691f83d7417ca902a20e6c1e
\ No newline at end of file
diff --git a/internal/files/testdata/testpackage/included.yml.link b/internal/files/testdata/testpackage/included.yml.link
new file mode 100644
index 0000000000..61dbe8caee
--- /dev/null
+++ b/internal/files/testdata/testpackage/included.yml.link
@@ -0,0 +1 @@
+../links/included.yml d709feed45b708c9548a18ca48f3ad4f41be8d3f691f83d7417ca902a20e6c1e
\ No newline at end of file
diff --git a/internal/files/testdata/testpackage/manifest.yml b/internal/files/testdata/testpackage/manifest.yml
new file mode 100644
index 0000000000..cef06e0e0f
--- /dev/null
+++ b/internal/files/testdata/testpackage/manifest.yml
@@ -0,0 +1,20 @@
+format_version: 2.3.0
+name: testpackage
+title: "With Includes Tests"
+version: 0.0.1
+description: "These are tests of field validation with includes."
+type: integration
+categories:
+  - custom
+conditions:
+  kibana.version: "^8.0.0"
+policy_templates:
+  - name: sample
+    title: Sample logs
+    description: Collect sample logs
+    inputs:
+      - type: logfile
+        title: Collect sample logs from instances
+        description: Collecting sample logs
+owner:
+  github: elastic/integrations
diff --git a/internal/packages/packages.go b/internal/packages/packages.go
index 30d505f949..7000d4382b 100644
--- a/internal/packages/packages.go
+++ b/internal/packages/packages.go
@@ -239,18 +239,22 @@ func MustFindPackageRoot() (string, error) {
 	return root, nil
 }
 
-// FindPackageRoot finds and returns the path to the root folder of a package.
+// FindPackageRoot finds and returns the path to the root folder of a package from the working directory.
 func FindPackageRoot() (string, bool, error) {
 	workDir, err := os.Getwd()
 	if err != nil {
 		return "", false, fmt.Errorf("locating working directory failed: %w", err)
 	}
+	return FindPackageRootFrom(workDir)
+}
 
+// FindPackageRootFrom finds and returns the path to the root folder of a package from a given directory.
+func FindPackageRootFrom(fromDir string) (string, bool, error) {
 	// VolumeName() will return something like "C:" in Windows, and "" in other OSs
 	// rootDir will be something like "C:\" in Windows, and "/" everywhere else.
-	rootDir := filepath.VolumeName(workDir) + string(filepath.Separator)
+	rootDir := filepath.VolumeName(fromDir) + string(filepath.Separator)
 
-	dir := workDir
+	dir := fromDir
 	for dir != "." {
 		path := filepath.Join(dir, PackageManifestFile)
 		fileInfo, err := os.Stat(path)
diff --git a/test/packages/other/with_links/_dev/build/build.yml b/test/packages/other/with_links/_dev/build/build.yml
new file mode 100644
index 0000000000..8a08f65dea
--- /dev/null
+++ b/test/packages/other/with_links/_dev/build/build.yml
@@ -0,0 +1,4 @@
+dependencies:
+  ecs:
+    reference: git@v8.5.2
+    import_mappings: true
diff --git a/test/packages/other/with_links/_dev/build/docs/README.md b/test/packages/other/with_links/_dev/build/docs/README.md
new file mode 100644
index 0000000000..591d5fa57c
--- /dev/null
+++ b/test/packages/other/with_links/_dev/build/docs/README.md
@@ -0,0 +1,9 @@
+# Imported Mappings Tests
+
+{{event "first"}}
+
+{{fields "first"}}
+
+{{event "second"}}
+
+{{fields "second"}}
\ No newline at end of file
diff --git a/test/packages/other/with_links/_dev/build/shared/stream.yml.hbs b/test/packages/other/with_links/_dev/build/shared/stream.yml.hbs
new file mode 100644
index 0000000000..5845510de8
--- /dev/null
+++ b/test/packages/other/with_links/_dev/build/shared/stream.yml.hbs
@@ -0,0 +1,7 @@
+paths:
+{{#each paths as |path i|}}
+  - {{path}}
+{{/each}}
+exclude_files: [".gz$"]
+processors:
+  - add_locale: ~
diff --git a/test/packages/other/with_links/changelog.yml b/test/packages/other/with_links/changelog.yml
new file mode 100644
index 0000000000..bb0320a524
--- /dev/null
+++ b/test/packages/other/with_links/changelog.yml
@@ -0,0 +1,6 @@
+# newer versions go on top
+- version: "0.0.1"
+  changes:
+    - description: Initial draft of the package
+      type: enhancement
+      link: https://github.com/elastic/integrations/pull/1 # FIXME Replace with the real PR link
diff --git a/test/packages/other/with_links/data_stream/first/_dev/test/pipeline/test-access-raw.log b/test/packages/other/with_links/data_stream/first/_dev/test/pipeline/test-access-raw.log
new file mode 100644
index 0000000000..c8c9ffe960
--- /dev/null
+++ b/test/packages/other/with_links/data_stream/first/_dev/test/pipeline/test-access-raw.log
@@ -0,0 +1 @@
+1.2.3.4 - - [25/Oct/2016:14:49:34 +0200] "GET /favicon.ico HTTP/1.1" 404 571 "http://localhost:8080/" "skip-this-one/5.0 (Macintosh; Intel Mac OS X 10_12_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.59 Safari/537.36"
\ No newline at end of file
diff --git a/test/packages/other/with_links/data_stream/first/_dev/test/pipeline/test-access-raw.log-config.yml b/test/packages/other/with_links/data_stream/first/_dev/test/pipeline/test-access-raw.log-config.yml
new file mode 100644
index 0000000000..958d74a23e
--- /dev/null
+++ b/test/packages/other/with_links/data_stream/first/_dev/test/pipeline/test-access-raw.log-config.yml
@@ -0,0 +1,4 @@
+multiline:
+  first_line_pattern: "^(?:[0-9]{1,3}\\.){3}[0-9]{1,3}"
+fields:
+  "@timestamp": "2020-04-28T11:07:58.223Z"
diff --git a/test/packages/other/with_links/data_stream/first/_dev/test/pipeline/test-access-raw.log-expected.json b/test/packages/other/with_links/data_stream/first/_dev/test/pipeline/test-access-raw.log-expected.json
new file mode 100644
index 0000000000..1c2f884a44
--- /dev/null
+++ b/test/packages/other/with_links/data_stream/first/_dev/test/pipeline/test-access-raw.log-expected.json
@@ -0,0 +1,5 @@
+{
+    "expected": [
+        null
+    ]
+}
\ No newline at end of file
diff --git a/test/packages/other/with_links/data_stream/first/agent/stream/stream.yml.hbs.link b/test/packages/other/with_links/data_stream/first/agent/stream/stream.yml.hbs.link
new file mode 100644
index 0000000000..ed8d9065d2
--- /dev/null
+++ b/test/packages/other/with_links/data_stream/first/agent/stream/stream.yml.hbs.link
@@ -0,0 +1 @@
+../../../../_dev/build/shared/stream.yml.hbs 069381d45bffbd532a4af8953766a053e75a2aceebdafdffc2264e800fcd1363
\ No newline at end of file
diff --git a/test/packages/other/with_links/data_stream/first/elasticsearch/ingest_pipeline/default.yml.link b/test/packages/other/with_links/data_stream/first/elasticsearch/ingest_pipeline/default.yml.link
new file mode 100644
index 0000000000..c8e0005e25
--- /dev/null
+++ b/test/packages/other/with_links/data_stream/first/elasticsearch/ingest_pipeline/default.yml.link
@@ -0,0 +1 @@
+../../../../../pipeline_tests/data_stream/test/elasticsearch/ingest_pipeline/default.yml f7c5f0c03aca8ef68c379a62447bdafbf0dcf32b1ff2de143fd6878ee01a91ad
\ No newline at end of file
diff --git a/test/packages/other/with_links/data_stream/first/fields/base-fields.yml b/test/packages/other/with_links/data_stream/first/fields/base-fields.yml
new file mode 100644
index 0000000000..7c798f4534
--- /dev/null
+++ b/test/packages/other/with_links/data_stream/first/fields/base-fields.yml
@@ -0,0 +1,12 @@
+- name: data_stream.type
+  type: constant_keyword
+  description: Data stream type.
+- name: data_stream.dataset
+  type: constant_keyword
+  description: Data stream dataset.
+- name: data_stream.namespace
+  type: constant_keyword
+  description: Data stream namespace.
+- name: '@timestamp'
+  type: date
+  description: Event timestamp.
diff --git a/test/packages/other/with_links/data_stream/first/fields/histogram-fields.yml b/test/packages/other/with_links/data_stream/first/fields/histogram-fields.yml
new file mode 100644
index 0000000000..128a0cb1ca
--- /dev/null
+++ b/test/packages/other/with_links/data_stream/first/fields/histogram-fields.yml
@@ -0,0 +1,3 @@
+- name: service.status.*.histogram
+  type: object
+  object_type: histogram
diff --git a/test/packages/other/with_links/data_stream/first/manifest.yml b/test/packages/other/with_links/data_stream/first/manifest.yml
new file mode 100644
index 0000000000..979ef29d64
--- /dev/null
+++ b/test/packages/other/with_links/data_stream/first/manifest.yml
@@ -0,0 +1,13 @@
+title: "First"
+type: logs
+streams:
+  - input: logfile
+    title: Sample logs
+    description: Collect sample logs
+    vars:
+      - name: paths
+        type: text
+        title: Paths
+        multi: true
+        default:
+          - /var/log/*.log
diff --git a/test/packages/other/with_links/data_stream/first/sample_event.json b/test/packages/other/with_links/data_stream/first/sample_event.json
new file mode 100644
index 0000000000..a242024f51
--- /dev/null
+++ b/test/packages/other/with_links/data_stream/first/sample_event.json
@@ -0,0 +1,26 @@
+{
+    "source.geo.location": {
+        "lat": 1.0,
+        "lon": "2.0"
+    },
+    "destination.geo.location.lat": 3.0,
+    "destination.geo.location.lon": 4.0,
+    "service.status.duration.histogram": {
+        "counts": [
+            8,
+            17,
+            8,
+            7,
+            6,
+            2
+        ],
+        "values": [
+            0.1,
+            0.25,
+            0.35,
+            0.4,
+            0.45,
+            0.5
+        ]
+    }
+}
\ No newline at end of file
diff --git a/test/packages/other/with_links/data_stream/second/agent/stream/stream.yml.hbs.link b/test/packages/other/with_links/data_stream/second/agent/stream/stream.yml.hbs.link
new file mode 100644
index 0000000000..ed8d9065d2
--- /dev/null
+++ b/test/packages/other/with_links/data_stream/second/agent/stream/stream.yml.hbs.link
@@ -0,0 +1 @@
+../../../../_dev/build/shared/stream.yml.hbs 069381d45bffbd532a4af8953766a053e75a2aceebdafdffc2264e800fcd1363
\ No newline at end of file
diff --git a/test/packages/other/with_links/data_stream/second/elasticsearch/ingest_pipeline/default.yml b/test/packages/other/with_links/data_stream/second/elasticsearch/ingest_pipeline/default.yml
new file mode 100644
index 0000000000..81221adf3f
--- /dev/null
+++ b/test/packages/other/with_links/data_stream/second/elasticsearch/ingest_pipeline/default.yml
@@ -0,0 +1,10 @@
+---
+description: Pipeline for processing sample logs
+processors:
+- set:
+    field: sample_field
+    value: "1"
+on_failure:
+- set:
+    field: error.message
+    value: '{{ _ingest.on_failure_message }}'
\ No newline at end of file
diff --git a/test/packages/other/with_links/data_stream/second/fields/base-fields.yml b/test/packages/other/with_links/data_stream/second/fields/base-fields.yml
new file mode 100644
index 0000000000..7c798f4534
--- /dev/null
+++ b/test/packages/other/with_links/data_stream/second/fields/base-fields.yml
@@ -0,0 +1,12 @@
+- name: data_stream.type
+  type: constant_keyword
+  description: Data stream type.
+- name: data_stream.dataset
+  type: constant_keyword
+  description: Data stream dataset.
+- name: data_stream.namespace
+  type: constant_keyword
+  description: Data stream namespace.
+- name: '@timestamp'
+  type: date
+  description: Event timestamp.
diff --git a/test/packages/other/with_links/data_stream/second/fields/geo-fields.yml b/test/packages/other/with_links/data_stream/second/fields/geo-fields.yml
new file mode 100644
index 0000000000..a618607d34
--- /dev/null
+++ b/test/packages/other/with_links/data_stream/second/fields/geo-fields.yml
@@ -0,0 +1,2 @@
+- name: destination.geo.location
+  external: ecs
diff --git a/test/packages/other/with_links/data_stream/second/fields/histogram-fields.yml b/test/packages/other/with_links/data_stream/second/fields/histogram-fields.yml
new file mode 100644
index 0000000000..128a0cb1ca
--- /dev/null
+++ b/test/packages/other/with_links/data_stream/second/fields/histogram-fields.yml
@@ -0,0 +1,3 @@
+- name: service.status.*.histogram
+  type: object
+  object_type: histogram
diff --git a/test/packages/other/with_links/data_stream/second/manifest.yml b/test/packages/other/with_links/data_stream/second/manifest.yml
new file mode 100644
index 0000000000..979ef29d64
--- /dev/null
+++ b/test/packages/other/with_links/data_stream/second/manifest.yml
@@ -0,0 +1,13 @@
+title: "First"
+type: logs
+streams:
+  - input: logfile
+    title: Sample logs
+    description: Collect sample logs
+    vars:
+      - name: paths
+        type: text
+        title: Paths
+        multi: true
+        default:
+          - /var/log/*.log
diff --git a/test/packages/other/with_links/data_stream/second/sample_event.json b/test/packages/other/with_links/data_stream/second/sample_event.json
new file mode 100644
index 0000000000..a242024f51
--- /dev/null
+++ b/test/packages/other/with_links/data_stream/second/sample_event.json
@@ -0,0 +1,26 @@
+{
+    "source.geo.location": {
+        "lat": 1.0,
+        "lon": "2.0"
+    },
+    "destination.geo.location.lat": 3.0,
+    "destination.geo.location.lon": 4.0,
+    "service.status.duration.histogram": {
+        "counts": [
+            8,
+            17,
+            8,
+            7,
+            6,
+            2
+        ],
+        "values": [
+            0.1,
+            0.25,
+            0.35,
+            0.4,
+            0.45,
+            0.5
+        ]
+    }
+}
\ No newline at end of file
diff --git a/test/packages/other/with_links/docs/README.md b/test/packages/other/with_links/docs/README.md
new file mode 100644
index 0000000000..6f99a892f2
--- /dev/null
+++ b/test/packages/other/with_links/docs/README.md
@@ -0,0 +1,85 @@
+# Imported Mappings Tests
+
+An example event for `first` looks as following:
+
+```json
+{
+    "source.geo.location": {
+        "lat": 1.0,
+        "lon": "2.0"
+    },
+    "destination.geo.location.lat": 3.0,
+    "destination.geo.location.lon": 4.0,
+    "service.status.duration.histogram": {
+        "counts": [
+            8,
+            17,
+            8,
+            7,
+            6,
+            2
+        ],
+        "values": [
+            0.1,
+            0.25,
+            0.35,
+            0.4,
+            0.45,
+            0.5
+        ]
+    }
+}
+```
+
+**Exported fields**
+
+| Field | Description | Type |
+|---|---|---|
+| @timestamp | Event timestamp. | date |
+| data_stream.dataset | Data stream dataset. | constant_keyword |
+| data_stream.namespace | Data stream namespace. | constant_keyword |
+| data_stream.type | Data stream type. | constant_keyword |
+| service.status.\*.histogram |  | object |
+
+
+An example event for `second` looks as following:
+
+```json
+{
+    "source.geo.location": {
+        "lat": 1.0,
+        "lon": "2.0"
+    },
+    "destination.geo.location.lat": 3.0,
+    "destination.geo.location.lon": 4.0,
+    "service.status.duration.histogram": {
+        "counts": [
+            8,
+            17,
+            8,
+            7,
+            6,
+            2
+        ],
+        "values": [
+            0.1,
+            0.25,
+            0.35,
+            0.4,
+            0.45,
+            0.5
+        ]
+    }
+}
+```
+
+**Exported fields**
+
+| Field | Description | Type |
+|---|---|---|
+| @timestamp | Event timestamp. | date |
+| data_stream.dataset | Data stream dataset. | constant_keyword |
+| data_stream.namespace | Data stream namespace. | constant_keyword |
+| data_stream.type | Data stream type. | constant_keyword |
+| destination.geo.location | Longitude and latitude. | geo_point |
+| service.status.\*.histogram |  | object |
diff --git a/test/packages/other/with_links/manifest.yml b/test/packages/other/with_links/manifest.yml
new file mode 100644
index 0000000000..3b0d50f567
--- /dev/null
+++ b/test/packages/other/with_links/manifest.yml
@@ -0,0 +1,20 @@
+format_version: 2.3.0
+name: with_links
+title: "With Links Tests"
+version: 0.0.1
+description: "These are tests of field validation with links."
+type: integration
+categories:
+  - custom
+conditions:
+  kibana.version: "^8.0.0"
+policy_templates:
+  - name: sample
+    title: Sample logs
+    description: Collect sample logs
+    inputs:
+      - type: logfile
+        title: Collect sample logs from instances
+        description: Collecting sample logs
+owner:
+  github: elastic/integrations