Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Feature: Support Andes Targets #82

Draft
wants to merge 3 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
46 changes: 46 additions & 0 deletions mlonmcu/feature/features.py
Original file line number Diff line number Diff line change
Expand Up @@ -1352,3 +1352,49 @@ def get_platform_config(self, platform):
return {
f"{platform}.profile": self.enabled,
}


@register_feature("andes_libnn")
class AndesLibNN(SetupFeature, FrameworkFeature, PlatformFeature):
"""AndesLib NN wrappers for TFLite Micro"""

DEFAULTS = {
**FeatureBase.DEFAULTS,
"core": "d25", # Options: d25, d45, nv27v
}

REQUIRED = ["andes_libnn.src_dir"]

def __init__(self, features=None, config=None):
super().__init__("andes_libnn", features=features, config=config)

@property
def andes_libnn_dir(self):
return str(self.config["andes_libnn.src_dir"])

@property
def core(self):
return self.config["core"]

def add_framework_config(self, framework, config):
assert framework == "tflm", f"Unsupported feature '{self.name}' for framework '{framework}'"
if f"{framework}.optimized_kernel" in config and config[f"{framework}.optimized_kernel"] not in [
None,
"andes_libnn",
]:
RuntimeError(f"There is already a optimized_kernel selected for framework '{framework}'")
else:
config[f"{framework}.optimized_kernel"] = "andes_libnn"

def get_platform_defs(self, platform):
assert platform in ["mlif"], f"Unsupported feature '{self.name}' for platform '{platform}'"
return {
"ANDES_LIBNN": self.enabled,
"ANDES_LIBNN_DIR": self.andes_libnn_dir,
}

def get_required_cache_flags(self):
ret = {}

ret["tf.src_dir"] = [f"andescore-{self.core}"]
return ret
2 changes: 1 addition & 1 deletion mlonmcu/flow/tflm/framework.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ class TFLMFramework(Framework):

name = "tflm"

FEATURES = ["muriscvnn", "cmsisnn"]
FEATURES = ["muriscvnn", "cmsisnn", "andes_libnn"]

DEFAULTS = {
"optimized_kernel": None,
Expand Down
1 change: 1 addition & 0 deletions mlonmcu/platform/mlif.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@ class MlifPlatform(CompilePlatform, TargetPlatform):
"arm_dsp",
"auto_vectorize",
"benchmark",
"andes_libnn",
] # TODO: allow Feature-Features with automatic resolution of initialization order
)

Expand Down
35 changes: 30 additions & 5 deletions mlonmcu/setup/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,28 +40,52 @@


def _validate_tensorflow(context: MlonMcuContext, params=None):
andes_patch = params.get("andes_patch", False)
if andes_patch:
if not context.environment.has_feature("andes_libnn"):
return False
return context.environment.has_framework("tflm")


@Tasks.provides(["tf.src_dir"])
@Tasks.validate(_validate_tensorflow)
@Tasks.param("andes_patch", [False, "andescore-d25", "andescore-d45", "andescore-nx27v"])
@Tasks.register(category=TaskType.FRAMEWORK)
def clone_tensorflow(
context: MlonMcuContext, params=None, rebuild=False, verbose=False, threads=multiprocessing.cpu_count()
):
"""Clone the TF/TFLM repository."""
tfName = utils.makeDirName("tf")
andes_patch = params.get("andes_patch", False)
flags = utils.makeFlags((andes_patch, andes_patch))
tfName = utils.makeDirName("tf", flags=flags)
tfSrcDir = context.environment.paths["deps"].path / "src" / tfName
if rebuild or not utils.is_populated(tfSrcDir):
if andes_patch:
assert "andescore" in andes_patch
andesSrcDir = tfSrcDir / "tensorflow" / "lite" / "micro" / "tools" / "make" / "targets" / "andesevb" / "ae350"
if rebuild or not utils.is_populated(tfSrcDir) or (andes_patch and not utils.is_populated(andesSrcDir)):
tfRepo = context.environment.repos["tensorflow"]
utils.clone(tfRepo.url, tfSrcDir, branch=tfRepo.ref, refresh=rebuild)
context.cache["tf.src_dir"] = tfSrcDir
if andes_patch:
coreUrl = "https://raw.githubusercontent.com/mlcommons/tiny_results_v0.7/main/closed/Andes/code/"
coreFile = andes_patch.replace("-", "_")
coreExt = ".tgz"
coreArchive = coreFile + coreExt
utils.download_and_extract(coreUrl, coreArchive, tfSrcDir, merge=True, auto=False)
# Workaround for Makefile incompatibility
toRemove = tfSrcDir / "tensorflow" / "lite" / "micro" / "examples" / "mlperf_libnn" / "Makefile.inc"
utils.remove(toRemove)

context.cache["tf.src_dir", flags] = tfSrcDir
if andes_patch:
context.cache["andes_libnn.src_dir", flags] = andesSrcDir


@Tasks.needs(["tf.src_dir"])
@Tasks.provides(["tf.dl_dir", "tf.lib_path"])
# @Tasks.param("dbg", False)
@Tasks.param("dbg", True)
@Tasks.param("andes_patch", [False, "andescore-d25", "andescore-d45", "andescore-nx27v"])
@Tasks.validate(_validate_tensorflow)
@Tasks.register(category=TaskType.FRAMEWORK)
def build_tensorflow(
Expand All @@ -70,9 +94,10 @@ def build_tensorflow(
"""Download tensorflow dependencies and build lib."""
if not params:
params = {}
flags = utils.makeFlags((params["dbg"], "dbg"))
flags = utils.makeFlags((params["andes_patch"], params["andes_patch"]), (params["dbg"], "dbg"))
flags_ = utils.makeFlags((params["andes_patch"], params["andes_patch"]))
# tfName = utils.makeDirName("tf", flags=flags)
tfSrcDir = context.cache["tf.src_dir"]
tfSrcDir = context.cache["tf.src_dir", flags_]
tflmDir = Path(tfSrcDir) / "tensorflow" / "lite" / "micro"
tflmBuildDir = tflmDir / "tools" / "make"
tflmDownloadsDir = tflmBuildDir / "downloads"
Expand All @@ -94,7 +119,7 @@ def build_tensorflow(
cwd=tfSrcDir,
live=verbose,
)
context.cache["tf.dl_dir"] = tflmDownloadsDir
context.cache["tf.dl_dir", flags_] = tflmDownloadsDir
context.cache["tf.lib_path", flags] = tflmLib # ignore!


Expand Down
36 changes: 25 additions & 11 deletions mlonmcu/setup/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import sys
import multiprocessing
import subprocess
import distutils.dir_util

# import logging
import tarfile
Expand Down Expand Up @@ -297,11 +298,16 @@ def remove(path):


def move(src, dest):
shutil.move(src, dest)
shutil.move(str(src), str(dest))


def copy(src, dest):
shutil.copy(src, dest)
def copy(src, dest, recursive=False):
if recursive:
# The following does not allow overwriting
# shutil.copytree(str(src), str(dest))
distutils.dir_util.copy_tree(str(src), str(dest))
else:
shutil.copy(str(src), str(dest))


def is_populated(path):
Expand All @@ -310,7 +316,7 @@ def is_populated(path):
return path.is_dir() and os.listdir(path.resolve())


def download_and_extract(url, archive, dest):
def download_and_extract(url, archive, dest, merge=False, auto=True):
with tempfile.TemporaryDirectory() as tmp_dir:
tmp_archive = os.path.join(tmp_dir, archive)
base_name = Path(archive).stem
Expand All @@ -323,14 +329,22 @@ def download_and_extract(url, archive, dest):
remove(os.path.join(tmp_dir, tmp_archive))
mkdirs(dest.parent)
if (Path(tmp_dir) / base_name).is_dir(): # Archive contains a subdirectory with the same name
move(os.path.join(tmp_dir, base_name), dest)
if merge:
copy(os.path.join(tmp_dir, base_name), dest, recursive=True)
else:
move(os.path.join(tmp_dir, base_name), dest)
else:
contents = list(Path(tmp_dir).glob("*"))
if len(contents) == 1:
tmp_dir_new = Path(tmp_dir) / contents[0]
if tmp_dir_new.is_dir(): # Archive contains a single subdirectory with a different name
tmp_dir = tmp_dir_new
move(tmp_dir, dest)
if auto:
contents = list(Path(tmp_dir).glob("*"))
if len(contents) == 1:
print("case 2.1")
tmp_dir_new = Path(tmp_dir) / contents[0]
if tmp_dir_new.is_dir(): # Archive contains a single subdirectory with a different name
tmp_dir = tmp_dir_new
if merge:
copy(tmp_dir, dest, recursive=True)
else:
move(tmp_dir, dest)


def patch(path, cwd=None):
Expand Down
129 changes: 129 additions & 0 deletions resources/templates/andes.yml.j2
Original file line number Diff line number Diff line change
@@ -0,0 +1,129 @@
---
# The MLONMCU_HOME is filled in automatically when creating the environment
home: "{{ home_dir }}"
logging:
level: DEBUG
to_file: false
rotate: false
cleanup:
auto: true
keep: 50
# Default locations for certain directoriescan be changed here
# Non-absolute paths will always be threated relative to the MLONMCU_HOME
paths:
# Where the dependencies are downloaded and installed
deps: deps
# If logging to file is used keep logs in this directory
logs: logs
# Location where reports and artifacts are written to
results: results
# Directory where custom extensions can be integrated
plugins: plugins
# Directory for intermediate build products, should be located on a large enough drive
temp: temp
# A collection of models which will be used to look for models
# The paths will be checked in the order defined here stopping at the first match
# Non-existant paths will be skipped without throwing an error
models:
- "{{ home_dir }}/models"
- "{{ config_dir }}/models"
# Here default clone_urls
repos:
tensorflow:
url: "https://github.com/tensorflow/tflite-micro.git"
ref: a30942eb03efc379e9e80279a44e39078344e5fa
muriscvnn:
url: "https://github.com/tum-ei-eda/muriscv-nn.git"
ref: c023b80a51c1b48ec62b9b092d047e9ac0bab3e8
etiss:
url: "https://github.com/tum-ei-eda/etiss.git"
ref: 4d2d26fb1fdb17e1da3a397c35d6f8877bf3ceab
spike:
url: "https://github.com/riscv-software-src/riscv-isa-sim.git"
ref: 0bc176b3fca43560b9e8586cdbc41cfde073e17a
spikepk:
url: "https://github.com/riscv-software-src/riscv-pk.git"
ref: 7e9b671c0415dfd7b562ac934feb9380075d4aa2
cmsis:
url: "https://github.com/PhilippvK/CMSIS_5.git"
ref: ad1c3cad8f1240ef14a2c55381a78d792d76ec4d
mlif:
url: "https://github.com/tum-ei-eda/mlonmcu-sw.git"
ref: 50c09c18ea003de976c5745d06b5f46dbb3f1273
# Here all supported frameworks with their specific features are defined
# Optionally disable unwanted or incomatible backends or features here
# The configured defaults are used if no backend was specified in the command line options
frameworks:
default: tflm
tflm:
enabled: true
backends:
default: tflmi
tflmi:
enabled: true
features:
debug_arena: true
features:
muriscvnn: true
cmsisnn: true
andes_libnn: true
# Some frontends are experimental and therefore disabled here
# Features like packing are only available in certain environments
frontends:
tflite:
enabled: true
features:
validate: true
visualize: true
# Some targets/platforms support multiple toolchains
toolchains:
gcc: true
llvm: true
# Platform extend the number of supported targets
platforms:
mlif:
enabled: true
features:
debug: true
validate: true
benchmark: true
# List of supported targets in the environment
targets:
default: etiss_pulpino
etiss_pulpino:
enabled: true
features:
gdbserver: true
etissdbg: true
trace: true
log_instrs: true
# vext: true
# pext: true
host_x86:
enabled: true
features:
gdbserver: true
spike:
enabled: true
features:
vext: true
pext: true
cachesim: true
log_instrs: true
corstone300:
enabled: true
features:
ethosu: false
arm_mvei: true
arm_dsp: true
postprocesses:
use: []
# This is where further options such as specific versions of dependencies can be set in the furture
vars:
allow_extensions: false
llvm.version: "14.0.0"
runs_per_stage: true
riscv_gcc.dl_url: "https://syncandshare.lrz.de/dl/fiD9HnHYkb2V8kNbVQYv242m/rv32gc.tar.gz"
riscv_gcc.dl_url_vext: "https://syncandshare.lrz.de/dl/fiGp4r3f6SZaC5QyDi6QUiNQ/rv32gcv_new.tar.gz"
riscv_gcc.dl_url_pext: "https://syncandshare.lrz.de/dl/fiNvP4mzVQ8uDvgT9Yf2bqNk/rv32gcp.tar.xz"
flags: []