Browse Source
Add action and scripts for footprint tracking. Signed-off-by: Anas Nashif <anas.nashif@intel.com>pull/37055/head
5 changed files with 291 additions and 0 deletions
@ -0,0 +1,64 @@
@@ -0,0 +1,64 @@
|
||||
name: Footprint Tracking |
||||
|
||||
# Run every 12 hours and on tags |
||||
on: |
||||
schedule: |
||||
- cron: '50 1/12 * * *' |
||||
push: |
||||
paths: |
||||
- 'VERSION' |
||||
tags: |
||||
# only publish v* tags, do not care about zephyr-v* which point to the |
||||
# same commit |
||||
- 'v*' |
||||
|
||||
jobs: |
||||
footprint-tracking-cancel: |
||||
runs-on: ubuntu-latest |
||||
steps: |
||||
- name: Cancel Previous Runs |
||||
uses: styfle/cancel-workflow-action@0.6.0 |
||||
with: |
||||
access_token: ${{ github.token }} |
||||
footprint-tracking: |
||||
runs-on: ubuntu-latest |
||||
needs: footprint-tracking-cancel |
||||
container: |
||||
image: zephyrprojectrtos/ci:v0.17.1 |
||||
options: '--entrypoint /bin/bash' |
||||
strategy: |
||||
fail-fast: false |
||||
env: |
||||
ZEPHYR_SDK_INSTALL_DIR: /opt/toolchains/zephyr-sdk-0.12.4 |
||||
CLANG_ROOT_DIR: /usr/lib/llvm-12 |
||||
ZEPHYR_TOOLCHAIN_VARIANT: zephyr |
||||
steps: |
||||
- name: Update PATH for west |
||||
run: | |
||||
echo "$HOME/.local/bin" >> $GITHUB_PATH |
||||
|
||||
- name: checkout |
||||
uses: actions/checkout@v2 |
||||
with: |
||||
ref: ${{ github.event.pull_request.head.sha }} |
||||
fetch-depth: 0 |
||||
|
||||
- name: west setup |
||||
run: | |
||||
west init -l . || true |
||||
west update |
||||
|
||||
- name: Configure AWS Credentials |
||||
uses: aws-actions/configure-aws-credentials@v1 |
||||
with: |
||||
aws-access-key-id: ${{ secrets.FOOTPRINT_AWS_KEY_ID }} |
||||
aws-secret-access-key: ${{ secrets.FOOTPRINT_AWS_ACCESS_KEY }} |
||||
aws-region: us-east-1 |
||||
|
||||
- name: Record Footprint |
||||
env: |
||||
BASE_REF: ${{ github.base_ref }} |
||||
run: | |
||||
export ZEPHYR_BASE=${PWD} |
||||
./scripts/footprint/track.py -p scripts/footprint/plan.txt |
||||
aws s3 sync --quiet footprint_data/ s3://testing.zephyrproject.org/footprint_data/ |
@ -0,0 +1,10 @@
@@ -0,0 +1,10 @@
|
||||
footprints,default,frdm_k64f,tests/benchmarks/footprints, |
||||
footprints,userspace,frdm_k64f,tests/benchmarks/footprints,-DCONF_FILE=prj_userspace.conf |
||||
footprints,default,disco_l475_iot1,tests/benchmarks/footprints, |
||||
footprints,userspace,disco_l475_iot1,tests/benchmarks/footprints,-DCONF_FILE=prj_userspace.conf |
||||
footprints,default,nrf5340dk_nrf5340_cpuapp,tests/benchmarks/footprints, |
||||
footprints,default,nrf51dk_nrf51422,tests/benchmarks/footprints, |
||||
footprints,default,altera_max10,tests/benchmarks/footprints, |
||||
footprints,default,hifive1_revb,tests/benchmarks/footprints, |
||||
footprints,default,ehl_crb,tests/benchmarks/footprints, |
||||
footprints,userspace,ehl_crb,tests/benchmarks/footprints,-DCONF_FILE=prj_userspace.conf |
@ -0,0 +1,63 @@
@@ -0,0 +1,63 @@
|
||||
#!/usr/bin/env python3 |
||||
# Copyright (c) 2021 Intel Corporation |
||||
# |
||||
# SPDX-License-Identifier: Apache-2.0 |
||||
|
||||
import csv |
||||
import subprocess |
||||
from git import Git |
||||
import pathlib |
||||
import shutil |
||||
import argparse |
||||
|
||||
def parse_args(): |
||||
parser = argparse.ArgumentParser( |
||||
description="Generate footprint data based on a predefined plan.") |
||||
parser.add_argument("-p", "--plan", help="Path of test plan", required=True) |
||||
|
||||
return parser.parse_args() |
||||
|
||||
def main(): |
||||
args = parse_args() |
||||
g = Git(".") |
||||
version = g.describe("--abbrev=12") |
||||
pathlib.Path(f'footprint_data/{version}').mkdir(exist_ok=True, parents=True) |
||||
|
||||
with open(args.plan) as csvfile: |
||||
csvreader = csv.reader(csvfile) |
||||
for row in csvreader: |
||||
name=row[0] |
||||
feature=row[1] |
||||
board=row[2] |
||||
app=row[3] |
||||
options=row[4] |
||||
|
||||
cmd = ['west', |
||||
'build', |
||||
'-d', |
||||
f'out/{name}/{feature}/{board}', |
||||
'-b', |
||||
board, |
||||
f'{app}', |
||||
'-t', |
||||
'footprint'] |
||||
|
||||
if options != '': |
||||
cmd += ['--', f'{options}'] |
||||
|
||||
print(" ".join(cmd)) |
||||
|
||||
|
||||
try: |
||||
subprocess.check_output(cmd, stderr=subprocess.STDOUT, timeout=120, universal_newlines=True) |
||||
print("Copying files...") |
||||
pathlib.Path(f'footprint_data/{version}/{name}/{feature}/{board}').mkdir(parents=True, exist_ok=True) |
||||
|
||||
shutil.copy(f'out/{name}/{feature}/{board}/ram.json', f'footprint_data/{version}/{name}/{feature}/{board}') |
||||
shutil.copy(f'out/{name}/{feature}/{board}/rom.json', f'footprint_data/{version}/{name}/{feature}/{board}') |
||||
except subprocess.CalledProcessError as exc: |
||||
print("Status : FAIL", exc.returncode, exc.output) |
||||
|
||||
|
||||
if __name__ == "__main__": |
||||
main() |
@ -0,0 +1,153 @@
@@ -0,0 +1,153 @@
|
||||
#!/usr/bin/env python3 |
||||
# Copyright (c) 2021 Intel Corporation |
||||
# |
||||
# SPDX-License-Identifier: Apache-2.0 |
||||
|
||||
from anytree.importer import DictImporter |
||||
from anytree import PreOrderIter |
||||
from anytree.search import find |
||||
importer = DictImporter() |
||||
from datetime import datetime |
||||
from dateutil.relativedelta import relativedelta |
||||
import os |
||||
import json |
||||
from git import Repo |
||||
from git.exc import BadName |
||||
|
||||
from influxdb import InfluxDBClient |
||||
import glob |
||||
import argparse |
||||
from tabulate import tabulate |
||||
|
||||
TODAY = datetime.utcnow() |
||||
two_mon_rel = relativedelta(months=4) |
||||
|
||||
influx_dsn = 'influxdb://localhost:8086/footprint_tracking' |
||||
|
||||
def create_event(data, board, feature, commit, current_time, typ, application): |
||||
footprint_data = [] |
||||
client = InfluxDBClient.from_dsn(influx_dsn) |
||||
client.create_database('footprint_tracking') |
||||
for d in data.keys(): |
||||
footprint_data.append({ |
||||
"measurement": d, |
||||
"tags": { |
||||
"board": board, |
||||
"commit": commit, |
||||
"application": application, |
||||
"type": typ, |
||||
"feature": feature |
||||
}, |
||||
"time": current_time, |
||||
"fields": { |
||||
"value": data[d] |
||||
} |
||||
}) |
||||
|
||||
client.write_points(footprint_data, time_precision='s', database='footprint_tracking') |
||||
|
||||
|
||||
def parse_args(): |
||||
global args |
||||
parser = argparse.ArgumentParser( |
||||
description=__doc__, |
||||
formatter_class=argparse.RawDescriptionHelpFormatter) |
||||
|
||||
parser.add_argument("-d", "--data", help="Data Directory") |
||||
parser.add_argument("-y", "--dryrun", action="store_true", help="Dry run, do not upload to database") |
||||
parser.add_argument("-z", "--zephyr-base", help="Zephyr tree") |
||||
parser.add_argument("-f", "--file", help="JSON file with footprint data") |
||||
args = parser.parse_args() |
||||
|
||||
|
||||
def parse_file(json_file): |
||||
|
||||
with open(json_file, "r") as fp: |
||||
contents = json.load(fp) |
||||
root = importer.import_(contents['symbols']) |
||||
|
||||
zr = find(root, lambda node: node.name == 'ZEPHYR_BASE') |
||||
ws = find(root, lambda node: node.name == 'WORKSPACE') |
||||
|
||||
data = {} |
||||
if zr and ws: |
||||
trees = [zr, ws] |
||||
else: |
||||
trees = [root] |
||||
|
||||
for node in PreOrderIter(root, maxlevel=2): |
||||
if node.name not in ['WORKSPACE', 'ZEPHYR_BASE']: |
||||
if node.name in ['Root', 'Symbols']: |
||||
data['all'] = node.size |
||||
else: |
||||
data[node.name] = node.size |
||||
|
||||
for t in trees: |
||||
root = t.name |
||||
for node in PreOrderIter(t, maxlevel=2): |
||||
if node.name == root: |
||||
continue |
||||
comp = node.name |
||||
if comp in ['Root', 'Symbols']: |
||||
data['all'] = node.size |
||||
else: |
||||
data[comp] = node.size |
||||
|
||||
return data |
||||
|
||||
def process_files(data_dir, zephyr_base, dry_run): |
||||
repo = Repo(zephyr_base) |
||||
|
||||
for hash in os.listdir(f'{data_dir}'): |
||||
if not dry_run: |
||||
client = InfluxDBClient.from_dsn(influx_dsn) |
||||
result = client.query(f"select * from kernel where commit = '{hash}';") |
||||
if result: |
||||
print(f"Skipping {hash}...") |
||||
continue |
||||
print(f"Importing {hash}...") |
||||
for file in glob.glob(f"{args.data}/{hash}/**/*json", recursive=True): |
||||
file_data = file.split("/") |
||||
json_file = os.path.basename(file) |
||||
if 'ram' in json_file: |
||||
typ = 'ram' |
||||
else: |
||||
typ = 'rom' |
||||
commit = file_data[1] |
||||
app = file_data[2] |
||||
feature = file_data[3] |
||||
board = file_data[4] |
||||
|
||||
data = parse_file(file) |
||||
|
||||
try: |
||||
gitcommit = repo.commit(f'{commit}') |
||||
current_time = gitcommit.committed_datetime |
||||
except BadName: |
||||
cidx = commit.find('-g') + 2 |
||||
gitcommit = repo.commit(f'{commit[cidx:]}') |
||||
current_time = gitcommit.committed_datetime |
||||
|
||||
print(current_time) |
||||
|
||||
if not dry_run: |
||||
create_event(data, board, feature, commit, current_time, typ, app) |
||||
|
||||
def main(): |
||||
parse_args() |
||||
|
||||
if args.data and args.zephyr_base: |
||||
process_files(args.data, args.zephyr_base, args.dryrun) |
||||
|
||||
if args.file: |
||||
data = parse_file(args.file) |
||||
items = [] |
||||
for component,value in data.items(): |
||||
items.append([component,value]) |
||||
|
||||
table = tabulate(items, headers=['Component', 'Size'], tablefmt='orgtbl') |
||||
print(table) |
||||
|
||||
|
||||
if __name__ == "__main__": |
||||
main() |
Loading…
Reference in new issue