Skip to content

Commit

Permalink
Refactoring
Browse files Browse the repository at this point in the history
* Split into three separate commands for generating, exporting dot-file and cleaning
* Add requirements.txt
  • Loading branch information
Alexander Verbitsky committed Dec 27, 2016
1 parent 2389ce5 commit 01ec150
Show file tree
Hide file tree
Showing 11 changed files with 386 additions and 225 deletions.
9 changes: 5 additions & 4 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
logs
*.pyc
*.dot
*.db
make.png
make.dot
Makefile*
*.mk
*.png
*.dot
config.inc.sh
make_profile.db
make_profile.db
4 changes: 3 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@

Example of usage:

python make.py -f Makefile -o Makefile_work.mk
python preprocess.py -i Makefile -o Makefile_work.mk

python dot_export.py -i Makefile -db profile.db > make.dot
dot -Tpng make.dot > make.png
35 changes: 35 additions & 0 deletions clean.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,11 @@
#!/usr/bin/python3

import os
import logging
import shutil

from lib.parser import parse, get_dependencies_influences, Tokens


def rm_node(node):
if not os.path.exists(node):
Expand All @@ -19,3 +23,34 @@ def clean_target(t, deps):
for sub_t in deps[t]:
rm_node(sub_t)
clean_target(sub_t, deps)

def main(argv):
options = argparse.ArgumentParser(
description='export graph of targets from Makefile')
options.add_argument(
'-i',
action='store',
dest='in_filename',
type=str,
default=None,
help='Makefile to read (default stdin)')
options.add_argument(
'targets',
default=['all'],
metavar='target',
type=str,
nargs='*',
help='Targets to process')

args = options.parse_args(argv)
in_file = open(args.out_filename, 'r') if args.in_filename else sys.stdin

ast = parse(in_file)
deps, influences, order_only = get_dependencies_influences(ast)

for target in args.targets:
clean_target(deps, target)


if __name__ == '__main__':
main(sys.argv[1:])
57 changes: 56 additions & 1 deletion dot_export.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,14 @@
#!/usr/bin/python3

import argparse
import os
import sys
import datetime

import collections
import pprint

from lib.parser import parse, get_dependencies_influences, Tokens
from lib.timing import parse_timing_db


def classify_target(name, influences, dependencies, inputs, order_only):
Expand Down Expand Up @@ -95,3 +102,51 @@ def export_dot(f, influences, dependencies, order_only, performance):
f.write('cluster_inputs_DUMMY -> cluster_not_implemented_DUMMY -> cluster_tools_DUMMY [ style=invis ];')
f.write('cluster_result_DUMMY -> cluster_not_implemented_DUMMY -> cluster_order_only_DUMMY [ style=invis ];')
f.write('}')


def main(argv):
options = argparse.ArgumentParser(
description='export graph of targets from Makefile')
options.add_argument(
'-i',
action='store',
dest='in_filename',
type=str,
default=None,
help='Makefile to read (default stdin)')
options.add_argument(
'-db',
action='store',
dest='db_filename',
type=str,
default='make_profile.db',
help='Profile with timings')
options.add_argument(
'-o',
action='store',
dest='out_filename',
type=str,
default=None,
help='Makefile to write (default: stdout)')

args = options.parse_args(argv)

in_file = open(args.out_filename, 'r') if args.in_filename else sys.stdin
out_file = open(args.in_filename, 'w') if args.out_filename else sys.stdout

ast = parse(in_file)

performance = parse_timing_db(args.db_filename)
deps, influences, order_only = get_dependencies_influences(ast)

export_dot(
out_file,
influences,
deps,
order_only,
performance
)


if __name__ == '__main__':
main(sys.argv[1:])
109 changes: 109 additions & 0 deletions lib/parser.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,109 @@
import collections

from more_itertools import peekable


class Tokens:
target = 'target'
command = 'command'
expression = 'expression'


def tokenizer(fd):
it = enumerate(fd)

def glue_multiline(line):
lines = []
strip_line = line.strip()
while strip_line[-1] == '\\':
lines.append(strip_line.rstrip('\\').strip())
line_num, line = next(it)
strip_line = line.strip()
lines.append(strip_line.rstrip('\\').strip())
return ' '.join(lines)

for line_num, line in it:
strip_line = line.strip()
# skip empty lines
if not strip_line:
continue
# skip comments
if strip_line[0] == '#':
continue
elif line[0] == '\t':
yield (Tokens.command, glue_multiline(line))
elif ':' in line and '=' not in line:
yield (Tokens.target, glue_multiline(line))
else:
yield (Tokens.expression, line.strip())


def parse(fd):
ast = []
it = peekable(tokenizer(fd))

def parse_target(token):
line = token[1]
target, deps = line.split(':', 1)
raw_deps = deps.strip().split('|', 1)
deps = raw_deps[0]
order_deps = raw_deps[1] if raw_deps[1:] else ''
body = parse_body()
ast.append((
token[0],
{
'target': target.strip(),
'deps': [
sorted(deps.split()) if deps else [],
list(order_deps.split()) if order_deps else []
],
'body': body
})
)

def parse_body():
body = []
try:
while it.peek()[0] != Tokens.target:
token = next(it)
if token[0] == Tokens.command:
body.append((token[0], token[1]))
else:
body.append(token)
except StopIteration:
pass
return body

for token in it:
if token[0] == Tokens.target:
parse_target(token)
else:
# expression
ast.append(token)

return ast


def get_dependencies_influences(ast):
dependencies = {}
influences = collections.defaultdict(set)
order_only = set()

for item_t, item in ast:
if item_t != Tokens.target:
continue
target = item['target']
deps, order_deps = item['deps']

if target not in ('.PHONY',):
dependencies[target] = [deps, order_deps]

# influences
influences[target]
for k in deps:
influences[k].add(target)
for k in order_deps:
influences[k]
order_only.update(order_deps)
return (dependencies, influences, order_only)

File renamed without changes.
Loading

0 comments on commit 01ec150

Please sign in to comment.