-#! /usr/bin/env python2.7
+#! /usr/bin/env python3
# Copyright (c) 2010 Advanced Micro Devices, Inc.
# All rights reserved.
interval = options.interval
if os.path.exists(options.directory):
- print 'Error: test directory', options.directory, 'exists'
- print ' Tester needs to create directory from scratch'
+ print('Error: test directory', options.directory, 'exists')
+ print(' Tester needs to create directory from scratch')
sys.exit(1)
top_dir = options.directory
os.mkdir(top_dir)
cmd_echo = open(os.path.join(top_dir, 'command'), 'w')
-print >>cmd_echo, ' '.join(sys.argv)
+print(' '.join(sys.argv), file=cmd_echo)
cmd_echo.close()
m5_binary = args[0]
cptdir = os.path.join(top_dir, 'm5out')
-print '===> Running initial simulation.'
+print('===> Running initial simulation.')
subprocess.call([m5_binary] + ['-red', cptdir] + options + initial_args)
dirs = os.listdir(cptdir)
# original checkpoint N+1. Thus the number of tests we can run is one
# less than tha number of checkpoints.
for i in range(1, len(cpts)):
- print '===> Running test %d of %d.' % (i, len(cpts)-1)
+ print('===> Running test %d of %d.' % (i, len(cpts)-1))
mydir = os.path.join(top_dir, 'test.%d' % i)
subprocess.call([m5_binary] + ['-red', mydir] + options + initial_args +
['--max-checkpoints' , '1', '--checkpoint-dir', cptdir,
diffout.close()
# print out the diff
diffout = open(diff_name)
- print diffout.read(),
+ print(diffout.read(), end=' ')
diffout.close()
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-from ConfigParser import ConfigParser
+from configparser import ConfigParser
import gzip
import sys, re, os
num_digits = len(str(len(cpts)-1))
for (i, arg) in enumerate(cpts):
- print arg
+ print(arg)
merged_config = myCP()
config = myCP()
config.readfp(open(cpts[i] + "/m5.cpt"))
### memory stuff
pages = int(config.get("system", "pagePtr"))
page_ptr = page_ptr + pages
- print "pages to be read: ", pages
+ print("pages to be read: ", pages)
f = open(cpts[i] + "/system.physmem.store0.pmem", "rb")
gf = gzip.GzipFile(fileobj=f, mode="rb")
file_size += 4 * 1024
page_ptr += 1
- print "WARNING: "
- print "Make sure the simulation using this checkpoint has at least ",
- print page_ptr, "x 4K of memory"
+ print("WARNING: ")
+ print("Make sure the simulation using this checkpoint has at least ", end=' ')
+ print(page_ptr, "x 4K of memory")
merged_config.set("system.physmem.store0", "range_size", page_ptr * 4 * 1024)
merged_config.add_section("Globals")
# Assume x86 ISA. Any other ISAs would need extra stuff in this script
# to appropriately parse their page tables and understand page sizes.
options = parser.parse_args()
- print options.cpts, len(options.cpts)
+ print(options.cpts, len(options.cpts))
if len(options.cpts) <= 1:
parser.error("You must specify atleast two checkpoint files that "\
"need to be combined.")
-#!/usr/bin/env python2.7
+#!/usr/bin/env python3
# Copyright (c) 2006 The Regents of The University of Michigan
# All rights reserved.
#
# upgrader. This can be especially valuable when maintaining private
# upgraders in private branches.
-from __future__ import print_function
+
from six.moves import configparser
import glob, types, sys, os
i = i + 1
# resolve forward dependencies and audit normal dependencies
- for tag, upg in Upgrader.by_tag.items():
+ for tag, upg in list(Upgrader.by_tag.items()):
for fd in upg.fwd_depends:
if fd not in Upgrader.by_tag:
print("Error: '{}' cannot (forward) depend on "
# Legacy linear checkpoint version
# convert to list of tags before proceeding
tags = set([])
- for i in xrange(2, cpt_ver+1):
+ for i in range(2, cpt_ver+1):
tags.add(Upgrader.legacy[i].tag)
verboseprint("performed legacy version -> tags conversion")
change = True
print("// this file is auto-generated by util/cpt_upgrader.py")
print("#include <string>")
print("#include <set>")
- print
+ print()
print("std::set<std::string> version_tags = {")
for tag in Upgrader.tag_set:
print(" \"{}\",".format(tag))
b_intPriority = cpt.get(sec, '*bankedIntPriority').split()
cpt.remove_option(sec, '*bankedIntPriority')
- for cpu in xrange(0, 255):
+ for cpu in range(255):
if cpuEnabled[cpu] == 'true':
intPriority = b_intPriority[cpu*32 : (cpu+1)*32]
new_sec = "%s.bankedRegs%u" % (sec, cpu)
for sec in cpt.sections():
if re.search('.*\.hdlcd$', sec):
options = {}
- for new, old in option_names.items():
+ for new, old in list(option_names.items()):
options[new] = cpt.get(sec, old)
cpt.remove_section(sec)
cpt.add_section(sec)
- for key, value in options.items():
+ for key, value in list(options.items()):
cpt.set(sec, key, value)
# Create a DMA engine section. The LCD controller will
# v8 has 128 normal fp and 32 special fp regs compared
# to v7's 64 normal fp and 8 special fp regs.
# Insert the extra normal fp registers at end of v7 normal fp regs
- for x in xrange(64):
+ for x in range(64):
fpr.insert(64, "0")
# Append the extra special registers
- for x in xrange(24):
+ for x in range(24):
fpr.append("0")
cpt.set(sec, 'floatRegs.i', ' '.join(str(x) for x in fpr))
# splice in the new misc registers, ~200 -> 605 registers,
# ordering does not remain consistent
mr_old = cpt.get(sec, 'miscRegs').split()
- mr_new = [ '0' for x in xrange(605) ]
+ mr_new = [ '0' for x in range(605) ]
# map old v7 miscRegs to new v8 miscRegs
mr_new[0] = mr_old[0] # CPSR
# find C/C++ sources
okfiles = [f for f in files if oksuffix(f)]
if okfiles:
- print >> file_list, \
- '\n'.join([os.path.join(dirpath, f) for f in okfiles])
+ print('\n'.join([os.path.join(dirpath, f) for f in okfiles]),
+ file=file_list)
file_list.close()
-#!/usr/bin/env python2.7
+#!/usr/bin/env python3
# Copyright (c) 2013 - 2015 ARM Limited
# All rights reserved
try:
import inst_dep_record_pb2
except:
- print "Did not find proto definition, attempting to generate"
+ print("Did not find proto definition, attempting to generate")
from subprocess import call
error = call(['protoc', '--python_out=util', '--proto_path=src/proto',
'src/proto/inst_dep_record.proto'])
if not error:
import inst_dep_record_pb2
- print "Generated proto definitions for instruction dependency record"
+ print("Generated proto definitions for instruction dependency record")
else:
- print "Failed to import proto definitions"
+ print("Failed to import proto definitions")
exit(-1)
def main():
if len(sys.argv) != 3:
- print "Usage: ", sys.argv[0], " <protobuf input> <ASCII output>"
+ print("Usage: ", sys.argv[0], " <protobuf input> <ASCII output>")
exit(-1)
# Open the file on read mode
try:
ascii_out = open(sys.argv[2], 'w')
except IOError:
- print "Failed to open ", sys.argv[2], " for writing"
+ print("Failed to open ", sys.argv[2], " for writing")
exit(-1)
# Read the magic number in 4-byte Little Endian
magic_number = proto_in.read(4)
if magic_number != "gem5":
- print "Unrecognized file"
+ print("Unrecognized file")
exit(-1)
- print "Parsing packet header"
+ print("Parsing packet header")
# Add the packet header
header = inst_dep_record_pb2.InstDepRecordHeader()
protolib.decodeMessage(proto_in, header)
- print "Object id:", header.obj_id
- print "Tick frequency:", header.tick_freq
+ print("Object id:", header.obj_id)
+ print("Tick frequency:", header.tick_freq)
- print "Parsing packets"
+ print("Parsing packets")
- print "Creating enum value,name lookup from proto"
+ print("Creating enum value,name lookup from proto")
enumNames = {}
desc = inst_dep_record_pb2.InstDepRecord.DESCRIPTOR
- for namestr, valdesc in desc.enum_values_by_name.items():
- print '\t', valdesc.number, namestr
+ for namestr, valdesc in list(desc.enum_values_by_name.items()):
+ print('\t', valdesc.number, namestr)
enumNames[valdesc.number] = namestr
num_packets = 0
try:
ascii_out.write(',%s' % enumNames[packet.type])
except KeyError:
- print "Seq. num", packet.seq_num, "has unsupported type", \
- packet.type
+ print("Seq. num", packet.seq_num, "has unsupported type", \
+ packet.type)
exit(-1)
# New line
ascii_out.write('\n')
- print "Parsed packets:", num_packets
- print "Packets with at least 1 reg dep:", num_regdeps
- print "Packets with at least 1 rob dep:", num_robdeps
+ print("Parsed packets:", num_packets)
+ print("Packets with at least 1 reg dep:", num_regdeps)
+ print("Packets with at least 1 rob dep:", num_robdeps)
# We're done
ascii_out.close()
-#!/usr/bin/env python2.7
+#!/usr/bin/env python3
# Copyright (c) 2013-2014 ARM Limited
# All rights reserved
try:
import inst_pb2
except:
- print "Did not find protobuf inst definitions, attempting to generate"
+ print("Did not find protobuf inst definitions, attempting to generate")
from subprocess import call
error = call(['protoc', '--python_out=util', '--proto_path=src/proto',
'src/proto/inst.proto'])
if not error:
- print "Generated inst proto definitions"
+ print("Generated inst proto definitions")
try:
import google.protobuf
except:
- print "Please install Python protobuf module"
+ print("Please install Python protobuf module")
exit(-1)
import inst_pb2
else:
- print "Failed to import inst proto definitions"
+ print("Failed to import inst proto definitions")
exit(-1)
def main():
if len(sys.argv) != 3:
- print "Usage: ", sys.argv[0], " <protobuf input> <ASCII output>"
+ print("Usage: ", sys.argv[0], " <protobuf input> <ASCII output>")
exit(-1)
# Open the file in read mode
try:
ascii_out = open(sys.argv[2], 'w')
except IOError:
- print "Failed to open ", sys.argv[2], " for writing"
+ print("Failed to open ", sys.argv[2], " for writing")
exit(-1)
# Read the magic number in 4-byte Little Endian
magic_number = proto_in.read(4)
if magic_number != "gem5":
- print "Unrecognized file", sys.argv[1]
+ print("Unrecognized file", sys.argv[1])
exit(-1)
- print "Parsing instruction header"
+ print("Parsing instruction header")
# Add the packet header
header = inst_pb2.InstHeader()
protolib.decodeMessage(proto_in, header)
- print "Object id:", header.obj_id
- print "Tick frequency:", header.tick_freq
- print "Memory addresses included:", header.has_mem
+ print("Object id:", header.obj_id)
+ print("Tick frequency:", header.tick_freq)
+ print("Memory addresses included:", header.has_mem)
if header.ver != 0:
- print "Warning: file version newer than decoder:", header.ver
- print "This decoder may not understand how to decode this file"
+ print("Warning: file version newer than decoder:", header.ver)
+ print("This decoder may not understand how to decode this file")
- print "Parsing instructions"
+ print("Parsing instructions")
num_insts = 0
inst = inst_pb2.Inst()
ascii_out.write('\n')
num_insts += 1
- print "Parsed instructions:", num_insts
+ print("Parsed instructions:", num_insts)
# We're done
ascii_out.close()
-#!/usr/bin/env python2.7
+#!/usr/bin/env python3
# Copyright (c) 2013-2014 ARM Limited
# All rights reserved
def main():
if len(sys.argv) != 3:
- print "Usage: ", sys.argv[0], " <protobuf input> <ASCII output>"
+ print("Usage: ", sys.argv[0], " <protobuf input> <ASCII output>")
exit(-1)
# Open the file in read mode
try:
ascii_out = open(sys.argv[2], 'w')
except IOError:
- print "Failed to open ", sys.argv[2], " for writing"
+ print("Failed to open ", sys.argv[2], " for writing")
exit(-1)
# Read the magic number in 4-byte Little Endian
magic_number = proto_in.read(4)
if magic_number != "gem5":
- print "Unrecognized file", sys.argv[1]
+ print("Unrecognized file", sys.argv[1])
exit(-1)
- print "Parsing packet header"
+ print("Parsing packet header")
# Add the packet header
header = packet_pb2.PacketHeader()
protolib.decodeMessage(proto_in, header)
- print "Object id:", header.obj_id
- print "Tick frequency:", header.tick_freq
+ print("Object id:", header.obj_id)
+ print("Tick frequency:", header.tick_freq)
for id_string in header.id_strings:
- print 'Master id %d: %s' % (id_string.key, id_string.value)
+ print('Master id %d: %s' % (id_string.key, id_string.value))
- print "Parsing packets"
+ print("Parsing packets")
num_packets = 0
packet = packet_pb2.Packet()
else:
ascii_out.write('\n')
- print "Parsed packets:", num_packets
+ print("Parsed packets:", num_packets)
# We're done
ascii_out.close()
-#!/usr/bin/env python2.7
+#!/usr/bin/env python3
# Copyright (c) 2015 ARM Limited
# All rights reserved
try:
import inst_dep_record_pb2
except:
- print "Did not find proto definition, attempting to generate"
+ print("Did not find proto definition, attempting to generate")
from subprocess import call
error = call(['protoc', '--python_out=util', '--proto_path=src/proto',
'src/proto/inst_dep_record.proto'])
if not error:
import inst_dep_record_pb2
- print "Generated proto definitions for instruction dependency record"
+ print("Generated proto definitions for instruction dependency record")
else:
- print "Failed to import proto definitions"
+ print("Failed to import proto definitions")
exit(-1)
DepRecord = inst_dep_record_pb2.InstDepRecord
def main():
if len(sys.argv) != 3:
- print "Usage: ", sys.argv[0], " <ASCII input> <protobuf output>"
+ print("Usage: ", sys.argv[0], " <ASCII input> <protobuf output>")
exit(-1)
# Open the file in write mode
try:
ascii_in = open(sys.argv[1], 'r')
except IOError:
- print "Failed to open ", sys.argv[1], " for reading"
+ print("Failed to open ", sys.argv[1], " for reading")
exit(-1)
# Write the magic number in 4-byte Little Endian, similar to what
header.window_size = 120
protolib.encodeMessage(proto_out, header)
- print "Creating enum name,value lookup from proto"
+ print("Creating enum name,value lookup from proto")
enumValues = {}
for namestr, valdesc in DepRecord.DESCRIPTOR.enum_values_by_name.items():
- print '\t', namestr, valdesc.number
+ print('\t', namestr, valdesc.number)
enumValues[namestr] = valdesc.number
num_records = 0
inst_info_list = inst_info_str.split(',')
dep_record = DepRecord()
- dep_record.seq_num = long(inst_info_list[0])
- dep_record.pc = long(inst_info_list[1])
- dep_record.weight = long(inst_info_list[2])
+ dep_record.seq_num = int(inst_info_list[0])
+ dep_record.pc = int(inst_info_list[1])
+ dep_record.weight = int(inst_info_list[2])
# If the type is not one of the enum values, it should be a key error
try:
dep_record.type = enumValues[inst_info_list[3]]
except KeyError:
- print "Seq. num", dep_record.seq_num, "has unsupported type", \
- inst_info_list[3]
+ print("Seq. num", dep_record.seq_num, "has unsupported type", \
+ inst_info_list[3])
exit(-1)
if dep_record.type == DepRecord.INVALID:
- print "Seq. num", dep_record.seq_num, "is of INVALID type"
+ print("Seq. num", dep_record.seq_num, "is of INVALID type")
exit(-1)
# If the instruction is a load or store record the physical addr,
# size flags in addition to recording the computation delay
if dep_record.type in [DepRecord.LOAD, DepRecord.STORE]:
p_addr, size, flags, comp_delay = inst_info_list[4:8]
- dep_record.p_addr = long(p_addr)
+ dep_record.p_addr = int(p_addr)
dep_record.size = int(size)
dep_record.flags = int(flags)
- dep_record.comp_delay = long(comp_delay)
+ dep_record.comp_delay = int(comp_delay)
else:
comp_delay = inst_info_list[4]
- dep_record.comp_delay = long(comp_delay)
+ dep_record.comp_delay = int(comp_delay)
# Parse the register and order dependencies both of which are
# repeated fields. An empty list is valid.
# if the string is ",4", split(',') returns 2 items: '', '4'
# long('') gives error, so check if the item is non-empty
if a_dep:
- dep_record.rob_dep.append(long(a_dep))
+ dep_record.rob_dep.append(int(a_dep))
reg_deps = reg_dep_str.split(',')
for a_dep in reg_deps:
if a_dep:
- dep_record.reg_dep.append(long(a_dep))
+ dep_record.reg_dep.append(int(a_dep))
protolib.encodeMessage(proto_out, dep_record)
num_records += 1
- print "Converted", num_records, "records."
+ print("Converted", num_records, "records.")
# We're done
ascii_in.close()
proto_out.close()
-#!/usr/bin/env python2.7
+#!/usr/bin/env python3
# Copyright (c) 2013-2014 ARM Limited
# All rights reserved
try:
import packet_pb2
except:
- print "Did not find packet proto definitions, attempting to generate"
+ print("Did not find packet proto definitions, attempting to generate")
from subprocess import call
error = call(['protoc', '--python_out=util', '--proto_path=src/proto',
'src/proto/packet.proto'])
if not error:
- print "Generated packet proto definitions"
+ print("Generated packet proto definitions")
try:
import google.protobuf
except:
- print "Please install the Python protobuf module"
+ print("Please install the Python protobuf module")
exit(-1)
import packet_pb2
else:
- print "Failed to import packet proto definitions"
+ print("Failed to import packet proto definitions")
exit(-1)
def main():
if len(sys.argv) != 3:
- print "Usage: ", sys.argv[0], " <ASCII input> <protobuf output>"
+ print("Usage: ", sys.argv[0], " <ASCII input> <protobuf output>")
exit(-1)
try:
ascii_in = open(sys.argv[1], 'r')
except IOError:
- print "Failed to open ", sys.argv[1], " for reading"
+ print("Failed to open ", sys.argv[1], " for reading")
exit(-1)
try:
proto_out = open(sys.argv[2], 'wb')
except IOError:
- print "Failed to open ", sys.argv[2], " for writing"
+ print("Failed to open ", sys.argv[2], " for writing")
exit(-1)
# Write the magic number in 4-byte Little Endian, similar to what
for line in ascii_in:
cmd, addr, size, tick = line.split(',')
packet = packet_pb2.Packet()
- packet.tick = long(tick)
+ packet.tick = int(tick)
# ReadReq is 1 and WriteReq is 4 in src/mem/packet.hh Command enum
packet.cmd = 1 if cmd == 'r' else 4
- packet.addr = long(addr)
+ packet.addr = int(addr)
packet.size = int(size)
protolib.encodeMessage(proto_out, packet)
-#!/usr/bin/env python2.7
+#!/usr/bin/env python3
import os
import re
return
else:
- raise AttributeError, "Could not handle language %s" % lang_type
+ raise AttributeError("Could not handle language %s" % lang_type)
date_range_re = re.compile(r'([0-9]{4})\s*-\s*([0-9]{4})')
def process_dates(dates):
match = date_range_re.match(date)
if match:
f,l = [ int(d) for d in match.groups() ]
- for i in xrange(f, l+1):
+ for i in range(f, l+1):
output.add(i)
else:
try:
try:
dates = process_dates(dates)
except Exception:
- print dates
- print owner
+ print(dates)
+ print(owner)
raise
authors = []
- for i in xrange(start,end+1):
+ for i in range(start,end+1):
line = lines[i]
if not authors:
match = authors_re.search(line)
else:
match = more_authors_re.search(line)
if not match:
- for j in xrange(i, end+1):
+ for j in range(i, end+1):
line = lines[j].strip()
if not line:
end = j
%s [-v] <directory>"""
def usage(exitcode):
- print usage_str % sys.argv[0]
+ print(usage_str % sys.argv[0])
if exitcode is not None:
sys.exit(exitcode)
elif os.path.isdir(base):
files += find_files(base)
else:
- raise AttributeError, "can't access '%s'" % base
+ raise AttributeError("can't access '%s'" % base)
copyrights = {}
counts = {}
lt = lang_type(filename, lines[0])
try:
data = get_data(lt, lines)
- except Exception, e:
+ except Exception as e:
if verbose:
if len(e.args) == 1:
e.args = ('%s (%s))' % (e, filename), )
- print "could not parse %s: %s" % (filename, e)
+ print("could not parse %s: %s" % (filename, e))
continue
for owner, dates, authors, start, end in data:
copyrights[owner] |= dates
counts[owner] += 1
- info = [ (counts[o], d, o) for o,d in copyrights.items() ]
+ info = [ (counts[o], d, o) for o,d in list(copyrights.items()) ]
for count,dates,owner in sorted(info, reverse=True):
if show_counts:
owner = '%s (%s files)' % (owner, count)
- print 'Copyright (c) %s %s' % (datestr(dates), owner)
+ print('Copyright (c) %s %s' % (datestr(dates), owner))
-#!/usr/bin/python2.7
+#!/usr/bin/python3
#
# Copyright 2020 Google, Inc.
#
def chsFromSize(sizeInBlocks):
if sizeInBlocks >= MaxLBABlocks:
sizeInMBs = (sizeInBlocks * BlockSize) / MB
- print '%d MB is too big for LBA, truncating file.' % sizeInMBs
+ print('%d MB is too big for LBA, truncating file.' % sizeInMBs)
return (MaxLBACylinders, MaxLBAHeads, MaxLBASectors)
sectors = sizeInBlocks
if not hasattr(needSudo, 'notRoot'):
needSudo.notRoot = (os.geteuid() != 0)
if needSudo.notRoot:
- print 'You are not root. Using sudo.'
+ print('You are not root. Using sudo.')
return needSudo.notRoot
# Run an external command.
def runCommand(command, inputVal=''):
- print "%>", ' '.join(command)
+ print("%>", ' '.join(command))
proc = Popen(command, stdin=PIPE)
proc.communicate(inputVal)
return proc.returncode
def getOutput(command, inputVal=''):
global debug
if debug:
- print "%>", ' '.join(command)
+ print("%>", ' '.join(command))
proc = Popen(command, stderr=STDOUT,
stdin=PIPE, stdout=PIPE)
(out, err) = proc.communicate(inputVal)
assert not self.devFile
(out, returncode) = privOutput([findProg('losetup'), '-f'])
if returncode != 0:
- print out
+ print(out)
return returncode
self.devFile = string.strip(out)
command = [findProg('losetup'), self.devFile, fileName]
command = [findProg('sfdisk'), '-d', dev.devFile]
(out, returncode) = privOutput(command)
if returncode != 0:
- print out
+ print(out)
exit(returncode)
lines = out.splitlines()
# Make sure the first few lines of the output look like what we expect.
def mountPointToDev(mountPoint):
(mountTable, returncode) = getOutput([findProg('mount')])
if returncode != 0:
- print mountTable
+ print(mountTable)
exit(returncode)
mountTable = mountTable.splitlines()
for line in mountTable:
def mountComFunc(options, args):
(path, mountPoint) = args
if not os.path.isdir(mountPoint):
- print "Mount point %s is not a directory." % mountPoint
+ print("Mount point %s is not a directory." % mountPoint)
dev = LoopbackDevice()
if dev.setup(path, offset=True) != 0:
def umountComFunc(options, args):
(mountPoint,) = args
if not os.path.isdir(mountPoint):
- print "Mount point %s is not a directory." % mountPoint
+ print("Mount point %s is not a directory." % mountPoint)
exit(1)
dev = mountPointToDev(mountPoint)
if not dev:
- print "Unable to find mount information for %s." % mountPoint
+ print("Unable to find mount information for %s." % mountPoint)
# Unmount the loopback device.
if runPriv([findProg('umount'), mountPoint]) != 0:
# Figure out what command was requested and execute it.
if len(argv) < 2 or argv[1] not in commands:
- print 'Usage: %s [command] <command arguments>'
- print 'where [command] is one of '
+ print('Usage: %s [command] <command arguments>')
+ print('where [command] is one of ')
for name in commandOrder:
command = commands[name]
- print ' %s: %s' % (command.name, command.description)
- print 'Watch for orphaned loopback devices and delete them with'
- print 'losetup -d. Mounted images will belong to root, so you may need'
- print 'to use sudo to modify their contents.'
+ print(' %s: %s' % (command.name, command.description))
+ print('Watch for orphaned loopback devices and delete them with')
+ print('losetup -d. Mounted images will belong to root, so you may need')
+ print('to use sudo to modify their contents.')
exit(1)
command = commands[argv[1]]
-#!/usr/bin/env python2.7
+#!/usr/bin/env python3
# Copyright (c) 2020 ARM Limited
# All rights reserved.
import os
def run_cmd(explanation, working_dir, cmd, stdout = None):
- print "Running phase '%s'" % explanation
+ print("Running phase '%s'" % explanation)
sys.stdout.flush()
# some of the commands need $PWD to be properly set
if return_code == 0:
return
- print "Error running phase %s. Returncode: %d" % (explanation, return_code)
+ print("Error running phase %s. Returncode: %d" % (explanation, return_code))
sys.exit(1)
def linux_clone():
help = "Number of jobs to use with the 'make' commands. Default value: "
"%default")
parser.add_option("-b", "--fs-binaries", action="append",
- choices=all_binaries.keys(), default=[],
+ choices=list(all_binaries.keys()), default=[],
help = "List of FS files to be generated. Defaulting to all")
(options, args) = parser.parse_args()
if args:
- print "Unrecognized argument(s) %s." % args
+ print("Unrecognized argument(s) %s." % args)
sys.exit(1)
if not os.path.isdir(options.dest_dir):
- print "Error: %s is not a directory." % options.dest_dir
+ print("Error: %s is not a directory." % options.dest_dir)
sys.exit(1)
if not os.path.isdir(options.gem5_dir):
- print "Error: %s is not a directory." % options.gem5_dir
+ print("Error: %s is not a directory." % options.gem5_dir)
sys.exit(1)
if machine() != "x86_64":
- print "Error: This script should run in a x86_64 machine"
+ print("Error: This script should run in a x86_64 machine")
sys.exit(1)
binaries_dir = options.dest_dir + "/binaries"
if os.path.exists(binaries_dir):
- print "Error: %s already exists." % binaries_dir
+ print("Error: %s already exists." % binaries_dir)
sys.exit(1)
revisions_dir = options.dest_dir + "/revisions"
if os.path.exists(revisions_dir):
- print "Error: %s already exists." %revisions_dir
+ print("Error: %s already exists." %revisions_dir)
sys.exit(1)
os.mkdir(binaries_dir);
rev_file)
rev_file.close()
-binaries = options.fs_binaries if options.fs_binaries else all_binaries.keys()
+binaries = options.fs_binaries if options.fs_binaries else list(all_binaries.keys())
for fs_binary in binaries:
all_binaries[fs_binary]()
-print "Done! All the generated files can be found in %s" % binaries_dir
+print("Done! All the generated files can be found in %s" % binaries_dir)
sys.exit(0)
print(error_message)
print("The commit has been cancelled, but a copy of it can be found in "
- + sys.argv[1] + " : ")
+ + sys.argv[1] + " : ")
print("""
--------------------------------------------------------------------------
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-from __future__ import print_function
+
from tempfile import TemporaryFile
import os
-# Suppress detection of leaks from within the python 2.7 interpreter.
-leak:libpython2.7.so
+# Suppress detection of leaks from within the python3 interpreter.
+leak:libpython3.so
def __init__(self, ydict: Mapping[str, Any]):
self._subsystems = {}
- for tag, maint in ydict.items():
+ for tag, maint in list(ydict.items()):
self._subsystems[tag] = Maintainers._parse_subsystem(tag, maint)
@classmethod
description=ydict.get('desc', ''))
def __iter__(self) -> Iterator[Tuple[str, Subsystem]]:
- return iter(self._subsystems.items())
+ return iter(list(self._subsystems.items()))
def __getitem__(self, key: str) -> Subsystem:
return self._subsystems[key]
-#!/usr/bin/env python2.7
+#!/usr/bin/env python3
#
# Copyright (c) 2017-2018 Arm Limited
# All rights reserved
self._tags = None
def _git(self, args):
- return subprocess.check_output([ "git", ] + args)
+ return subprocess.check_output([ "git", ] + args).decode()
@property
def log(self):
changes = subprocess.check_output(
[ "git", "rev-list", query, '--'] + paths
- )
+ ).decode()
if changes == "":
return
upstream_cids = dict([
(c.change_id, c) for c in upstream_revs if c.change_id is not None ])
- incoming = filter(
- lambda r: r.change_id and r.change_id not in feature_cids,
- reversed(upstream_revs))
- outgoing = filter(
- lambda r: r.change_id and r.change_id not in upstream_cids,
- reversed(feature_revs))
- common = filter(
- lambda r: r.change_id in upstream_cids,
- reversed(feature_revs))
- upstream_unknown = filter(
- lambda r: r.change_id is None,
- reversed(upstream_revs))
- feature_unknown = filter(
- lambda r: r.change_id is None,
- reversed(feature_revs))
+ incoming = [r for r in reversed(upstream_revs) \
+ if r.change_id and r.change_id not in feature_cids]
+ outgoing = [r for r in reversed(feature_revs) \
+ if r.change_id and r.change_id not in upstream_cids]
+ common = [r for r in reversed(feature_revs) \
+ if r.change_id in upstream_cids]
+ upstream_unknown = [r for r in reversed(upstream_revs) \
+ if r.change_id is None]
+ feature_unknown = [r for r in reversed(feature_revs) \
+ if r.change_id is None]
return incoming, outgoing, common, upstream_unknown, feature_unknown
list_changes(args.upstream, args.feature, paths=args.paths)
if incoming:
- print "Incoming changes:"
+ print("Incoming changes:")
for rev in incoming:
- print rev
- print
+ print(rev)
+ print()
if args.show_unknown and upstream_unknown:
- print "Upstream changes without change IDs:"
+ print("Upstream changes without change IDs:")
for rev in upstream_unknown:
- print rev
- print
+ print(rev)
+ print()
if outgoing:
- print "Outgoing changes:"
+ print("Outgoing changes:")
for rev in outgoing:
- print rev
- print
+ print(rev)
+ print()
if args.show_common and common:
- print "Common changes:"
+ print("Common changes:")
for rev in common:
- print rev
- print
+ print(rev)
+ print()
if args.show_unknown and feature_unknown:
- print "Outgoing changes without change IDs:"
+ print("Outgoing changes without change IDs:")
for rev in feature_unknown:
- print rev
+ print(rev)
if args.deep_search:
- print "Incorrectly rebased changes:"
+ print("Incorrectly rebased changes:")
all_upstream_revs = list_revs(args.upstream, paths=args.paths)
all_upstream_cids = dict([
(c.change_id, c) for c in all_upstream_revs \
if c.change_id is not None ])
- incorrect_outgoing = filter(
- lambda r: r.change_id in all_upstream_cids,
- outgoing)
+ incorrect_outgoing = [r for r in outgoing if r.change_id in all_upstream_cids]
for rev in incorrect_outgoing:
- print rev
+ print(rev)
-#!/usr/bin/env python2.7
+#!/usr/bin/env python3
#
# Copyright (c) 2018 Advanced Micro Devices, Inc.
# All rights reserved.
odd = diff_files(args.upstream, args.feature, paths=args.paths)
- for key, value in odd.iteritems():
- print key
+ for key, value in odd.items():
+ print(key)
for entry in value:
- print " %s" % entry
+ print(" %s" % entry)
path = key + entry
sha = cl_hash(args.upstream, args.feature, path)
for s in sha:
- print "\t%s" % s
- print
+ print("\t%s" % s)
+ print()
if __name__ == "__main__":
_main()
-#! /usr/bin/env python2.7
+#! /usr/bin/env python3
# Copyright (c) 2015 ARM Limited
# All rights reserved
(options, args) = parser.parse_args()
if len(args) != 1:
- print "Error: Expecting a single argument specifying the gem5 binary"
+ print("Error: Expecting a single argument specifying the gem5 binary")
sys.exit(1)
gem5_binary = args[0]
status = subprocess.call([gem5_binary, 'configs/example/memtest.py',
'-r', '-m %d' % (options.ticks)])
if status != 0:
- print "Error: memtest run failed\n"
+ print("Error: memtest run failed\n")
sys.exit(1)
-print "memtest soak finished without errors"
+print("memtest soak finished without errors")
-#!/usr/bin/env python2.7
+#!/usr/bin/env python3
#
# Copyright (c) 2013 ARM Limited
# All rights reserved
import re
import math
-from point import Point
-import parse
-import colours
-from colours import backgroundColour, black
-import model
+from .point import Point
+from . import parse
+from . import colours
+from .colours import backgroundColour, black
+from . import model
def centre_size_to_sides(centre, size):
"""Returns a 4-tuple of the relevant ordinates of the left,
cr.line_to(left, top)
stroke_and_fill(cr, colours[0])
# Stripes
- for i in xrange(1, num_colours - 1):
+ for i in range(1, num_colours - 1):
xOffset = x_stripe_width * i
cr.move_to(left + xOffset - half_x_stripe_width, bottom)
cr.line_to(left + xOffset + half_x_stripe_width, bottom)
if len(strips) == 0:
strips = [[colours.errorColour]]
- print 'Problem with the colour of event:', event
+ print('Problem with the colour of event:', event)
num_strips = len(strips)
strip_proportion = 1.0 / num_strips
cr.set_line_width(view.midLineWidth / view.pitch.x)
# Draw the strips and their blocks
- for strip_index in xrange(0, num_strips):
+ for strip_index in range(num_strips):
num_blocks = len(strips[strip_index])
block_proportion = 1.0 / num_blocks
firstBlockOffset = (num_blocks / 2.0) - 0.5
block_centre = (strip_centre + strip_step.scale(strip_index) -
(block_size * block_step_base.scale(firstBlockOffset)))
- for block_index in xrange(0, num_blocks):
+ for block_index in range(num_blocks):
striped_box(cr, block_centre +
block_step.scale(block_index), block_size,
strips[strip_index][block_index])
cr.select_font_face('Helvetica', cairo.FONT_SLANT_NORMAL,
cairo.FONT_WEIGHT_BOLD)
- for i in xrange(0, num_colours):
+ for i in range(num_colours):
centre = first_blob_centre + blob_step.scale(i)
box(cr, centre, real_blob_size)
ret = unknownColour
return ret
-number_colour_code = map(name_to_colour, ['black', 'brown', 'red', 'orange',
- 'yellow', 'green', 'blue', 'violet', 'grey', 'white'])
+number_colour_code = list(map(name_to_colour, ['black', 'brown', 'red',
+ 'orange', 'yellow', 'green', 'blue', 'violet', 'grey', 'white']))
def number_to_colour(num):
"""Convert the last decimal digit of an integer into a resistor
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-import parse
-import colours
-from colours import unknownColour
-from point import Point
+from . import parse
+from . import colours
+from .colours import unknownColour
+from .point import Point
import re
-import blobs
+from . import blobs
from time import time as wall_time
import os
return int(string)
if m is None:
- print 'Invalid Id string', string
+ print('Invalid Id string', string)
else:
elems = m.groups()
self.id = special_view_decoder(Id)(id)
# self.branch = special_view_decoder(Branch)(branch)
else:
- print "Bad Branch data:", string
+ print("Bad Branch data:", string)
return self
def to_striped_block(self, select):
self.counts = []
def from_string(self, string):
- self.counts = map(int, re.split('/', string))
+ self.counts = list(map(int, re.split('/', string)))
return self
def to_striped_block(self, select):
- return map(colours.number_to_colour, self.counts)
+ return list(map(colours.number_to_colour, self.counts))
class Colour(BlobVisualData):
"""A fixed colour block, used for special colour decoding"""
"""Factory for making decoders for particular block types"""
def decode(pairs):
if dataName not in pairs:
- print 'TwoDColours: no event data called:', \
- dataName, 'in:', pairs
+ print('TwoDColours: no event data called:', \
+ dataName, 'in:', pairs)
return class_([[Colour(colours.errorColour)]])
else:
parsed = parse.list_parser(pairs[dataName])
at strip=0, elem=1"""
def decode(pairs):
if dataName not in pairs:
- print 'TwoDColours: no event data called:', \
- dataName, 'in:', pairs
+ print('TwoDColours: no event data called:', \
+ dataName, 'in:', pairs)
return class_([[Colour(colours.errorColour)]])
else:
strips = int(picPairs['strips'])
parsed = parse.parse_indexed_list(raw_iv_pairs)
array = [[Colour(colours.emptySlotColour)
- for i in xrange(0, strip_elems)]
- for j in xrange(0, strips)]
+ for i in range(0, strip_elems)]
+ for j in range(0, strips)]
for index, value in parsed:
try:
array[index % strips][index / strips] = \
special_view_decoder(elemClass)(value)
except:
- print "Element out of range strips: %d," \
+ print("Element out of range strips: %d," \
" stripelems %d, index: %d" % (strips,
- strip_elems, index)
+ strip_elems, index))
# return class_(array)
return class_(array)
"""Factory for element type"""
def decode(pairs):
if dataName not in pairs:
- print 'FrameColours: no event data called:', dataName, \
- 'in:', pairs
+ print('FrameColours: no event data called:', dataName, \
+ 'in:', pairs)
return class_([Colour(colours.errorColour)])
else:
parsed = parse.list_parser(pairs[dataName])
'w': '(w)rite'
}
-special_state_chars = special_state_colours.keys()
+special_state_chars = list(special_state_colours.keys())
# The complete set of available block data types
decoder_element_classes = {
else:
addrStr = '0x%x' % self.addr
ret = [addrStr, self.disassembly]
- for name, value in self.pairs.iteritems():
+ for name, value in self.pairs.items():
ret.append("%s=%s" % (name, str(value)))
return ret
line = model.find_line(lineId)
if line is not None:
ret.append(line)
- map(find_inst, blocks)
+ list(map(find_inst, blocks))
return sorted(ret)
class BlobModel(object):
self.lines = {}
self.numEvents = 0
- for unit, events in self.unitEvents.iteritems():
+ for unit, events in self.unitEvents.items():
self.unitEvents[unit] = []
def add_blob(self, blob):
if event.unit in self.unitEvents:
events = self.unitEvents[event.unit]
if len(events) > 0 and events[len(events)-1].time > event.time:
- print "Bad event ordering"
+ print("Bad event ordering")
events.append(event)
self.numEvents += 1
self.lastTime = max(self.lastTime, event.time)
"""Extract a list of all the times from the seen events. Call after
reading events to give a safe index list to use for time indices"""
times = {}
- for unitEvents in self.unitEvents.itervalues():
+ for unitEvents in self.unitEvents.values():
for event in unitEvents:
times[event.time] = 1
- self.times = times.keys()
+ self.times = list(times.keys())
self.times.sort()
def find_line(self, id):
next_progress_print_event_count = 1000
if not os.access(file, os.R_OK):
- print 'Can\'t open file', file
+ print('Can\'t open file', file)
exit(1)
else:
- print 'Opening file', file
+ print('Opening file', file)
f = open(file)
# When the time changes, resolve comments
if event_time != time:
if self.numEvents > next_progress_print_event_count:
- print ('Parsed to time: %d' % event_time)
+ print(('Parsed to time: %d' % event_time))
next_progress_print_event_count = (
self.numEvents + 1000)
update_comments(comments, time)
end_wall_time = wall_time()
- print 'Total events:', minor_trace_line_count, 'unique events:', \
- self.numEvents
- print 'Time to parse:', end_wall_time - start_wall_time
+ print('Total events:', minor_trace_line_count, 'unique events:', \
+ self.numEvents)
+ print('Time to parse:', end_wall_time - start_wall_time)
def add_blob_picture(self, offset, pic, nameDict):
"""Add a parsed ASCII-art pipeline markup to the model"""
direc = direc,
size = (Point(1, 1) + arrow_point - start)))
else:
- print 'Bad arrow', start
+ print('Bad arrow', start)
char = pic_at(start)
if char == '-\\':
elif typ == 'block':
ret = blobs.Block(char, unit, Point(0,0), colour)
else:
- print "Bad picture blog type:", typ
+ print("Bad picture blog type:", typ)
if 'hideId' in pairs:
hide = pairs['hideId']
if decoder is not None:
ret.visualDecoder = decoder
else:
- print 'Bad visualDecoder requested:', decoderName
+ print('Bad visualDecoder requested:', decoderName)
if 'border' in pairs:
border = pairs['border']
macros = {}
if not os.access(filename, os.R_OK):
- print 'Can\'t open file', filename
+ print('Can\'t open file', filename)
exit(1)
else:
- print 'Opening file', filename
+ print('Opening file', filename)
f = open(filename)
l = get_line(f)
# Setup the events structure
self.unitEvents[unit] = []
else:
- print 'Problem with Blob line:', l
+ print('Problem with Blob line:', l)
l = get_line(f)
ret.append([elem])
if len(accum) > 0:
- print 'Non matching brackets in', names
+ print('Non matching brackets in', names)
return ret
def map2(f, ls):
"""map to a depth of 2. That is, given a list of lists, apply
f to those innermost elements """
- return map(lambda l: map(f, l), ls)
+ return [list(map(f, l)) for l in ls]
def remove_trailing_ws(line):
return re.sub('\s*$', '', line)
import cairo
import re
-from point import Point
-import parse
-import colours
-import model
-from model import Id, BlobModel, BlobDataSelect, special_state_chars
-import blobs
+from .point import Point
+from . import parse
+from . import colours
+from . import model
+from .model import Id, BlobModel, BlobDataSelect, special_state_chars
+from . import blobs
class BlobView(object):
"""The canvas view of the pipeline"""
widths = [0] * num_columns
for line in lines:
- for i in xrange(0, len(line)):
+ for i in range(len(line)):
widths[i] = max(widths[i], text_width(line[i]))
# Calculate the size of the speech bubble
id_size = Point(id_width, text_size)
# Draw the rows in the table
- for i in xrange(0, len(insts)):
+ for i in range(0, len(insts)):
row_point = text_point
inst = insts[i]
line = lines[i]
row_point += Point(1.0, 0.0).scale(id_width)
row_point += text_step
# Draw the columns of each row
- for j in xrange(0, len(line)):
+ for j in range(0, len(line)):
row_point += gap_step
cr.move_to(*row_point.to_pair())
cr.show_text(line[j])
self.miniViewHBox = gtk.HBox(homogeneous=True, spacing=2)
# Draw mini views
- for i in xrange(1, self.miniViewCount + 1):
+ for i in range(1, self.miniViewCount + 1):
miniView = BlobView(self.model)
miniView.set_time_index(0)
miniView.masterScale = Point(0.1, 0.1)
self.window.add(self.vbox)
def show_event(picChar, event):
- print '**** Comments for', event.unit, \
- 'at time', self.view.time
- for name, value in event.pairs.iteritems():
- print name, '=', value
+ print('**** Comments for', event.unit, \
+ 'at time', self.view.time)
+ for name, value in event.pairs.items():
+ print(name, '=', value)
for comment in event.comments:
- print comment
+ print(comment)
if picChar in event.visuals:
# blocks = event.visuals[picChar].elems()
- print '**** Colour data'
+ print('**** Colour data')
objs = event.find_ided_objects(self.model, picChar, True)
for obj in objs:
- print ' '.join(obj.table_line())
+ print(' '.join(obj.table_line()))
def clicked_da(da, b):
point = Point(b.x, b.y)
self.view.set_da_size()
self.view.da.add_events(gtk.gdk.BUTTON_PRESS_MASK)
self.view.da.connect('button-press-event', clicked_da)
- self.window.connect('destroy', lambda(widget): gtk.main_quit())
+ self.window.connect('destroy', lambda widget: gtk.main_quit())
def resize(window, event):
"""Resize DrawingArea to match new window size"""
-#! /usr/bin/env python2.7
+#! /usr/bin/env python3
# Copyright (c) 2011 ARM Limited
# All rights reserved
parser.error('invalid range')
sys.exit(1)
# Process trace
- print 'Processing trace... ',
+ print('Processing trace... ', end=' ')
with open(args[0], 'r') as trace:
with open(options.outfile, 'w') as out:
process_trace(trace, out, options.cycle_time, options.width,
options.color, options.timestamps,
options.only_committed, options.store_completions,
*(tick_range + inst_range))
- print 'done!'
+ print('done!')
if __name__ == '__main__':
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-from ConfigParser import ConfigParser
+from configparser import ConfigParser
import string, sys, subprocess, os
# Compile DSENT to generate the Python module and then import it.
def parseConfig(config_file):
config = ConfigParser()
if not config.read(config_file):
- print("ERROR: config file '", config_file, "' not found")
+ print(("ERROR: config file '", config_file, "' not found"))
sys.exit(1)
if not config.has_section("system.ruby.network"):
- print("ERROR: Ruby network not found in '", config_file)
+ print(("ERROR: Ruby network not found in '", config_file))
sys.exit(1)
if config.get("system.ruby.network", "type") != "GarnetNetwork_d" :
- print("ERROR: Garnet network not used in '", config_file)
+ print(("ERROR: Garnet network not used in '", config_file))
sys.exit(1)
number_of_virtual_networks = config.getint("system.ruby.network",
-#! /usr/bin/env python2.7
+#! /usr/bin/env python3
# Copyright (c) 2005 The Regents of The University of Michigan
# All rights reserved.
else:
name = app
- if categories.has_key(name):
+ if name in categories:
return categories[name]
for regexp, cat in categories_re:
if regexp.match(name):
return cat
- print "no match for symbol %s" % name
+ print("no match for symbol %s" % name)
return 'other'
try:
(opts, files) = getopt.getopt(sys.argv[1:], 'i')
except getopt.GetoptError:
- print "usage", sys.argv[0], "[-i] <files>"
+ print("usage", sys.argv[0], "[-i] <files>")
sys.exit(2)
showidle = True
for o,v in opts:
if o == "-i":
showidle = False
-print files
+print(files)
f = open(files.pop())
total = 0
prof = {}
# print "%s -- %5.1f%% " % (prof[i][1], 100 * float(prof[i][0])/float(total))
for d in cats:
- if prof.has_key(d):
- print "%s -- %5.1f%% " % (d, 100 * float(prof[d])/float(total))
+ if d in prof:
+ print("%s -- %5.1f%% " % (d, 100 * float(prof[d])/float(total)))
results[delay][bank_util][seq_bytes][state] = \
int(stime)
#### state energy values ####
- elif line.strip().split()[0] in StatToKey.keys():
+ elif line.strip().split()[0] in list(StatToKey.keys()):
# Example format:
# system.mem_ctrls_0.actEnergy 35392980
statistic, e_val = line.strip().split()[0:2]
fig, ax = plt.subplots()
width = 0.35
ind = np.arange(len(States))
- l1 = ax.bar(ind, map(lambda x : idleResults[x], States), width)
+ l1 = ax.bar(ind, [idleResults[x] for x in States], width)
ax.xaxis.set_ticks(ind + width/2)
ax.xaxis.set_ticklabels(States)
ax.set_ylabel('Time (ps) spent in a power state')
fig.suptitle("Idle 50 us")
- print "saving plot:", idlePlotName(plot_dir)
+ print("saving plot:", idlePlotName(plot_dir))
plt.savefig(idlePlotName(plot_dir), format='eps')
plt.close(fig)
# Must have a bottom of the stack first
state = bottom_state
- l_states[state] = map(lambda x: results[delay][bank_util][x][state],
- seqBytesValues)
+ l_states[state] = [results[delay][bank_util][x][state] \
+ for x in seqBytesValues]
p_states[state] = ax[sub_idx].bar(ind, l_states[state], width,
color=StackColors[state])
time_sum = l_states[state]
for state in states_list[1:]:
- l_states[state] = map(lambda x:
- results[delay][bank_util][x][state],
- seqBytesValues)
+ l_states[state] = [results[delay][bank_util][x][state] \
+ for x in seqBytesValues]
# Now add on top of the bottom = sum of values up until now
p_states[state] = ax[sub_idx].bar(ind, l_states[state], width,
color=StackColors[state],
myFontSize='small'
fontP = FontProperties()
fontP.set_size(myFontSize)
- fig.legend(map(lambda x: p_states[x], states_list), states_list,
+ fig.legend([p_states[x] for x in states_list], states_list,
prop=fontP)
plt.savefig(plot_name, format='eps', bbox_inches='tight')
- print "saving plot:", plot_name
+ print("saving plot:", plot_name)
plt.close(fig)
# These plat name functions are also called in the main script
-#!/usr/bin/env python2.7
+#!/usr/bin/env python3
# Copyright (c) 2015 ARM Limited
# All rights reserved
import matplotlib as mpl
import numpy as np
except ImportError:
- print "Failed to import matplotlib and numpy"
+ print("Failed to import matplotlib and numpy")
exit(-1)
import sys
def main():
if len(sys.argv) != 2:
- print "Usage: ", sys.argv[0], "<simout directory>"
+ print("Usage: ", sys.argv[0], "<simout directory>")
exit(-1)
try:
stats = open(sys.argv[1] + '/stats.txt', 'r')
except IOError:
- print "Failed to open ", sys.argv[1] + '/stats.txt', " for reading"
+ print("Failed to open ", sys.argv[1] + '/stats.txt', " for reading")
exit(-1)
try:
simout = open(sys.argv[1] + '/simout', 'r')
except IOError:
- print "Failed to open ", sys.argv[1] + '/simout', " for reading"
+ print("Failed to open ", sys.argv[1] + '/simout', " for reading")
exit(-1)
# Get the address ranges
simout.close()
if not got_ranges:
- print "Failed to get address ranges, ensure simout is up-to-date"
+ print("Failed to get address ranges, ensure simout is up-to-date")
exit(-1)
# Now parse the stats
for i in range(iterations):
rd_lat.append(filtered_rd_lat[i::iterations])
- final_rd_lat = map(lambda p: min(p), zip(*rd_lat))
+ final_rd_lat = [min(p) for p in zip(*rd_lat)]
# Sanity check
if not (len(ranges) == len(final_rd_lat)):
- print "Address ranges (%d) and read latency (%d) do not match" % \
- (len(ranges), len(final_rd_lat))
+ print("Address ranges (%d) and read latency (%d) do not match" % \
+ (len(ranges), len(final_rd_lat)))
exit(-1)
for (r, l) in zip(ranges, final_rd_lat):
- print r, round(l, 2)
+ print(r, round(l, 2))
# lazy version to check if an integer is a power of two
def is_pow2(num):
-#!/usr/bin/env python2.7
+#!/usr/bin/env python3
# Copyright (c) 2014 ARM Limited
# All rights reserved
import matplotlib.pyplot as plt
import numpy as np
except ImportError:
- print "Failed to import matplotlib and numpy"
+ print("Failed to import matplotlib and numpy")
exit(-1)
import sys
def main():
if len(sys.argv) != 3:
- print "Usage: ", sys.argv[0], "-u|p|e <simout directory>"
+ print("Usage: ", sys.argv[0], "-u|p|e <simout directory>")
exit(-1)
if len(sys.argv[1]) != 2 or sys.argv[1][0] != '-' or \
not sys.argv[1][1] in "upe":
- print "Choose -u (utilisation), -p (total power), or -e " \
- "(power efficiency)"
+ print("Choose -u (utilisation), -p (total power), or -e " \
+ "(power efficiency)")
exit(-1)
# Choose the appropriate mode, either utilisation, total power, or
try:
stats = open(sys.argv[2] + '/stats.txt', 'r')
except IOError:
- print "Failed to open ", sys.argv[2] + '/stats.txt', " for reading"
+ print("Failed to open ", sys.argv[2] + '/stats.txt', " for reading")
exit(-1)
try:
simout = open(sys.argv[2] + '/simout', 'r')
except IOError:
- print "Failed to open ", sys.argv[2] + '/simout', " for reading"
+ print("Failed to open ", sys.argv[2] + '/simout', " for reading")
exit(-1)
# Get the burst size, number of banks and the maximum stride from
simout.close()
if not got_sweep:
- print "Failed to establish sweep details, ensure simout is up-to-date"
+ print("Failed to establish sweep details, ensure simout is up-to-date")
exit(-1)
# Now parse the stats
# Sanity check
if not (len(peak_bw) == len(bus_util) and len(bus_util) == len(avg_pwr)):
- print "Peak bandwidth, bus utilisation, and average power do not match"
+ print("Peak bandwidth, bus utilisation, and average power do not match")
exit(-1)
# Collect the selected metric as our Z-axis, we do this in a 2D
# avg_pwr is in mW, peak_bw in MiByte/s, bus_util in percent
z.append(avg_pwr[j] / (bus_util[j] / 100.0 * peak_bw[j] / 1000.0))
else:
- print "Unexpected mode %s" % mode
+ print("Unexpected mode %s" % mode)
exit(-1)
i += 1
# We should have a 2D grid with as many columns as banks
if len(zs) != banks:
- print "Unexpected number of data points in stats output"
+ print("Unexpected number of data points in stats output")
exit(-1)
fig = plt.figure()
# place tex and pdf files in outdir
os.chdir(args.outdir)
texfile_s = 'stacked_lowp_sweep.tex'
- print "\t", texfile_s
+ print("\t", texfile_s)
outfile = open(texfile_s, 'w')
startDocText(outfile)
endDocText(outfile)
outfile.close()
- print "\n Generating pdf file"
- print "*******************************"
- print "\tpdflatex ", texfile_s
+ print("\n Generating pdf file")
+ print("*******************************")
+ print("\tpdflatex ", texfile_s)
# Run pdflatex to generate to pdf
call(["pdflatex", texfile_s])
call(["open", texfile_s.split('.')[0] + '.pdf'])
-#!/usr/bin/env python2.7
+#!/usr/bin/env python3
# Copyright (c) 2013 ARM Limited
# All rights reserved
except IOError:
proto_in = open(in_file, 'rb')
except IOError:
- print "Failed to open ", in_file, " for reading"
+ print("Failed to open ", in_file, " for reading")
exit(-1)
return proto_in
-#!/usr/bin/env python2.7
+#!/usr/bin/env python3
# Copyright (c) 2009 The Hewlett-Packard Development Company
# All rights reserved.
#
from matplotlib.numerix import array, arange, reshape, shape, transpose, zeros
from matplotlib.numerix import Float
from matplotlib.ticker import NullLocator
+from functools import reduce
matplotlib.interactive(False)
-from chart import ChartOptions
+from .chart import ChartOptions
class BarChart(ChartOptions):
def __init__(self, default=None, **kwargs):
data = array(data)
dim = len(shape(data))
if dim not in (1, 2, 3):
- raise AttributeError, "Input data must be a 1, 2, or 3d matrix"
+ raise AttributeError("Input data must be a 1, 2, or 3d matrix")
self.inputdata = data
# If the input data is a 1d matrix, then it describes a
err = array(err)
dim = len(shape(err))
if dim not in (1, 2, 3):
- raise AttributeError, "Input err must be a 1, 2, or 3d matrix"
+ raise AttributeError("Input err must be a 1, 2, or 3d matrix")
self.inputerr = err
if dim == 1:
#
def graph(self):
if self.chartdata is None:
- raise AttributeError, "Data not set for bar chart!"
+ raise AttributeError("Data not set for bar chart!")
dim = len(shape(self.inputdata))
cshape = shape(self.chartdata)
if self.charterr is not None and shape(self.charterr) != cshape:
- raise AttributeError, 'Dimensions of error and data do not match'
+ raise AttributeError('Dimensions of error and data do not match')
if dim == 1:
colors = self.gen_colors(cshape[2])
if dim == 1:
lbars = bars[0][0]
if dim == 2:
- lbars = [ bars[i][0][0] for i in xrange(len(bars))]
+ lbars = [ bars[i][0][0] for i in range(len(bars))]
if dim == 3:
number = len(bars[0])
- lbars = [ bars[0][number - j - 1][0] for j in xrange(number)]
+ lbars = [ bars[0][number - j - 1][0] for j in range(number)]
if self.fig_legend:
self.figure.legend(lbars, self.legend, self.legend_loc,
# generate a data matrix of the given shape
size = reduce(lambda x,y: x*y, myshape)
#data = [ random.randrange(size - i) + 10 for i in xrange(size) ]
- data = [ float(i)/100.0 for i in xrange(size) ]
+ data = [ float(i)/100.0 for i in range(size) ]
data = reshape(data, myshape)
# setup some test bar charts
chart1.xlabel = 'Benchmark'
chart1.ylabel = 'Bandwidth (GBps)'
- chart1.legend = [ 'x%d' % x for x in xrange(myshape[-1]) ]
- chart1.xticks = [ 'xtick%d' % x for x in xrange(myshape[0]) ]
+ chart1.legend = [ 'x%d' % x for x in range(myshape[-1]) ]
+ chart1.xticks = [ 'xtick%d' % x for x in range(myshape[0]) ]
chart1.title = 'this is the title'
if len(myshape) > 2:
- chart1.xsubticks = [ '%d' % x for x in xrange(myshape[1]) ]
+ chart1.xsubticks = [ '%d' % x for x in range(myshape[1]) ]
chart1.graph()
chart1.savefig('/tmp/test1.png')
chart1.savefig('/tmp/test1.ps')
}
def func_categorize(symbol):
- from categories import func_categories
+ from .categories import func_categories
if symbol in func_categories:
return func_categories[symbol]
return None
]
def pc_categorize(symbol):
- from categories import pc_categories, pc_categories_re
+ from .categories import pc_categories, pc_categories_re
if symbol in pc_categories:
return pc_categories[symbol]
for regexp, category in pc_categories_re:
def update(self, options=None, **kwargs):
if options is not None:
if not isinstance(options, ChartOptions):
- raise AttributeError, \
- 'attribute options of type %s should be %s' % \
- (type(options), ChartOptions)
+ raise AttributeError(
+ 'attribute options of type %s should be %s' %
+ (type(options), ChartOptions))
self.options.update(options.options)
- for key,value in kwargs.iteritems():
+ for key,value in kwargs.items():
if key not in ChartOptions.defaults:
- raise AttributeError, \
- "%s instance has no attribute '%s'" % (type(self), key)
+ raise AttributeError(
+ "%s instance has no attribute '%s'" % (type(self), key))
self.options[key] = value
def __getattr__(self, attr):
if attr in ChartOptions.defaults:
return ChartOptions.defaults[attr]
- raise AttributeError, \
- "%s instance has no attribute '%s'" % (type(self), attr)
+ raise AttributeError("%s instance has no attribute '%s'" % (type(self), attr))
def __setattr__(self, attr, value):
if attr in ChartOptions.defaults:
self.prereq = int(row[5])
self.precision = int(row[6])
- import flags
+ from . import flags
self.flags = 0
if int(row[4]): self.flags |= flags.printable
if int(row[7]): self.flags |= flags.nozero
def __getitem__(self, run):
if run not in self.data:
- self.data[run] = [ [ 0.0 ] * self.y for i in xrange(self.x) ]
+ self.data[run] = [ [ 0.0 ] * self.y for i in range(self.x) ]
return self.data[run]
class Database(object):
if run is None:
return None
- from info import ProxyError, scalar, vector, value, values, total, len
+ from .info import ProxyError, scalar, vector, value, values, total, len
if system is None and hasattr(job, 'system'):
system = job.system
x = self
while len(path) > 1:
name = path.pop(0)
- if not x.__dict__.has_key(name):
+ if name not in x.__dict__:
x.__dict__[name] = Node(fullname + name)
x = x.__dict__[name]
fullname = '%s%s.' % (fullname, name)
self.query('select sd_stat,sd_x,sd_y,sd_name,sd_descr from subdata')
for result in self.cursor.fetchall():
subdata = SubData(result)
- if self.allSubData.has_key(subdata.stat):
+ if subdata.stat in self.allSubData:
self.allSubData[subdata.stat].append(subdata)
else:
self.allSubData[subdata.stat] = [ subdata ]
StatData.db = self
self.query('select * from stats')
- import info
+ from . import info
for result in self.cursor.fetchall():
stat = info.NewStat(self, StatData(result))
self.append(stat)
# Desc: Prints all runs matching a given user, if no argument
# is given all runs are returned
def listRuns(self, user=None):
- print '%-40s %-10s %-5s' % ('run name', 'user', 'id')
- print '-' * 62
+ print('%-40s %-10s %-5s' % ('run name', 'user', 'id'))
+ print('-' * 62)
for run in self.allRuns:
if user == None or user == run.user:
- print '%-40s %-10s %-10d' % (run.name, run.user, run.run)
+ print('%-40s %-10s %-10d' % (run.name, run.user, run.run))
# Name: listTicks
# Desc: Prints all samples for a given run
def listTicks(self, runs=None):
- print "tick"
- print "----------------------------------------"
+ print("tick")
+ print("----------------------------------------")
sql = 'select distinct dt_tick from data where dt_stat=1180 and ('
if runs != None:
first = True
sql += ')'
self.query(sql)
for r in self.cursor.fetchall():
- print r[0]
+ print(r[0])
# Name: retTicks
# Desc: Prints all samples for a given run
# the optional argument is a regular expression that can
# be used to prune the result set
def listStats(self, regex=None):
- print '%-60s %-8s %-10s' % ('stat name', 'id', 'type')
- print '-' * 80
+ print('%-60s %-8s %-10s' % ('stat name', 'id', 'type'))
+ print('-' * 80)
rx = None
if regex != None:
for stat in stats:
stat = self.allStatNames[stat]
if rx == None or rx.match(stat.name):
- print '%-60s %-8s %-10s' % (stat.name, stat.stat, stat.type)
+ print('%-60s %-8s %-10s' % (stat.name, stat.stat, stat.type))
# Name: liststats
# Desc: Prints all statistics that appear in the database,
# the optional argument is a regular expression that can
# be used to prune the result set
def listFormulas(self, regex=None):
- print '%-60s %s' % ('formula name', 'formula')
- print '-' * 80
+ print('%-60s %s' % ('formula name', 'formula'))
+ print('-' * 80)
rx = None
if regex != None:
for stat in stats:
stat = self.allStatNames[stat]
if stat.type == 'FORMULA' and (rx == None or rx.match(stat.name)):
- print '%-60s %s' % (stat.name, self.allFormulas[stat.stat])
+ print('%-60s %s' % (stat.name, self.allFormulas[stat.stat]))
def getStat(self, stats):
if type(stats) is not list:
elif value == 'stdev':
self._method = self.stdev
else:
- raise AttributeError, "can only set get to: sum | avg | stdev"
+ raise AttributeError("can only set get to: sum | avg | stdev")
def data(self, stat, ticks=None):
if ticks is None:
ymax = 0
for x in self.cursor.fetchall():
data = Data(x)
- if not runs.has_key(data.run):
+ if data.run not in runs:
runs[data.run] = {}
- if not runs[data.run].has_key(data.x):
+ if data.x not in runs[data.run]:
runs[data.run][data.x] = {}
xmax = max(xmax, data.x)
runs[data.run][data.x][data.y] = data.data
results = Result(xmax + 1, ymax + 1)
- for run,data in runs.iteritems():
+ for run,data in runs.items():
result = results[run]
- for x,ydata in data.iteritems():
- for y,data in ydata.iteritems():
+ for x,ydata in data.items():
+ for y,data in ydata.items():
result[x][y] = data
return results
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+from functools import reduce
+
class Value:
def __init__(self, value, precision, percent = False):
self.value = float(value)
value = Value(self.value, self.precision)
pdf = ''
cdf = ''
- if self.__dict__.has_key('pdf'):
+ if 'pdf' in self.__dict__:
pdf = Value(self.pdf, 2, True)
- if self.__dict__.has_key('cdf'):
+ if 'cdf' in self.__dict__:
cdf = Value(self.cdf, 2, True)
output = "%-40s %12s %8s %8s" % (self.name, value, pdf, cdf)
- if descriptions and self.__dict__.has_key('desc') and self.desc:
+ if descriptions and 'desc' in self.__dict__ and self.desc:
output = "%s # %s" % (output, self.desc)
return output
def display(self):
if self.doprint():
- print self
+ print(self)
class VectorDisplay:
def display(self):
else:
subnames = [''] * len(value)
- if self.__dict__.has_key('subnames'):
+ if 'subnames' in self.__dict__:
for i,each in enumerate(self.subnames):
if len(each) > 0:
subnames[i] = '.%s' % each
subdescs = [self.desc]*len(value)
- if self.__dict__.has_key('subdescs'):
- for i in xrange(min(len(value), len(self.subdescs))):
+ if 'subdescs' in self.__dict__:
+ for i in range(min(len(value), len(self.subdescs))):
subdescs[i] = self.subdescs[i]
for val,sname,sdesc in map(None, value, subnames, subdescs):
p.display()
if (self.flags & flags_total):
- if (p.__dict__.has_key('pdf')): del p.__dict__['pdf']
- if (p.__dict__.has_key('cdf')): del p.__dict__['cdf']
+ if ('pdf' in p.__dict__): del p.__dict__['pdf']
+ if ('cdf' in p.__dict__): del p.__dict__['cdf']
p.name = self.name + '.total'
p.desc = self.desc
p.value = mytotal
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-from __future__ import division
+
import operator, re, types
+from functools import reduce
class ProxyError(Exception):
pass
def values(stat, run):
stat = unproxy(stat)
result = []
- for i in xrange(len(stat)):
+ for i in range(len(stat)):
val = value(stat, run, i)
if val is None:
return None
class Value(object):
def __scalar__(self):
- raise AttributeError, "must define __scalar__ for %s" % (type (self))
+ raise AttributeError("must define __scalar__ for %s" % (type (self)))
def __vector__(self):
- raise AttributeError, "must define __vector__ for %s" % (type (self))
+ raise AttributeError("must define __vector__ for %s" % (type (self)))
def __add__(self, other):
return BinaryProxy(operator.__add__, self, other)
return False
def __value__(self, run):
- raise AttributeError, '__value__ must be defined'
+ raise AttributeError('__value__ must be defined')
class VectorItemProxy(Value):
def __init__(self, proxy, index):
return True
def __value__(self, run, index):
- raise AttributeError, '__value__ must be defined'
+ raise AttributeError('__value__ must be defined')
def __getitem__(self, index):
return VectorItemProxy(self, index)
return str(self.constant)
def WrapValue(value):
- if isinstance(value, (int, long, float)):
+ if isinstance(value, (int, float)):
return ScalarConstant(value)
if isinstance(value, (list, tuple)):
return VectorConstant(value)
if isinstance(value, Value):
return value
- raise AttributeError, 'Only values can be wrapped'
+ raise AttributeError('Only values can be wrapped')
class Statistic(object):
def __getattr__(self, attr):
def __setattr__(self, attr, value):
if attr == 'stat':
- raise AttributeError, '%s is read only' % stat
+ raise AttributeError('%s is read only' % stat)
if attr in ('source', 'ticks'):
if getattr(self, attr) != value:
if hasattr(self, 'data'):
len1 = len(self.arg1)
if len0 != len1:
- raise AttributeError, \
- "vectors of different lengths %d != %d" % (len0, len1)
+ raise AttributeError(
+ "vectors of different lengths %d != %d" % (len0, len1))
return len0
proxy = unproxy(self.proxy)
try:
attr = getattr(proxy, self.attr)
- except AttributeError, e:
- raise ProxyError, e
+ except AttributeError as e:
+ raise ProxyError(e)
return unproxy(attr)
def __str__(self):
return self.data[run][0][0]
def display(self, run=None):
- import display
+ from . import display
p = display.Print()
p.name = self.name
p.desc = self.desc
return self.x
def display(self, run=None):
- import display
+ from . import display
d = display.VectorDisplay()
d.name = self.name
d.desc = self.desc
- d.value = [ value(self, run, i) for i in xrange(len(self)) ]
+ d.value = [ value(self, run, i) for i in range(len(self)) ]
d.flags = self.flags
d.precision = self.precision
d.display()
self.samples = samples
def display(self, name, desc, flags, precision):
- import display
+ from . import display
p = display.Print()
p.flags = flags
p.precision = precision
self.size = size
def display(self, name, desc, flags, precision):
- import display
+ from . import display
p = display.Print()
p.flags = flags
p.precision = precision
self.minval = min(self.minval, other.minval)
self.maxval = max(self.maxval, other.maxval)
self.under -= under
- self.vec = map(lambda x,y: x - y, self.vec, other.vec)
+ self.vec = list(map(lambda x,y: x - y, self.vec, other.vec))
self.over -= over
return self
self.minval = min(self.minval, other.minval)
self.maxval = max(self.maxval, other.maxval)
self.under += other.under
- self.vec = map(lambda x,y: x + y, self.vec, other.vec)
+ self.vec = list(map(lambda x,y: x + y, self.vec, other.vec))
self.over += other.over
return self
if self.samples:
self.under /= other
- for i in xrange(len(self.vec)):
+ for i in range(len(self.vec)):
self.vec[i] /= other
self.over /= other
return self
class Dist(Statistic):
def display(self):
- import display
+ from . import display
if not display.all and not (self.flags & flags.printable):
return
class VectorDist(Statistic):
def display(self):
- import display
+ from . import display
if not display.all and not (self.flags & flags.printable):
return
def comparable(self, other):
return self.name == other.name and \
alltrue(map(lambda x, y : x.comparable(y),
- self.dist,
- other.dist))
+ self.dist,
+ other.dist))
def __eq__(self, other):
return alltrue(map(lambda x, y : x == y, self.dist, other.dist))
class Vector2d(Statistic):
def display(self):
- import display
+ from . import display
if not display.all and not (self.flags & flags.printable):
return
d = display.VectorDisplay()
d.__dict__.update(self.__dict__)
- if self.__dict__.has_key('ysubnames'):
+ if 'ysubnames' in self.__dict__:
ysubnames = list(self.ysubnames)
slack = self.x - len(ysubnames)
if slack > 0:
ysubnames.extend(['']*slack)
else:
- ysubnames = range(self.x)
+ ysubnames = list(range(self.x))
for x,sname in enumerate(ysubnames):
o = x * self.y
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-from chart import ChartOptions
+from .chart import ChartOptions
class StatOutput(ChartOptions):
def __init__(self, jobfile, info, stat=None):
self.info = info
def display(self, name, printmode = 'G'):
- import info
+ from . import info
if printmode == 'G':
valformat = '%g'
value[i] = 1 / val
valstring = ', '.join([ valformat % val for val in value ])
- print '%-50s %s' % (job.name + ':', valstring)
+ print('%-50s %s' % (job.name + ':', valstring))
def graph(self, name, graphdir, proxy=None):
from os.path import expanduser, isdir, join as joinpath
- from barchart import BarChart
+ from .barchart import BarChart
from matplotlib.numerix import Float, array, zeros
- import os, re, urllib
+ import os, re, urllib.request, urllib.parse, urllib.error
from jobfile import crossproduct
confgroups = self.jobfile.groups()
if baropts:
baropts = [ bar for bar in crossproduct(baropts) ]
else:
- raise AttributeError, 'No group selected for graph bars'
+ raise AttributeError('No group selected for graph bars')
directory = expanduser(graphdir)
if not isdir(directory):
os.mkdir(directory)
html = file(joinpath(directory, '%s.html' % name), 'w')
- print >>html, '<html>'
- print >>html, '<title>Graphs for %s</title>' % name
- print >>html, '<body>'
+ print('<html>', file=html)
+ print('<title>Graphs for %s</title>' % name, file=html)
+ print('<body>', file=html)
html.flush()
for options in self.jobfile.options(groups):
chart = BarChart(self)
- data = [ [ None ] * len(baropts) for i in xrange(len(groupopts)) ]
+ data = [ [ None ] * len(baropts) for i in range(len(groupopts)) ]
enabled = False
stacked = 0
for g,gopt in enumerate(groupopts):
continue
if proxy:
- import db
+ from . import db
proxy.dict['system'] = self.info[job.system]
val = self.info.get(job, self.stat)
if val is None:
- print 'stat "%s" for job "%s" not found' % \
- (self.stat, job)
+ print('stat "%s" for job "%s" not found' % \
+ (self.stat, job))
if isinstance(val, (list, tuple)):
if len(val) == 1:
data[g][b] = val
if stacked == 0:
- for i in xrange(len(groupopts)):
- for j in xrange(len(baropts)):
+ for i in range(len(groupopts)):
+ for j in range(len(baropts)):
if data[i][j] is None:
data[i][j] = 0.0
else:
- for i in xrange(len(groupopts)):
- for j in xrange(len(baropts)):
+ for i in range(len(groupopts)):
+ for j in range(len(baropts)):
val = data[i][j]
if val is None:
data[i][j] = [ 0.0 ] * stacked
elif len(val) != stacked:
- raise ValueError, "some stats stacked, some not"
+ raise ValueError("some stats stacked, some not")
data = array(data)
if data.sum() == 0:
dim = len(data.shape)
x = data.shape[0]
- xkeep = [ i for i in xrange(x) if data[i].sum() != 0 ]
+ xkeep = [ i for i in range(x) if data[i].sum() != 0 ]
y = data.shape[1]
- ykeep = [ i for i in xrange(y) if data[:,i].sum() != 0 ]
+ ykeep = [ i for i in range(y) if data[:,i].sum() != 0 ]
data = data.take(xkeep, axis=0)
data = data.take(ykeep, axis=1)
if not has_group:
try:
chart.legend = self.info.rcategories
except:
- chart.legend = [ str(i) for i in xrange(stacked) ]
+ chart.legend = [ str(i) for i in range(stacked) ]
else:
chart.legend = bdescs
chart.savefig(joinpath(directory, pngname))
chart.savefig(joinpath(directory, epsname))
chart.savefig(joinpath(directory, psname))
- html_name = urllib.quote(pngname)
- print >>html, '''%s<br><img src="%s"><br>''' % (desc, html_name)
+ html_name = urllib.parse.quote(pngname)
+ print('''%s<br><img src="%s"><br>''' % (desc, html_name),
+ file=html)
html.flush()
- print >>html, '</body>'
- print >>html, '</html>'
+ print('</body>', file=html)
+ print('</html>', file=html)
html.close()
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+from functools import reduce
+
all = False
descriptions = False
value = Value(self.value, self.precision)
pdf = ''
cdf = ''
- if self.__dict__.has_key('pdf'):
+ if 'pdf' in self.__dict__:
pdf = Value(self.pdf, 2, True)
- if self.__dict__.has_key('cdf'):
+ if 'cdf' in self.__dict__:
cdf = Value(self.cdf, 2, True)
output = "%-40s %12s %8s %8s" % (self.name, value, pdf, cdf)
- if descriptions and self.__dict__.has_key('desc') and self.desc:
+ if descriptions and 'desc' in self.__dict__ and self.desc:
output = "%s # %s" % (output, self.desc)
return output
def display(self):
if self.doprint():
- print self
+ print(self)
class VectorDisplay:
def display(self):
else:
subnames = [''] * len(value)
- if self.__dict__.has_key('subnames'):
+ if 'subnames' in self.__dict__:
for i,each in enumerate(self.subnames):
if len(each) > 0:
subnames[i] = '.%s' % each
subdescs = [self.desc]*len(value)
- if self.__dict__.has_key('subdescs'):
- for i in xrange(min(len(value), len(self.subdescs))):
+ if 'subdescs' in self.__dict__:
+ for i in range(min(len(value), len(self.subdescs))):
subdescs[i] = self.subdescs[i]
for val,sname,sdesc in map(None, value, subnames, subdescs):
p.display()
if (self.flags & flags_total):
- if (p.__dict__.has_key('pdf')): del p.__dict__['pdf']
- if (p.__dict__.has_key('cdf')): del p.__dict__['cdf']
+ if ('pdf' in p.__dict__): del p.__dict__['pdf']
+ if ('cdf' in p.__dict__): del p.__dict__['cdf']
p.name = self.name + '.total'
p.desc = self.desc
p.value = mytotal
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-import output
+from . import output
class FileData(dict):
def __init__(self, filename):
def __getattribute__(self, attr):
if attr == 'total':
total = 0.0
- for value in self.itervalues():
+ for value in self.values():
total += value
return total
return FileData(self.filename)
if attr == 'maxsymlen':
- return max([ len(sym) for sym in self.iterkeys() ])
+ return max([ len(sym) for sym in self.keys() ])
return super(RunData, self).__getattribute__(attr)
total = float(self.total)
# swap (string,count) order so we can sort on count
- symbols = [ (count,name) for name,count in self.iteritems() ]
+ symbols = [ (count,name) for name,count in self.items() ]
symbols.sort(reverse=True)
if limit is not None:
symbols = symbols[:limit]
symbolf = "%-" + str(maxsymlen + 1) + "s %.2f%%"
for number,name in symbols:
- print >>output, symbolf % (name, 100.0 * (float(number) / total))
+ print(symbolf % (name, 100.0 * (float(number) / total)),
+ file=output)
class PCData(RunData):
def __init__(self, filename=None, categorize=None, showidle=True):
nodes = {}
for line in filedata['function data']:
data = line.split(' ')
- node_id = long(data[0], 16)
+ node_id = int(data[0], 16)
node = FuncNode()
node.symbol = data[1]
if node.symbol == '':
node.symbol = 'unknown'
- node.count = long(data[2])
- node.children = [ long(child, 16) for child in data[3:] ]
+ node.count = int(data[2])
+ node.children = [ int(child, 16) for child in data[3:] ]
nodes[node_id] = node
- for node in nodes.itervalues():
+ for node in nodes.values():
children = []
for cid in node.children:
child = nodes[cid]
child.parent = node
node.children = tuple(children)
if not nodes:
- print filedata.filename
- print nodes
+ print(filedata.filename)
+ print(nodes)
return nodes[0]
def total(self):
def dump(self):
kids = [ child.symbol for child in self.children]
- print '%s %d <%s>' % (self.symbol, self.count, ', '.join(kids))
+ print('%s %d <%s>' % (self.symbol, self.count, ', '.join(kids)))
for child in self.children:
child.dump()
import sys
output = sys.stdout
- items = [ (val,key) for key,val in self.iteritems() ]
+ items = [ (val,key) for key,val in self.items() ]
items.sort(reverse=True)
for val,key in items:
if maxcount is not None:
maxcount -= 1
percent = val * 100.0 / self.total
- print >>output, '%-30s %8s' % (key, '%3.2f%%' % percent)
+ print('%-30s %8s' % (key, '%3.2f%%' % percent), file=output)
class Profile(object):
# This list controls the order of values in stacked bar data output
self.data[run] = {}
if cpu in self.data[run]:
- raise AttributeError, \
- 'data already stored for run %s and cpu %s' % (run, cpu)
+ raise AttributeError(
+ 'data already stored for run %s and cpu %s' % (run, cpu))
self.data[run][cpu] = data
try:
return self.data[run][cpu]
except KeyError:
- print run, cpu
+ print(run, cpu)
return None
def alldata(self):
- for run,cpus in self.data.iteritems():
- for cpu,data in cpus.iteritems():
+ for run,cpus in self.data.items():
+ for cpu,data in cpus.items():
yield run,cpu,data
def get(self, job, stat, system=None):
system = job.system
if system is None:
- raise AttributeError, 'The job must have a system set'
+ raise AttributeError('The job must have a system set')
cpu = '%s.run%d' % (system, self.cpu)
for category in self.categories:
val = float(data.get(category, 0.0))
if val < 0.0:
- raise ValueError, 'value is %f' % val
+ raise ValueError('value is %f' % val)
values.append(val)
total = sum(values)
return [ v / total * 100.0 for v in values ]
def dump(self):
for run,cpu,data in self.alldata():
- print 'run %s, cpu %s' % (run, cpu)
+ print('run %s, cpu %s' % (run, cpu))
data.dump()
- print
+ print()
def write_dot(self, threshold, jobfile=None, jobs=None):
import pydot
for job in thejobs:
cpu = '%s.run%d' % (job.system, self.cpu)
symbols = self.getdata(job.name, cpu)
- print job.name
+ print(job.name)
symbols.display(limit=limit, maxsymlen=maxsymlen)
- print
+ print()
-from categories import func_categorize, pc_categorize
+from .categories import func_categorize, pc_categorize
class PCProfile(Profile):
def __init__(self, categorize=pc_categorize):
super(PCProfile, self).__init__(PCData, categorize)
super(FuncProfile, self).__init__(FuncData, categorize)
def usage(exitcode = None):
- print '''\
+ print('''\
Usage: %s [-bc] [-g <dir>] [-j <jobfile>] [-n <num>]
-c groups symbols into categories
-g <d> draw graphs and send output to <d>
-j <jobfile> specify a different jobfile (default is Test.py)
-n <n> selects number of top symbols to print (default 5)
-''' % sys.argv[0]
+''' % sys.argv[0])
if exitcode is not None:
sys.exit(exitcode)
if __name__ == '__main__':
import getopt, re, sys
from os.path import expanduser
- from output import StatOutput
+ from .output import StatOutput
# default option values
numsyms = 10
textout = True
if args:
- print "'%s'" % args, len(args)
+ print("'%s'" % args, len(args))
usage(1)
if inputfile:
-#!/usr/bin/env python2.7
+#!/usr/bin/env python3
# Copyright (c) 2003-2004 The Regents of The University of Michigan
# All rights reserved.
import re, sys, math
def usage():
- print '''\
+ print('''\
Usage: %s [-E] [-F] [ -G <get> ] [-d <db> ] [-g <graphdir> ] [-h <host>] [-p]
[-s <system>] [-r <runs> ] [-T <samples>] [-u <username>]
<command> [command args]
database <command> Where command is drop, init, or clean
-''' % sys.argv[0]
+''' % sys.argv[0])
sys.exit(1)
def getopts(list, flags):
if command == 'database':
if len(args) == 0: raise CommandException
- import dbinit
+ from . import dbinit
mydb = dbinit.MyDB(options)
if args[0] == 'drop':
raise CommandException
- import db
+ from . import db
source = db.Database()
source.host = options.host
source.db = options.db
source.method = 'sum'
def disp(*args):
- print "%-35s %12s %12s %4s %5s %5s %5s %10s" % args
+ print("%-35s %12s %12s %4s %5s %5s %5s %10s" % args)
# temporary variable containing a bunch of dashes
d = '-' * 100
#loop through all the stats selected
for stat in stats:
- print "%s:" % stat.name
+ print("%s:" % stat.name)
disp("run name", "average", "stdev", ">10%", ">1SDV", ">2SDV",
"SAMP", "CV")
disp(d[:35], d[:12], d[:12], d[:4], d[:5], d[:5], d[:5], d[:10])
if options.ticks:
if not options.graph:
- print 'only displaying sample %s' % options.ticks
+ print('only displaying sample %s' % options.ticks)
source.ticks = [ int(x) for x in options.ticks.split() ]
- from output import StatOutput
+ from .output import StatOutput
output = StatOutput(options.jobfile, source)
output.xlabel = 'System Configuration'
output.colormap = 'RdYlGn'
if len(args):
raise CommandException
- from info import ProxyGroup
+ from .info import ProxyGroup
proxy = ProxyGroup(system = source[options.system])
system = proxy.system
-#!/usr/bin/env python2.7
+#!/usr/bin/env python3
# Copyright (c) 2012, 2014 ARM Limited
# All rights reserved
# Subsequent versions should be backward compatible
import re, sys, os
-from ConfigParser import ConfigParser
+from configparser import ConfigParser
import gzip
import xml.etree.ElementTree as ET
import xml.dom.minidom as minidom
args = parser.parse_args()
if not re.match("(.*)\.apc", args.output_path):
- print "ERROR: <dest .apc folder> should end with '.apc'!"
+ print("ERROR: <dest .apc folder> should end with '.apc'!")
sys.exit(1)
# gzipped BMP files for visual annotation is supported in Streamline 5.14.
def parseConfig(config_file):
global num_cpus, num_l2
- print "\n==============================="
- print "Parsing gem5 config.ini file..."
- print config_file
- print "===============================\n"
+ print("\n===============================")
+ print("Parsing gem5 config.ini file...")
+ print(config_file)
+ print("===============================\n")
config = ConfigParser()
if not config.read(config_file):
- print "ERROR: config file '", config_file, "' not found"
+ print("ERROR: config file '", config_file, "' not found")
sys.exit(1)
if config.has_section("system.cpu"):
while config.has_section("system.l2_cache" + str(num_l2)):
num_l2 += 1
- print "Num CPUs:", num_cpus
- print "Num L2s:", num_l2
- print ""
+ print("Num CPUs:", num_cpus)
+ print("Num L2s:", num_l2)
+ print("")
return (num_cpus, num_l2)
elif frame_type == "Idle":
code = 9
else:
- print "ERROR: Unknown frame type:", frame_type
+ print("ERROR: Unknown frame type:", frame_type)
sys.exit(1)
packed_code = packed32(code)
####################################################################
def parseProcessInfo(task_file):
- print "\n==============================="
- print "Parsing Task file..."
- print task_file
- print "===============================\n"
+ print("\n===============================")
+ print("Parsing Task file...")
+ print(task_file)
+ print("===============================\n")
global start_tick, end_tick, num_cpus
global process_dict, thread_dict, process_list
else:
process_file = open(task_file, 'rb')
except:
- print "ERROR opening task file:", task_file
- print "Make sure context switch task dumping is enabled in gem5."
+ print("ERROR opening task file:", task_file)
+ print("Make sure context switch task dumping is enabled in gem5.")
sys.exit(1)
process_re = re.compile("tick=(\d+)\s+(\d+)\s+cpu_id=(\d+)\s+" +
if not task_name_failure_warned:
if task_name == "FailureIn_curTaskName":
- print "-------------------------------------------------"
- print "WARNING: Task name not set correctly!"
- print "Process/Thread info will not be displayed correctly"
- print "Perhaps forgot to apply m5struct.patch to kernel?"
- print "-------------------------------------------------"
+ print("-------------------------------------------------")
+ print("WARNING: Task name not set correctly!")
+ print("Process/Thread info will not be displayed correctly")
+ print("Perhaps forgot to apply m5struct.patch to kernel?")
+ print("-------------------------------------------------")
task_name_failure_warned = True
if not tgid in process_dict:
if tgid == pid:
# new task is parent as well
if args.verbose:
- print "new process", uid, pid, tgid, task_name
+ print("new process", uid, pid, tgid, task_name)
if tgid == 0:
# new process is the "idle" task
process = Task(uid, pid, tgid, "idle", True, tick)
if tgid == pid:
if process_dict[tgid].task_name == "_Unknown_":
if args.verbose:
- print "new process", \
- process_dict[tgid].uid, pid, tgid, task_name
+ print("new process", \
+ process_dict[tgid].uid, pid, tgid, task_name)
process_dict[tgid].task_name = task_name
if process_dict[tgid].task_name != task_name and tgid != 0:
process_dict[tgid].task_name = task_name
if not pid in thread_dict:
if args.verbose:
- print "new thread", \
- uid, process_dict[tgid].uid, pid, tgid, task_name
+ print("new thread", \
+ uid, process_dict[tgid].uid, pid, tgid, task_name)
thread = Task(uid, pid, tgid, task_name, False, tick)
uid += 1
thread_dict[pid] = thread
thread_dict[pid].task_name = task_name
if args.verbose:
- print tick, uid, cpu_id, pid, tgid, task_name
+ print(tick, uid, cpu_id, pid, tgid, task_name)
task = thread_dict[pid]
event = Event(tick, task)
unified_event_list.append(event)
if len(unified_event_list) == num_events:
- print "Truncating at", num_events, "events!"
+ print("Truncating at", num_events, "events!")
break
- print "Found %d events." % len(unified_event_list)
+ print("Found %d events." % len(unified_event_list))
for process in process_list:
if process.pid > 9990: # fix up framebuffer ticks
process.tick = start_tick
- print process.uid, process.pid, process.tgid, \
- process.task_name, str(process.tick)
+ print(process.uid, process.pid, process.tgid, \
+ process.task_name, str(process.tick))
for thread in process.children:
if thread.pid > 9990:
thread.tick = start_tick
- print "\t", thread.uid, thread.pid, thread.tgid, \
- thread.task_name, str(thread.tick)
+ print("\t", thread.uid, thread.pid, thread.tgid, \
+ thread.task_name, str(thread.tick))
end_tick = tick
- print "Start tick:", start_tick
- print "End tick: ", end_tick
- print ""
+ print("Start tick:", start_tick)
+ print("End tick: ", end_tick)
+ print("")
return
def ticksToNs(tick):
if ticks_in_ns < 0:
- print "ticks_in_ns not set properly!"
+ print("ticks_in_ns not set properly!")
sys.exit(1)
return tick / ticks_in_ns
per_cpu_name = re.sub("#", "", self.name)
self.per_cpu_name.append(per_cpu_name)
- print "\t", per_cpu_name
+ print("\t", per_cpu_name)
self.per_cpu_regex_string.\
append("^" + per_cpu_name + "\s+[\d\.]+")
self.next_key = 1
def register(self, name, group, group_index, per_cpu):
- print "registering stat:", name, "group:", group, group_index
+ print("registering stat:", name, "group:", group, group_index)
self.stats_list.append(StatsEntry(name, group, group_index, per_cpu, \
self.next_key))
self.next_key += 1
# Union of all stats to accelerate parsing speed
def createStatsRegex(self):
regex_strings = [];
- print "\nnum entries in stats_list", len(self.stats_list)
+ print("\nnum entries in stats_list", len(self.stats_list))
for entry in self.stats_list:
if entry.per_cpu:
for i in range(num_cpus):
def registerStats(config_file):
- print "==============================="
- print "Parsing stats config.ini file..."
- print config_file
- print "==============================="
+ print("===============================")
+ print("Parsing stats config.ini file...")
+ print(config_file)
+ print("===============================")
config = ConfigParser()
if not config.read(config_file):
- print "ERROR: config file '", config_file, "' not found!"
+ print("ERROR: config file '", config_file, "' not found!")
sys.exit(1)
- print "\nRegistering Stats..."
+ print("\nRegistering Stats...")
stats = Stats()
# Parse and read in gem5 stats file
# Streamline counters are organized per CPU
def readGem5Stats(stats, gem5_stats_file):
- print "\n==============================="
- print "Parsing gem5 stats file..."
- print gem5_stats_file
- print "===============================\n"
+ print("\n===============================")
+ print("Parsing gem5 stats file...")
+ print(gem5_stats_file)
+ print("===============================\n")
ext = os.path.splitext(gem5_stats_file)[1]
window_start_regex = \
else:
f = open(gem5_stats_file, "r")
except:
- print "ERROR opening stats file", gem5_stats_file, "!"
+ print("ERROR opening stats file", gem5_stats_file, "!")
sys.exit(1)
stats_not_found_list = stats.stats_list[:]
try:
line = f.readline()
except IOError:
- print ""
- print "WARNING: IO error in stats file"
- print "(gzip stream not closed properly?)...continuing for now"
+ print("")
+ print("WARNING: IO error in stats file")
+ print("(gzip stream not closed properly?)...continuing for now")
error = True
if not line:
break
if m:
sim_freq = int(m.group(1)) # ticks in 1 sec
ticks_in_ns = int(sim_freq / 1e9)
- print "Simulation frequency found! 1 tick == %e sec\n" \
- % (1.0 / sim_freq)
+ print("Simulation frequency found! 1 tick == %e sec\n" \
+ % (1.0 / sim_freq))
# Final tick in gem5 stats: current absolute timestamp
m = final_tick_regex.match(line)
if (window_end_regex.match(line) or error):
if args.verbose:
- print "new window"
+ print("new window")
for stat in stats.stats_list:
if stat.per_cpu:
for i in range(num_cpus):
if not stat.per_cpu_found[i]:
if not stat.not_found_at_least_once:
- print "WARNING: stat not found in window #", \
- window_num, ":", stat.per_cpu_name[i]
- print "suppressing further warnings for " + \
- "this stat"
+ print("WARNING: stat not found in window #", \
+ window_num, ":", stat.per_cpu_name[i])
+ print("suppressing further warnings for " + \
+ "this stat")
stat.not_found_at_least_once = True
stat.values[i].append(str(0))
stat.per_cpu_found[i] = False
else:
if not stat.found:
if not stat.not_found_at_least_once:
- print "WARNING: stat not found in window #", \
- window_num, ":", stat.name
- print "suppressing further warnings for this stat"
+ print("WARNING: stat not found in window #", \
+ window_num, ":", stat.name)
+ print("suppressing further warnings for this stat")
stat.not_found_at_least_once = True
stat.values.append(str(0))
stat.found = False
else:
value = str(int(float(m.group(1))))
if args.verbose:
- print stat.per_cpu_name[i], value
+ print(stat.per_cpu_name[i], value)
stat.values[i].append(value)
stat.per_cpu_found[i] = True
all_found = True
if m:
value = str(int(float(m.group(1))))
if args.verbose:
- print stat.name, value
+ print(stat.name, value)
stat.values.append(value)
stat.found = True
stats_not_found_list.remove(stat)
thread_list = []
for process in process_list:
if process.uid > 0:
- print "cookie", process.task_name, process.uid
+ print("cookie", process.task_name, process.uid)
writeBinary(blob, cookieNameFrame(process.uid, process.task_name))
# pid and tgid need to be positive values -- no longer true?
# Threads need to be sorted in timestamp order
thread_list.sort(key = lambda x: x.tick)
for thread in thread_list:
- print "thread", thread.task_name, (ticksToNs(thread.tick)),\
- thread.tgid, thread.pid
+ print("thread", thread.task_name, (ticksToNs(thread.tick)),\
+ thread.tgid, thread.pid)
writeBinary(blob, threadNameFrame(ticksToNs(thread.tick),\
thread.pid, thread.task_name))
timestamp = ticksToNs(event.tick)
pid = event.task.tgid
tid = event.task.pid
- if process_dict.has_key(event.task.tgid):
+ if event.task.tgid in process_dict:
cookie = process_dict[event.task.tgid].uid
else:
cookie = 0
state = 0
if args.verbose:
- print cpu, timestamp, pid, tid, cookie
+ print(cpu, timestamp, pid, tid, cookie)
writeBinary(blob,\
schedSwitchFrame(cpu, timestamp, pid, tid, cookie, state))
writeBinary(blob, annotateFrame(0, annotate_pid, ticksToNs(tick), \
len(userspace_body), userspace_body))
- print "\nfound", frame_count, "frames for visual annotation.\n"
+ print("\nfound", frame_count, "frames for visual annotation.\n")
def createApcProject(input_path, output_path, stats):
writeCookiesThreads(blob)
- print "writing Events"
+ print("writing Events")
writeSchedEvents(blob)
- print "writing Counters"
+ print("writing Counters")
writeCounters(blob, stats)
- print "writing Visual Annotations"
+ print("writing Visual Annotations")
writeVisualAnnotations(blob, input_path, output_path)
doSessionXML(output_path)
# Make sure input path exists
####
if not os.path.exists(input_path):
- print "ERROR: Input path %s does not exist!" % input_path
+ print("ERROR: Input path %s does not exist!" % input_path)
sys.exit(1)
####
# Check if both stats.txt and stats.txt.gz exist and warn if both exist
if os.path.exists(input_path + "/stats.txt") and \
os.path.exists(input_path + "/stats.txt.gz"):
- print "WARNING: Both stats.txt.gz and stats.txt exist. \
- Using stats.txt.gz by default."
+ print("WARNING: Both stats.txt.gz and stats.txt exist. \
+ Using stats.txt.gz by default.")
gem5_stats_file = input_path + "/stats.txt.gz"
if not os.path.exists(gem5_stats_file):
gem5_stats_file = input_path + "/stats.txt"
if not os.path.exists(gem5_stats_file):
- print "ERROR: stats.txt[.gz] file does not exist in %s!" % input_path
+ print("ERROR: stats.txt[.gz] file does not exist in %s!" % input_path)
sys.exit(1)
readGem5Stats(stats, gem5_stats_file)
####
createApcProject(input_path, output_path, stats)
-print "All done!"
+print("All done!")
-#! /usr/bin/env python2.7
+#! /usr/bin/env python3
#
# Copyright (c) 2016 ARM Limited
# All rights reserved
verifiers = style.verifiers.all_verifiers
if verbose:
- print "Verifying %s[%s]..." % (filename, regions)
+ print("Verifying %s[%s]..." % (filename, regions))
for verifier in [ v(ui, opts, base=base) for v in verifiers ]:
if verbose:
- print "Applying %s (%s)" % (
- verifier.test_name, verifier.__class__.__name__)
+ print("Applying %s (%s)" % (
+ verifier.test_name, verifier.__class__.__name__))
if verifier.apply(filename, regions=regions):
return False
return True
def detect_repo():
repo_classes = repo.detect_repo()
if not repo_classes:
- print >> sys.stderr, "Error: Failed to detect repository type, no " \
- "known repository type found."
+ print("Error: Failed to detect repository type, no " \
+ "known repository type found.", file=sys.stderr)
sys.exit(1)
elif len(repo_classes) > 1:
- print >> sys.stderr, "Error: Detected multiple repository types."
+ print("Error: Detected multiple repository types.", file=sys.stderr)
sys.exit(1)
else:
return repo_classes[0]()
-#!/usr/bin/env python2.7
+#!/usr/bin/env python3
#
# Copyright (c) 2016 ARM Limited
# All rights reserved
orig_lines = []
# grab all of the lines of the file and strip them of their line ending
- old_lines = list(line.rstrip('\r\n') for line in src.xreadlines())
+ old_lines = list(line.rstrip('\r\n') for line in src)
new_lines = list(mutator(old_lines, src.name, language))
- for line in src.xreadlines():
+ for line in src:
line = line
if inplace:
args = tuple(arg)
if len(args) != 2:
- raise(AttributeError, \
+ raise AttributeError(
"Only one or two arguments allowed, %d provided" % (alen, ))
return tuple.__new__(cls, args)
all_regions = Regions(Region(neg_inf, pos_inf))
if __name__ == '__main__':
- x = Regions(*((i, i + 1) for i in xrange(0,30,2)))
- y = Regions(*((i, i + 4) for i in xrange(0,30,5)))
+ x = Regions(*((i, i + 1) for i in range(0,30,2)))
+ y = Regions(*((i, i + 4) for i in range(0,30,5)))
z = Region(6,7)
n = Region(9,10)
-#!/usr/bin/env python2.7
+#!/usr/bin/env python3
#
# Copyright (c) 2016 ARM Limited
# All rights reserved
from .region import *
from .style import modified_regions
-class AbstractRepo(object):
- __metaclass__ = ABCMeta
-
+class AbstractRepo(object, metaclass=ABCMeta):
def file_path(self, fname):
"""Get the absolute path to a file relative within the repository. The
input file name must be a valid path within the repository.
-#!/usr/bin/env python2.7
+#!/usr/bin/env python3
#
# Copyright (c) 2014-2015 ARM Limited
# All rights reserved
-#! /usr/bin/env python2.7
+#! /usr/bin/env python3
# Copyright (c) 2014, 2016 ARM Limited
# All rights reserved
#
any_control = re.compile(r'\b(if|while|for)([ \t]*)\(')
-class UserInterface(object):
- __metaclass__ = ABCMeta
-
+class UserInterface(object, metaclass=ABCMeta):
def __init__(self, verbose=False):
self.verbose = verbose
class StdioUI(UserInterface):
def _prompt(self, prompt, results, default):
- return raw_input(prompt) or default
+ return input(prompt) or default
def write(self, string):
sys.stdout.write(string)
-#!/usr/bin/env python2.7
+#!/usr/bin/env python3
#
# Copyright (c) 2014, 2016 ARM Limited
# All rights reserved
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-from __future__ import print_function
+
import argparse
import m5