2020-12-15 16:24:48 +00:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
|
2020-12-15 16:24:50 +00:00
|
|
|
import argparse
|
2020-12-15 16:24:53 +00:00
|
|
|
import fcntl
|
2020-12-15 16:24:55 +00:00
|
|
|
import json
|
2020-12-15 16:24:48 +00:00
|
|
|
import os
|
2020-12-15 16:25:05 +00:00
|
|
|
import re
|
2020-12-15 16:24:53 +00:00
|
|
|
import struct
|
2020-12-15 16:24:48 +00:00
|
|
|
import subprocess
|
2020-12-15 16:24:53 +00:00
|
|
|
import sys
|
2021-01-04 11:30:19 +00:00
|
|
|
import xml.etree.ElementTree
|
|
|
|
|
|
|
|
_KEYS = {
|
|
|
|
"cpuid": ["eax_in", "ecx_in"],
|
|
|
|
"msr": ["index"],
|
|
|
|
}
|
|
|
|
|
|
|
|
_REGS = {
|
|
|
|
"cpuid": ["eax", "ebx", "ecx", "edx"],
|
|
|
|
"msr": ["eax", "edx"],
|
|
|
|
}
|
2020-12-15 16:24:48 +00:00
|
|
|
|
|
|
|
|
2020-12-15 16:24:50 +00:00
|
|
|
def gather_name(args):
|
|
|
|
if args.name:
|
|
|
|
return args.name
|
|
|
|
|
2020-12-15 16:24:49 +00:00
|
|
|
with open("/proc/cpuinfo", "rt") as f:
|
|
|
|
for line in f.readlines():
|
|
|
|
if line.startswith("model name"):
|
|
|
|
return line.split(":", 2)[1].strip()
|
|
|
|
|
2020-12-15 16:24:50 +00:00
|
|
|
exit("Error: '/proc/cpuinfo' does not contain a model name.\n"
|
|
|
|
"Use '--model' to set a model name.")
|
2020-12-15 16:24:49 +00:00
|
|
|
|
|
|
|
|
2021-04-27 08:25:01 +00:00
|
|
|
def gather_cpuid_leaves_cpuid(output):
|
2020-12-15 16:25:12 +00:00
|
|
|
leave_pattern = re.compile(
|
|
|
|
"^\\s*"
|
|
|
|
"(0x[0-9a-f]+)\\s*"
|
|
|
|
"(0x[0-9a-f]+):\\s*"
|
|
|
|
"eax=(0x[0-9a-f]+)\\s*"
|
|
|
|
"ebx=(0x[0-9a-f]+)\\s*"
|
|
|
|
"ecx=(0x[0-9a-f]+)\\s*"
|
|
|
|
"edx=(0x[0-9a-f]+)\\s*$")
|
|
|
|
|
2020-12-15 16:24:51 +00:00
|
|
|
for line in output.split("\n"):
|
2020-12-15 16:25:12 +00:00
|
|
|
match = leave_pattern.match(line)
|
|
|
|
if not match:
|
2020-12-15 16:24:51 +00:00
|
|
|
continue
|
2021-04-27 08:25:01 +00:00
|
|
|
yield {
|
2020-12-15 16:25:12 +00:00
|
|
|
"eax_in": int(match.group(1), 0),
|
|
|
|
"ecx_in": int(match.group(2), 0),
|
|
|
|
"eax": int(match.group(3), 0),
|
|
|
|
"ebx": int(match.group(4), 0),
|
|
|
|
"ecx": int(match.group(5), 0),
|
|
|
|
"edx": int(match.group(6), 0)}
|
2020-12-15 16:24:51 +00:00
|
|
|
|
2021-04-27 08:25:01 +00:00
|
|
|
|
2021-04-27 08:25:02 +00:00
|
|
|
def gather_cpuid_leaves_kcpuid(output):
|
|
|
|
leave_pattern = re.compile(
|
|
|
|
"^(0x[0-9a-f]+): "
|
|
|
|
"EAX=(0x[0-9a-f]+), "
|
|
|
|
"EBX=(0x[0-9a-f]+), "
|
|
|
|
"ECX=(0x[0-9a-f]+), "
|
|
|
|
"EDX=(0x[0-9a-f]+)$")
|
|
|
|
branch_pattern_head = re.compile(
|
|
|
|
"^(0x[0-9a-f]+): "
|
|
|
|
"subleafs:$")
|
|
|
|
branch_pattern_body = re.compile(
|
|
|
|
"^\\s*([0-9]+): "
|
|
|
|
"EAX=(0x[0-9a-f]+), "
|
|
|
|
"EBX=(0x[0-9a-f]+), "
|
|
|
|
"ECX=(0x[0-9a-f]+), "
|
|
|
|
"EDX=(0x[0-9a-f]+)$")
|
|
|
|
|
|
|
|
regs = list()
|
|
|
|
eax_in = 0
|
|
|
|
for line in output.split("\n"):
|
|
|
|
match = branch_pattern_head.match(line)
|
|
|
|
if match:
|
|
|
|
eax_in = int(match.group(1), 0)
|
|
|
|
continue
|
|
|
|
match = branch_pattern_body.match(line)
|
|
|
|
if match:
|
|
|
|
regs.append({
|
|
|
|
"eax_in": eax_in,
|
|
|
|
"ecx_in": int(match.group(1), 0),
|
|
|
|
"eax": int(match.group(2), 0),
|
|
|
|
"ebx": int(match.group(3), 0),
|
|
|
|
"ecx": int(match.group(4), 0),
|
|
|
|
"edx": int(match.group(5), 0)})
|
|
|
|
continue
|
|
|
|
match = leave_pattern.match(line)
|
|
|
|
if match:
|
|
|
|
regs.append({
|
|
|
|
"eax_in": int(match.group(1), 0),
|
|
|
|
"ecx_in": 0,
|
|
|
|
"eax": int(match.group(2), 0),
|
|
|
|
"ebx": int(match.group(3), 0),
|
|
|
|
"ecx": int(match.group(4), 0),
|
|
|
|
"edx": int(match.group(5), 0)})
|
|
|
|
continue
|
|
|
|
return regs
|
|
|
|
|
|
|
|
|
2021-04-27 08:25:01 +00:00
|
|
|
def gather_cpuid_leaves(args):
|
|
|
|
def mask(regs, eax_in, ecx_in, eax_mask, ebx_mask, ecx_mask, edx_mask):
|
2023-03-28 14:14:55 +00:00
|
|
|
if eax_in != regs["eax_in"]:
|
|
|
|
return
|
|
|
|
if ecx_in != regs["ecx_in"] and ecx_in is not None:
|
|
|
|
return
|
|
|
|
regs["eax"] &= eax_mask
|
|
|
|
regs["ebx"] &= ebx_mask
|
|
|
|
regs["ecx"] &= ecx_mask
|
|
|
|
regs["edx"] &= edx_mask
|
2021-04-27 08:25:01 +00:00
|
|
|
|
|
|
|
cpuid = args.path_to_cpuid or "cpuid"
|
|
|
|
try:
|
|
|
|
output = subprocess.check_output(
|
2021-04-27 08:25:03 +00:00
|
|
|
[cpuid, "-r" if "kcpuid" in cpuid else "-1r"],
|
2021-04-27 08:25:01 +00:00
|
|
|
universal_newlines=True)
|
|
|
|
except FileNotFoundError as e:
|
|
|
|
exit("Error: '{}' not found.\n'cpuid' can be usually found in a "
|
|
|
|
"package named identically. If your distro does not provide such "
|
|
|
|
"package, you can find the sources or binary packages at "
|
|
|
|
"'http://www.etallen.com/cpuid.html'.".format(e.filename))
|
|
|
|
|
2021-04-27 08:25:02 +00:00
|
|
|
if "=====" in output:
|
|
|
|
reglist = gather_cpuid_leaves_kcpuid(output)
|
|
|
|
else:
|
|
|
|
reglist = gather_cpuid_leaves_cpuid(output)
|
|
|
|
|
2021-04-27 08:25:01 +00:00
|
|
|
for regs in reglist:
|
2021-01-08 14:43:02 +00:00
|
|
|
# local apic id. Pretend to always run on logical processor #0.
|
|
|
|
mask(regs, 0x01, 0x00, 0xffffffff, 0x00ffffff, 0xffffffff, 0xffffffff)
|
2023-03-28 14:14:55 +00:00
|
|
|
mask(regs, 0x0b, None, 0xffffffff, 0xffffffff, 0xffffffff, 0x00000000)
|
|
|
|
mask(regs, 0x1f, None, 0xffffffff, 0xffffffff, 0xffffffff, 0x00000000)
|
2021-01-08 14:43:02 +00:00
|
|
|
|
|
|
|
yield regs
|
|
|
|
|
2020-12-15 16:24:51 +00:00
|
|
|
|
2020-12-15 16:24:53 +00:00
|
|
|
def gather_msr():
|
2020-12-15 16:25:14 +00:00
|
|
|
msrs = dict()
|
|
|
|
addresses = [
|
|
|
|
0x10a, # IA32_ARCH_CAPABILITIES_MSR
|
2020-12-15 16:25:15 +00:00
|
|
|
0xcf, # IA32_CORE_CAPABILITY_MSR
|
2020-12-15 16:25:14 +00:00
|
|
|
]
|
2020-12-15 16:24:53 +00:00
|
|
|
KVM_GET_MSRS = 0xc008ae88
|
|
|
|
|
|
|
|
try:
|
|
|
|
with open("/dev/cpu/0/msr", "rb") as f:
|
2020-12-15 16:25:14 +00:00
|
|
|
for addr in addresses:
|
|
|
|
f.seek(addr)
|
|
|
|
buf = f.read(8)
|
|
|
|
msrs[addr] = struct.unpack("=Q", buf)[0]
|
|
|
|
return "", msrs
|
2020-12-15 16:24:53 +00:00
|
|
|
except IOError as e:
|
|
|
|
print("Warning: {}".format(e), file=sys.stderr)
|
|
|
|
|
|
|
|
try:
|
|
|
|
with open("/dev/kvm", "rb") as f:
|
2020-12-15 16:25:14 +00:00
|
|
|
for addr in addresses:
|
|
|
|
bufIn = struct.pack("=LLLLQ", 1, 0, addr, 0, 0)
|
|
|
|
bufOut = fcntl.ioctl(f, KVM_GET_MSRS, bufIn)
|
|
|
|
msrs[addr] = struct.unpack("=LLLLQ", bufOut)[4]
|
|
|
|
return " via KVM", msrs
|
2020-12-15 16:24:53 +00:00
|
|
|
except IOError as e:
|
|
|
|
print("Warning: {}".format(e), file=sys.stderr)
|
|
|
|
|
|
|
|
return None, {}
|
|
|
|
|
|
|
|
|
2020-12-15 16:24:55 +00:00
|
|
|
def call_qemu(qemu, qmp_cmds):
|
|
|
|
cmd = [
|
|
|
|
qemu,
|
|
|
|
"-machine", "accel=kvm",
|
|
|
|
"-cpu", "host",
|
|
|
|
"-nodefaults",
|
|
|
|
"-nographic",
|
|
|
|
"-qmp", "stdio"]
|
|
|
|
|
|
|
|
stdin = list()
|
|
|
|
stdin.append("{\"execute\": \"qmp_capabilities\"}")
|
|
|
|
stdin.extend([json.dumps(o) for o in qmp_cmds])
|
|
|
|
stdin.append("{\"execute\": \"quit\"}")
|
|
|
|
|
|
|
|
try:
|
|
|
|
output = subprocess.check_output(
|
|
|
|
cmd,
|
|
|
|
universal_newlines=True,
|
|
|
|
input="\n".join(stdin))
|
|
|
|
except subprocess.CalledProcessError:
|
|
|
|
exit("Error: Non-zero exit code from '{}'.".format(qemu))
|
|
|
|
except FileNotFoundError:
|
|
|
|
exit("Error: File not found: '{}'.".format(qemu))
|
|
|
|
|
2020-12-15 16:24:56 +00:00
|
|
|
for line in output.split("\n"):
|
2020-12-15 16:25:10 +00:00
|
|
|
if not line:
|
|
|
|
continue
|
|
|
|
response = json.loads(line)
|
|
|
|
if "return" in response and not response["return"]:
|
|
|
|
continue
|
2020-12-15 16:25:11 +00:00
|
|
|
if response.get("event") == "SHUTDOWN":
|
|
|
|
continue
|
2020-12-15 16:25:10 +00:00
|
|
|
yield response
|
2020-12-15 16:24:55 +00:00
|
|
|
|
|
|
|
|
2020-12-15 16:24:59 +00:00
|
|
|
def gather_model(args):
|
2020-12-15 16:24:55 +00:00
|
|
|
output = call_qemu(args.path_to_qemu, [
|
|
|
|
{
|
|
|
|
"execute": "query-cpu-model-expansion",
|
|
|
|
"arguments":
|
|
|
|
{
|
|
|
|
"type": "static",
|
|
|
|
"model": {"name": "host"}
|
|
|
|
},
|
|
|
|
"id": "model-expansion"
|
|
|
|
}])
|
2020-12-15 16:24:56 +00:00
|
|
|
|
2022-01-25 11:18:23 +00:00
|
|
|
version = 0, 0
|
2020-12-15 16:24:59 +00:00
|
|
|
static_model = None
|
2020-12-15 16:24:56 +00:00
|
|
|
for o in output:
|
|
|
|
if o.get("id") == "model-expansion":
|
2020-12-15 16:24:59 +00:00
|
|
|
static_model = o["return"]["model"]
|
2022-01-25 11:18:23 +00:00
|
|
|
if "QMP" in o:
|
|
|
|
version = o["QMP"]["version"]["qemu"]
|
|
|
|
version = version["major"], version["minor"]
|
2020-12-15 16:24:56 +00:00
|
|
|
|
2020-12-15 16:24:57 +00:00
|
|
|
if static_model:
|
2022-01-25 11:18:23 +00:00
|
|
|
if version[0] > 6 or (version[0] == 6 and version[1] >= 1):
|
|
|
|
static_model["props"]["hv-passthrough"] = True
|
|
|
|
|
2020-12-15 16:24:58 +00:00
|
|
|
return call_qemu(args.path_to_qemu, [
|
|
|
|
{
|
|
|
|
"execute": "query-cpu-model-expansion",
|
|
|
|
"arguments":
|
|
|
|
{
|
|
|
|
"type": "full",
|
|
|
|
"model": static_model
|
|
|
|
},
|
|
|
|
"id": "model-expansion"
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"execute": "query-cpu-definitions",
|
|
|
|
"id": "definitions"
|
|
|
|
}
|
|
|
|
])
|
2020-12-15 16:24:57 +00:00
|
|
|
else:
|
|
|
|
return call_qemu(args.path_to_qemu, [
|
|
|
|
{
|
|
|
|
"execute": "qom-get",
|
|
|
|
"arguments":
|
|
|
|
{
|
|
|
|
"path": "/machine/unattached/device[0]",
|
|
|
|
"property": "feature-words"
|
|
|
|
},
|
|
|
|
"id": "feature-words"
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"execute": "qom-get",
|
|
|
|
"arguments":
|
|
|
|
{
|
|
|
|
"path": "/machine/unattached/device[0]",
|
|
|
|
"property": "family"
|
|
|
|
},
|
|
|
|
"id": "family"
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"execute": "qom-get",
|
|
|
|
"arguments":
|
|
|
|
{
|
|
|
|
"path": "/machine/unattached/device[0]",
|
|
|
|
"property": "model"
|
|
|
|
},
|
|
|
|
"id": "model"
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"execute": "qom-get",
|
|
|
|
"arguments":
|
|
|
|
{
|
|
|
|
"path": "/machine/unattached/device[0]",
|
|
|
|
"property": "stepping"
|
|
|
|
},
|
|
|
|
"id": "stepping"
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"execute": "qom-get",
|
|
|
|
"arguments":
|
|
|
|
{
|
|
|
|
"path": "/machine/unattached/device[0]",
|
|
|
|
"property": "model-id"
|
|
|
|
},
|
|
|
|
"id": "model-id"
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"execute": "query-cpu-definitions",
|
|
|
|
"id": "definitions"
|
|
|
|
}
|
|
|
|
])
|
|
|
|
|
|
|
|
|
2020-12-15 16:25:01 +00:00
|
|
|
def gather(args):
|
|
|
|
result = dict()
|
|
|
|
result["name"] = gather_name(args)
|
|
|
|
result["leaves"] = list(gather_cpuid_leaves(args))
|
|
|
|
result["via"], result["msr"] = gather_msr()
|
|
|
|
result["model"] = list(gather_model(args))
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
2020-12-15 16:25:05 +00:00
|
|
|
def parse_filename(data):
|
|
|
|
filename = data["name"].strip()
|
|
|
|
filename = re.sub("[ -]+ +", " ", filename)
|
|
|
|
filename = re.sub("\\(([Rr]|[Tt][Mm])\\)", "", filename)
|
|
|
|
filename = re.sub(".*(Intel|AMD) ", "", filename)
|
|
|
|
filename = re.sub(" (Duo|Quad|II X[0-9]+)", " ", filename)
|
2021-01-07 17:47:27 +00:00
|
|
|
filename = re.sub(" (CPU|[Pp]rocessor)", "", filename)
|
2020-12-15 16:25:05 +00:00
|
|
|
filename = re.sub(" @.*", "", filename)
|
|
|
|
filename = re.sub(" APU .*", "", filename)
|
|
|
|
filename = re.sub(" SE$", "", filename)
|
|
|
|
filename = re.sub(" ", "-", filename)
|
|
|
|
return "x86_64-cpuid-{}".format(filename)
|
|
|
|
|
|
|
|
|
2020-12-15 16:25:06 +00:00
|
|
|
def output_xml(data, filename):
|
|
|
|
leave_template = \
|
|
|
|
" <cpuid" \
|
2020-12-15 16:25:12 +00:00
|
|
|
" eax_in='0x{0[eax_in]:08x}'" \
|
|
|
|
" ecx_in='0x{0[ecx_in]:02x}'" \
|
|
|
|
" eax='0x{0[eax]:08x}'" \
|
|
|
|
" ebx='0x{0[ebx]:08x}'" \
|
|
|
|
" ecx='0x{0[ecx]:08x}'" \
|
|
|
|
" edx='0x{0[edx]:08x}'" \
|
2020-12-15 16:25:06 +00:00
|
|
|
"/>\n"
|
|
|
|
|
|
|
|
msr_template = " <msr index='0x{:x}' edx='0x{:08x}' eax='0x{:08x}'/>\n"
|
|
|
|
|
|
|
|
print(filename)
|
|
|
|
with open(filename, "wt") as f:
|
|
|
|
f.write("<!-- {} -->\n".format(data["name"]))
|
|
|
|
f.write("<cpudata arch='x86'>\n")
|
2020-12-15 16:25:12 +00:00
|
|
|
for leave in data["leaves"]:
|
|
|
|
f.write(leave_template.format(leave))
|
2020-12-15 16:25:06 +00:00
|
|
|
for key, value in sorted(data["msr"].items()):
|
|
|
|
f.write(msr_template.format(
|
|
|
|
int(key),
|
|
|
|
0xffffffff & (value >> 32),
|
|
|
|
0xffffffff & (value >> 0)))
|
|
|
|
f.write("</cpudata>\n")
|
|
|
|
|
|
|
|
|
2020-12-15 16:25:07 +00:00
|
|
|
def output_json(data, filename):
|
|
|
|
replies = list()
|
|
|
|
for reply in data["model"]:
|
|
|
|
if "QMP" in reply:
|
|
|
|
continue
|
|
|
|
if "timestamp" in reply:
|
|
|
|
continue
|
|
|
|
if "return" in reply and not reply["return"]:
|
|
|
|
continue
|
|
|
|
replies.append(reply)
|
|
|
|
|
|
|
|
if not replies:
|
|
|
|
return
|
|
|
|
|
|
|
|
if "model-expansion" not in [reply.get("id") for reply in replies]:
|
|
|
|
exit(
|
|
|
|
"Error: Missing query-cpu-model-expansion reply in "
|
|
|
|
"{}".format(filename))
|
|
|
|
|
|
|
|
print(filename)
|
|
|
|
with open(filename, "wt") as f:
|
|
|
|
for reply in replies:
|
|
|
|
if reply is not replies[0]:
|
|
|
|
f.write("\n")
|
|
|
|
json.dump(reply, f, indent=2)
|
|
|
|
f.write("\n")
|
|
|
|
|
|
|
|
|
2021-01-04 11:30:18 +00:00
|
|
|
def parse(args, data):
|
2020-12-15 16:25:05 +00:00
|
|
|
filename = parse_filename(data)
|
2020-12-15 16:25:06 +00:00
|
|
|
filename_xml = "{}.xml".format(filename)
|
2020-12-15 16:25:07 +00:00
|
|
|
filename_json = "{}.json".format(filename)
|
2020-12-15 16:25:06 +00:00
|
|
|
|
|
|
|
output_xml(data, filename_xml)
|
2020-12-15 16:25:07 +00:00
|
|
|
output_json(data, filename_json)
|
2020-12-15 16:25:05 +00:00
|
|
|
|
2020-12-15 16:25:08 +00:00
|
|
|
if not os.path.isfile(filename_json):
|
|
|
|
return
|
|
|
|
if os.path.getsize(filename_json) == 0:
|
|
|
|
return
|
|
|
|
|
2021-01-04 11:30:18 +00:00
|
|
|
args.json_files = getattr(args, "json_files", list()) + [filename_json]
|
2020-12-15 16:25:08 +00:00
|
|
|
|
2020-12-15 16:25:01 +00:00
|
|
|
|
2021-01-04 11:30:19 +00:00
|
|
|
def checkFeature(cpuData, feature):
|
|
|
|
for key in ["type"] + _KEYS.get(feature["type"], list()):
|
|
|
|
if feature[key] not in cpuData:
|
|
|
|
return False
|
|
|
|
cpuData = cpuData[feature[key]]
|
|
|
|
|
|
|
|
for reg in _REGS.get(feature["type"], list()):
|
|
|
|
if feature[reg] > 0 and feature[reg] == feature[reg] & cpuData[reg]:
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
|
|
def addFeature(cpuData, feature):
|
|
|
|
for key in ["type"] + _KEYS.get(feature["type"], list()):
|
|
|
|
if feature[key] not in cpuData:
|
|
|
|
cpuData[feature[key]] = dict()
|
|
|
|
cpuData = cpuData[feature[key]]
|
|
|
|
|
|
|
|
for reg in _REGS.get(feature["type"], list()):
|
|
|
|
cpuData[reg] = cpuData.get(reg, 0) | feature[reg]
|
|
|
|
|
|
|
|
|
|
|
|
def parseQemu(path, features):
|
|
|
|
cpuData = {}
|
|
|
|
with open(path, "r") as f:
|
|
|
|
data, pos = json.JSONDecoder().raw_decode(f.read())
|
|
|
|
|
|
|
|
for (prop, val) in data["return"]["model"]["props"].items():
|
|
|
|
if val and prop in features:
|
|
|
|
addFeature(cpuData, features[prop])
|
|
|
|
|
|
|
|
return cpuData
|
|
|
|
|
|
|
|
|
|
|
|
def parseCPUData(path):
|
|
|
|
cpuData = dict()
|
|
|
|
for f in xml.etree.ElementTree.parse(path).getroot():
|
|
|
|
if f.tag not in ("cpuid", "msr"):
|
|
|
|
continue
|
|
|
|
|
|
|
|
feature = {"type": f.tag}
|
|
|
|
for reg in _KEYS[f.tag] + _REGS[f.tag]:
|
|
|
|
feature[reg] = int(f.attrib.get(reg, "0"), 0)
|
|
|
|
addFeature(cpuData, feature)
|
|
|
|
return cpuData
|
|
|
|
|
|
|
|
|
|
|
|
def parseMap():
|
|
|
|
path = os.path.dirname(sys.argv[0])
|
|
|
|
path = os.path.join(path, "..", "..", "src", "cpu_map", "x86_features.xml")
|
|
|
|
|
|
|
|
cpuMap = dict()
|
|
|
|
for f in xml.etree.ElementTree.parse(path).getroot().iter("feature"):
|
2022-10-17 09:55:08 +00:00
|
|
|
data = f.find("cpuid")
|
|
|
|
if data is None:
|
|
|
|
data = f.find("msr")
|
|
|
|
if data is None:
|
2021-01-04 11:30:19 +00:00
|
|
|
continue
|
|
|
|
|
2022-10-17 09:55:08 +00:00
|
|
|
feature = {"type": data.tag}
|
|
|
|
for reg in _KEYS[data.tag] + _REGS[data.tag]:
|
|
|
|
feature[reg] = int(data.attrib.get(reg, "0"), 0)
|
2021-01-04 11:30:19 +00:00
|
|
|
cpuMap[f.attrib["name"]] = feature
|
2024-10-08 10:26:42 +00:00
|
|
|
|
|
|
|
for alias in f.iterfind("alias"):
|
|
|
|
if alias.attrib["source"] == "qemu":
|
|
|
|
cpuMap[alias.attrib["name"]] = feature
|
|
|
|
|
2021-01-04 11:30:19 +00:00
|
|
|
return cpuMap
|
|
|
|
|
|
|
|
|
|
|
|
def formatCPUData(cpuData, path, comment):
|
|
|
|
print(path)
|
|
|
|
with open(path, "w") as f:
|
|
|
|
f.write("<!-- " + comment + " -->\n")
|
|
|
|
f.write("<cpudata arch='x86'>\n")
|
|
|
|
|
|
|
|
cpuid = cpuData["cpuid"]
|
|
|
|
for eax_in in sorted(cpuid.keys()):
|
|
|
|
for ecx_in in sorted(cpuid[eax_in].keys()):
|
|
|
|
leaf = cpuid[eax_in][ecx_in]
|
|
|
|
line = (" <cpuid eax_in='0x%08x' ecx_in='0x%02x' "
|
|
|
|
"eax='0x%08x' ebx='0x%08x' "
|
|
|
|
"ecx='0x%08x' edx='0x%08x'/>\n")
|
|
|
|
f.write(line % (
|
|
|
|
eax_in, ecx_in,
|
|
|
|
leaf["eax"], leaf["ebx"], leaf["ecx"], leaf["edx"]))
|
|
|
|
|
|
|
|
if "msr" in cpuData:
|
|
|
|
msr = cpuData["msr"]
|
|
|
|
for index in sorted(msr.keys()):
|
|
|
|
f.write(" <msr index='0x%x' edx='0x%08x' eax='0x%08x'/>\n" %
|
|
|
|
(index, msr[index]['edx'], msr[index]['eax']))
|
|
|
|
|
|
|
|
f.write("</cpudata>\n")
|
|
|
|
|
|
|
|
|
|
|
|
def diff(args):
|
|
|
|
cpuMap = parseMap()
|
|
|
|
|
|
|
|
for jsonFile in args.json_files:
|
|
|
|
cpuDataFile = jsonFile.replace(".json", ".xml")
|
|
|
|
enabledFile = jsonFile.replace(".json", "-enabled.xml")
|
|
|
|
disabledFile = jsonFile.replace(".json", "-disabled.xml")
|
|
|
|
|
|
|
|
cpuData = parseCPUData(cpuDataFile)
|
|
|
|
qemu = parseQemu(jsonFile, cpuMap)
|
|
|
|
|
|
|
|
enabled = dict()
|
|
|
|
disabled = dict()
|
|
|
|
for feature in cpuMap.values():
|
|
|
|
if checkFeature(qemu, feature):
|
|
|
|
addFeature(enabled, feature)
|
|
|
|
elif checkFeature(cpuData, feature):
|
|
|
|
addFeature(disabled, feature)
|
|
|
|
|
|
|
|
formatCPUData(enabled, enabledFile, "Features enabled by QEMU")
|
|
|
|
formatCPUData(disabled, disabledFile, "Features disabled by QEMU")
|
|
|
|
|
|
|
|
|
2020-12-15 16:24:48 +00:00
|
|
|
def main():
|
2020-12-15 16:24:50 +00:00
|
|
|
parser = argparse.ArgumentParser(description="Gather cpu test data")
|
|
|
|
parser.add_argument(
|
|
|
|
"--name",
|
|
|
|
help="CPU model name. "
|
|
|
|
"If unset, model name is read from '/proc/cpuinfo'.")
|
2020-12-15 16:24:52 +00:00
|
|
|
parser.add_argument(
|
|
|
|
"--path-to-cpuid",
|
|
|
|
metavar="PATH",
|
|
|
|
help="Path to 'cpuid' utility. "
|
|
|
|
"If unset, the first executable 'cpuid' in $PATH is used.")
|
2020-12-15 16:24:54 +00:00
|
|
|
parser.add_argument(
|
|
|
|
"--path-to-qemu",
|
|
|
|
metavar="PATH",
|
|
|
|
help="Path to qemu. "
|
|
|
|
"If unset, will try '/usr/bin/qemu-system-x86_64', "
|
|
|
|
"'/usr/bin/qemu-kvm', and '/usr/libexec/qemu-kvm'.")
|
2021-01-04 11:30:17 +00:00
|
|
|
subparsers = parser.add_subparsers(dest="action")
|
|
|
|
subparsers.add_parser(
|
|
|
|
"gather",
|
|
|
|
help="Acquire data on target system and outputs to stdout. "
|
|
|
|
"This is the default. ")
|
|
|
|
subparsers.add_parser(
|
|
|
|
"parse",
|
|
|
|
help="Reads data from stdin and parses data for libvirt use.")
|
|
|
|
subparsers.add_parser(
|
|
|
|
"full",
|
2021-01-07 17:48:41 +00:00
|
|
|
help="Equivalent to `cpu-data.py gather | cpu-data.py parse`.")
|
2021-01-04 11:30:19 +00:00
|
|
|
diffparser = subparsers.add_parser(
|
|
|
|
"diff",
|
|
|
|
help="Diff json description of CPU model against known features.")
|
|
|
|
diffparser.add_argument(
|
|
|
|
"json_files",
|
|
|
|
nargs="+",
|
|
|
|
metavar="FILE",
|
|
|
|
type=os.path.realpath,
|
|
|
|
help="Path to one or more json CPU model descriptions.")
|
2020-12-15 16:25:02 +00:00
|
|
|
|
2020-12-15 16:24:50 +00:00
|
|
|
args = parser.parse_args()
|
|
|
|
|
2021-01-04 11:30:17 +00:00
|
|
|
if not args.action:
|
|
|
|
args.action = "gather"
|
2020-12-15 16:25:02 +00:00
|
|
|
|
2020-12-15 16:24:54 +00:00
|
|
|
if not args.path_to_qemu:
|
|
|
|
args.path_to_qemu = "qemu-system-x86_64"
|
|
|
|
search = [
|
|
|
|
"/usr/bin/qemu-system-x86_64",
|
|
|
|
"/usr/bin/qemu-kvm",
|
|
|
|
"/usr/libexec/qemu-kvm"]
|
|
|
|
for f in search:
|
|
|
|
if os.path.isfile(f):
|
|
|
|
args.path_to_qemu = f
|
|
|
|
|
2021-01-04 11:30:17 +00:00
|
|
|
if args.action in ["gather", "full"]:
|
2020-12-15 16:25:02 +00:00
|
|
|
data = gather(args)
|
2021-01-04 11:30:17 +00:00
|
|
|
if args.action == "gather":
|
2020-12-15 16:25:13 +00:00
|
|
|
json.dump(data, sys.stdout, indent=2)
|
|
|
|
|
2021-01-04 11:30:17 +00:00
|
|
|
if args.action in ["parse", "full"]:
|
|
|
|
if args.action == "parse":
|
2020-12-15 16:25:13 +00:00
|
|
|
data = json.load(sys.stdin)
|
2021-01-04 11:30:18 +00:00
|
|
|
parse(args, data)
|
|
|
|
|
|
|
|
if "json_files" in args:
|
2021-01-04 11:30:19 +00:00
|
|
|
diff(args)
|
2020-12-15 16:24:55 +00:00
|
|
|
|
2020-12-15 16:24:48 +00:00
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
main()
|