Patrick Lühne
60128d03ef
The Git history is sufficient for viewing past status messages. Additionally, long status log files tend to take long to render. With this commit, only the most recent 100 lines are kept, and this number can be configured.
263 lines
8.7 KiB
Python
263 lines
8.7 KiB
Python
#!/usr/bin/python3
|
|
|
|
import atexit
|
|
import os
|
|
import re
|
|
import subprocess
|
|
import sys
|
|
import time
|
|
import yaml
|
|
|
|
import pprint
|
|
|
|
def executeCommand(command, stdin = None, cwd = None):
|
|
with subprocess.Popen(command, stdout = subprocess.PIPE, stderr = subprocess.PIPE, stdin = (subprocess.PIPE if stdin != None else None), cwd = cwd) as process:
|
|
stdout, stderr = process.communicate(input = (stdin.encode("utf-8") if stdin != None else None))
|
|
exitCode = process.returncode
|
|
|
|
return stdout.decode("utf-8"), stderr.decode("utf-8"), exitCode
|
|
|
|
def plaspVersion(config):
|
|
version, _, _ = executeCommand([config["executables"]["plasp"]["binary"], "-v"])
|
|
version = version.strip()
|
|
|
|
match = re.match(r'plasp version (.*?)$', version, re.M | re.I)
|
|
|
|
return match.group(1)
|
|
|
|
def clingoVersion(config):
|
|
version, _, _ = executeCommand([config["executables"]["clingo"]["binary"], "-v"])
|
|
version = version.strip()
|
|
|
|
match = re.match(r'clingo version (.*?)$', version, re.M | re.I)
|
|
|
|
return match.group(1)
|
|
|
|
def plannerVersion(config):
|
|
version, _, _ = executeCommand(["git", "rev-parse", "HEAD"], cwd = config["executables"]["planner"]["directory"])
|
|
date, _, _ = executeCommand(["git", "show", "-s", "--format=%ci"], cwd = config["executables"]["planner"]["directory"])
|
|
|
|
return version.strip() + " (" + date.strip() + ")"
|
|
|
|
def fastDownwardVersion(config):
|
|
version, _, _ = executeCommand(["hg", "log", "-r.", "-T {rev}:{node} ({date|isodate})"], cwd = config["executables"]["fastDownward"]["directory"])
|
|
|
|
return version.strip()
|
|
|
|
def git(command, cwd):
|
|
stdout, stderr, exitCode = executeCommand(["git"] + command, cwd = cwd)
|
|
|
|
if exitCode != 0:
|
|
print(stderr, file = sys.stderr)
|
|
raise RuntimeError("git error")
|
|
|
|
def initRepo(config):
|
|
dataDir = config["storage"]["local"]
|
|
|
|
# clone repo if not existing
|
|
if not os.path.isdir(config["storage"]["local"]):
|
|
git(["clone", config["storage"]["remote"], dataDir], None)
|
|
|
|
# default settings
|
|
git(["config", "--local", "user.name", config["storage"]["userName"]], dataDir)
|
|
git(["config", "--local", "user.email", config["storage"]["userEMail"]], dataDir)
|
|
|
|
if "userSigningKey" in config["storage"]:
|
|
git(["config", "--local", "user.signingkey", config["storage"]["userSigningKey"]], dataDir)
|
|
git(["config", "--local", "commit.gpgsign", "true"], dataDir)
|
|
else:
|
|
git(["config", "--local", "commit.gpgsign", "false"], dataDir)
|
|
|
|
# fetch origin
|
|
git(["fetch"], cwd = dataDir)
|
|
|
|
# pull all branches
|
|
for key, branch in config["storage"]["branches"].items():
|
|
git(["checkout", branch], cwd = dataDir)
|
|
git(["pull"], cwd = dataDir)
|
|
|
|
def readBenchmarkConfig(config):
|
|
initRepo(config)
|
|
|
|
dataDir = config["storage"]["local"]
|
|
|
|
# checkout config branch
|
|
git(["checkout", config["storage"]["branches"]["config"]], cwd = dataDir)
|
|
|
|
# read instance list
|
|
instancesFile = os.path.join(config["storage"]["local"], "instances.yaml")
|
|
|
|
with open(instancesFile, "r") as stream:
|
|
instances = yaml.load(stream, Loader=yaml.CLoader)
|
|
|
|
# read configurations to test
|
|
configurationsFile = os.path.join(config["storage"]["local"], "configurations.yaml")
|
|
|
|
with open(configurationsFile, "r") as stream:
|
|
configurations = yaml.load(stream, Loader=yaml.CLoader)
|
|
|
|
# flatten lists of options
|
|
for configuration in configurations["configurations"]:
|
|
configuration["options"] = [item for sublist in configuration["options"] for item in sublist]
|
|
|
|
return {"configurations": configurations, "instances": instances}
|
|
|
|
def inputFilenames(instance, config):
|
|
pddlInstancesDir = config["input"]["pddlInstances"]
|
|
|
|
domainFile = os.path.join(pddlInstancesDir, instance["ipc"], "domains", instance["domain"], "domain.pddl")
|
|
instanceFile = os.path.join(pddlInstancesDir, instance["ipc"], "domains", instance["domain"], "instances", "instance-" + str(instance["instance"]) + ".pddl")
|
|
|
|
return {"domainFile": domainFile, "instanceFile": instanceFile}
|
|
|
|
def outputFilenames(configuration, instance, config):
|
|
instanceID = instance["ipc"] + "_" + instance["domain"] + "_" + str(instance["instance"])
|
|
outputFile = os.path.join(configuration["id"], instanceID + ".out")
|
|
errorFile = os.path.join(configuration["id"], instanceID + ".err")
|
|
environmentFile = os.path.join(configuration["id"], instanceID + ".env")
|
|
|
|
return {"outputFile": outputFile, "errorFile": errorFile, "environmentFile": environmentFile}
|
|
|
|
def nextJob(config):
|
|
benchmarkConfig = readBenchmarkConfig(config)
|
|
|
|
dataDir = config["storage"]["local"]
|
|
|
|
# checkout results branch
|
|
git(["checkout", config["storage"]["branches"]["results"]], cwd = dataDir)
|
|
|
|
configurations = benchmarkConfig["configurations"]["configurations"]
|
|
instances = benchmarkConfig["instances"]
|
|
|
|
for instanceSetName, instanceSet in instances.items():
|
|
for instance in instanceSet:
|
|
for configuration in configurations:
|
|
filenames = outputFilenames(configuration, instance, config)
|
|
outputFile = os.path.join(config["storage"]["local"], filenames["outputFile"])
|
|
environmentFile = os.path.join(config["storage"]["local"], filenames["environmentFile"])
|
|
|
|
if not os.path.exists(outputFile) or not os.path.exists(environmentFile):
|
|
return {"configuration": configuration, "instance": instance}
|
|
|
|
return None
|
|
|
|
def writeStatus(message, config):
|
|
dataDir = config["storage"]["local"]
|
|
|
|
# checkout status branch
|
|
git(["checkout", config["storage"]["branches"]["status"]], cwd = dataDir)
|
|
|
|
statusFilename = os.path.join(dataDir, "status.log")
|
|
|
|
if os.path.exists(statusFilename):
|
|
with open(statusFilename, "r") as statusFile:
|
|
# add the previous status messages, but trancate them
|
|
content = statusFile.readlines()[0:(config["storage"]["statusLogSize"] - 1)]
|
|
else:
|
|
content = ""
|
|
|
|
with open(statusFilename, "w") as statusFile:
|
|
print(time.strftime("%Y-%m-%d %H:%M:%S %z") + "\t" + message + "\n" + "".join(content), file = statusFile, end = "")
|
|
|
|
git(["add", "status.log"], dataDir)
|
|
git(["commit", "-m Update status: " + message], dataDir)
|
|
git(["push"], dataDir)
|
|
|
|
def runJob(configuration, instance, config):
|
|
jobName = "[" + str(configuration["id"]) + " | " + instance["ipc"] + " | " + instance["domain"] + " | " + str(instance["instance"]) + "]"
|
|
|
|
writeStatus("started benchmark job " + jobName, config)
|
|
|
|
dataDir = config["storage"]["local"]
|
|
|
|
inputFiles = inputFilenames(instance, config)
|
|
|
|
# checkout results branch
|
|
git(["checkout", config["storage"]["branches"]["results"]], cwd = dataDir)
|
|
|
|
command = \
|
|
[
|
|
config["executables"]["timeout"]["binary"],
|
|
"-m=" + str(config["limits"]["memory"]),
|
|
"-t=" + str(config["limits"]["time"]),
|
|
config["executables"]["planner"]["binary"],
|
|
"--domain=" + inputFiles["domainFile"],
|
|
inputFiles["instanceFile"],
|
|
]
|
|
|
|
command += configuration["options"]
|
|
|
|
# TODO: verify planner Git hash
|
|
plannerDir = config["executables"]["planner"]["directory"]
|
|
stdout, stderr, exitCode = executeCommand(command, cwd = plannerDir)
|
|
|
|
outputFiles = outputFilenames(configuration, instance, config)
|
|
outputDir = os.path.dirname(os.path.join(config["storage"]["local"], outputFiles["outputFile"]))
|
|
|
|
if not os.path.isdir(outputDir):
|
|
os.makedirs(outputDir)
|
|
|
|
with open(os.path.join(config["storage"]["local"], outputFiles["outputFile"]), "w") as outputFile, \
|
|
open(os.path.join(config["storage"]["local"], outputFiles["errorFile"]), "w") as errorFile, \
|
|
open(os.path.join(config["storage"]["local"], outputFiles["environmentFile"]), "w") as environmentFile:
|
|
print(stdout, file = outputFile)
|
|
print("# configuration: " + str(configuration), file = errorFile)
|
|
print("# instance: " + str(instance), file = errorFile)
|
|
print("# command: " + str(command), file = errorFile)
|
|
print("# working directory: " + plannerDir, file = errorFile)
|
|
print(stderr, file = errorFile)
|
|
|
|
if exitCode != 0:
|
|
print(stderr)
|
|
|
|
environment = \
|
|
{
|
|
"configuration": configuration,
|
|
"instance": instance,
|
|
"command": command,
|
|
"workingDirectory": plannerDir,
|
|
"versions": \
|
|
{
|
|
"clingo": clingoVersion(config),
|
|
"plasp": plaspVersion(config),
|
|
"planner": plannerVersion(config),
|
|
"fastDownward": fastDownwardVersion(config)
|
|
}
|
|
}
|
|
|
|
print(yaml.dump(environment, default_flow_style = False), file = environmentFile)
|
|
|
|
git(["add", outputFiles["outputFile"], outputFiles["errorFile"], outputFiles["environmentFile"]], dataDir)
|
|
git(["commit", "-m Add benchmark result " + jobName], dataDir)
|
|
git(["push"], dataDir)
|
|
|
|
if exitCode != 0:
|
|
writeStatus("errors reported for benchmark job " + jobName, config)
|
|
else:
|
|
writeStatus("finished benchmark job " + jobName, config)
|
|
|
|
def main():
|
|
with open("config.yaml", "r") as stream:
|
|
config = yaml.load(stream, Loader=yaml.CLoader)
|
|
|
|
atexit.register(writeStatus, "benchmark runner exited", config)
|
|
|
|
performedJobs = 0
|
|
|
|
while True:
|
|
job = nextJob(config)
|
|
|
|
if not job:
|
|
break
|
|
|
|
configuration = job["configuration"]
|
|
instance = job["instance"]
|
|
|
|
runJob(configuration, instance, config)
|
|
performedJobs += 1
|
|
|
|
if performedJobs == 0:
|
|
writeStats("finished benchmark series", config)
|
|
|
|
main()
|