2017-11-19 22:49:09 +01:00
|
|
|
#!/usr/bin/python3
|
|
|
|
|
|
|
|
import atexit
|
|
|
|
import os
|
|
|
|
import re
|
|
|
|
import subprocess
|
|
|
|
import sys
|
|
|
|
import time
|
|
|
|
import yaml
|
|
|
|
|
|
|
|
import pprint
|
|
|
|
|
|
|
|
def executeCommand(command, stdin = None, cwd = None):
|
|
|
|
with subprocess.Popen(command, stdout = subprocess.PIPE, stderr = subprocess.PIPE, stdin = (subprocess.PIPE if stdin != None else None), cwd = cwd) as process:
|
|
|
|
stdout, stderr = process.communicate(input = (stdin.encode("utf-8") if stdin != None else None))
|
|
|
|
exitCode = process.returncode
|
|
|
|
|
|
|
|
return stdout.decode("utf-8"), stderr.decode("utf-8"), exitCode
|
|
|
|
|
2017-11-21 02:27:10 +01:00
|
|
|
def pythonVersion(config):
|
|
|
|
version, _, _ = executeCommand([config["executables"]["python"]["binary"], "--version"])
|
|
|
|
version = version.strip()
|
|
|
|
|
|
|
|
match = re.match(r'Python (.*?)$', version, re.M | re.I)
|
|
|
|
|
|
|
|
return match.group(1)
|
|
|
|
|
2017-11-19 22:49:09 +01:00
|
|
|
def plaspVersion(config):
|
|
|
|
version, _, _ = executeCommand([config["executables"]["plasp"]["binary"], "-v"])
|
|
|
|
version = version.strip()
|
|
|
|
|
|
|
|
match = re.match(r'plasp version (.*?)$', version, re.M | re.I)
|
|
|
|
|
|
|
|
return match.group(1)
|
|
|
|
|
|
|
|
def clingoVersion(config):
|
|
|
|
version, _, _ = executeCommand([config["executables"]["clingo"]["binary"], "-v"])
|
|
|
|
version = version.strip()
|
|
|
|
|
|
|
|
match = re.match(r'clingo version (.*?)$', version, re.M | re.I)
|
|
|
|
|
|
|
|
return match.group(1)
|
|
|
|
|
|
|
|
def plannerVersion(config):
|
|
|
|
version, _, _ = executeCommand(["git", "rev-parse", "HEAD"], cwd = config["executables"]["planner"]["directory"])
|
|
|
|
date, _, _ = executeCommand(["git", "show", "-s", "--format=%ci"], cwd = config["executables"]["planner"]["directory"])
|
|
|
|
|
|
|
|
return version.strip() + " (" + date.strip() + ")"
|
|
|
|
|
|
|
|
def fastDownwardVersion(config):
|
|
|
|
version, _, _ = executeCommand(["hg", "log", "-r.", "-T {rev}:{node} ({date|isodate})"], cwd = config["executables"]["fastDownward"]["directory"])
|
|
|
|
|
|
|
|
return version.strip()
|
|
|
|
|
2017-11-20 23:31:34 +01:00
|
|
|
def git(command, cwd, enforce = False):
|
2017-11-19 22:49:09 +01:00
|
|
|
stdout, stderr, exitCode = executeCommand(["git"] + command, cwd = cwd)
|
|
|
|
|
|
|
|
if exitCode != 0:
|
|
|
|
print(stderr, file = sys.stderr)
|
2017-11-20 23:31:34 +01:00
|
|
|
|
|
|
|
if enforce:
|
|
|
|
raise RuntimeError("git error")
|
2017-11-19 22:49:09 +01:00
|
|
|
|
2017-11-24 15:11:57 +01:00
|
|
|
def getResultsDir(config):
|
|
|
|
return os.path.join(config["storage"]["local"], "results")
|
|
|
|
|
|
|
|
def getConfigDir(config):
|
|
|
|
return os.path.join(config["storage"]["local"], "config")
|
|
|
|
|
|
|
|
def getStatusDir(config):
|
|
|
|
return os.path.join(config["storage"]["local"], "status")
|
|
|
|
|
|
|
|
def configureGit(dir, config):
|
|
|
|
# default settings
|
|
|
|
git(["config", "--local", "user.name", config["storage"]["userName"]], dir, enforce = True)
|
|
|
|
git(["config", "--local", "user.email", config["storage"]["userEMail"]], dir, enforce = True)
|
|
|
|
|
|
|
|
if "userSigningKey" in config["storage"]:
|
|
|
|
git(["config", "--local", "user.signingkey", config["storage"]["userSigningKey"]], dir, enforce = True)
|
|
|
|
git(["config", "--local", "commit.gpgsign", "true"], dir, enforce = True)
|
|
|
|
else:
|
|
|
|
git(["config", "--local", "commit.gpgsign", "false"], dir, enforce = True)
|
|
|
|
|
2017-11-19 22:49:09 +01:00
|
|
|
def initRepo(config):
|
2017-11-24 15:11:57 +01:00
|
|
|
resultsDir = getResultsDir(config)
|
|
|
|
configDir = getConfigDir(config)
|
|
|
|
statusDir = getStatusDir(config)
|
2017-11-19 22:49:09 +01:00
|
|
|
|
|
|
|
# clone repo if not existing
|
2017-11-24 15:11:57 +01:00
|
|
|
if not os.path.isdir(statusDir):
|
|
|
|
os.makedirs(statusDir)
|
|
|
|
git(["clone", config["storage"]["remote"], "--branch=" + config["storage"]["branches"]["status"], statusDir], None, enforce = True)
|
|
|
|
git(["worktree", "add", os.path.join("..", "results"), "-b" + config["storage"]["branches"]["results"], "origin/" + config["storage"]["branches"]["results"]], statusDir, enforce = True)
|
|
|
|
git(["branch", "--set-upstream-to=" + "origin/" + config["storage"]["branches"]["results"]], resultsDir, enforce = True)
|
|
|
|
git(["worktree", "add", os.path.join("..", "config"), "-b" + config["storage"]["branches"]["config"], "origin/" + config["storage"]["branches"]["config"]], statusDir, enforce = True)
|
|
|
|
git(["branch", "--set-upstream-to=" + "origin/" + config["storage"]["branches"]["config"]], configDir, enforce = True)
|
2017-11-19 22:49:09 +01:00
|
|
|
|
|
|
|
# default settings
|
2017-11-24 15:11:57 +01:00
|
|
|
configureGit(resultsDir, config)
|
|
|
|
configureGit(configDir, config)
|
|
|
|
configureGit(statusDir, config)
|
2017-11-20 00:11:28 +01:00
|
|
|
|
|
|
|
if "userSigningKey" in config["storage"]:
|
2017-11-24 15:11:57 +01:00
|
|
|
git(["config", "--local", "user.signingkey", config["storage"]["userSigningKey"]], statusDir, enforce = True)
|
|
|
|
git(["config", "--local", "commit.gpgsign", "true"], statusDir, enforce = True)
|
2017-11-20 00:11:28 +01:00
|
|
|
else:
|
2017-11-24 15:11:57 +01:00
|
|
|
git(["config", "--local", "commit.gpgsign", "false"], statusDir, enforce = True)
|
2017-11-19 22:49:09 +01:00
|
|
|
|
|
|
|
# fetch origin
|
2017-11-24 15:11:57 +01:00
|
|
|
git(["fetch"], cwd = statusDir)
|
2017-11-19 22:49:09 +01:00
|
|
|
|
|
|
|
# pull all branches
|
2017-11-24 15:11:57 +01:00
|
|
|
git(["pull"], cwd = configDir)
|
|
|
|
git(["pull"], cwd = statusDir)
|
|
|
|
git(["pull"], cwd = resultsDir)
|
2017-11-19 22:49:09 +01:00
|
|
|
|
|
|
|
def readBenchmarkConfig(config):
|
|
|
|
initRepo(config)
|
|
|
|
|
2017-11-24 15:11:57 +01:00
|
|
|
configDir = getConfigDir(config)
|
2017-11-19 22:49:09 +01:00
|
|
|
|
|
|
|
# read instance list
|
2017-11-24 15:11:57 +01:00
|
|
|
instancesFile = os.path.join(configDir, "instances.yml")
|
2017-11-19 22:49:09 +01:00
|
|
|
|
|
|
|
with open(instancesFile, "r") as stream:
|
2017-11-24 15:10:00 +01:00
|
|
|
instances = yaml.load(stream, Loader = yaml.CLoader)
|
2017-11-19 22:49:09 +01:00
|
|
|
|
|
|
|
# read configurations to test
|
2017-11-24 15:11:57 +01:00
|
|
|
configurationsFile = os.path.join(configDir, "configurations.yml")
|
2017-11-19 22:49:09 +01:00
|
|
|
|
|
|
|
with open(configurationsFile, "r") as stream:
|
2017-11-24 15:10:00 +01:00
|
|
|
configurations = yaml.load(stream, Loader = yaml.CLoader)
|
2017-11-19 22:49:09 +01:00
|
|
|
|
|
|
|
# flatten lists of options
|
|
|
|
for configuration in configurations["configurations"]:
|
|
|
|
configuration["options"] = [item for sublist in configuration["options"] for item in sublist]
|
|
|
|
|
|
|
|
return {"configurations": configurations, "instances": instances}
|
|
|
|
|
|
|
|
def inputFilenames(instance, config):
|
|
|
|
pddlInstancesDir = config["input"]["pddlInstances"]
|
|
|
|
|
|
|
|
domainFile = os.path.join(pddlInstancesDir, instance["ipc"], "domains", instance["domain"], "domain.pddl")
|
2017-11-28 00:49:44 +01:00
|
|
|
|
|
|
|
if not os.path.exists(domainFile):
|
|
|
|
domainFile = os.path.join(pddlInstancesDir, instance["ipc"], "domains", instance["domain"], "domains", "domain-" + str(instance["instance"]) + ".pddl")
|
|
|
|
|
2017-11-19 22:49:09 +01:00
|
|
|
instanceFile = os.path.join(pddlInstancesDir, instance["ipc"], "domains", instance["domain"], "instances", "instance-" + str(instance["instance"]) + ".pddl")
|
|
|
|
|
|
|
|
return {"domainFile": domainFile, "instanceFile": instanceFile}
|
|
|
|
|
|
|
|
def outputFilenames(configuration, instance, config):
|
|
|
|
instanceID = instance["ipc"] + "_" + instance["domain"] + "_" + str(instance["instance"])
|
|
|
|
outputFile = os.path.join(configuration["id"], instanceID + ".out")
|
|
|
|
errorFile = os.path.join(configuration["id"], instanceID + ".err")
|
|
|
|
environmentFile = os.path.join(configuration["id"], instanceID + ".env")
|
|
|
|
|
|
|
|
return {"outputFile": outputFile, "errorFile": errorFile, "environmentFile": environmentFile}
|
|
|
|
|
|
|
|
def nextJob(config):
|
|
|
|
benchmarkConfig = readBenchmarkConfig(config)
|
2017-11-24 15:11:57 +01:00
|
|
|
resultsDir = getResultsDir(config)
|
2017-11-19 22:49:09 +01:00
|
|
|
|
|
|
|
configurations = benchmarkConfig["configurations"]["configurations"]
|
|
|
|
instances = benchmarkConfig["instances"]
|
|
|
|
|
2017-11-23 17:40:19 +01:00
|
|
|
for instanceSetID, instanceSet in instances.items():
|
2017-11-19 22:49:09 +01:00
|
|
|
for instance in instanceSet:
|
|
|
|
for configuration in configurations:
|
|
|
|
filenames = outputFilenames(configuration, instance, config)
|
2017-11-24 15:11:57 +01:00
|
|
|
outputFile = os.path.join(resultsDir, filenames["outputFile"])
|
|
|
|
errorFile = os.path.join(resultsDir, filenames["errorFile"])
|
|
|
|
environmentFile = os.path.join(resultsDir, filenames["environmentFile"])
|
2017-11-19 22:49:09 +01:00
|
|
|
|
2017-11-23 17:40:19 +01:00
|
|
|
if not instanceSetID in configuration["instanceSets"]:
|
|
|
|
continue
|
|
|
|
|
2017-11-20 23:32:42 +01:00
|
|
|
if not os.path.exists(outputFile) or not os.path.exists(errorFile) or not os.path.exists(environmentFile):
|
2017-11-19 22:49:09 +01:00
|
|
|
return {"configuration": configuration, "instance": instance}
|
|
|
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
def writeStatus(message, config):
|
2017-11-24 15:11:57 +01:00
|
|
|
statusDir = getStatusDir(config)
|
|
|
|
statusFilename = os.path.join(statusDir, "status.log")
|
2017-11-19 22:49:09 +01:00
|
|
|
|
|
|
|
if os.path.exists(statusFilename):
|
|
|
|
with open(statusFilename, "r") as statusFile:
|
2017-11-20 18:56:37 +01:00
|
|
|
# add the previous status messages, but trancate them
|
|
|
|
content = statusFile.readlines()[0:(config["storage"]["statusLogSize"] - 1)]
|
2017-11-19 22:49:09 +01:00
|
|
|
else:
|
|
|
|
content = ""
|
|
|
|
|
|
|
|
with open(statusFilename, "w") as statusFile:
|
2017-11-20 18:56:37 +01:00
|
|
|
print(time.strftime("%Y-%m-%d %H:%M:%S %z") + "\t" + message + "\n" + "".join(content), file = statusFile, end = "")
|
2017-11-19 22:49:09 +01:00
|
|
|
|
2017-11-24 15:11:57 +01:00
|
|
|
git(["add", "status.log"], statusDir, enforce = True)
|
|
|
|
git(["commit", "-m Update status: " + message], statusDir, enforce = True)
|
|
|
|
git(["push", "origin", config["storage"]["branches"]["status"]], statusDir)
|
2017-11-19 22:49:09 +01:00
|
|
|
|
|
|
|
def runJob(configuration, instance, config):
|
|
|
|
jobName = "[" + str(configuration["id"]) + " | " + instance["ipc"] + " | " + instance["domain"] + " | " + str(instance["instance"]) + "]"
|
|
|
|
|
|
|
|
writeStatus("started benchmark job " + jobName, config)
|
|
|
|
|
2017-11-24 15:11:57 +01:00
|
|
|
resultsDir = getResultsDir(config)
|
2017-11-19 22:49:09 +01:00
|
|
|
inputFiles = inputFilenames(instance, config)
|
|
|
|
|
|
|
|
command = \
|
|
|
|
[
|
|
|
|
config["executables"]["timeout"]["binary"],
|
|
|
|
"-m=" + str(config["limits"]["memory"]),
|
|
|
|
"-t=" + str(config["limits"]["time"]),
|
2017-11-21 02:24:12 +01:00
|
|
|
config["executables"]["python"]["binary"],
|
2017-11-19 22:49:09 +01:00
|
|
|
config["executables"]["planner"]["binary"],
|
|
|
|
"--domain=" + inputFiles["domainFile"],
|
|
|
|
inputFiles["instanceFile"],
|
|
|
|
]
|
|
|
|
|
|
|
|
command += configuration["options"]
|
|
|
|
|
|
|
|
# TODO: verify planner Git hash
|
|
|
|
plannerDir = config["executables"]["planner"]["directory"]
|
|
|
|
stdout, stderr, exitCode = executeCommand(command, cwd = plannerDir)
|
|
|
|
|
|
|
|
outputFiles = outputFilenames(configuration, instance, config)
|
2017-11-24 15:11:57 +01:00
|
|
|
outputDir = os.path.dirname(os.path.join(resultsDir, outputFiles["outputFile"]))
|
2017-11-19 22:49:09 +01:00
|
|
|
|
|
|
|
if not os.path.isdir(outputDir):
|
|
|
|
os.makedirs(outputDir)
|
|
|
|
|
2017-11-24 15:11:57 +01:00
|
|
|
with open(os.path.join(resultsDir, outputFiles["outputFile"]), "w") as outputFile, \
|
|
|
|
open(os.path.join(resultsDir, outputFiles["errorFile"]), "w") as errorFile, \
|
|
|
|
open(os.path.join(resultsDir, outputFiles["environmentFile"]), "w") as environmentFile:
|
2017-11-19 22:49:09 +01:00
|
|
|
print(stdout, file = outputFile)
|
|
|
|
print("# configuration: " + str(configuration), file = errorFile)
|
|
|
|
print("# instance: " + str(instance), file = errorFile)
|
|
|
|
print("# command: " + str(command), file = errorFile)
|
|
|
|
print("# working directory: " + plannerDir, file = errorFile)
|
2017-11-20 23:32:48 +01:00
|
|
|
print("# exit code: " + str(exitCode), file = errorFile)
|
2017-11-19 22:49:09 +01:00
|
|
|
print(stderr, file = errorFile)
|
|
|
|
|
|
|
|
if exitCode != 0:
|
|
|
|
print(stderr)
|
|
|
|
|
|
|
|
environment = \
|
|
|
|
{
|
|
|
|
"configuration": configuration,
|
|
|
|
"instance": instance,
|
|
|
|
"command": command,
|
|
|
|
"workingDirectory": plannerDir,
|
|
|
|
"versions": \
|
|
|
|
{
|
2017-11-21 02:27:10 +01:00
|
|
|
"python": pythonVersion(config),
|
2017-11-19 22:49:09 +01:00
|
|
|
"clingo": clingoVersion(config),
|
|
|
|
"plasp": plaspVersion(config),
|
|
|
|
"planner": plannerVersion(config),
|
|
|
|
"fastDownward": fastDownwardVersion(config)
|
2017-11-20 23:32:48 +01:00
|
|
|
},
|
|
|
|
"exitCode": exitCode,
|
2017-11-19 22:49:09 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
print(yaml.dump(environment, default_flow_style = False), file = environmentFile)
|
|
|
|
|
2017-11-24 15:11:57 +01:00
|
|
|
git(["add", outputFiles["outputFile"], outputFiles["errorFile"], outputFiles["environmentFile"]], resultsDir, enforce = True)
|
2017-11-21 00:02:28 +01:00
|
|
|
|
|
|
|
if exitCode == 0:
|
|
|
|
message = "Add benchmark result"
|
|
|
|
else:
|
|
|
|
message = "Add corrupt benchmark result"
|
|
|
|
|
2017-11-24 15:11:57 +01:00
|
|
|
git(["commit", "-m " + message + " " + jobName], resultsDir, enforce = True)
|
|
|
|
git(["push", "origin", config["storage"]["branches"]["results"]], resultsDir)
|
2017-11-19 22:49:09 +01:00
|
|
|
|
|
|
|
if exitCode != 0:
|
|
|
|
writeStatus("errors reported for benchmark job " + jobName, config)
|
|
|
|
else:
|
|
|
|
writeStatus("finished benchmark job " + jobName, config)
|
|
|
|
|
|
|
|
def main():
|
2017-11-20 18:58:57 +01:00
|
|
|
with open("config.yml", "r") as stream:
|
2017-11-24 15:10:00 +01:00
|
|
|
config = yaml.load(stream, Loader = yaml.CLoader)
|
2017-11-19 22:49:09 +01:00
|
|
|
|
|
|
|
atexit.register(writeStatus, "benchmark runner exited", config)
|
|
|
|
|
|
|
|
performedJobs = 0
|
|
|
|
|
|
|
|
while True:
|
|
|
|
job = nextJob(config)
|
|
|
|
|
|
|
|
if not job:
|
|
|
|
break
|
|
|
|
|
|
|
|
configuration = job["configuration"]
|
|
|
|
instance = job["instance"]
|
|
|
|
|
|
|
|
runJob(configuration, instance, config)
|
|
|
|
performedJobs += 1
|
|
|
|
|
|
|
|
if performedJobs == 0:
|
|
|
|
writeStats("finished benchmark series", config)
|
|
|
|
|
|
|
|
main()
|