Patrick Lühne
24b1bf655b
Checking out branches with many files consumes increasing amounts of time. For this reason, this commit introduces worktrees that represent the individual branches in different directories, while still maintaining only one copy of the .git directory.
300 lines
11 KiB
Python
300 lines
11 KiB
Python
#!/usr/bin/python3
|
|
|
|
import atexit
|
|
import os
|
|
import re
|
|
import subprocess
|
|
import sys
|
|
import time
|
|
import yaml
|
|
|
|
import pprint
|
|
|
|
def executeCommand(command, stdin = None, cwd = None):
|
|
with subprocess.Popen(command, stdout = subprocess.PIPE, stderr = subprocess.PIPE, stdin = (subprocess.PIPE if stdin != None else None), cwd = cwd) as process:
|
|
stdout, stderr = process.communicate(input = (stdin.encode("utf-8") if stdin != None else None))
|
|
exitCode = process.returncode
|
|
|
|
return stdout.decode("utf-8"), stderr.decode("utf-8"), exitCode
|
|
|
|
def pythonVersion(config):
|
|
version, _, _ = executeCommand([config["executables"]["python"]["binary"], "--version"])
|
|
version = version.strip()
|
|
|
|
match = re.match(r'Python (.*?)$', version, re.M | re.I)
|
|
|
|
return match.group(1)
|
|
|
|
def plaspVersion(config):
|
|
version, _, _ = executeCommand([config["executables"]["plasp"]["binary"], "-v"])
|
|
version = version.strip()
|
|
|
|
match = re.match(r'plasp version (.*?)$', version, re.M | re.I)
|
|
|
|
return match.group(1)
|
|
|
|
def clingoVersion(config):
|
|
version, _, _ = executeCommand([config["executables"]["clingo"]["binary"], "-v"])
|
|
version = version.strip()
|
|
|
|
match = re.match(r'clingo version (.*?)$', version, re.M | re.I)
|
|
|
|
return match.group(1)
|
|
|
|
def plannerVersion(config):
|
|
version, _, _ = executeCommand(["git", "rev-parse", "HEAD"], cwd = config["executables"]["planner"]["directory"])
|
|
date, _, _ = executeCommand(["git", "show", "-s", "--format=%ci"], cwd = config["executables"]["planner"]["directory"])
|
|
|
|
return version.strip() + " (" + date.strip() + ")"
|
|
|
|
def fastDownwardVersion(config):
|
|
version, _, _ = executeCommand(["hg", "log", "-r.", "-T {rev}:{node} ({date|isodate})"], cwd = config["executables"]["fastDownward"]["directory"])
|
|
|
|
return version.strip()
|
|
|
|
def git(command, cwd, enforce = False):
|
|
stdout, stderr, exitCode = executeCommand(["git"] + command, cwd = cwd)
|
|
|
|
if exitCode != 0:
|
|
print(stderr, file = sys.stderr)
|
|
|
|
if enforce:
|
|
raise RuntimeError("git error")
|
|
|
|
def getResultsDir(config):
|
|
return os.path.join(config["storage"]["local"], "results")
|
|
|
|
def getConfigDir(config):
|
|
return os.path.join(config["storage"]["local"], "config")
|
|
|
|
def getStatusDir(config):
|
|
return os.path.join(config["storage"]["local"], "status")
|
|
|
|
def configureGit(dir, config):
|
|
# default settings
|
|
git(["config", "--local", "user.name", config["storage"]["userName"]], dir, enforce = True)
|
|
git(["config", "--local", "user.email", config["storage"]["userEMail"]], dir, enforce = True)
|
|
|
|
if "userSigningKey" in config["storage"]:
|
|
git(["config", "--local", "user.signingkey", config["storage"]["userSigningKey"]], dir, enforce = True)
|
|
git(["config", "--local", "commit.gpgsign", "true"], dir, enforce = True)
|
|
else:
|
|
git(["config", "--local", "commit.gpgsign", "false"], dir, enforce = True)
|
|
|
|
def initRepo(config):
|
|
resultsDir = getResultsDir(config)
|
|
configDir = getConfigDir(config)
|
|
statusDir = getStatusDir(config)
|
|
|
|
# clone repo if not existing
|
|
if not os.path.isdir(statusDir):
|
|
os.makedirs(statusDir)
|
|
git(["clone", config["storage"]["remote"], "--branch=" + config["storage"]["branches"]["status"], statusDir], None, enforce = True)
|
|
git(["worktree", "add", os.path.join("..", "results"), "-b" + config["storage"]["branches"]["results"], "origin/" + config["storage"]["branches"]["results"]], statusDir, enforce = True)
|
|
git(["branch", "--set-upstream-to=" + "origin/" + config["storage"]["branches"]["results"]], resultsDir, enforce = True)
|
|
git(["worktree", "add", os.path.join("..", "config"), "-b" + config["storage"]["branches"]["config"], "origin/" + config["storage"]["branches"]["config"]], statusDir, enforce = True)
|
|
git(["branch", "--set-upstream-to=" + "origin/" + config["storage"]["branches"]["config"]], configDir, enforce = True)
|
|
|
|
# default settings
|
|
configureGit(resultsDir, config)
|
|
configureGit(configDir, config)
|
|
configureGit(statusDir, config)
|
|
|
|
if "userSigningKey" in config["storage"]:
|
|
git(["config", "--local", "user.signingkey", config["storage"]["userSigningKey"]], statusDir, enforce = True)
|
|
git(["config", "--local", "commit.gpgsign", "true"], statusDir, enforce = True)
|
|
else:
|
|
git(["config", "--local", "commit.gpgsign", "false"], statusDir, enforce = True)
|
|
|
|
# fetch origin
|
|
git(["fetch"], cwd = statusDir)
|
|
|
|
# pull all branches
|
|
git(["pull"], cwd = configDir)
|
|
git(["pull"], cwd = statusDir)
|
|
git(["pull"], cwd = resultsDir)
|
|
|
|
def readBenchmarkConfig(config):
|
|
initRepo(config)
|
|
|
|
configDir = getConfigDir(config)
|
|
|
|
# read instance list
|
|
instancesFile = os.path.join(configDir, "instances.yml")
|
|
|
|
with open(instancesFile, "r") as stream:
|
|
instances = yaml.load(stream, Loader = yaml.CLoader)
|
|
|
|
# read configurations to test
|
|
configurationsFile = os.path.join(configDir, "configurations.yml")
|
|
|
|
with open(configurationsFile, "r") as stream:
|
|
configurations = yaml.load(stream, Loader = yaml.CLoader)
|
|
|
|
# flatten lists of options
|
|
for configuration in configurations["configurations"]:
|
|
configuration["options"] = [item for sublist in configuration["options"] for item in sublist]
|
|
|
|
return {"configurations": configurations, "instances": instances}
|
|
|
|
def inputFilenames(instance, config):
|
|
pddlInstancesDir = config["input"]["pddlInstances"]
|
|
|
|
domainFile = os.path.join(pddlInstancesDir, instance["ipc"], "domains", instance["domain"], "domain.pddl")
|
|
instanceFile = os.path.join(pddlInstancesDir, instance["ipc"], "domains", instance["domain"], "instances", "instance-" + str(instance["instance"]) + ".pddl")
|
|
|
|
return {"domainFile": domainFile, "instanceFile": instanceFile}
|
|
|
|
def outputFilenames(configuration, instance, config):
|
|
instanceID = instance["ipc"] + "_" + instance["domain"] + "_" + str(instance["instance"])
|
|
outputFile = os.path.join(configuration["id"], instanceID + ".out")
|
|
errorFile = os.path.join(configuration["id"], instanceID + ".err")
|
|
environmentFile = os.path.join(configuration["id"], instanceID + ".env")
|
|
|
|
return {"outputFile": outputFile, "errorFile": errorFile, "environmentFile": environmentFile}
|
|
|
|
def nextJob(config):
|
|
benchmarkConfig = readBenchmarkConfig(config)
|
|
resultsDir = getResultsDir(config)
|
|
|
|
configurations = benchmarkConfig["configurations"]["configurations"]
|
|
instances = benchmarkConfig["instances"]
|
|
|
|
for instanceSetID, instanceSet in instances.items():
|
|
for instance in instanceSet:
|
|
for configuration in configurations:
|
|
filenames = outputFilenames(configuration, instance, config)
|
|
outputFile = os.path.join(resultsDir, filenames["outputFile"])
|
|
errorFile = os.path.join(resultsDir, filenames["errorFile"])
|
|
environmentFile = os.path.join(resultsDir, filenames["environmentFile"])
|
|
|
|
if not instanceSetID in configuration["instanceSets"]:
|
|
continue
|
|
|
|
if not os.path.exists(outputFile) or not os.path.exists(errorFile) or not os.path.exists(environmentFile):
|
|
return {"configuration": configuration, "instance": instance}
|
|
|
|
return None
|
|
|
|
def writeStatus(message, config):
|
|
statusDir = getStatusDir(config)
|
|
statusFilename = os.path.join(statusDir, "status.log")
|
|
|
|
if os.path.exists(statusFilename):
|
|
with open(statusFilename, "r") as statusFile:
|
|
# add the previous status messages, but trancate them
|
|
content = statusFile.readlines()[0:(config["storage"]["statusLogSize"] - 1)]
|
|
else:
|
|
content = ""
|
|
|
|
with open(statusFilename, "w") as statusFile:
|
|
print(time.strftime("%Y-%m-%d %H:%M:%S %z") + "\t" + message + "\n" + "".join(content), file = statusFile, end = "")
|
|
|
|
git(["add", "status.log"], statusDir, enforce = True)
|
|
git(["commit", "-m Update status: " + message], statusDir, enforce = True)
|
|
git(["push", "origin", config["storage"]["branches"]["status"]], statusDir)
|
|
|
|
def runJob(configuration, instance, config):
|
|
jobName = "[" + str(configuration["id"]) + " | " + instance["ipc"] + " | " + instance["domain"] + " | " + str(instance["instance"]) + "]"
|
|
|
|
writeStatus("started benchmark job " + jobName, config)
|
|
|
|
resultsDir = getResultsDir(config)
|
|
inputFiles = inputFilenames(instance, config)
|
|
|
|
command = \
|
|
[
|
|
config["executables"]["timeout"]["binary"],
|
|
"-m=" + str(config["limits"]["memory"]),
|
|
"-t=" + str(config["limits"]["time"]),
|
|
config["executables"]["python"]["binary"],
|
|
config["executables"]["planner"]["binary"],
|
|
"--domain=" + inputFiles["domainFile"],
|
|
inputFiles["instanceFile"],
|
|
]
|
|
|
|
command += configuration["options"]
|
|
|
|
# TODO: verify planner Git hash
|
|
plannerDir = config["executables"]["planner"]["directory"]
|
|
stdout, stderr, exitCode = executeCommand(command, cwd = plannerDir)
|
|
|
|
outputFiles = outputFilenames(configuration, instance, config)
|
|
outputDir = os.path.dirname(os.path.join(resultsDir, outputFiles["outputFile"]))
|
|
|
|
if not os.path.isdir(outputDir):
|
|
os.makedirs(outputDir)
|
|
|
|
with open(os.path.join(resultsDir, outputFiles["outputFile"]), "w") as outputFile, \
|
|
open(os.path.join(resultsDir, outputFiles["errorFile"]), "w") as errorFile, \
|
|
open(os.path.join(resultsDir, outputFiles["environmentFile"]), "w") as environmentFile:
|
|
print(stdout, file = outputFile)
|
|
print("# configuration: " + str(configuration), file = errorFile)
|
|
print("# instance: " + str(instance), file = errorFile)
|
|
print("# command: " + str(command), file = errorFile)
|
|
print("# working directory: " + plannerDir, file = errorFile)
|
|
print("# exit code: " + str(exitCode), file = errorFile)
|
|
print(stderr, file = errorFile)
|
|
|
|
if exitCode != 0:
|
|
print(stderr)
|
|
|
|
environment = \
|
|
{
|
|
"configuration": configuration,
|
|
"instance": instance,
|
|
"command": command,
|
|
"workingDirectory": plannerDir,
|
|
"versions": \
|
|
{
|
|
"python": pythonVersion(config),
|
|
"clingo": clingoVersion(config),
|
|
"plasp": plaspVersion(config),
|
|
"planner": plannerVersion(config),
|
|
"fastDownward": fastDownwardVersion(config)
|
|
},
|
|
"exitCode": exitCode,
|
|
}
|
|
|
|
print(yaml.dump(environment, default_flow_style = False), file = environmentFile)
|
|
|
|
git(["add", outputFiles["outputFile"], outputFiles["errorFile"], outputFiles["environmentFile"]], resultsDir, enforce = True)
|
|
|
|
if exitCode == 0:
|
|
message = "Add benchmark result"
|
|
else:
|
|
message = "Add corrupt benchmark result"
|
|
|
|
git(["commit", "-m " + message + " " + jobName], resultsDir, enforce = True)
|
|
git(["push", "origin", config["storage"]["branches"]["results"]], resultsDir)
|
|
|
|
if exitCode != 0:
|
|
writeStatus("errors reported for benchmark job " + jobName, config)
|
|
else:
|
|
writeStatus("finished benchmark job " + jobName, config)
|
|
|
|
def main():
|
|
with open("config.yml", "r") as stream:
|
|
config = yaml.load(stream, Loader = yaml.CLoader)
|
|
|
|
atexit.register(writeStatus, "benchmark runner exited", config)
|
|
|
|
performedJobs = 0
|
|
|
|
while True:
|
|
job = nextJob(config)
|
|
|
|
if not job:
|
|
break
|
|
|
|
configuration = job["configuration"]
|
|
instance = job["instance"]
|
|
|
|
runJob(configuration, instance, config)
|
|
performedJobs += 1
|
|
|
|
if performedJobs == 0:
|
|
writeStats("finished benchmark series", config)
|
|
|
|
main()
|