-
Notifications
You must be signed in to change notification settings - Fork 0
/
run.py
148 lines (130 loc) · 5.95 KB
/
run.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
import copy
import time, datetime, requests, os, json, yaml
import docker
def logThis(msg, end='\n'):
print(datetime.datetime.utcnow().strftime("%x %H:%M:%S | " + msg), end=end)
class Scraper(object):
def Scrape(self):
page = requests.get('https://cdn.openttd.org/openttd-releases/latest.yaml')
if page.status_code == 200:
self.page = page.text
self.data = [] # clean house
latestVersions = yaml.load(self.page, Loader=yaml.FullLoader).get('latest')
for data in latestVersions:
thisver = {'version': data.get('version'), 'date': data.get('date'), 'tag': data.get('name')}
self.data.append(thisver)
logThis("Scrape succeeded")
else:
logThis("Scrape failed!")
def Process(self):
newJobsFlag = False
for target, data in self.targets.items():
allPossibleBuildTargets = list(x for x in self.data
if x.get('tag', None) == data.get('tag')
and data.get('search', '').upper() in x.get('version').upper()
)
if len(allPossibleBuildTargets) == 0:
succ = False
for alternate in data['upgrade']:
copyBuild = self.knownBuilds.get(alternate, False)
allPossibleBuildTargets = [copy.copy(copyBuild)]
if allPossibleBuildTargets[0]:
logThis("Target " + target + ': unavailable, superceded by ' + buildTarget['version'])
allPossibleBuildTargets[0]['tags'] = data['tags']
succ = True
break
if not succ:
logThis("Target " + target + ': unavailable and no supercession available, skipping')
break
buildTarget = max(allPossibleBuildTargets, key=(lambda key: key['date']))
buildTarget['tags'] = data['tags'] # we tag early so that we can easily compare
if self.knownBuilds.get(target, {}) == buildTarget:
# we already have the build, have we processed it?
if self.finishedBuilds.get(target, {}) == buildTarget:
logThis("Target " + target + ': version ' + buildTarget[
'version'] + " already built, skipping")
continue
else:
logThis("Build target for " + target + ': version ' + buildTarget[
'version'] + " detected as failed, requeuing")
else:
logThis("New build target for " + target + ': version ' + buildTarget['version'])
self.knownBuilds[target] = buildTarget
self.jobs.append(buildTarget)
newJobsFlag = True
self.SaveState()
if not newJobsFlag:
logThis("No new targets")
return newJobsFlag
def DispatchJobs(self):
garbage = []
for job in self.jobs:
logThis("Building " + job['version'] + " for " + ','.join(job['tags']))
image = self.docker.images.build(
path=os.environ.get('DOCKER_BUILDDIR', '/Users/duck/Documents/Workbench/Docker/OpenTTD'),
rm=True,
buildargs={'OPENTTD_VERSION': job['version']},
tag=self.repo + ':' + job['version'])
for tag in job['tags']:
image.tag(self.repo, tag)
logThis("done!")
garbage.append(job)
logThis("Builds complete, uploading (this might take a moment)")
self.docker.images.push(self.repo)
for job in garbage:
self.finishedBuilds[job['tag']] = job
self.jobs.remove(job)
logThis("Upload complete")
self.SaveState()
def LoadState(self):
def date_hook(json_dict):
for (key, value) in json_dict.items():
try:
json_dict[key] = datetime.datetime.strptime(value, "%Y-%m-%d %H:%M:%S%z")
except:
pass
return json_dict
try:
with open('builds.json') as fp:
try:
filedata = json.load(fp, object_hook=date_hook)
self.knownBuilds = filedata.get('known', {})
self.finishedBuilds = filedata.get('built', {})
logThis("Loaded builds from builds.json")
except json.decoder.JSONDecodeError:
json.dump({}, open('builds.json', 'w'))
except FileNotFoundError:
pass
def SaveState(self):
with open('builds.json', 'w') as fp:
json.dump({'known': self.knownBuilds, 'built': self.finishedBuilds}, fp, default=str)
@classmethod
def Run(cls, scraper):
cls.Scrape(scraper)
newjobs = cls.Process(scraper)
if newjobs:
# Dey took our jerbs!
logThis("Processing new jobs")
cls.DispatchJobs(scraper)
def __init__(self):
self.data = []
self.targets = {'stable': {'tag': 'stable', 'tags': ['stable', 'latest']},
'testing_rc': {'tag': 'testing', 'tags': ['rc'], 'search': 'RC', 'upgrade': ['stable']},
'testing_beta': {'tag': 'testing', 'tags': ['beta'], 'search': 'beta',
'upgrade': ['testing_rc', 'stable']}}
self.jobs = []
self.knownBuilds = {}
self.finishedBuilds = {}
self.LoadState()
self.repo = 'redditopenttd/openttd'
self.docker = docker.from_env()
if os.environ.get('DOCKER_USER', False):
try:
self.docker.login(os.environ.get('DOCKER_USER', None), os.environ.get('DOCKER_PASS', None))
except docker.errors.DockerException as e:
print(e)
if __name__ == '__main__':
scraper = Scraper()
while True:
Scraper.Run(scraper)
time.sleep(60)