-
Notifications
You must be signed in to change notification settings - Fork 2
/
suite_report.py
executable file
·2308 lines (2093 loc) · 85.3 KB
/
suite_report.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
#!/usr/bin/env python3
# *****************************COPYRIGHT*******************************
# (C) Crown copyright Met Office. All rights reserved.
# For further details please refer to the file COPYRIGHT.txt
# which you should have received as part of this distribution.
# *****************************COPYRIGHT*******************************
"""
## NOTE ##
This module is one of several for which the Master copy is in the
UM repository. When making changes, please ensure the changes are made in
the UM repository or they will be lost during the release process when the UM
copy is copied over.
Script to process the results of a suite and write a summary to file. The
summary is in Trac wiki mark-up. Any projects that do not have a local
mirror repository are assumed not to be used at that site and are
excluded from the report.
Owner: Scientific Software Development and Deployment team
(formerly : UM System Development Team)
Cylc Suite Syntax: shutdown handler = "suite_report.py"
Command Line syntax:
suite_report.py -S <suite_dir> [-v] [-q] [-N] [-L <log_dir>]
"""
from __future__ import print_function
import glob
import os
import re
import sqlite3
import sys
import traceback
import time
import subprocess
import json
from optparse import OptionParser, OptionGroup
from collections import defaultdict
from fcm_bdiff import get_branch_diff_filenames
CYLC_SUITE_ENV_FILE = "cylc-suite-env"
PROCESSED_SUITE_RC = "suite.rc.processed"
PROCESSED_FLOW_CYLC = "flow-processed.cylc"
ROSE_SUITE_RUN_CONF = "rose-suite-run.conf"
ROSE_SUITE_RUN_CONF_CYLC8 = "-rose-suite.conf"
SUITE_DB_FILENAME = "cylc-suite.db"
SUITE_DB_FILENAME_CYLC8 = "db"
TRAC_LOG_FILE = "trac.log"
DEFAULT_VERBOSITY = 3
PINK_FAIL_TEXT = "'''[[span(style=color: #FF00FF, pink failure )]]'''"
DESIRED_ORDER = [PINK_FAIL_TEXT, "failed", "succeeded"]
BACKGROUND_COLOURS = {
"um": "#FFFFBF",
"lfric_apps": "#E9D2FF",
"jules": "#BFD0FF",
"ukca": "#BFFFD1",
"unknown": "#BFFFD1",
}
FCM = {
"meto": "fcm",
"ecmwf": "fcm",
"nci": "fcm",
"bom": "fcm",
"uoe": "fcm",
"niwa": "fcm",
"kma": "fcm",
"vm": "fcm",
"jasmin": "fcm",
"cehwl1": "fcm",
"mss": "fcm",
"ncas": "fcm",
"psc": "fcm",
"uoleeds": "fcm",
"Unknown": "true",
}
RESOURCE_MONITORING_JOBS = {
"meto": [
"atmos-xc40_cce_um_fast_omp-seukv-4x9-noios-2t",
],
"ecmwf": [],
"nci": [],
"bom": [],
"uoe": [],
"niwa": [],
"kma": [],
"vm": [],
"jasmin": [],
"cehwl1": [],
"mss": [],
"ncas": [],
"psc": [],
"uoleeds": [],
"Unknown": [],
}
CYLC_REVIEW_URL = {
"meto": "http://fcm1/cylc-review",
"ecmwf": "Unavailable",
"nci": "http://accessdev.nci.org.au/cylc-review",
"bom": "http://scs-watchdog-dev/rose-bush",
"uoe": "Unavailable",
"niwa": "http://w-rose01.maui.niwa.co.nz/cylc-review",
"kma": "Unavailable",
"vm": "http://localhost/cylc-review",
"jasmin": "Unavailable",
"cehwl1": "Unavailable",
"mss": "Unavailable",
"ncas": "http://puma.nerc.ac.uk/cylc-review",
"psc": "Unavailable",
"uoleeds": "Unavailable",
"Unknown": "Unavailable",
}
HIGHLIGHT_ROSE_ANA_FAILS = [
"_vs_",
"lrun_crun_atmos",
"proc",
"atmos_omp",
"atmos_nruncrun",
"atmos_thread",
"-v-",
]
COMMON_GROUPS = {
"meto": [
"all",
"nightly",
"developer",
"xc40",
"ex1a",
"spice",
"xc40_nightly",
"ex1a_nightly",
"spice_nightly",
"xc40_developer",
"ex1a_developer",
"spice_developer",
"ukca",
"recon",
"jules",
"xc40_ukca",
"ex1a_ukca",
"spice_ukca",
"xc40_jules",
"ex1a_jules",
"spice_jules",
],
"ecmwf": [],
"nci": [],
"bom": [],
"uoe": [],
"niwa": [],
"kma": [],
"vm": [],
"jasmin": [],
"cehwl1": [],
"mss": [],
"ncas": [],
"psc": [],
"Unknown": [],
}
def _read_file(filename):
"""Takes filename (str)
Return contents of a file, as list of strings."""
if os.path.exists(filename):
with open(filename, "r") as filehandle:
lines = filehandle.readlines()
else:
print('[ERROR] Unable to find file :\n "{0:s}"'.format(filename))
raise IOError(
'_read_file got invalid filename : "{0:s}"'.format(filename)
)
return lines
def _write_file(filename, lines, newline=False):
"""Takes filemname and list of strings and opt newline boolean.
Writes array to file of given name.
The optional newline argument adds a newline at the end of each
element of the list.
Returns None"""
retn = "\n" if newline else ""
with open(filename, "w") as filehandle:
for line in lines:
filehandle.write("{0:s}{1:s}".format(line, retn))
def _run_command(command, ignore_fail=False):
"""Takes command and command line options as a list.
Runs the command with subprocess.Popen.
Returns the exit code, standard out and standard error as list.
"""
pobj = subprocess.Popen(
command, stdout=subprocess.PIPE, stderr=subprocess.PIPE
)
pobj.wait()
retcode, stdout, stderr = (
pobj.returncode,
pobj.stdout.read().decode("utf-8"),
pobj.stderr.read().decode("utf-8"),
)
if retcode != 0 and not ignore_fail:
print("[ERROR] running {0:s}".format(command))
print("[INFO] RC: {0:}".format(retcode))
print("[INFO] Stdout: {0:s}".format(stdout))
print("[INFO] Stderr: {0:s}".format(stderr))
raise IOError("run_command")
# Reformat stdout into a list
stdout = "".join(stdout)
stdout = stdout.split("\n")
return retcode, stdout, stderr
def _remove_quotes(string):
"""Takes, modifies and returns string.
Removes all quotes from the string.
None input results in None output"""
if string is not None:
string = re.sub(r'"', r"", string)
string = re.sub(r"'", r"", string)
return string
def _dict_merge(main_dict, addon_dict, force=False):
"""Merge addon dictionary into main dictionary.
Takes main_dict, addon_dict and optional bool 'force'
Returns new merged dictionary.
Optional argument force=True allows forced overwrite of existing
value with None from the addon dictionary. Otherwise original
value is preserved when value in addon dict is None.
This preserving behaviour differentiates it from main.update(addon)"""
merged_dict = main_dict.copy()
for key, value in addon_dict.items():
if isinstance(value, dict):
if key not in merged_dict:
merged_dict[key] = {}
merged_dict[key] = _dict_merge(merged_dict[key], value)
else:
# Switch to Force main to take whatever addon has
# No matching key in main - take whatever addon has including None
# Or
# Override main with contents of addon
if force or key not in merged_dict or value is not None:
merged_dict[key] = value
return merged_dict
def _select_preferred(option_list):
"""Takes a list of strings, returns the fist one that is not None.
If the strings are report text in preffered order it essentially
ensures you get the preffered option from a list of choices."""
pref_opt = None
for choice in option_list:
if choice is not None:
pref_opt = choice
break
return pref_opt
def _escape_svn(url):
"""Takes and returns url as string.
Escape 'svn:' urls as Trac tries to convert them to links."""
if not re.search(r"!svn://", url): # Make sure it's not already escaped.
url = re.sub(r"svn://", r"!svn://", url)
return url
def _get_current_head_revision(mirror_url, fcm_exec):
"""Given a mirror repository (local) url, uses fcm branch-info to
retrieve and append the head revision number.
Requires url and fcm exec path (strings)
Returns revision number as string"""
revision = ""
_, stdout, _ = _run_command([fcm_exec, "branch-info", mirror_url])
find_last_changed_rev = re.compile(r"Last Changed Rev:\s*(\d+)")
for line in stdout:
result = find_last_changed_rev.search(line)
if result:
revision = str(result.group(1))
break
return revision
def _url_to_trac_link(url):
"""Takes a URL as string, edits text to resemble a Trac link for code
on the SRS.
Returns Trac link form of URL or None if 'svn' was absent from the url.
"""
if re.search(r"/svn/", url):
link_2_url = re.sub(r"svn", r"trac", url)
elements = link_2_url.split("/")
elements.insert(elements.index("trac") + 2, "browser")
link_2_url = "/".join(elements)
link_2_url = re.sub(r"@", r"?rev=", link_2_url)
else:
link_2_url = None
return link_2_url
def _parse_string(
varname,
lines,
remove_quotes=True,
split_on_comma=False,
default_unknown=False,
):
"""Given a variable name in the rose-suite-run.conf file, return its
value."""
find_var = re.compile(r"{0}\s*=\s*(.*)".format(varname))
if split_on_comma:
value = [None]
elif default_unknown:
value = "Unknown"
else:
value = None
for line in lines:
result = find_var.search(line)
if result:
value = result.group(1)
if remove_quotes:
value = _remove_quotes(value.rstrip())
if split_on_comma:
# Remove brackets and split on comma
value = re.sub(r"\[", r"", value)
value = re.sub(r"\]", r"", value)
value = value.split(",")
return value
class SuiteReport(object):
"""Object to hold data and methods required to produce a suite report
from a rose-stem suite output."""
def __init__(
self,
suite_path,
log_path=None,
verbosity=DEFAULT_VERBOSITY,
sort_by_name=False,
):
"""Requires a path to the suite output directory.
Takes optional arguments for log_path (output dir), and
verbosity which dictates what tasks are omitted from the log.
also the boolean sort_by_name to force sorting by task name over
status when generating the task table in the report.
"""
self.suite_path = os.path.abspath(suite_path)
self.is_cylc8 = os.path.isdir(
os.path.join(self.suite_path, "log", "config")
)
self.log_path = log_path
self.sort_by_name = sort_by_name
self.verbosity = verbosity
self.creation_time = time.strftime("%Y/%m/%d %X")
self.uncommitted_changes = 0
self.site = "Unknown"
self.rose_orig_host = None
self.groups = []
self.job_sources = {}
self.primary_project = ""
self.projects = {}
self.status_counts = defaultdict(int)
self.status_counts["failed"] = 0
try:
# Resolve "runN" soft link - Required for Cylc8 cylc-review path
link_target = os.readlink(self.suite_path)
suitename = os.path.join(os.path.dirname(self.suite_path), link_target)
except OSError:
suitename = self.suite_path
suite_dir, self.suitename = suitename.split("cylc-run/")
# Default to userID from suite path unless CYLC_SUITE_OWNER is present
self.suite_owner = os.environ.get(
"CYLC_SUITE_OWNER",
os.path.basename(suite_dir.rstrip("/"))
)
self.parse_rose_suite_run()
self.initialise_projects()
self.parse_processed_config_file()
projects = self.check_versions_files()
self.job_sources = _dict_merge(self.job_sources, projects)
# Work out which project this suite is run as - heirarchical structure
# with lfric_apps at the top, then UM, then the rest
if "LFRIC_APPS" in self.job_sources.keys():
self.primary_project = "LFRIC_APPS"
elif "UM" in self.job_sources.keys():
self.primary_project = "UM"
elif "JULES" in self.job_sources.keys():
self.primary_project = "JULES"
elif "UKCA" in self.job_sources.keys():
self.primary_project = "UKCA"
else:
self.primary_project = "UNKNOWN"
self.groups = [_remove_quotes(group) for group in self.groups]
fcm_exec = FCM[self.site]
invalid = []
for project in self.job_sources:
proj_dict = self.job_sources[project]
proj_dict["tested source"] = _remove_quotes(
proj_dict["tested source"]
)
if "repo loc" in proj_dict:
proj_dict["repo loc"] = self.convert_to_srs(
proj_dict["repo loc"], self.projects
)
else:
proj_dict["repo loc"] = self.convert_to_srs(
proj_dict["tested source"], self.projects
)
proj_dict["repo mirror"] = self.convert_to_mirror(
proj_dict["repo loc"], self.projects
)
# If the mirror doesn't exist, move on to the next project.
if not self.check_repository(fcm_exec, proj_dict["repo mirror"]):
invalid.append(project)
continue
proj_dict["parent mirror"] = self.set_parent(
fcm_exec, proj_dict["repo mirror"]
)
proj_dict["parent loc"] = self.convert_to_srs(
proj_dict["parent mirror"], self.projects
)
# Check "repo loc" and "parent loc" have revisions,
# and if not, try to get a head of 'trunk' one for them.
for location in ("repo", "parent"):
url = proj_dict[location + " loc"]
mirror_url = proj_dict[location + " mirror"]
if url is None or mirror_url is None:
continue
if ":" in url and "@" not in url:
revision = _get_current_head_revision(mirror_url, fcm_exec)
proj_dict[location + " loc"] = url + "@" + revision
proj_dict[location + " mirror"] = (
mirror_url + "@" + revision
)
proj_dict["repo link"] = self.generate_link(proj_dict["repo loc"])
proj_dict["parent link"] = self.generate_link(
proj_dict["parent loc"]
)
# If those attempts to generate links didn't work, try the hope
# and guess approach.
if proj_dict["repo link"] is None:
proj_dict["repo link"] = self.link_from_loc_layout(
proj_dict["repo link"], proj_dict["repo mirror"], fcm_exec
)
if proj_dict["parent link"] is None:
proj_dict["parent link"] = self.link_from_loc_layout(
proj_dict["parent loc"],
proj_dict["parent mirror"],
fcm_exec,
)
# Final attempt to ensure the links have revision numbers and not
# keywords which aren't evaluated in the browser.
if proj_dict["repo link"] is not None and re.search(
r"rev=[a-zA-z]", proj_dict["repo link"]
):
revision = self.revision_from_loc_layout(
proj_dict["repo mirror"], fcm_exec
)
proj_dict["repo link"] = re.sub(
r"rev=[a-zA-z0-9.]+",
"rev=" + revision,
proj_dict["repo link"],
)
proj_dict["human repo loc"] = self.convert_to_keyword(
proj_dict["repo loc"], self.projects
)
proj_dict["human parent"] = self.convert_to_keyword(
proj_dict["parent loc"], self.projects
)
proj_dict["ticket no"] = self.ascertain_ticket_number(
proj_dict["repo mirror"], fcm_exec
)
proj_dict["bdiff_files"] = self.get_altered_files_list(
proj_dict["repo mirror"]
)
# Check to see if ALL the groups being run fall into the "common groups"
# category. This is used to control automatic hiding of successful tasks
# later.
if self.site == "meto" and "all" in self.groups:
self.only_common_groups = True
else:
self.only_common_groups = all(
[
group.strip() in COMMON_GROUPS[self.site]
for group in self.groups
]
)
# Finally, remove any projects which were deemed invalid.
for project in invalid:
del self.job_sources[project]
def debug_print_obj(self):
"""Debug print method.
Prints everything in the SuiteReport object."""
print("-" * 80 + "\nSet up SuiteReport object\n" + "-" * 80 + "\n\n")
for key, value in self.__dict__.items():
if key == "projects":
print('{0:s} contains "{1:d}" entries.'.format(key, len(value)))
elif key == "sort_by_name":
if value:
print('{0:s} is :"True"'.format(key))
else:
print('{0:s} is :"False"'.format(key))
elif key == "only_common_groups":
if value:
print('{0:s} is :"True"'.format(key))
else:
print('{0:s} is :"False"'.format(key))
elif key == "verbosity":
text = "Verbosity level is set to : "
if value >= 4:
print(
text
+ "Hide Housekeeping, Gatekeeping and Successful tasks"
)
elif value >= 3:
print(
text
+ "Hide Housekeeping, Gatekeeping and if all groups run"
'were "common" groups also hide Successful tasks'
)
elif value >= 2:
print(text + "Hide Housekeeping and Gatekeeping tasks")
elif value >= 1:
print(text + "Hide Housekeeping tasks")
else:
print(text + "Forcibly Print Everything.")
elif key == "job_sources":
self.print_job_sources(value)
else:
print('{0:s} is :"{1:}"'.format(key, value))
print(
"\n" + "-" * 80 + "\nEnd of SuiteReport object\n" + "-" * 80 + "\n"
)
@staticmethod
def print_job_sources(job_srcs_dict):
"""Debug print method.
Prints everything in projects dictionary."""
for key, value in job_srcs_dict.items():
print(" {0:s} :".format(key))
for sub_key, sub_value in value.items():
if isinstance(sub_value, bool):
if sub_value:
print(' {0:s} is :"True"'.format(sub_key))
else:
print(' {0:s} is :"False"'.format(sub_key))
else:
print(' {0:s} is :"{1:}"'.format(sub_key, sub_value))
def parse_processed_config_file(self):
"""Parse the suite.rc.processed file.
Extract all projects present that begin with a "SOURCE_".
Allow SOURCE_<project> to override any SOURCE_<project>_<extension>
entries. Creating a dictionary of format {<project> : <URL>,...}
Also Extract the host machine rose was launched on.
Takes full path for suite dir.
Sets class variables"""
suite_dir = self.suite_path
rose_orig_host = "Unknown rose_orig_host"
srp_file = ""
if self.is_cylc8:
srp_file = os.path.join(suite_dir, "log", "config")
for filename in os.listdir(srp_file):
if (
os.path.isfile(os.path.join(srp_file, filename))
and ROSE_SUITE_RUN_CONF_CYLC8 in filename
):
srp_file = os.path.join(srp_file, filename)
break
else:
sys.exit(
"Error: Couldn't find a *-rose-suite.conf file in "
+ f"{srp_file}"
)
else:
srp_file = os.path.join(suite_dir, PROCESSED_SUITE_RC)
find_orig_host = re.compile(r"ROSE_ORIG_HOST\s*=\s*(.*)")
# in pattern below, need to include "_REV" after the project name and
# before the " *=" and then exclude lines with "_REV" later as
# otherwise the search will identify PROJ_REV as a unique project
# name. The other option would be to have an alternate 3rd group match
# of "_.*?" but that would exclude any future project names that might
# have an underscore in them.
find_sources = re.compile(
r"\s*(?:HOST_SOURCE_|SOURCE_)(.*?)(|_BASE|_MIRROR|_REV)\s*=\s*(.*)"
)
sources = {}
multiple_branches = {}
for line in _read_file(srp_file):
# check for ROSE_ORIG_HOST
result = find_orig_host.search(line)
if result:
rose_orig_host = result.group(1).rstrip()
# check for SOURCE_.*
result = find_sources.match(line)
# Discard the ones which were SOURCE_PROJ_REV
if result and result.group(2) != "_REV":
# Allow SOURCE_PROJ to override any existing entries
# Otherwise only add new entries
if result.group(1) not in sources or result.group(2) == "":
sources[result.group(1)] = {}
if " " in result.group(3):
multiple_branches[(result.group(1))] = result.group(3)
sources[result.group(1)][
"tested source"
] = result.group(3).split()[0]
else:
sources[result.group(1)][
"tested source"
] = result.group(3)
self.rose_orig_host = rose_orig_host
self.job_sources = sources
self.multi_branches = multiple_branches
return
def parse_rose_suite_run(self):
"""Parse rose-suite-run.conf file.
Takes full path for suite dir.
Sets class variables"""
suite_dir = self.suite_path
rsr_file = ""
if self.is_cylc8:
glob_format = "{0:s}/*{1:s}".format(
os.path.join(suite_dir, "log", "config"),
ROSE_SUITE_RUN_CONF_CYLC8,
)
rsr_file = glob.glob(glob_format)[0]
else:
rsr_file = os.path.join(suite_dir, "log", ROSE_SUITE_RUN_CONF)
lines = _read_file(rsr_file)
self.site = _parse_string("SITE", lines, default_unknown=True)
self.groups = _parse_string(
"RUN_NAMES", lines, split_on_comma=True, remove_quotes=False
)
self.fcm = _parse_string("FCM_VERSION", lines)
self.cylc = _parse_string("CYLC_VERSION", lines)
if not self.cylc:
self.cylc = "8" if self.is_cylc8 is True else "7"
self.rose = _parse_string("ROSE_VERSION", lines)
# This test is a little problematic when running this script on a JULES
# rose-stem suite as JULES has no 'need' of the two compare variables
# and to prevent the warning their absence would produce from occuring
# unnecessarily in JULES they have been added to rose-suite.conf for now
compare_output = _parse_string("COMPARE_OUTPUT", lines)
compare_wallclock = _parse_string("COMPARE_WALLCLOCK", lines)
self.required_comparisons = (
compare_output == "true" and compare_wallclock == "true"
)
self.trustzone = os.environ.get("TRUSTZONE", None)
self.host_xcs = False
if self.site == "meto":
for line in lines:
if "HOST_XC40='xcsr'" in line:
self.host_xcs = True
return
def initialise_projects(self):
"""Uses fcm kp to initialise a directory containing project keywords
linked to SVN URLS. Format {<project> : <URL>,...}
Takes full path for suite dir.
Sets class variable"""
fcm_exec = FCM[self.site]
projects = {}
_, stdout, _ = _run_command([fcm_exec, "kp"])
find_primary_loc = re.compile(r"location{primary}")
find_projects = re.compile(r"\[(.*)\]\s*=\s*(.*)")
find_x_keyword = re.compile(r".x$")
find_xm_keyword = re.compile(r".xm$")
find_srs_url = re.compile(r"https://code.metoffice")
find_mirror_url = re.compile(r"svn:|https://")
for line in stdout:
if not find_primary_loc.search(line):
continue
result = find_projects.search(line)
if result:
project = result.group(1)
url = result.group(2)
# Check for keywords conforming to the meto prescribed pattern
# of ending in '.x' for the external repo and '.xm' for the
# local mirror.
if (
find_x_keyword.search(project) and find_srs_url.match(url)
) or (
find_xm_keyword.search(project)
and find_mirror_url.match(url)
):
projects[project] = url
self.projects = projects
return
def cylc7_check_versions_file(self, projects):
"""
cylc7 version of the check_versions_files function. Can be deleted once
cylc7 no longer supported.
"""
find_proj_name = re.compile(r"/(\w+)-\d+.version")
version_files = []
version_files = glob.glob(
"{0:s}/*.version".format(os.path.join(self.suite_path, "log"))
)
for vfile in version_files:
if "rose-suite-run.version" in vfile:
continue
result = find_proj_name.search(vfile)
if result:
project = result.group(1).upper()
projects[project] = {}
url, revision, wc_changes = self.parse_versions_file(vfile)
projects[project]["last changed rev"] = revision
projects[project]["working copy changes"] = wc_changes
projects[project]["version file"] = os.path.basename(vfile)
if wc_changes:
self.uncommitted_changes += 1
if url is not None:
if revision is not None:
ending = "@" + revision
else:
ending = ""
projects[project]["repo loc"] = url + ending
return projects
def check_versions_files(self):
"""Locate the log/*.version files.
Call parse_versions_fileto parse the contents of each file.
Recover which projects are being augmented by branch or WC
Takes full path for suite dir.
Returns dictionary of project dictionares and number of projects
with uncommitted changes."""
projects = {}
self.uncommitted_changes = 0
if not self.is_cylc8:
return self.cylc7_check_versions_file(projects)
vcs_path = os.path.join(self.suite_path, "log", "version", "vcs.json")
with open(vcs_path) as vcs_file:
vcs_data = json.load(vcs_file)
if (
"url" not in vcs_data
or "revision" not in vcs_data
or "status" not in vcs_data
):
sys.exit(
f"The version control json file {vcs_file} did not have "
"entries for all of 'url', 'revision' and 'status'"
)
if vcs_data["url"] is not None:
if vcs_data["revision"] is not None:
ending = "@" + vcs_data["revision"]
else:
ending = ""
project = vcs_data["url"]
prefix = "https://code.metoffice.gov.uk/svn/"
prefix_svn = "svn://fcm1/"
if project.startswith(prefix):
project = project[len(prefix) :]
if project.startswith(prefix_svn):
project = project[len(prefix_svn) :]
project = re.split("[/.]", project)[0].upper()
projects[project] = {}
# Use the version control url as the project source
# This url isn't necessarily to top of the working copy so split
# the url around "branches" or "trunk" to ensure the correct url
url = vcs_data["url"]
if "branches" in url:
splitter = "branches"
else:
splitter = "trunk"
start_url, end_url = url.split(f"/{splitter}/", 1)
start_url += f"/{splitter}/"
end_url = end_url.split("/")
if splitter == "branches":
# For branches, format is "/[dev|test]/<username>/<branch-name>"
end_url = f"{end_url[0]}/{end_url[1]}/{end_url[2]}"
else:
# For trunk, format is just "/trunk/"
end_url = ""
projects[project]["repo loc"] = start_url + end_url + ending
for item in vcs_data["status"]:
if not item.startswith("?") and len(item) > 0:
self.uncommitted_changes += 1
return projects
@staticmethod
def parse_versions_file(vfile):
"""Parse a versions file to extract the url and revision for
the branches behind any working copies, plus any uncommitted
changes.
Takes full path to a .version file.
Returns url and revision as strings plus wc changes as boolean."""
url = None
revision = None
working_copy_changes = False
find_svn_status = re.compile(r"SVN STATUS", re.IGNORECASE)
find_url = re.compile(r"URL:\s*")
find_last_changed_rev = re.compile(r"Last Changed Rev:\s*")
for line in _read_file(vfile):
if find_svn_status.search(line):
working_copy_changes = True
if find_url.match(line):
url = find_url.sub(r"", line).rstrip()
if find_last_changed_rev.match(line):
revision = find_last_changed_rev.sub(r"", line).rstrip()
return url, revision, working_copy_changes
@staticmethod
def set_parent(fcm_exec, mirror_url):
"""For given URL, on the internal mirror repository, use
'fcm branch-info' to try and ascertain the branch parent, if any.
Takes fcm_exec path and mirror_url as strings.
Returns parent URL or None"""
parent = None
stdout = ""
command = [fcm_exec, "branch-info", mirror_url]
_, stdout, _ = _run_command(command, ignore_fail=True)
find_branch_parent = re.compile(r"Branch Parent:\s*(.*)")
for line in stdout:
result = find_branch_parent.search(line)
if result:
parent = result.group(1).rstrip()
return parent
@staticmethod
def check_repository(fcm_exec, url):
"""Checks whether a given repository is accessible or not.
Takes fcm_exec path and a url (either SRS or mirror) as strings.
Returns True if the repository exists, False otherwise."""
retcode = 0
command = [fcm_exec, "info", url]
retcode, _, _ = _run_command(command, ignore_fail=True)
if retcode == 0:
return True
return False
@staticmethod
def export_file(repo_url, fname, outname="~/temp.txt"):
"""
Runs an fcm export on a file and saves it as outname
Attempts to check it out 5 times to account for any network glitches.
Returns None if all attempts fail, otherwise the user expanded path to
the file
Inputs: repo_url, eg. fcm:um.xm_tr
fname: the path of the file in the repo
outname: the path to the output file. Default ~/temp.txt
"""
fname = fname.lstrip("/")
outname = os.path.expanduser(outname)
# Try 5 times, if all fail then use working copy version
for _ in range(5):
try:
subproc = "fcm export -q {}/{} {} --force".format(
repo_url, fname, outname
)
subprocess.check_output(subproc, shell=True)
return outname
except subprocess.CalledProcessError as error:
print(error)
else:
return None
@staticmethod
def clean_tempfile(fname="~/temp.txt"):
"""
Clean up a temp file exported
"""
try:
os.remove(os.path.expanduser(fname))
except EnvironmentError:
pass
def generate_owner_dictionary(self, mode):
"""
Function that parses an owners file to create a dictionary of owners,
mapping a configuration/section to an owner
Input:
mode - either config or code depending on dictionary created
"""
if mode == "config":
fname = "ConfigOwners.txt"
sep = "Configuration"
elif mode == "code":
fname = "CodeOwners.txt"
sep = "Owner"
else:
return None
# Export the Owners file from the HOT
exported_file = "~/tmp_owners.txt"
file_path = self.export_file("fcm:um.xm_tr", fname, exported_file)
if file_path is None:
# Couldn't check out file - use working copy Owners file instead
wc_path = get_working_copy_path(
self.job_sources["UM"]["tested source"]
)
if not wc_path:
wc_path = ""
file_path = os.path.join(wc_path, fname)
print("Using the checked out version of Owners file")
# Read through file and generate dictionary
try:
with open(file_path, "r") as inp_file:
owners_dict = {}
inside_listing = False
for line in inp_file:
if "{{{" in line:
inside_listing = True
continue
if "}}}" in line:
inside_listing = False
continue
if inside_listing:
if line != "\n" and sep not in line:
dummy_list = line.split()
section = dummy_list[0].strip()
owners = dummy_list[1].strip()
if "umsysteam" in owners:
owners = "!umsysteam@metoffice.gov.uk"
try:
others = dummy_list[2].replace("\n", "")
if others == "--":
others = ""
except IndexError:
others = ""
owners_dict.update(
{section.lower(): [owners, others]}
)
except EnvironmentError:
print("Can't find working copy for Owners File")
return None
# Clean up the checked out copy of the owners file
self.clean_tempfile(exported_file)
return owners_dict
def create_approval_table(self, needed_approvals, mode):
"""
Function to write out the trac.log table for config and CO approvals
Input: needed_approvals - dictionary with keys as owners and values,
a list of configs or code sections
mode - either "config" or "code" depending on which type of table
is being created
"""
table = ["'''Required " + mode.capitalize() + " Owner Approvals'''"]
if mode == "config":
table += [" || '''Owner''' || '''Approval''' || '''Configs''' || "]
else:
table += [
" || '''Owner (Deputy)''' || '''Approval''' || "
+ "'''Code Section''' || "
]
if needed_approvals is None:
table += [
" |||||| No UM "