preCICE v3.2.0
All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
updateSourceFiles.py
Go to the documentation of this file.
1#!/usr/bin/env python3
2
3import collections
4import glob
5import os
6import pathlib
7import subprocess
8import sys
9
10""" Files matching this pattern will be filtered out """
11IGNORE_PATTERNS = ["drivers", "mapping/device"]
12
13""" Configured files, which should be ignored by git, yet installed by CMake"""
14CONFIGURED_PUBLIC = ["${PROJECT_BINARY_DIR}/src/precice/Version.h"]
15
16""" Configured files, which should be ignored by git """
17CONFIGURED_SOURCES = [
18 "${PROJECT_BINARY_DIR}/src/precice/impl/versions.hpp",
19 "${CMAKE_BINARY_DIR}/src/precice/impl/versions.cpp",
20]
21
22
24 ret = subprocess.run(
25 ["git", "ls-files", "--full-name"],
26 stdout=subprocess.PIPE,
27 stderr=subprocess.PIPE,
28 check=False,
29 )
30 if ret.returncode != 0:
31 return None
32 else:
33 return ret.stdout.decode().split()
34
35
37 _, ext = os.path.splitext(name)
38 return ext
39
40
42 sources = os.path.join(root, "src", "sources.cmake")
43 utests = os.path.join(root, "src", "tests.cmake")
44 itests = os.path.join(root, "tests", "tests.cmake")
45 benchmarks = os.path.join(root, "benchmarks", "sources.cmake")
46 cmakepaths = collections.namedtuple(
47 "CMakePaths", "sources utests itests benchmarks"
48 )
49 return cmakepaths(sources, utests, itests, benchmarks)
50
51
53 paths = get_cmake_file_paths(root)
54 return all(map(os.path.exists, paths))
55
56
58 src_dir = os.path.join(root, "src")
59 tests_dir = os.path.join(root, "tests")
60 bench_dir = os.path.join(root, "benchmarks")
61
62 # Find interface headers
63 public = glob.glob(os.path.join(src_dir, "precice", "*.hpp"))
64 public += CONFIGURED_PUBLIC
65 public = [os.path.relpath(p, root) for p in public]
66
67 # Find all test and source cpp files
68 sources, utests = [], []
69 exts = [".cpp", ".c", ".hpp", ".h"]
70 for dir, _, filenames in os.walk(src_dir):
71 if any([elem in dir for elem in IGNORE_PATTERNS]):
72 continue
73 files = [
74 os.path.relpath(os.path.join(dir, name), root)
75 for name in filenames
76 if file_extension(name) in exts
77 ]
78 if "test" in dir:
79 utests += files
80 else:
81 sources += files
82 sources += CONFIGURED_SOURCES
83
84 itests = []
85 for dir, _, filenames in os.walk(tests_dir):
86 if any([elem in dir for elem in IGNORE_PATTERNS]):
87 continue
88 files = [
89 os.path.relpath(os.path.join(dir, name), root)
90 for name in filenames
91 if file_extension(name) in exts
92 ]
93 itests += files
94
95 benchmarks = []
96 for dir, _, filenames in os.walk(bench_dir):
97 files = [
98 os.path.relpath(os.path.join(dir, name), root)
99 for name in filenames
100 if file_extension(name) in exts
101 ]
102 benchmarks += files
103
104 return (
105 sorted(sources),
106 sorted(public),
107 sorted(utests),
108 sorted(itests),
109 sorted(benchmarks),
110 )
111
112
114 """
115 Extracts the test suite from a path and translates it to the boost test name
116 """
117 dir = pathlib.PurePath(path).parts[1]
118 parts = map(lambda s: s.capitalize(), dir.split("-"))
119 return "".join(parts)
120
121
122SOURCES_BASE = """#
123# This file lists all sources that will be compiles into the precice library
124#
125
126target_sources(preciceCore
127 PRIVATE
128 {}
129 )
130
131#
132# Select headers to install
133#
134
135set_property(TARGET precice PROPERTY PUBLIC_HEADER
136 {}
137 )
138"""
139TESTS_BASE = """#
140# This file lists all tests sources that will be compiled into the test executable
141#
142target_sources(testprecice
143 PRIVATE
144 {}
145 )
146"""
147ITESTS_BASE = """#
148# This file lists all integration test sources and test suites
149#
150target_sources(testprecice
151 PRIVATE
152 {}
153 )
154"""
155BENCHMARKS_BASE = """#
156# This file lists all benchmarks that will be compiles into precice-bench
157#
158
159target_sources(precice-bench
160 PRIVATE
161 {}
162 )
163"""
164
165
166def generate_lib_sources(sources, public):
167 return SOURCES_BASE.format("\n ".join(sources), "\n ".join(public))
168
169
171 return TESTS_BASE.format("\n ".join(utests))
172
173
175 return ITESTS_BASE.format("\n ".join(itests))
176
177
179 return BENCHMARKS_BASE.format("\n ".join(sources))
180
181
182def main():
183 root = os.curdir
184 if not is_precice_root(root):
185 print("Current dir {} is not the root of the precice repository!".format(root))
186 return 1
187 sources, public, utests, itests, benchmarks = get_file_lists(root)
188 print(
189 "Detected files:\n sources: {}\n public headers: {}\n unit tests: {}\n integration tests: {}\n benchmarks: {}".format(
190 len(sources), len(public), len(utests), len(itests), len(benchmarks)
191 )
192 )
193
194 gitfiles = get_gitfiles()
195 if gitfiles:
196 not_tracked = list(
197 set(sources + public + utests + itests)
198 - set(gitfiles + CONFIGURED_SOURCES + CONFIGURED_PUBLIC)
199 )
200 if not_tracked:
201 print("The source tree contains files not tracked by git.")
202 print("Please do one of the following with them:")
203 print(" - track them using 'git add'")
204 print(" - add them to IGNORE_PATTERNS in this script")
205 print(" - add them to CONFIGURED_SOURCES in this script!")
206 print("Files:")
207 for file in not_tracked:
208 print(" {}".format(file))
209 print("Verification FAILED")
210 else:
211 print("Verification SUCCEEDED")
212 else:
213 print("Git did not run successfully.")
214 print("Verification SKIPPED")
215
216 print("Generating CMake files")
217 # sources_file, tests_file = get_cmake_file_paths(root)
218 files = get_cmake_file_paths(root)
219 sources_content = generate_lib_sources(sources, public)
220 utests_content = generate_unit_tests(utests)
221 itests_content = generate_integration_tests(itests)
222 benchmarks_content = generate_benchmark_sources(benchmarks)
223
224 print("Writing Files")
225 print(" {}".format(files.sources))
226 with open(files.sources, "w") as f:
227 f.write(sources_content)
228
229 print(" {}".format(files.utests))
230 with open(files.utests, "w") as f:
231 f.write(utests_content)
232
233 print(" {}".format(files.itests))
234 with open(files.itests, "w") as f:
235 f.write(itests_content)
236
237 print(" {}".format(files.benchmarks))
238 with open(files.benchmarks, "w") as f:
239 f.write(benchmarks_content)
240
241 print("done")
242 return 0
243
244
245if __name__ == "__main__":
246 sys.exit(main())
generate_lib_sources(sources, public)
generate_benchmark_sources(sources)