Skip to content

Commit 0be2c63

Browse files
authored
Merge pull request #89 from sommersoft/val_contents_py_only
Don't Check non-.py Example Filenames For the Library Name
2 parents 179741a + 81738c4 commit 0be2c63

File tree

1 file changed

+90
-39
lines changed

1 file changed

+90
-39
lines changed

adabot/lib/circuitpython_library_validators.py

+90-39
Original file line numberDiff line numberDiff line change
@@ -181,13 +181,15 @@ def validate_repo_state(self, repo):
181181
errors.append(ERROR_MISSING_LICENSE)
182182
if not repo["permissions"]["push"]:
183183
errors.append(ERROR_MISSING_LIBRARIANS)
184-
if not common_funcs.is_repo_in_bundle(full_repo["clone_url"], self.bundle_submodules) and \
185-
not repo["name"] in BUNDLE_IGNORE_LIST: # Don't assume the bundle will
186-
# bundle itself and possibly
187-
# other repos.
188-
errors.append(ERROR_NOT_IN_BUNDLE)
189-
if "allow_squash_merge" not in full_repo or full_repo["allow_squash_merge"] or full_repo["allow_rebase_merge"]:
190-
errors.append(ERROR_ONLY_ALLOW_MERGES)
184+
if (not common_funcs.is_repo_in_bundle(full_repo["clone_url"], self.bundle_submodules)
185+
and not repo["name"] in BUNDLE_IGNORE_LIST):
186+
# Don't assume the bundle will bundle itself and possibly
187+
# other repos.
188+
errors.append(ERROR_NOT_IN_BUNDLE)
189+
if ("allow_squash_merge" not in full_repo
190+
or full_repo["allow_squash_merge"]
191+
or full_repo["allow_rebase_merge"]):
192+
errors.append(ERROR_ONLY_ALLOW_MERGES)
191193
return errors
192194

193195
def validate_release_state(self, repo):
@@ -204,7 +206,9 @@ def validate_release_state(self, repo):
204206
if repo["name"] in BUNDLE_IGNORE_LIST:
205207
return []
206208

207-
repo_last_release = github.get("/repos/" + repo["full_name"] + "/releases/latest")
209+
repo_last_release = github.get("/repos/"
210+
+ repo["full_name"]
211+
+ "/releases/latest")
208212
if not repo_last_release.ok:
209213
return [ERROR_GITHUB_NO_RELEASE]
210214
repo_release_json = repo_last_release.json()
@@ -215,14 +219,27 @@ def validate_release_state(self, repo):
215219
return [ERROR_GITHUB_NO_RELEASE]
216220
else:
217221
# replace 'output_handler' with ERROR_OUTPUT_HANDLER
218-
self.output_file_data.append("Error: retrieving latest release information failed on '{0}'. Information Received: {1}".format(
219-
repo["name"], repo_release_json["message"]))
222+
err_msg = [
223+
"Error: retrieving latest release information failed on ",
224+
"'{}'. "format(repo["name"]),
225+
"Information Received: ",
226+
"{}".format(repo_release_json["message"])
227+
]
228+
self.output_file_data.append("".join(err_msg))
220229
return [ERROR_OUTPUT_HANDLER]
221230

222-
compare_tags = github.get("/repos/" + repo["full_name"] + "/compare/" + tag_name + "...master")
231+
compare_tags = github.get("/repos/"
232+
+ repo["full_name"]
233+
+ "/compare/"
234+
+ tag_name
235+
+ "...master")
223236
if not compare_tags.ok:
224237
# replace 'output_handler' with ERROR_OUTPUT_HANDLER
225-
self.output_file_data.append("Error: failed to compare {0} 'master' to tag '{1}'".format(repo["name"], tag_name))
238+
err_msg = [
239+
"Error: failed to compare {} 'master' ".format(repo["name"]),
240+
"to tag '{}'".format(tag_name)
241+
]
242+
self.output_file_data.append("".join(err_msg))
226243
return [ERROR_OUTPUT_HANDLER]
227244
compare_tags_json = compare_tags.json()
228245
if "status" in compare_tags_json:
@@ -239,22 +256,30 @@ def validate_release_state(self, repo):
239256
# compare_tags_json["status"], compare_tags_json["ahead_by"],
240257
# compare_tags_json["behind_by"], compare_tags_json["total_commits"], repo["full_name"]))
241258
if date_diff.days > datetime.date.today().max.day:
242-
return [(ERROR_GITHUB_COMMITS_SINCE_LAST_RELEASE_GTM, date_diff.days)]
259+
return [(ERROR_GITHUB_COMMITS_SINCE_LAST_RELEASE_GTM,
260+
date_diff.days)]
243261
elif date_diff.days <= datetime.date.today().max.day:
244262
if date_diff.days > 7:
245-
return [(ERROR_GITHUB_COMMITS_SINCE_LAST_RELEASE_1M, date_diff.days)]
263+
return [(ERROR_GITHUB_COMMITS_SINCE_LAST_RELEASE_1M,
264+
date_diff.days)]
246265
else:
247-
return [(ERROR_GITHUB_COMMITS_SINCE_LAST_RELEASE_1W, date_diff.days)]
266+
return [(ERROR_GITHUB_COMMITS_SINCE_LAST_RELEASE_1W,
267+
date_diff.days)]
248268
elif "errors" in compare_tags_json:
249269
# replace 'output_handler' with ERROR_OUTPUT_HANDLER
250-
self.output_file_data.append("Error: comparing latest release to 'master' failed on '{0}'. Error Message: {1}".format(
251-
repo["name"], compare_tags_json["message"]))
270+
err_msg = [
271+
"Error: comparing latest release to 'master' failed on ",
272+
"'{}'. ".format(repo["name"]),
273+
"Error Message: {}".format(compare_tags_json["message"])
274+
]
275+
self.output_file_data.append("".join(err_msg))
252276
return [ERROR_OUTPUT_HANDLER]
253277

254278
return []
255279

256280
def _validate_readme(self, repo, download_url):
257-
# We use requests because file contents are hosted by githubusercontent.com, not the API domain.
281+
# We use requests because file contents are hosted by
282+
# githubusercontent.com, not the API domain.
258283
contents = requests.get(download_url, timeout=30)
259284
if not contents.ok:
260285
return [ERROR_README_DOWNLOAD_FAILED]
@@ -295,31 +320,40 @@ def _validate_py_for_u_modules(self, repo, download_url):
295320
look for "import ___". If the "import u___" is
296321
used with NO "import ____" generate an error.
297322
"""
298-
# We use requests because file contents are hosted by githubusercontent.com, not the API domain.
323+
# We use requests because file contents are hosted by
324+
# githubusercontent.com, not the API domain.
299325
contents = requests.get(download_url, timeout=30)
300326
if not contents.ok:
301327
return [ERROR_PYFILE_DOWNLOAD_FAILED]
302328

303329
errors = []
304330

305331
lines = contents.text.split("\n")
306-
ustruct_lines = [l for l in lines if re.match(r"[\s]*import[\s][\s]*ustruct", l)]
307-
struct_lines = [l for l in lines if re.match(r"[\s]*import[\s][\s]*struct", l)]
332+
ustruct_lines = ([l for l in lines
333+
if re.match(r"[\s]*import[\s][\s]*ustruct", l)])
334+
struct_lines = ([l for l in lines
335+
if re.match(r"[\s]*import[\s][\s]*struct", l)])
308336
if ustruct_lines and not struct_lines:
309337
errors.append(ERROR_PYFILE_MISSING_STRUCT)
310338

311-
ure_lines = [l for l in lines if re.match(r"[\s]*import[\s][\s]*ure", l)]
312-
re_lines = [l for l in lines if re.match(r"[\s]*import[\s][\s]*re", l)]
339+
ure_lines = ([l for l in lines
340+
if re.match(r"[\s]*import[\s][\s]*ure", l)])
341+
re_lines = ([l for l in lines
342+
if re.match(r"[\s]*import[\s][\s]*re", l)])
313343
if ure_lines and not re_lines:
314344
errors.append(ERROR_PYFILE_MISSING_RE)
315345

316-
ujson_lines = [l for l in lines if re.match(r"[\s]*import[\s][\s]*ujson", l)]
317-
json_lines = [l for l in lines if re.match(r"[\s]*import[\s][\s]*json", l)]
346+
ujson_lines = ([l for l in lines
347+
if re.match(r"[\s]*import[\s][\s]*ujson", l)])
348+
json_lines = ([l for l in lines
349+
if re.match(r"[\s]*import[\s][\s]*json", l)])
318350
if ujson_lines and not json_lines:
319351
errors.append(ERROR_PYFILE_MISSING_JSON)
320352

321-
uerrno_lines = [l for l in lines if re.match(r"[\s]*import[\s][\s]*uerrno", l)]
322-
errno_lines = [l for l in lines if re.match(r"[\s]*import[\s][\s]*errno", l)]
353+
uerrno_lines = ([l for l in lines
354+
if re.match(r"[\s]*import[\s][\s]*uerrno", l)])
355+
errno_lines = ([l for l in lines
356+
if re.match(r"[\s]*import[\s][\s]*errno", l)])
323357
if uerrno_lines and not errno_lines:
324358
errors.append(ERROR_PYFILE_MISSING_ERRNO)
325359

@@ -336,7 +370,10 @@ def _validate_travis_yml(self, repo, travis_yml_file_info):
336370
errors = []
337371

338372
lines = contents.text.split("\n")
339-
pypi_providers_lines = [l for l in lines if re.match(r"[\s]*-[\s]*provider:[\s]*pypi[\s]*", l)]
373+
pypi_providers_lines = (
374+
[l for l in lines
375+
if re.match(r"[\s]*-[\s]*provider:[\s]*pypi[\s]*", l)]
376+
)
340377

341378
if not pypi_providers_lines:
342379
errors.append(ERROR_MISSING_PYPIPROVIDER)
@@ -381,7 +418,8 @@ def _validate_requirements_txt(self, repo, file_info):
381418

382419
errors = []
383420
lines = contents.text.split("\n")
384-
blinka_lines = [l for l in lines if re.match(r"[\s]*Adafruit-Blinka[\s]*", l)]
421+
blinka_lines = ([l for l in lines
422+
if re.match(r"[\s]*Adafruit-Blinka[\s]*", l)])
385423

386424
if not blinka_lines and repo["name"] not in LIBRARIES_DONT_NEED_BLINKA:
387425
errors.append(ERROR_MISSING_BLINKA)
@@ -409,7 +447,8 @@ def validate_contents(self, repo):
409447
files = [x["name"] for x in content_list]
410448

411449
# ignore new/in-work repos, which should have less than 8 files:
412-
# ___.py or folder, CoC, .travis.yml, .readthedocs.yml, docs/, examples/, README, LICENSE
450+
# ___.py or folder, CoC, .travis.yml, .readthedocs.yml, docs/,
451+
# examples/, README, LICENSE
413452
if len(files) < 8:
414453
BUNDLE_IGNORE_LIST.append(repo["name"])
415454
if not self.validate_contents_quiet:
@@ -435,7 +474,8 @@ def validate_contents(self, repo):
435474
if f["name"] == "README.rst":
436475
readme_info = f
437476
break
438-
errors.extend(self._validate_readme(repo, readme_info["download_url"]))
477+
errors.extend(self._validate_readme(repo,
478+
readme_info["download_url"]))
439479

440480
if ".travis.yml" in files:
441481
file_info = content_list[files.index(".travis.yml")]
@@ -471,19 +511,23 @@ def validate_contents(self, repo):
471511
dirs = [x["name"] for x in content_list if x["type"] == "dir"]
472512
if "examples" in dirs:
473513
# check for at least on .py file
474-
examples_list = github.get("/repos/" + repo["full_name"] + "/contents/examples")
514+
examples_list = github.get("/repos/"
515+
+ repo["full_name"]
516+
+ "/contents/examples")
475517
if not examples_list.ok:
476518
errors.append(ERROR_UNABLE_PULL_REPO_EXAMPLES)
477519
examples_list = examples_list.json()
478520
if len(examples_list) < 1:
479521
errors.append(ERROR_MISSING_EXAMPLE_FILES)
480522
else:
481-
lib_name = repo["name"][repo["name"].rfind("CircuitPython_") + 14:].lower()
523+
lib_name = (repo["name"][repo["name"].rfind("CircuitPython_")
524+
+ 14:].lower())
482525
all_have_name = True
483526
simpletest_exists = False
484527
for example in examples_list:
485-
if not example["name"].lower().startswith(lib_name):
486-
all_have_name = False
528+
if (not example["name"].lower().startswith(lib_name)
529+
and example["name"].endswith(".py")):
530+
all_have_name = False
487531
if "simpletest" in example["name"].lower():
488532
simpletest_exists = True
489533
if not all_have_name:
@@ -495,7 +539,8 @@ def validate_contents(self, repo):
495539

496540
# first location .py files whose names begin with "adafruit_"
497541
re_str = re.compile('adafruit\_[\w]*\.py')
498-
pyfiles = [x["download_url"] for x in content_list if re_str.fullmatch(x["name"])]
542+
pyfiles = ([x["download_url"] for x in content_list
543+
if re_str.fullmatch(x["name"])])
499544
for pyfile in pyfiles:
500545
# adafruit_xxx.py file; check if for proper usage of u___ versions of modules
501546
errors.extend(self._validate_py_for_u_modules(repo, pyfile))
@@ -505,14 +550,20 @@ def validate_contents(self, repo):
505550
for adir in dirs:
506551
if re_str.fullmatch(adir):
507552
# retrieve the files in that directory
508-
dir_file_list = github.get("/repos/" + repo["full_name"] + "/contents/" + adir)
553+
dir_file_list = github.get("/repos/"
554+
+ repo["full_name"]
555+
+ "/contents/"
556+
+ adir)
509557
if not dir_file_list.ok:
510558
errors.append(ERROR_UNABLE_PULL_REPO_DIR)
511559
dir_file_list = dir_file_list.json()
512560
# search for .py files in that directory
513-
dir_files = [x["download_url"] for x in dir_file_list if x["type"] == "file" and x["name"].endswith(".py")]
561+
dir_files = ([x["download_url"] for x in dir_file_list
562+
if x["type"] == "file"
563+
and x["name"].endswith(".py")])
514564
for dir_file in dir_files:
515-
# .py files in subdirectory adafruit_xxx; check if for proper usage of u___ versions of modules
565+
# .py files in subdirectory adafruit_xxx
566+
# check if for proper usage of u___ versions of modules
516567
errors.extend(self._validate_py_for_u_modules(repo, dir_file))
517568

518569
return errors

0 commit comments

Comments
 (0)