Commit fa22f3d1 authored by Christophe Benz's avatar Christophe Benz

Add flake8 plugins, fix errors

parent 33eb540d
Pipeline #114430 passed with stages
in 2 minutes and 43 seconds
......@@ -138,7 +138,7 @@ def main():
dashboard_generator = DashboardGenerator(args=args)
dashboard_generator.fetch()
print(dashboard_generator.render())
print(dashboard_generator.render()) # noqa
def load_bytes(source: str) -> bytes:
......@@ -173,7 +173,7 @@ class Job:
elif job_status == "canceled":
return "fa-minus-circle text-dark"
logger.warning("Unsupported job status: {!r}".format(job_status))
logger.warning("Unsupported job status: %r", job_status)
return ""
def get_status(self):
......@@ -211,7 +211,7 @@ class FetcherJob(Job):
errors_list = (
errors.keys()
if isinstance(errors, dict)
else list(e["dataset_code"] for e in errors)
else [e["dataset_code"] for e in errors]
)
datasets_in_errors_list_str = (
": {}".format(", ".join(sorted(errors_list)))
......@@ -249,7 +249,7 @@ class FetcherJob(Job):
# under 100%, because in that case a progress bar should be rendered,
# and it wouldn't use `fa-*` classes.
assert self.completion_rate == 100, self.completion_rate
# 100% converted
# When data is 100% converted, add a check mark to the circle.
return "fa-check-circle text-success"
elif self.parsing_error:
# error parsing errors.json file
......@@ -334,7 +334,7 @@ class DashboardGenerator:
self.solr_counts_by_slug = None
if self.cache_file.is_file():
logger.debug("Load cache from {!r}".format(CACHE_FILENAME))
logger.debug("Load cache from %r", CACHE_FILENAME)
with self.cache_file.open("rb") as f:
self.cache = pickle.load(f)
self.project_by_slug = self.cache.get("project_by_slug")
......@@ -408,9 +408,9 @@ class DashboardGenerator:
return schedules_by_slug
def fetch_project_by_slug(self):
logger.info("Fetch GitLab projects in {!r} group".format(FETCHERS_NAMESPACE))
logger.info("Fetch GitLab projects in %r group", FETCHERS_NAMESPACE)
project_by_slug = {}
for provider_slug, fetcher_metadata in self.fetcher_metadata_by_slug.items():
for provider_slug in self.fetcher_metadata_by_slug:
try:
project = self.gl.projects.get(
"{}/{}-fetcher".format(FETCHERS_NAMESPACE, provider_slug)
......@@ -437,7 +437,7 @@ class DashboardGenerator:
def fetch_importer_jobs(self):
args = self.args
logger.info("Fetch GitLab CI jobs of {!r} project".format(DBNOMICS_IMPORTER))
logger.info("Fetch GitLab CI jobs of project %r", DBNOMICS_IMPORTER)
importer_jobs = [
(job, extract_provider_slug_from_trace(job, "Indexing provider ([^$. ]+)"))
for job in take(
......@@ -455,7 +455,7 @@ class DashboardGenerator:
def fetch_data_model_jobs(self):
args = self.args
logger.info("Fetch GitLab CI jobs of {!r} project".format(DBNOMICS_DATA_MODEL))
logger.info("Fetch GitLab CI jobs of project %r", DBNOMICS_DATA_MODEL)
data_model_jobs = [
(
job,
......@@ -496,7 +496,7 @@ class DashboardGenerator:
def load_fetchers_yml(self):
args = self.args
logger.info("Load providers from {}".format(args.fetchers_yml))
logger.info("Load providers from %r", args.fetchers_yml)
yaml = YAML(typ="safe")
fetchers_yml = load_bytes(args.fetchers_yml)
fetchers_metadata = yaml.load(fetchers_yml)
......@@ -563,13 +563,13 @@ class DashboardGenerator:
)
def write_cache(self):
logger.debug("Write cache to {!r}".format(CACHE_FILENAME))
logger.debug("Write cache to %r", CACHE_FILENAME)
with self.cache_file.open("wb") as f:
pickle.dump(self.cache, f)
def extract_fetcher_job_variables(job: ProjectJob):
"""Return job variables specific to *-fetcher projects.
"""Return job variables specific to `<provider_slug>-fetcher` projects.
Getting job variables is not supported by GitLab API v4.
"""
......@@ -617,7 +617,7 @@ def extract_provider_slug_from_trace(job, regex):
def get_fetcher_jobs_by_type(project, all_branches: bool):
logger.info("Fetch GitLab CI jobs of %r project", project.name)
logger.info("Fetch GitLab CI jobs of project %r", project.name)
fetcher_jobs_by_type: Dict[str, List[FetcherJob]] = {}
for gitlab_job in project.jobs.list():
if not all_branches and gitlab_job.ref != MASTER:
......
black
flake8
flake8-black
flake8-blind-except
flake8-breakpoint
flake8-bugbear
flake8-builtins
flake8-comprehensions
flake8-eradicate
flake8-fixme
flake8-isort
flake8-logging-format
flake8-mutable
flake8-print
flake8-rst-docstrings
isort
mypy
pep8-naming
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment