Skip to content

Commit

Permalink
chore_ black linting into the game
Browse files Browse the repository at this point in the history
  • Loading branch information
peppelinux committed Feb 6, 2025
1 parent a7a0f0c commit f512e8b
Show file tree
Hide file tree
Showing 129 changed files with 3,221 additions and 3,154 deletions.
25 changes: 6 additions & 19 deletions linting.sh
Original file line number Diff line number Diff line change
Expand Up @@ -7,26 +7,13 @@ autopep8 -r --in-place $SRC
autoflake -r --in-place --remove-unused-variables --expand-star-imports --remove-all-unused-imports $SRC

flake8 $SRC --count --select=E9,F63,F7,F82 --show-source --statistics
flake8 $SRC --max-line-length 120 --count --statistics

isort --atomic pyeudiw
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
flake8 $SRC --count --exit-zero --statistics

bandit -r -x $SRC/test* $SRC/*
isort --atomic $SRC

black $SRC

echo -e '\nHTML linting:'
shopt -s globstar nullglob
for file in `find example -type f | grep html`
do
echo -e "\n$file:"
html_lint.py "$file" | awk -v path="file://$PWD/$file:" '$0=path$0' | sed -e 's/: /:\n\t/';
done

errors=0
for file in "${array[@]}"
do
errors=$((errors + $(html_lint.py "$file" | grep -c 'Error')))
done

echo -e "\nHTML errors: $errors"
if [ "$errors" -gt 0 ]; then exit 1; fi;
bandit -r -x $SRC/test* $SRC/*

23 changes: 12 additions & 11 deletions pyeudiw/federation/http_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,9 @@
from .exceptions import HttpError


async def fetch(session: aiohttp.ClientSession, url: str, httpc_params: dict) -> requests.Response:
async def fetch(
session: aiohttp.ClientSession, url: str, httpc_params: dict
) -> requests.Response:
"""
Fetches the content of a URL.
Expand All @@ -27,7 +29,9 @@ async def fetch(session: aiohttp.ClientSession, url: str, httpc_params: dict) ->
return await response


async def fetch_all(session: aiohttp.ClientSession, urls: list[str], httpc_params: dict) -> list[requests.Response]:
async def fetch_all(
session: aiohttp.ClientSession, urls: list[str], httpc_params: dict
) -> list[requests.Response]:
"""
Fetches the content of a list of URL.
Expand Down Expand Up @@ -78,14 +82,11 @@ def http_get_sync(urls, httpc_params: dict) -> list[requests.Response]:
:rtype: list[requests.Response]
"""
_conf = {
'verify': httpc_params['connection']['ssl'],
'timeout': httpc_params['session']['timeout']
"verify": httpc_params["connection"]["ssl"],
"timeout": httpc_params["session"]["timeout"],
}
try:
res = [
requests.get(url, **_conf) # nosec - B113
for url in urls
]
res = [requests.get(url, **_conf) for url in urls] # nosec - B113
except requests.exceptions.ConnectionError as e:
raise HttpError(f"Connection error: {e}")

Expand All @@ -112,9 +113,9 @@ async def http_get_async(urls, httpc_params: dict) -> list[requests.Response]:
:returns: the list of responses
:rtype: list[requests.Response]
"""
if not isinstance(httpc_params['session']['timeout'], aiohttp.ClientTimeout):
httpc_params['session']['timeout'] = aiohttp.ClientTimeout(
total=httpc_params['session']['timeout']
if not isinstance(httpc_params["session"]["timeout"], aiohttp.ClientTimeout):
httpc_params["session"]["timeout"] = aiohttp.ClientTimeout(
total=httpc_params["session"]["timeout"]
)

async with aiohttp.ClientSession(**httpc_params.get("session", {})) as session:
Expand Down
Loading

0 comments on commit f512e8b

Please sign in to comment.