Skip to content

net: sched: sch_qfq: Fix UAF in qfq_dequeue() Testing Python PR checker #79

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 2 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
134 changes: 134 additions & 0 deletions .github/workflows/process-git-request.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,134 @@
#!/usr/bin/env python3

import sys
import subprocess
import os
import re
import git

def log_commits_between_branches(repo_path, from_branch, to_branch):
repo = git.Repo(repo_path)

# Get the common ancestor of the two branches
common_ancestor = repo.merge_base(from_branch, to_branch)

print(f"Common ancestor is {common_ancestor}")

# Get the commits in 'from_branch' that are not in 'to_branch'
commits = list(repo.iter_commits(f"{to_branch}..{from_branch}"))

# for commit in commits:
# print(commit.hexsha, commit.message.strip())

def file_prepend(file, str):
with open(file, 'r') as fd:
contents = fd.read()
new_contents = str + contents

# Overwrite file but now with prepended string on it
with open(file, 'w') as fd:
fd.write(new_contents)

def process_git_request(fname, target_branch, source_branch, prj_dir):
retcode = 0 # presume success
file = open(fname, "w")
working_dir = prj_dir
os.chdir(working_dir)

repo = git.Repo(".")
commits = repo.iter_commits(f"{target_branch}..{source_branch}")
loglines_to_check = 13
for commit in commits:
print(f"{commit.hexsha} {commit.message.splitlines()[0]}")

commit_sha = commit.hexsha

git_cmd = "git show " + commit_sha
gitlog_out, gitlog_err = subprocess.Popen(git_cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True).communicate()

loglines = gitlog_out.splitlines()
lines_counted = 0
local_diffdiff_sha = commit_sha
upstream_diffdiff_sha = ""
upstream_diff = False

for logline in loglines:
# print(f"Processing logline {commit_sha}")
lines_counted += 1
if lines_counted == 1:
file.write("Merge Request sha: " + local_diffdiff_sha)
file.write("\n")
if lines_counted == 2: # email address
if "ciq.com" not in logline.lower():
# Bad Author
s = f"error:\nBad {logline}\n"
print(s)
file.write(s)
file.close()
return retcode
if lines_counted > 1:
if "jira" in logline.lower():
file.write("\t" + logline + "\n")

if "upstream-diff" in logline.lower():
upstream_diff = True

if "commit" in logline.lower():
local_commit_sha = re.search(r'[0-9a-f]{40}', logline)
upstream_diffdiff_sha = str(local_commit_sha.group(0)) if local_commit_sha else ""
if upstream_diffdiff_sha:
print(f"Upstream : " + upstream_diffdiff_sha)
file.write("\tUpstream sha: " + upstream_diffdiff_sha)
file.write("\n")

if lines_counted > loglines_to_check: # Everything we need should be in the first loglines_to_check lines
# print(f"Breaking after {loglines_to_check} lines of commit checking")
break

if local_diffdiff_sha and upstream_diffdiff_sha:
diff_cmd = os.path.join(os.getcwd(), ".github/workflows/diffdiff.py") + " --colour --commit " + local_diffdiff_sha
# print("diffdiff: " + diff_cmd)
process = subprocess.run(diff_cmd, shell=True, capture_output=True, text=True)
diff_out = process.stdout
diff_err = process.stderr
diff_status = process.returncode

if diff_status != 0 and not upstream_diff:
print(f"diffdiff out: " + diff_out)
print(f"diffdiff err: " + diff_err)
retcode = 1
file.write("error:\nCommit: " + local_diffdiff_sha + " differs with no upstream tag in commit message\n")

return retcode

first_arg, *argv_in = sys.argv[1:] # Skip script name in sys.argv

if len(argv_in) < 5:
print("Not enough arguments: fname, target_branch, source_branch, prj_dir, pull_request, requestor")
sys.exit()

fname = str(first_arg)
fname = "tmp-" + fname
# print("filename is " + fname)
target_branch = str(argv_in[0])
# print("target branch is " + target_branch)
source_branch = str(argv_in[1])
# print("source branch is " + source_branch)
prj_dir = str(argv_in[2])
# print("project dir is " + prj_dir)
pullreq = str(argv_in[3])
# print("pull request is " + pullreq)
requestor = str(argv_in[4])

retcode = process_git_request(fname, target_branch, source_branch, prj_dir)

if retcode != 0:
with open(fname, 'r') as fd:
contents = fd.read()
print(contents)
sys.exit(1)
else:
print("Done")

sys.exit(0)

140 changes: 0 additions & 140 deletions .github/workflows/process-git-request.rb

This file was deleted.

48 changes: 48 additions & 0 deletions .github/workflows/process-pull-request.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
# This workflow uses actions that are not certified by GitHub.
# They are provided by a third-party and are governed by
# separate terms of service, privacy policy, and support
# documentation.

name: Pull Request Checker

on:
pull_request:
branches:
- '**'
- '!mainline'

permissions:
contents: read

jobs:
test:

runs-on: ubuntu-latest

steps:
- uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v5
- name: Run tests
run: |
/usr/bin/pip3 install gitPython
python -c "import sys; import git; print(sys.version)"
rm -rf /home/runner/work/kernel-src-tree/kernel-src-tree
cd /home/runner/work/kernel-src-tree
git clone https://github.com/ctrliq/kernel-src-tree
cd kernel-src-tree
git fetch --all
git checkout -b ${{ github.head_ref }} origin/${{ github.head_ref }}
git checkout ${{ github.base_ref }}
git remote add linux https://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
git fetch --shallow-since="2 years ago" linux
echo "Will run process-git-request.py with:"
echo "fname = ${{ github.run_id }}"
echo "target_branch = ${{ github.base_ref }}"
echo "source_branch = ${{ github.head_ref }}"
echo "prj_dir = ${{ github.workspace }}"
echo "pull_request = ${{ github.ref }}"
echo "requestor = ${{ github.actor }}"
cd ${{ github.workspace }}
/usr/bin/python3 .github/workflows/process-git-request.py ${{ github.run_id }} ${{ github.base_ref }} \
${{ github.ref }} ${{ github.workspace }} ${{ github.head_ref }} ${{ github.actor }}
2 changes: 1 addition & 1 deletion net/sched/sch_plug.c
Original file line number Diff line number Diff line change
Expand Up @@ -214,7 +214,7 @@ static struct Qdisc_ops plug_qdisc_ops __read_mostly = {
.priv_size = sizeof(struct plug_sched_data),
.enqueue = plug_enqueue,
.dequeue = plug_dequeue,
.peek = qdisc_peek_head,
.peek = qdisc_peek_dequeued,
.init = plug_init,
.change = plug_change,
.reset = qdisc_reset_queue,
Expand Down
22 changes: 17 additions & 5 deletions net/sched/sch_qfq.c
Original file line number Diff line number Diff line change
Expand Up @@ -973,10 +973,13 @@ static void qfq_update_eligible(struct qfq_sched *q)
}

/* Dequeue head packet of the head class in the DRR queue of the aggregate. */
static void agg_dequeue(struct qfq_aggregate *agg,
struct qfq_class *cl, unsigned int len)
static struct sk_buff *agg_dequeue(struct qfq_aggregate *agg,
struct qfq_class *cl, unsigned int len)
{
qdisc_dequeue_peeked(cl->qdisc);
struct sk_buff *skb = qdisc_dequeue_peeked(cl->qdisc);

if (!skb)
return NULL;

cl->deficit -= (int) len;

Expand All @@ -986,6 +989,8 @@ static void agg_dequeue(struct qfq_aggregate *agg,
cl->deficit += agg->lmax;
list_move_tail(&cl->alist, &agg->active);
}

return skb;
}

static inline struct sk_buff *qfq_peek_skb(struct qfq_aggregate *agg,
Expand Down Expand Up @@ -1131,11 +1136,18 @@ static struct sk_buff *qfq_dequeue(struct Qdisc *sch)
if (!skb)
return NULL;

qdisc_qstats_backlog_dec(sch, skb);
sch->q.qlen--;

skb = agg_dequeue(in_serv_agg, cl, len);

if (!skb) {
sch->q.qlen++;
return NULL;
}

qdisc_qstats_backlog_dec(sch, skb);
qdisc_bstats_update(sch, skb);

agg_dequeue(in_serv_agg, cl, len);
/* If lmax is lowered, through qfq_change_class, for a class
* owning pending packets with larger size than the new value
* of lmax, then the following condition may hold.
Expand Down
Loading