Skip to content

Commit 1574ab6

Browse files
- regex syntax fix and var to int comparison fix
1 parent 3b5f50e commit 1574ab6

File tree

2 files changed

+55
-73
lines changed

2 files changed

+55
-73
lines changed

steem/utils.py

+54-72
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
from langdetect.lang_detect_exception import LangDetectException
1616
from toolz import update_in, assoc
1717

18-
if sys.version >= '3.0':
18+
if sys.version >= "3.0":
1919
from urllib.parse import urlparse
2020
else:
2121
from urlparse import urlparse
@@ -24,9 +24,7 @@
2424

2525
# https://github.com/matiasb/python-unidiff/blob/master/unidiff/constants.py#L37
2626
# @@ (source offset, length) (target offset, length) @@ (section header)
27-
RE_HUNK_HEADER = re.compile(
28-
r"^@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))?\ @@[ ]?(.*)$",
29-
flags=re.MULTILINE)
27+
RE_HUNK_HEADER = re.compile(r"^@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))?\ @@[ ]?(.*)$", flags=re.MULTILINE)
3028

3129
# ensure deterministec language detection
3230
DetectorFactory.seed = 0
@@ -82,34 +80,29 @@ def chunkify(iterable, chunksize=10000):
8280

8381
def ensure_decoded(thing):
8482
if not thing:
85-
logger.debug('ensure_decoded thing is logically False')
83+
logger.debug("ensure_decoded thing is logically False")
8684
return None
8785
if isinstance(thing, (list, dict)):
88-
logger.debug('ensure_decoded thing is already decoded')
86+
logger.debug("ensure_decoded thing is already decoded")
8987
return thing
9088
single_encoded_dict = double_encoded_dict = None
9189
try:
9290
single_encoded_dict = json.loads(thing)
9391
if isinstance(single_encoded_dict, dict):
94-
logger.debug('ensure_decoded thing is single encoded dict')
92+
logger.debug("ensure_decoded thing is single encoded dict")
9593
return single_encoded_dict
9694
elif isinstance(single_encoded_dict, str):
97-
logger.debug('ensure_decoded thing is single encoded str')
95+
logger.debug("ensure_decoded thing is single encoded str")
9896
if single_encoded_dict == "":
99-
logger.debug(
100-
'ensure_decoded thing is single encoded str == ""')
97+
logger.debug('ensure_decoded thing is single encoded str == ""')
10198
return None
10299
else:
103100
double_encoded_dict = json.loads(single_encoded_dict)
104-
logger.debug('ensure_decoded thing is double encoded')
101+
logger.debug("ensure_decoded thing is double encoded")
105102
return double_encoded_dict
106103
except Exception as e:
107-
extra = dict(
108-
thing=thing,
109-
single_encoded_dict=single_encoded_dict,
110-
double_encoded_dict=double_encoded_dict,
111-
error=e)
112-
logger.error('ensure_decoded error', extra=extra)
104+
extra = dict(thing=thing, single_encoded_dict=single_encoded_dict, double_encoded_dict=double_encoded_dict, error=e)
105+
logger.error("ensure_decoded error", extra=extra)
113106
return None
114107

115108

@@ -137,31 +130,30 @@ def extract_keys_from_meta(meta, keys):
137130
elif isinstance(item, (list, tuple)):
138131
extracted.extend(item)
139132
else:
140-
logger.warning('unusual item in meta: %s', item)
133+
logger.warning("unusual item in meta: %s", item)
141134
return extracted
142135

143136

144137
def build_comment_url(parent_permlink=None, author=None, permlink=None):
145-
return '/'.join([parent_permlink, author, permlink])
138+
return "/".join([parent_permlink, author, permlink])
146139

147140

148141
def canonicalize_url(url, **kwargs):
149142
try:
150143
canonical_url = w3lib.url.canonicalize_url(url, **kwargs)
151144
except Exception as e:
152-
logger.warning('url preparation error', extra=dict(url=url, error=e))
145+
logger.warning("url preparation error", extra=dict(url=url, error=e))
153146
return None
154147
if canonical_url != url:
155-
logger.debug('canonical_url changed %s to %s', url, canonical_url)
148+
logger.debug("canonical_url changed %s to %s", url, canonical_url)
156149
try:
157150
parsed_url = urlparse(canonical_url)
158151
if not parsed_url.scheme and not parsed_url.netloc:
159-
_log = dict(
160-
url=url, canonical_url=canonical_url, parsed_url=parsed_url)
161-
logger.warning('bad url encountered', extra=_log)
152+
_log = dict(url=url, canonical_url=canonical_url, parsed_url=parsed_url)
153+
logger.warning("bad url encountered", extra=_log)
162154
return None
163155
except Exception as e:
164-
logger.warning('url parse error', extra=dict(url=url, error=e))
156+
logger.warning("url parse error", extra=dict(url=url, error=e))
165157
return None
166158
return canonical_url
167159

@@ -172,7 +164,7 @@ def findall_patch_hunks(body=None):
172164

173165
def detect_language(text):
174166
if not text or len(text) < MIN_TEXT_LENGTH_FOR_DETECTION:
175-
logger.debug('not enough text to perform langdetect')
167+
logger.debug("not enough text to perform langdetect")
176168
return None
177169
try:
178170
return detect(text)
@@ -186,7 +178,7 @@ def is_comment(item):
186178
The item can be a Post object or just a raw comment object from the
187179
blockchain.
188180
"""
189-
return item['parent_author'] != ""
181+
return item["parent_author"] != ""
190182

191183

192184
def time_elapsed(posting_time):
@@ -202,16 +194,15 @@ def parse_time(block_time):
202194
"""Take a string representation of time from the blockchain, and parse
203195
it into datetime object.
204196
"""
205-
return datetime.strptime(block_time, '%Y-%m-%dT%H:%M:%S')
197+
return datetime.strptime(block_time, "%Y-%m-%dT%H:%M:%S")
206198

207199

208200
def time_diff(time1, time2):
209201
return parse_time(time1) - parse_time(time2)
210202

211203

212204
def keep_in_dict(obj, allowed_keys=list()):
213-
""" Prune a class or dictionary of all but allowed keys.
214-
"""
205+
"""Prune a class or dictionary of all but allowed keys."""
215206
if type(obj) == dict:
216207
items = obj.items()
217208
else:
@@ -221,8 +212,7 @@ def keep_in_dict(obj, allowed_keys=list()):
221212

222213

223214
def remove_from_dict(obj, remove_keys=list()):
224-
""" Prune a class or dictionary of specified keys.
225-
"""
215+
"""Prune a class or dictionary of specified keys."""
226216
if type(obj) == dict:
227217
items = obj.items()
228218
else:
@@ -232,7 +222,7 @@ def remove_from_dict(obj, remove_keys=list()):
232222

233223

234224
def construct_identifier(*args):
235-
""" Create a post identifier from comment/post object or arguments.
225+
"""Create a post identifier from comment/post object or arguments.
236226
237227
Examples:
238228
@@ -244,21 +234,20 @@ def construct_identifier(*args):
244234

245235
if len(args) == 1:
246236
op = args[0]
247-
author, permlink = op['author'], op['permlink']
237+
author, permlink = op["author"], op["permlink"]
248238
elif len(args) == 2:
249239
author, permlink = args
250240
else:
251-
raise ValueError(
252-
'construct_identifier() received unparsable arguments')
241+
raise ValueError("construct_identifier() received unparsable arguments")
253242

254243
# remove the @ sign in case it was passed in by the user.
255-
author = author.replace('@', '')
244+
author = author.replace("@", "")
256245
fields = dict(author=author, permlink=permlink)
257246
return "{author}/{permlink}".format(**fields)
258247

259248

260-
def json_expand(json_op, key_name='json'):
261-
""" Convert a string json object to Python dict in an op. """
249+
def json_expand(json_op, key_name="json"):
250+
"""Convert a string json object to Python dict in an op."""
262251
if type(json_op) == dict and key_name in json_op and json_op[key_name]:
263252
try:
264253
return update_in(json_op, [key_name], json.loads)
@@ -270,9 +259,9 @@ def json_expand(json_op, key_name='json'):
270259

271260
def sanitize_permlink(permlink):
272261
permlink = permlink.strip()
273-
permlink = re.sub("_|\s|\.", "-", permlink)
274-
permlink = re.sub("[^\w-]", "", permlink)
275-
permlink = re.sub("[^a-zA-Z0-9-]", "", permlink)
262+
permlink = re.sub(r"_|\s|\.", "-", permlink)
263+
permlink = re.sub(r"[^\w-]", "", permlink)
264+
permlink = re.sub(r"[^a-zA-Z0-9-]", "", permlink)
276265
permlink = permlink.lower()
277266
return permlink
278267

@@ -292,53 +281,49 @@ def derive_permlink(title, parent_permlink=None):
292281
def resolve_identifier(identifier):
293282

294283
# in case the user supplied the @ sign.
295-
identifier = identifier.replace('@', '')
284+
identifier = identifier.replace("@", "")
296285

297-
match = re.match("([\w\-\.]*)/([\w\-]*)", identifier)
286+
match = re.match(r"([\w\-\.]*)/([\w\-]*)", identifier)
298287
if not hasattr(match, "group"):
299288
raise ValueError("Invalid identifier")
300289
return match.group(1), match.group(2)
301290

302291

303292
def fmt_time(t):
304-
""" Properly Format Time for permlinks
305-
"""
293+
"""Properly Format Time for permlinks"""
306294
return datetime.utcfromtimestamp(t).strftime("%Y%m%dt%H%M%S%Z")
307295

308296

309297
def fmt_time_string(t):
310-
""" Properly Format Time for permlinks
311-
"""
312-
return datetime.strptime(t, '%Y-%m-%dT%H:%M:%S')
298+
"""Properly Format Time for permlinks"""
299+
return datetime.strptime(t, "%Y-%m-%dT%H:%M:%S")
313300

314301

315302
def fmt_time_from_now(secs=0):
316-
""" Properly Format Time that is `x` seconds in the future
303+
"""Properly Format Time that is `x` seconds in the future
317304
318-
:param int secs: Seconds to go in the future (`x>0`) or the
319-
past (`x<0`)
320-
:return: Properly formated time for Graphene (`%Y-%m-%dT%H:%M:%S`)
321-
:rtype: str
305+
:param int secs: Seconds to go in the future (`x>0`) or the
306+
past (`x<0`)
307+
:return: Properly formated time for Graphene (`%Y-%m-%dT%H:%M:%S`)
308+
:rtype: str
322309
323310
"""
324-
return datetime.utcfromtimestamp(time.time() + int(secs)).strftime(
325-
'%Y-%m-%dT%H:%M:%S')
311+
return datetime.utcfromtimestamp(time.time() + int(secs)).strftime("%Y-%m-%dT%H:%M:%S")
326312

327313

328314
def env_unlocked():
329-
""" Check if wallet passphrase is provided as ENV variable. """
330-
return os.getenv('UNLOCK', False)
315+
"""Check if wallet passphrase is provided as ENV variable."""
316+
return os.getenv("UNLOCK", False)
331317

332318

333319
# todo remove these
334320
def strfage(time, fmt=None):
335-
""" Format time/age
336-
"""
321+
"""Format time/age"""
337322
if not hasattr(time, "days"): # dirty hack
338323
now = datetime.utcnow()
339324
if isinstance(time, str):
340-
time = datetime.strptime(time, '%Y-%m-%dT%H:%M:%S')
341-
time = (now - time)
325+
time = datetime.strptime(time, "%Y-%m-%dT%H:%M:%S")
326+
time = now - time
342327

343328
d = {"days": time.days}
344329
d["hours"], rem = divmod(time.seconds, 3600)
@@ -355,8 +340,7 @@ def strfage(time, fmt=None):
355340

356341

357342
def strfdelta(tdelta, fmt):
358-
""" Format time/age
359-
"""
343+
"""Format time/age"""
360344
if not tdelta or not hasattr(tdelta, "days"): # dirty hack
361345
return None
362346

@@ -367,7 +351,7 @@ def strfdelta(tdelta, fmt):
367351

368352

369353
def is_valid_account_name(name):
370-
return re.match('^[a-z][a-z0-9\-.]{2,15}$', name)
354+
return re.match(r"^[a-z][a-z0-9\-.]{2,15}$", name)
371355

372356

373357
def compat_compose_dictionary(dictionary, **kwargs):
@@ -394,20 +378,18 @@ def compat_json(data, ignore_dicts=False):
394378
"""
395379
# if this is a unicode string, return its string representation
396380
if isinstance(data, unicode):
397-
return data.encode('utf-8')
381+
return data.encode("utf-8")
398382
# if this is a list of values, return list of byte-string values
399383
if isinstance(data, list):
400384
return [compat_json(item, ignore_dicts=True) for item in data]
401385
# if this is a dictionary, return dictionary of byte-string keys and values
402386
# but only if we haven't already byte-string it
403387
if isinstance(data, dict) and not ignore_dicts:
404-
return {
405-
compat_json(key, ignore_dicts=True): compat_json(value, ignore_dicts=True)
406-
for key, value in data.iteritems()
407-
}
388+
return {compat_json(key, ignore_dicts=True): compat_json(value, ignore_dicts=True) for key, value in data.iteritems()}
408389
# if it's anything else, return it in its original form
409390
return data
410391

392+
411393
def compat_bytes(item, encoding=None):
412394
"""
413395
This method is required because Python 2.7 `bytes` is simply an alias for `str`. Without this method,
@@ -442,7 +424,7 @@ def __bytes__(self):
442424
:param encoding: optional encoding parameter to handle the Python 3.6 two argument 'bytes' method.
443425
:return: a bytes object that functions the same across 3.6 and 2.7
444426
"""
445-
if hasattr(item, '__bytes__'):
427+
if hasattr(item, "__bytes__"):
446428
return item.__bytes__()
447429
else:
448430
if encoding:
@@ -461,7 +443,7 @@ def compat_chr(item):
461443
:param item: a length 1 string who's `chr` method needs to be invoked
462444
:return: the unichr code point of the single character string, item
463445
"""
464-
if sys.version >= '3.0':
446+
if sys.version >= "3.0":
465447
return chr(item)
466448
else:
467449
return unichr(item)

steembase/transactions.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -326,7 +326,7 @@ def sign(self, wifkeys, chain=None):
326326
#
327327
lenR = sigder[3]
328328
lenS = sigder[5 + lenR]
329-
if lenR is 32 and lenS is 32:
329+
if int(lenR) == 32 and int(lenS) == 32:
330330
# Derive the recovery parameter
331331
#
332332
i = self.recoverPubkeyParameter(

0 commit comments

Comments
 (0)