Staging
v0.5.1
https://github.com/python/cpython
Revision 9eb3d5463976068900e94b860ced7e035885835c authored by Steve Dower on 21 August 2019, 22:52:42 UTC, committed by GitHub on 21 August 2019, 22:52:42 UTC
bpo-37834: Normalise handling of reparse points on Windows
* ntpath.realpath() and nt.stat() will traverse all supported reparse points (previously was mixed)
* nt.lstat() will let the OS traverse reparse points that are not name surrogates (previously would not traverse any reparse point)
* nt.[l]stat() will only set S_IFLNK for symlinks (previous behaviour)
* nt.readlink() will read destinations for symlinks and junction points only

bpo-1311: os.path.exists('nul') now returns True on Windows
* nt.stat('nul').st_mode is now S_IFCHR (previously was an error)
1 parent c30c869
Raw File
Tip revision: 9eb3d5463976068900e94b860ced7e035885835c authored by Steve Dower on 21 August 2019, 22:52:42 UTC
bpo-37834: Normalise handling of reparse points on Windows (GH-15370)
Tip revision: 9eb3d54
time_hashlib.py
# It's intended that this script be run by hand.  It runs speed tests on
# hashlib functions; it does not test for correctness.

import sys
import time
import hashlib


def creatorFunc():
    raise RuntimeError("eek, creatorFunc not overridden")

def test_scaled_msg(scale, name):
    iterations = 106201//scale * 20
    longStr = b'Z'*scale

    localCF = creatorFunc
    start = time.perf_counter()
    for f in range(iterations):
        x = localCF(longStr).digest()
    end = time.perf_counter()

    print(('%2.2f' % (end-start)), "seconds", iterations, "x", len(longStr), "bytes", name)

def test_create():
    start = time.perf_counter()
    for f in range(20000):
        d = creatorFunc()
    end = time.perf_counter()

    print(('%2.2f' % (end-start)), "seconds", '[20000 creations]')

def test_zero():
    start = time.perf_counter()
    for f in range(20000):
        x = creatorFunc().digest()
    end = time.perf_counter()

    print(('%2.2f' % (end-start)), "seconds", '[20000 "" digests]')



hName = sys.argv[1]

#
# setup our creatorFunc to test the requested hash
#
if hName in ('_md5', '_sha'):
    exec('import '+hName)
    exec('creatorFunc = '+hName+'.new')
    print("testing speed of old", hName, "legacy interface")
elif hName == '_hashlib' and len(sys.argv) > 3:
    import _hashlib
    exec('creatorFunc = _hashlib.%s' % sys.argv[2])
    print("testing speed of _hashlib.%s" % sys.argv[2], getattr(_hashlib, sys.argv[2]))
elif hName == '_hashlib' and len(sys.argv) == 3:
    import _hashlib
    exec('creatorFunc = lambda x=_hashlib.new : x(%r)' % sys.argv[2])
    print("testing speed of _hashlib.new(%r)" % sys.argv[2])
elif hasattr(hashlib, hName) and hasattr(getattr(hashlib, hName), '__call__'):
    creatorFunc = getattr(hashlib, hName)
    print("testing speed of hashlib."+hName, getattr(hashlib, hName))
else:
    exec("creatorFunc = lambda x=hashlib.new : x(%r)" % hName)
    print("testing speed of hashlib.new(%r)" % hName)

try:
    test_create()
except ValueError:
    print()
    print("pass argument(s) naming the hash to run a speed test on:")
    print(" '_md5' and '_sha' test the legacy builtin md5 and sha")
    print(" '_hashlib' 'openssl_hName' 'fast' tests the builtin _hashlib")
    print(" '_hashlib' 'hName' tests builtin _hashlib.new(shaFOO)")
    print(" 'hName' tests the hashlib.hName() implementation if it exists")
    print("         otherwise it uses hashlib.new(hName).")
    print()
    raise

test_zero()
test_scaled_msg(scale=106201, name='[huge data]')
test_scaled_msg(scale=10620, name='[large data]')
test_scaled_msg(scale=1062, name='[medium data]')
test_scaled_msg(scale=424, name='[4*small data]')
test_scaled_msg(scale=336, name='[3*small data]')
test_scaled_msg(scale=212, name='[2*small data]')
test_scaled_msg(scale=106, name='[small data]')
test_scaled_msg(scale=creatorFunc().digest_size, name='[digest_size data]')
test_scaled_msg(scale=10, name='[tiny data]')
back to top