Skip to content

Commit

Permalink
Merge pull request #17 from andhus/github-workflows
Browse files Browse the repository at this point in the history
Adds github workflows, drops python 2.7, limit `pathspec<0.10.0`, and cleanup
  • Loading branch information
andhus authored Apr 8, 2024
2 parents 4452f8d + 1cd629b commit 82bb16e
Show file tree
Hide file tree
Showing 6 changed files with 97 additions and 54 deletions.
48 changes: 48 additions & 0 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
name: Run tests

on:
push:
branches:
- "master"
pull_request:
branches:
- "*"
workflow_dispatch:
release:
types: [published, edited]

jobs:
tests:

runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"]

steps:
- uses: actions/checkout@v4
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
python -m pip install --upgrade pip
python -m pip install tox tox-gh-actions
- name: Cache tox environments
id: cache-tox
uses: actions/cache@v4
with:
path: .tox
# setup.py and setup.cfg have versioning info that would impact the
# tox environment. hashFiles only takes a single file path or pattern
# at the moment.
key: ${{ runner.os }}-${{ matrix.python-version }}-tox-${{ hashFiles('setup.py') }}-${{ hashFiles('setup.cfg') }} }}
- name: Test with tox
run: tox
- uses: codecov/codecov-action@v4
env:
token: ${{ secrets.CODECOV_TOKEN }}
with:
verbose: true
23 changes: 0 additions & 23 deletions .travis.yml

This file was deleted.

2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
author="Anders Huss",
author_email="andhus@kth.se",
license='MIT',
install_requires=['scantree>=0.0.1'],
install_requires=['scantree>=0.0.2', 'pathspec<0.10.0'],
packages=find_packages('src'),
package_dir={'': 'src'},
include_package_data=True,
Expand Down
4 changes: 2 additions & 2 deletions src/dirhash/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ def get_kwargs(args):
help=(
'One or several patterns for paths to include. NOTE: patterns '
'with an asterisk must be in quotes ("*") or the asterisk '
'preceded by an escape character (\*).'
'preceded by an escape character (`*).'
),
metavar=''
)
Expand All @@ -91,7 +91,7 @@ def get_kwargs(args):
help=(
'One or several patterns for paths to exclude. NOTE: patterns '
'with an asterisk must be in quotes ("*") or the asterisk '
'preceded by an escape character (\*).'
'preceded by an escape character (`*).'
),
metavar=''
)
Expand Down
60 changes: 35 additions & 25 deletions tests/test_dirhash.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,9 +42,19 @@ def test_get_guaranteed(self):
def test_get_available(self):
for algorithm in algorithms_available:
hasher_factory = _get_hasher_factory(algorithm)
hasher = hasher_factory()
assert hasattr(hasher, 'update')
assert hasattr(hasher, 'hexdigest')
try:
hasher = hasher_factory()
except ValueError as exc:
# Some "available" algorithms are not necessarily available (fails for e.g.
# 'ripemd160' in github actions for python 3.8). See:
# https://stackoverflow.com/questions/72409563/unsupported-hash-type-ripemd160-with-hashlib-in-python # noqa
print(f"Failed to create hasher for {algorithm}: {exc}")
assert exc.args[0] == f"unsupported hash type {algorithm}"
hasher = None

if hasher is not None:
assert hasattr(hasher, 'update')
assert hasattr(hasher, 'hexdigest')

def test_not_available(self):
with pytest.raises(ValueError):
Expand Down Expand Up @@ -132,10 +142,10 @@ def test_ignore_extensions(self):

class TempDirTest(object):

def setup(self):
def setup_method(self):
self.dir = tempfile.mkdtemp()

def tear_down(self):
def teardown_method(self):
if os.path.exists(self.dir):
shutil.rmtree(self.dir)

Expand Down Expand Up @@ -693,19 +703,19 @@ def test_multiproc_speedup(self):
for i in range(num_files):
self.mkfile('root/file_{}'.format(i), '< one chunk content')

expected_min_elapsed = SlowHasher.wait_time * num_files
expected_min_elapsed_sequential = SlowHasher.wait_time * num_files

start = time()
dirhash(self.path_to('root'), algorithm=SlowHasher)
end = time()
elapsed_sequential = end - start
assert elapsed_sequential > expected_min_elapsed
assert elapsed_sequential > expected_min_elapsed_sequential

start = time()
dirhash(self.path_to('root'), algorithm=SlowHasher, jobs=num_files)
end = time()
elapsed_muliproc = end - start
assert elapsed_muliproc < expected_min_elapsed
assert elapsed_muliproc < 0.9 * expected_min_elapsed_sequential
# just check "any speedup", the overhead varies (and is high on Travis)

def test_cache_by_real_path_speedup(self, tmpdir):
Expand All @@ -719,13 +729,13 @@ def test_cache_by_real_path_speedup(self, tmpdir):
file_i.write('< one chunk content', ensure=True)

wait_time = SlowHasher.wait_time
expected_min_elapsed = wait_time * num_links
expected_min_elapsed_no_links = wait_time * num_links
start = time()
dirhash(root1, algorithm=SlowHasher)
end = time()
elapsed_sequential = end - start
assert elapsed_sequential > expected_min_elapsed
overhead = elapsed_sequential - expected_min_elapsed
elapsed_no_links = end - start
assert elapsed_no_links > expected_min_elapsed_no_links
overhead = elapsed_no_links - expected_min_elapsed_no_links

# all links to same file
root2 = tmpdir.join('root2')
Expand All @@ -736,13 +746,13 @@ def test_cache_by_real_path_speedup(self, tmpdir):
root2.join('link_{}'.format(i)).mksymlinkto(target_file)

overhead_margin_factor = 1.5
expected_max_elapsed = overhead * overhead_margin_factor + wait_time
assert expected_max_elapsed < expected_min_elapsed
expected_max_elapsed_with_links = overhead * overhead_margin_factor + wait_time
assert expected_max_elapsed_with_links < expected_min_elapsed_no_links
start = time()
dirhash(root2, algorithm=SlowHasher)
end = time()
elapsed_cache = end - start
assert elapsed_cache < expected_max_elapsed
elapsed_with_links = end - start
assert elapsed_with_links < expected_max_elapsed_with_links

def test_cache_together_with_multiprocess_speedup(self, tmpdir):
target_file_names = ['target_file_1', 'target_file_2']
Expand All @@ -758,13 +768,13 @@ def test_cache_together_with_multiprocess_speedup(self, tmpdir):

jobs = 2
wait_time = SlowHasher.wait_time
expected_min_elapsed = wait_time * num_links / jobs
expected_min_elapsed_no_links = wait_time * num_links / jobs
start = time()
dirhash(root1, algorithm=SlowHasher, jobs=jobs)
end = time()
elapsed_sequential = end - start
assert elapsed_sequential > expected_min_elapsed
overhead = elapsed_sequential - expected_min_elapsed
elapsed_no_links = end - start
assert elapsed_no_links > expected_min_elapsed_no_links
overhead = elapsed_no_links - expected_min_elapsed_no_links

root2 = tmpdir.join('root2')
root2.ensure(dir=True)
Expand All @@ -775,13 +785,13 @@ def test_cache_together_with_multiprocess_speedup(self, tmpdir):
root2.join('link_{}_{}'.format(i, j)).mksymlinkto(target_file)

overhead_margin_factor = 1.5
expected_max_elapsed = overhead * overhead_margin_factor + wait_time * 2
assert expected_max_elapsed < expected_min_elapsed
expected_max_elapsed_with_links = overhead * overhead_margin_factor + wait_time * 2
assert expected_max_elapsed_with_links < expected_min_elapsed_no_links
start = time()
dirhash(root2, algorithm=SlowHasher, jobs=jobs)
end = time()
elapsed_mp_cache = end - start
assert elapsed_mp_cache < expected_max_elapsed
elapsed_mp_with_links = end - start
assert elapsed_mp_with_links < expected_max_elapsed_with_links

def test_hash_cyclic_link_to_root(self):
self.mkdirs('root/d1')
Expand Down Expand Up @@ -821,7 +831,7 @@ def test_raise_on_wrong_type(self):


class SlowHasher(object):
wait_time = 0.05
wait_time = 0.25

def __init__(self, *args, **kwargs):
pass
Expand Down
14 changes: 11 additions & 3 deletions tox.ini
Original file line number Diff line number Diff line change
@@ -1,9 +1,17 @@
[tox]
envlist = py27,py37
envlist = py{38,39,310,311,312}

[testenv]
deps =
pytest
pytest-cov
commands =
py.test --cov-report=xml --cov-config=.coveragerc --cov=dirhash tests/
coverage report
pytest --cov=scantree --cov-report=xml --cov-report=term-missing --cov-config=.coveragerc tests/

[gh-actions]
python =
3.8: py38
3.9: py39
3.10: py310
3.11: py311
3.12: py312

0 comments on commit 82bb16e

Please sign in to comment.