Skip to content

Commit

Permalink
increase acc (#2039)
Browse files Browse the repository at this point in the history
* increase acc

* try 0.45

* @pytest

* @pytest

* try .50

* duration

* pytest
  • Loading branch information
Borda authored and justusschock committed Jun 29, 2020
1 parent 4b3b2a6 commit 3edffd0
Show file tree
Hide file tree
Showing 9 changed files with 19 additions and 21 deletions.
4 changes: 2 additions & 2 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ references:
name: Testing
command: |
python --version ; pip --version ; pip list
py.test pytorch_lightning tests -v --doctest-modules --junitxml=test-reports/pytest_junit.xml
py.test pytorch_lightning tests -v --junitxml=test-reports/pytest_junit.xml
no_output_timeout: 15m

examples: &examples
Expand All @@ -30,7 +30,7 @@ references:
command: |
pip install -r ./pl_examples/requirements.txt --user
python --version ; pip --version ; pip list
py.test pl_examples -v --doctest-modules --junitxml=test-reports/pytest_junit.xml
py.test pl_examples -v --junitxml=test-reports/pytest_junit.xml
no_output_timeout: 20m

install_pkg: &install_pkg
Expand Down
2 changes: 1 addition & 1 deletion .drone.yml
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ steps:
#- pip install -r ./docs/requirements.txt --user -q
- pip list
- python -c "import torch ; print(' & '.join([torch.cuda.get_device_name(i) for i in range(torch.cuda.device_count())]) if torch.cuda.is_available() else 'only CPU')"
- coverage run --source pytorch_lightning -m py.test pytorch_lightning tests benchmarks -v --doctest-modules # --flake8
- coverage run --source pytorch_lightning -m py.test pytorch_lightning tests benchmarks -v # --flake8
#- cd docs; make doctest; make coverage
- coverage report
- codecov --token $CODECOV_TOKEN # --pr $DRONE_PULL_REQUEST --build $DRONE_BUILD_NUMBER --branch $DRONE_BRANCH --commit $DRONE_COMMIT --tag $DRONE_TAG
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/ci-testing.yml
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@ jobs:
run: |
# tox --sitepackages
# flake8 .
coverage run --source pytorch_lightning -m py.test pytorch_lightning tests -v --doctest-modules --junitxml=junit/test-results-${{ runner.os }}-${{ matrix.python-version }}-${{ matrix.requires }}.xml
coverage run --source pytorch_lightning -m py.test pytorch_lightning tests -v --junitxml=junit/test-results-${{ runner.os }}-${{ matrix.python-version }}-${{ matrix.requires }}.xml
coverage report
- name: Upload pytest test results
Expand Down
2 changes: 1 addition & 1 deletion .run_local_tests.sh
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ rm -rf ./tests/cometruns*
rm -rf ./tests/wandb*
rm -rf ./tests/tests/*
rm -rf ./lightning_logs
python -m coverage run --source pytorch_lightning -m py.test pytorch_lightning tests pl_examples -v --doctest-modules --flake8 --durations=0
python -m coverage run --source pytorch_lightning -m py.test pytorch_lightning tests pl_examples -v --flake8
python -m coverage report -m

# specific file
Expand Down
6 changes: 5 additions & 1 deletion setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -6,12 +6,16 @@ norecursedirs =
python_files =
test_*.py
# doctest_plus = disabled
addopts = --strict
addopts =
--strict
--doctest-modules
--durations=0
markers =
slow
remote_data
filterwarnings
gpus_param_tests
junit_duration_report = call

[coverage:report]
exclude_lines =
Expand Down
2 changes: 1 addition & 1 deletion tests/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ Make sure to run coverage on a GPU machine with at least 2 GPUs and NVIDIA apex
cd pytorch-lightning

# generate coverage (coverage is also installed as part of dev dependencies under tests/requirements-devel.txt)
coverage run --source pytorch_lightning -m py.test pytorch_lightning tests examples -v --doctest-modules
coverage run --source pytorch_lightning -m py.test pytorch_lightning tests examples -v

# print coverage stats
coverage report -m
Expand Down
4 changes: 2 additions & 2 deletions tests/base/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ def assert_speed_parity(pl_times, pt_times, num_epochs):
f"lightning was slower than PT (threshold {max_diff_per_epoch})"


def run_model_test_without_loggers(trainer_options, model, min_acc=0.30):
def run_model_test_without_loggers(trainer_options, model, min_acc=0.50):
reset_seed()

# fit model
Expand Down Expand Up @@ -155,7 +155,7 @@ def load_model_from_checkpoint(root_weights_dir, module_class=EvalModelTemplate)
return trained_model


def run_prediction(dataloader, trained_model, dp=False, min_acc=0.3):
def run_prediction(dataloader, trained_model, dp=False, min_acc=0.50):
# run prediction on 1 batch
for batch in dataloader:
break
Expand Down
15 changes: 5 additions & 10 deletions tests/trainer/test_dataloaders.py
Original file line number Diff line number Diff line change
Expand Up @@ -248,9 +248,8 @@ def test_mixing_of_dataloader_options(tmpdir):
f'`test_dataloaders` not initiated properly, got {trainer.test_dataloaders}'


@pytest.mark.skip('TODO: speed up this test')
def test_train_inf_dataloader_error(tmpdir):
pytest.skip('TODO: fix speed of this test')

"""Test inf train data loader (e.g. IterableDataset)"""
model = EvalModelTemplate()
model.train_dataloader = model.train_dataloader__infinite
Expand All @@ -261,9 +260,8 @@ def test_train_inf_dataloader_error(tmpdir):
trainer.fit(model)


@pytest.mark.skip('TODO: speed up this test')
def test_val_inf_dataloader_error(tmpdir):
pytest.skip('TODO: fix speed of this test')

"""Test inf train data loader (e.g. IterableDataset)"""
model = EvalModelTemplate()
model.val_dataloader = model.val_dataloader__infinite
Expand All @@ -274,9 +272,8 @@ def test_val_inf_dataloader_error(tmpdir):
trainer.fit(model)


@pytest.mark.skip('TODO: speed up this test')
def test_test_inf_dataloader_error(tmpdir):
pytest.skip('TODO: fix speed of this test')

"""Test inf train data loader (e.g. IterableDataset)"""
model = EvalModelTemplate()
model.test_dataloader = model.test_dataloader__infinite
Expand All @@ -288,9 +285,8 @@ def test_test_inf_dataloader_error(tmpdir):


@pytest.mark.parametrize('check_interval', [50, 1.0])
@pytest.mark.skip('TODO: speed up this test')
def test_inf_train_dataloader(tmpdir, check_interval):
pytest.skip('TODO: fix speed of this test')

"""Test inf train data loader (e.g. IterableDataset)"""

model = EvalModelTemplate()
Expand All @@ -307,9 +303,8 @@ def test_inf_train_dataloader(tmpdir, check_interval):


@pytest.mark.parametrize('check_interval', [1.0])
@pytest.mark.skip('TODO: speed up this test')
def test_inf_val_dataloader(tmpdir, check_interval):
pytest.skip('TODO: fix speed of this test')

"""Test inf val data loader (e.g. IterableDataset)"""

model = EvalModelTemplate()
Expand Down
3 changes: 1 addition & 2 deletions tests/trainer/test_lr_finder.py
Original file line number Diff line number Diff line change
Expand Up @@ -134,9 +134,8 @@ def test_call_to_trainer_method(tmpdir):
'Learning rate was not altered after running learning rate finder'


@pytest.mark.skip('TODO: speed up this test')
def test_accumulation_and_early_stopping(tmpdir):
pytest.skip('TODO: speed up this test')

""" Test that early stopping of learning rate finder works, and that
accumulation also works for this feature """

Expand Down

0 comments on commit 3edffd0

Please sign in to comment.