Skip to content

Commit

Permalink
Add unit test plus '--high-memory' option, *off by default*.
Browse files Browse the repository at this point in the history
  • Loading branch information
jeffknupp committed Jul 23, 2017
1 parent 2ab4971 commit e3cb9c1
Show file tree
Hide file tree
Showing 2 changed files with 19 additions and 1 deletion.
7 changes: 6 additions & 1 deletion pandas/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,9 @@ def pytest_addoption(parser):
parser.addoption("--skip-slow", action="store_true",
help="skip slow tests")
parser.addoption("--skip-network", action="store_true",
help="run network tests")
help="skip network tests")
parser.addoption("--run-highmemory", action="store_true",
help="run high memory tests")
parser.addoption("--only-slow", action="store_true",
help="run only slow tests")

Expand All @@ -24,6 +26,9 @@ def pytest_runtest_setup(item):
if 'network' in item.keywords and item.config.getoption("--skip-network"):
pytest.skip("skipping due to --skip-network")

if 'high_memory' in item.keywords and not item.config.getoption("--run-highmemory"):
pytest.skip("skipping high memory test since --run-highmemory was not set")


# Configurations for all tests and all test modules

Expand Down
13 changes: 13 additions & 0 deletions pandas/tests/io/parser/test_parsers.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,9 @@
# -*- coding: utf-8 -*-

import os
from io import StringIO

import pytest

import pandas.util.testing as tm

Expand All @@ -24,6 +27,16 @@
from .python_parser_only import PythonParserTests
from .dtypes import DtypeTests

@pytest.mark.high_memory
def test_bytes_exceed_2gb():
"""Read from a "CSV" that has a column larger than 2GB.
GH 16798
"""
csv = StringIO('strings\n' + '\n'.join(['x' * (1 << 20) for _ in range(2100)]))
df = read_csv(csv, low_memory=False)
assert not df.empty


class BaseParser(CommentTests, CompressionTests,
ConverterTests, DialectTests,
Expand Down

0 comments on commit e3cb9c1

Please sign in to comment.