Skip to content

Commit 2df3570

Browse files
committed
Skip Python 2/3 only sections on each of these to achive actual 100% coverage when running on each
1 parent 4eb1b05 commit 2df3570

File tree

4 files changed

+25
-4
lines changed

4 files changed

+25
-4
lines changed

.coveragerc

+1
Original file line numberDiff line numberDiff line change
@@ -7,3 +7,4 @@ precision = 1
77
exclude_lines =
88
pragma: no cover
99
abc.abstractmethod
10+
\# PY2

.coveragerc-py2

+10
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
[run]
2+
branch=True
3+
source=unasync
4+
5+
[report]
6+
precision = 1
7+
exclude_lines =
8+
pragma: no cover
9+
abc.abstractmethod
10+
\# PY3

ci/travis.sh

+11-1
Original file line numberDiff line numberDiff line change
@@ -51,6 +51,16 @@ if [ "$USE_PYPY_RELEASE_VERSION" != "" ]; then
5151
source testenv/bin/activate
5252
fi
5353

54+
case "${MACPYTHON:-${TRAVIS_PYTHON_VERSION:-}}" in
55+
2*)
56+
COVERAGE_FILE=.coveragerc-py2
57+
;;
58+
59+
*)
60+
COVERAGE_FILE=.coveragerc
61+
;;
62+
esac
63+
5464
pip install -U pip setuptools wheel
5565

5666
if [ "$CHECK_FORMATTING" = "1" ]; then
@@ -91,7 +101,7 @@ else
91101
mkdir empty
92102
cd empty
93103

94-
pytest -ra -v --cov=unasync --cov-config=../.coveragerc --verbose ../tests
104+
pytest -ra -v --cov=unasync --cov-config="../${COVERAGE_FILE}" --verbose ../tests
95105

96106
bash <(curl -s https://codecov.io/bash)
97107
fi

src/unasync/__init__.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,7 @@ def _match(self, filepath):
6767
def _unasync_file(self, filepath):
6868
with open(filepath, "rb") as f:
6969
write_kwargs = {}
70-
if sys.version_info[0] >= 3:
70+
if sys.version_info[0] >= 3: # PY3 # pragma: no branch
7171
encoding, _ = std_tokenize.detect_encoding(f.readline)
7272
write_kwargs["encoding"] = encoding
7373
f.seek(0)
@@ -128,11 +128,11 @@ def unasync_files(fpath_list, rules):
128128

129129

130130
def _get_tokens(f):
131-
if sys.version_info[0] == 2:
131+
if sys.version_info[0] == 2: # PY2
132132
for tok in std_tokenize.generate_tokens(f.readline):
133133
type_, string, start, end, line = tok
134134
yield Token(type_, string, start, end, line)
135-
else:
135+
else: # PY3
136136
for tok in std_tokenize.tokenize(f.readline):
137137
if tok.type == std_tokenize.ENCODING:
138138
continue

0 commit comments

Comments
 (0)