Port the tinycss test suite to use the unittest module

This commit is contained in:
Kovid Goyal 2014-05-21 13:05:05 +05:30
parent 097d9e3099
commit f38af40146
7 changed files with 1029 additions and 0 deletions

View File

@ -0,0 +1,27 @@
#!/usr/bin/env python
# vim:fileencoding=utf-8
from __future__ import (unicode_literals, division, absolute_import,
print_function)
__license__ = 'GPL v3'
__copyright__ = '2014, Kovid Goyal <kovid at kovidgoyal.net>'
import unittest
try:
unicode
except NameError:
unicode = str
class BaseTest(unittest.TestCase):
longMessage = True
maxDiff = None
ae = unittest.TestCase.assertEqual
def assert_errors(self, errors, expected_errors):
"""Test not complete error messages but only substrings."""
self.ae(len(errors), len(expected_errors))
for error, expected in zip(errors, expected_errors):
self.assertIn(expected, unicode(error))

198
src/tinycss/tests/color3.py Normal file
View File

@ -0,0 +1,198 @@
#!/usr/bin/env python
# vim:fileencoding=utf-8
from __future__ import (unicode_literals, division, absolute_import,
print_function)
__license__ = 'GPL v3'
__copyright__ = '2014, Kovid Goyal <kovid at kovidgoyal.net>'
from tinycss.color3 import parse_color_string, hsl_to_rgb
from tinycss.tests import BaseTest
class TestColor3(BaseTest):
def test_color_parsing(self):
for css_source, expected_result in [
('', None),
(' /* hey */\n', None),
('4', None),
('top', None),
('/**/transparent', (0, 0, 0, 0)),
('transparent', (0, 0, 0, 0)),
(' transparent\n', (0, 0, 0, 0)),
('TransParent', (0, 0, 0, 0)),
('currentColor', 'currentColor'),
('CURRENTcolor', 'currentColor'),
('current_Color', None),
('black', (0, 0, 0, 1)),
('white', (1, 1, 1, 1)),
('fuchsia', (1, 0, 1, 1)),
('cyan', (0, 1, 1, 1)),
('CyAn', (0, 1, 1, 1)),
('darkkhaki', (189 / 255., 183 / 255., 107 / 255., 1)),
('#', None),
('#f', None),
('#ff', None),
('#fff', (1, 1, 1, 1)),
('#ffg', None),
('#ffff', None),
('#fffff', None),
('#ffffff', (1, 1, 1, 1)),
('#fffffg', None),
('#fffffff', None),
('#ffffffff', None),
('#fffffffff', None),
('#cba987', (203 / 255., 169 / 255., 135 / 255., 1)),
('#CbA987', (203 / 255., 169 / 255., 135 / 255., 1)),
('#1122aA', (17 / 255., 34 / 255., 170 / 255., 1)),
('#12a', (17 / 255., 34 / 255., 170 / 255., 1)),
('rgb(203, 169, 135)', (203 / 255., 169 / 255., 135 / 255., 1)),
('RGB(255, 255, 255)', (1, 1, 1, 1)),
('rgB(0, 0, 0)', (0, 0, 0, 1)),
('rgB(0, 51, 255)', (0, .2, 1, 1)),
('rgb(0,51,255)', (0, .2, 1, 1)),
('rgb(0\t, 51 ,255)', (0, .2, 1, 1)),
('rgb(/* R */0, /* G */51, /* B */255)', (0, .2, 1, 1)),
('rgb(-51, 306, 0)', (-.2, 1.2, 0, 1)), # out of 0..1 is allowed
('rgb(42%, 3%, 50%)', (.42, .03, .5, 1)),
('RGB(100%, 100%, 100%)', (1, 1, 1, 1)),
('rgB(0%, 0%, 0%)', (0, 0, 0, 1)),
('rgB(10%, 20%, 30%)', (.1, .2, .3, 1)),
('rgb(10%,20%,30%)', (.1, .2, .3, 1)),
('rgb(10%\t, 20% ,30%)', (.1, .2, .3, 1)),
('rgb(/* R */10%, /* G */20%, /* B */30%)', (.1, .2, .3, 1)),
('rgb(-12%, 110%, 1400%)', (-.12, 1.1, 14, 1)), # out of 0..1 is allowed
('rgb(10%, 50%, 0)', None),
('rgb(255, 50%, 0%)', None),
('rgb(0, 0 0)', None),
('rgb(0, 0, 0deg)', None),
('rgb(0, 0, light)', None),
('rgb()', None),
('rgb(0)', None),
('rgb(0, 0)', None),
('rgb(0, 0, 0, 0)', None),
('rgb(0%)', None),
('rgb(0%, 0%)', None),
('rgb(0%, 0%, 0%, 0%)', None),
('rgb(0%, 0%, 0%, 0)', None),
('rgba(0, 0, 0, 0)', (0, 0, 0, 0)),
('rgba(203, 169, 135, 0.3)', (203 / 255., 169 / 255., 135 / 255., 0.3)),
('RGBA(255, 255, 255, 0)', (1, 1, 1, 0)),
('rgBA(0, 51, 255, 1)', (0, 0.2, 1, 1)),
('rgba(0, 51, 255, 1.1)', (0, 0.2, 1, 1)),
('rgba(0, 51, 255, 37)', (0, 0.2, 1, 1)),
('rgba(0, 51, 255, 0.42)', (0, 0.2, 1, 0.42)),
('rgba(0, 51, 255, 0)', (0, 0.2, 1, 0)),
('rgba(0, 51, 255, -0.1)', (0, 0.2, 1, 0)),
('rgba(0, 51, 255, -139)', (0, 0.2, 1, 0)),
('rgba(42%, 3%, 50%, 0.3)', (.42, .03, .5, 0.3)),
('RGBA(100%, 100%, 100%, 0)', (1, 1, 1, 0)),
('rgBA(0%, 20%, 100%, 1)', (0, 0.2, 1, 1)),
('rgba(0%, 20%, 100%, 1.1)', (0, 0.2, 1, 1)),
('rgba(0%, 20%, 100%, 37)', (0, 0.2, 1, 1)),
('rgba(0%, 20%, 100%, 0.42)', (0, 0.2, 1, 0.42)),
('rgba(0%, 20%, 100%, 0)', (0, 0.2, 1, 0)),
('rgba(0%, 20%, 100%, -0.1)', (0, 0.2, 1, 0)),
('rgba(0%, 20%, 100%, -139)', (0, 0.2, 1, 0)),
('rgba(255, 255, 255, 0%)', None),
('rgba(10%, 50%, 0, 1)', None),
('rgba(255, 50%, 0%, 1)', None),
('rgba(0, 0, 0 0)', None),
('rgba(0, 0, 0, 0deg)', None),
('rgba(0, 0, 0, light)', None),
('rgba()', None),
('rgba(0)', None),
('rgba(0, 0, 0)', None),
('rgba(0, 0, 0, 0, 0)', None),
('rgba(0%)', None),
('rgba(0%, 0%)', None),
('rgba(0%, 0%, 0%)', None),
('rgba(0%, 0%, 0%, 0%)', None),
('rgba(0%, 0%, 0%, 0%, 0%)', None),
('HSL(0, 0%, 0%)', (0, 0, 0, 1)),
('hsL(0, 100%, 50%)', (1, 0, 0, 1)),
('hsl(60, 100%, 37.5%)', (0.75, 0.75, 0, 1)),
('hsl(780, 100%, 37.5%)', (0.75, 0.75, 0, 1)),
('hsl(-300, 100%, 37.5%)', (0.75, 0.75, 0, 1)),
('hsl(300, 50%, 50%)', (0.75, 0.25, 0.75, 1)),
('hsl(10, 50%, 0)', None),
('hsl(50%, 50%, 0%)', None),
('hsl(0, 0% 0%)', None),
('hsl(30deg, 100%, 100%)', None),
('hsl(0, 0%, light)', None),
('hsl()', None),
('hsl(0)', None),
('hsl(0, 0%)', None),
('hsl(0, 0%, 0%, 0%)', None),
('HSLA(-300, 100%, 37.5%, 1)', (0.75, 0.75, 0, 1)),
('hsLA(-300, 100%, 37.5%, 12)', (0.75, 0.75, 0, 1)),
('hsla(-300, 100%, 37.5%, 0.2)', (0.75, 0.75, 0, .2)),
('hsla(-300, 100%, 37.5%, 0)', (0.75, 0.75, 0, 0)),
('hsla(-300, 100%, 37.5%, -3)', (0.75, 0.75, 0, 0)),
('hsla(10, 50%, 0, 1)', None),
('hsla(50%, 50%, 0%, 1)', None),
('hsla(0, 0% 0%, 1)', None),
('hsla(30deg, 100%, 100%, 1)', None),
('hsla(0, 0%, light, 1)', None),
('hsla()', None),
('hsla(0)', None),
('hsla(0, 0%)', None),
('hsla(0, 0%, 0%, 50%)', None),
('hsla(0, 0%, 0%, 1, 0%)', None),
('cmyk(0, 0, 0, 0)', None),
]:
result = parse_color_string(css_source)
if isinstance(result, tuple):
for got, expected in zip(result, expected_result):
# Compensate for floating point errors:
self.assertLess(abs(got - expected), 1e-10)
for i, attr in enumerate(['red', 'green', 'blue', 'alpha']):
self.ae(getattr(result, attr), result[i])
else:
self.ae(result, expected_result)
def test_hsl(self):
for hsl, expected_rgb in [
# http://en.wikipedia.org/wiki/HSL_and_HSV#Examples
((0, 0, 100), (1, 1, 1)),
((127, 0, 100), (1, 1, 1)),
((0, 0, 50), (0.5, 0.5, 0.5)),
((127, 0, 50), (0.5, 0.5, 0.5)),
((0, 0, 0), (0, 0, 0)),
((127, 0, 0), (0, 0, 0)),
((0, 100, 50), (1, 0, 0)),
((60, 100, 37.5), (0.75, 0.75, 0)),
((780, 100, 37.5), (0.75, 0.75, 0)),
((-300, 100, 37.5), (0.75, 0.75, 0)),
((120, 100, 25), (0, 0.5, 0)),
((180, 100, 75), (0.5, 1, 1)),
((240, 100, 75), (0.5, 0.5, 1)),
((300, 50, 50), (0.75, 0.25, 0.75)),
((61.8, 63.8, 39.3), (0.628, 0.643, 0.142)),
((251.1, 83.2, 51.1), (0.255, 0.104, 0.918)),
((134.9, 70.7, 39.6), (0.116, 0.675, 0.255)),
((49.5, 89.3, 49.7), (0.941, 0.785, 0.053)),
((283.7, 77.5, 54.2), (0.704, 0.187, 0.897)),
((14.3, 81.7, 62.4), (0.931, 0.463, 0.316)),
((56.9, 99.1, 76.5), (0.998, 0.974, 0.532)),
((162.4, 77.9, 44.7), (0.099, 0.795, 0.591)),
((248.3, 60.1, 37.3), (0.211, 0.149, 0.597)),
((240.5, 29, 60.7), (0.495, 0.493, 0.721)),
]:
for got, expected in zip(hsl_to_rgb(*hsl), expected_rgb):
# Compensate for floating point errors and Wikipedias rounding:
self.assertLess(abs(got - expected), 0.001)

337
src/tinycss/tests/css21.py Normal file
View File

@ -0,0 +1,337 @@
#!/usr/bin/env python
# vim:fileencoding=utf-8
from __future__ import (unicode_literals, division, absolute_import,
print_function)
__license__ = 'GPL v3'
__copyright__ = '2014, Kovid Goyal <kovid at kovidgoyal.net>'
import io
import os
import tempfile
from tinycss.css21 import CSS21Parser
from tinycss.tests.tokenizer import jsonify
from tinycss.tests import BaseTest
class CoreParser(CSS21Parser):
"""A parser that always accepts unparsed at-rules."""
def parse_at_rule(self, rule, stylesheet_rules, errors, context):
return rule
def parse_bytes(css_bytes, kwargs):
return CSS21Parser().parse_stylesheet_bytes(css_bytes, **kwargs)
def parse_bytesio_file(css_bytes, kwargs):
css_file = io.BytesIO(css_bytes)
return CSS21Parser().parse_stylesheet_file(css_file, **kwargs)
def parse_filename(css_bytes, kwargs):
css_file = tempfile.NamedTemporaryFile(delete=False)
try:
css_file.write(css_bytes)
# Windows can not open the filename a second time while
# it is still open for writing.
css_file.close()
return CSS21Parser().parse_stylesheet_file(css_file.name, **kwargs)
finally:
os.remove(css_file.name)
class TestCSS21(BaseTest):
def test_bytes(self):
for (css_bytes, kwargs, expected_result, parse) in [
params + (parse,)
for parse in [parse_bytes, parse_bytesio_file, parse_filename]
for params in [
('@import "é";'.encode('utf8'), {}, 'é'),
('@import "é";'.encode('utf16'), {}, 'é'), # with a BOM
('@import "é";'.encode('latin1'), {}, 'é'),
('@import "£";'.encode('Shift-JIS'), {}, '\x81\x92'), # latin1 mojibake
('@charset "Shift-JIS";@import "£";'.encode('Shift-JIS'), {}, '£'),
(' @charset "Shift-JIS";@import "£";'.encode('Shift-JIS'), {},
'\x81\x92'),
('@import "£";'.encode('Shift-JIS'),
{'document_encoding': 'Shift-JIS'}, '£'),
('@import "£";'.encode('Shift-JIS'),
{'document_encoding': 'utf8'}, '\x81\x92'),
('@charset "utf8"; @import "£";'.encode('utf8'),
{'document_encoding': 'latin1'}, '£'),
# Mojibake yay!
(' @charset "utf8"; @import "é";'.encode('utf8'),
{'document_encoding': 'latin1'}, 'é'),
('@import "é";'.encode('utf8'), {'document_encoding': 'latin1'}, 'é'),
]
]:
stylesheet = parse(css_bytes, kwargs)
self.ae(stylesheet.rules[0].at_keyword, '@import')
self.ae(stylesheet.rules[0].uri, expected_result)
def test_at_rules(self):
for (css_source, expected_rules, expected_errors) in [
(' /* hey */\n', 0, []),
('foo {}', 1, []),
('foo{} @lipsum{} bar{}', 2,
['unknown at-rule in stylesheet context: @lipsum']),
('@charset "ascii"; foo {}', 1, []),
(' @charset "ascii"; foo {}', 1, ['mis-placed or malformed @charset rule']),
('@charset ascii; foo {}', 1, ['mis-placed or malformed @charset rule']),
('foo {} @charset "ascii";', 1, ['mis-placed or malformed @charset rule']),
]:
# Pass 'encoding' to allow @charset
stylesheet = CSS21Parser().parse_stylesheet(css_source, encoding='utf8')
self.assert_errors(stylesheet.errors, expected_errors)
self.ae(len(stylesheet.rules), expected_rules)
def test_core_parser(self):
for (css_source, expected_rules, expected_errors) in [
(' /* hey */\n', [], []),
('foo{} /* hey */\n@bar;@baz{}',
[('foo', []), ('@bar', [], None), ('@baz', [], [])], []),
('@import "foo.css"/**/;', [
('@import', [('STRING', 'foo.css')], None)], []),
('@import "foo.css"/**/', [
('@import', [('STRING', 'foo.css')], None)], []),
('@import "foo.css', [
('@import', [('STRING', 'foo.css')], None)], []),
('{}', [], ['empty selector']),
('a{b:4}', [('a', [('b', [('INTEGER', 4)])])], []),
('@page {\t b: 4; @margin}', [('@page', [], [
('S', '\t '), ('IDENT', 'b'), (':', ':'), ('S', ' '), ('INTEGER', 4),
(';', ';'), ('S', ' '), ('ATKEYWORD', '@margin'),
])], []),
('foo', [], ['no declaration block found']),
('foo @page {} bar {}', [('bar', [])],
['unexpected ATKEYWORD token in selector']),
('foo { content: "unclosed string;\n color:red; ; margin/**/\n: 2cm; }',
[('foo', [('margin', [('DIMENSION', 2)])])],
['unexpected BAD_STRING token in property value']),
('foo { 4px; bar: 12% }',
[('foo', [('bar', [('PERCENTAGE', 12)])])],
['expected a property name, got DIMENSION']),
('foo { bar! 3cm auto ; baz: 7px }',
[('foo', [('baz', [('DIMENSION', 7)])])],
["expected ':', got DELIM"]),
('foo { bar ; baz: {("}"/* comment */) {0@fizz}} }',
[('foo', [('baz', [('{', [
('(', [('STRING', '}')]), ('S', ' '),
('{', [('INTEGER', 0), ('ATKEYWORD', '@fizz')])
])])])],
["expected ':'"]),
('foo { bar: ; baz: not(z) }',
[('foo', [('baz', [('FUNCTION', 'not', [('IDENT', 'z')])])])],
['expected a property value']),
('foo { bar: (]) ; baz: U+20 }',
[('foo', [('baz', [('UNICODE-RANGE', 'U+20')])])],
['unmatched ] token in (']),
]:
stylesheet = CoreParser().parse_stylesheet(css_source)
self.assert_errors(stylesheet.errors, expected_errors)
result = [
(rule.at_keyword, list(jsonify(rule.head)),
list(jsonify(rule.body))
if rule.body is not None else None)
if rule.at_keyword else
(rule.selector.as_css(), [
(decl.name, list(jsonify(decl.value)))
for decl in rule.declarations])
for rule in stylesheet.rules
]
self.ae(result, expected_rules)
def test_parse_style_attr(self):
for (css_source, expected_declarations, expected_errors) in [
(' /* hey */\n', [], []),
('b:4', [('b', [('INTEGER', 4)])], []),
('{b:4}', [], ['expected a property name, got {']),
('b:4} c:3', [], ['unmatched } token in property value']),
(' 4px; bar: 12% ',
[('bar', [('PERCENTAGE', 12)])],
['expected a property name, got DIMENSION']),
('bar! 3cm auto ; baz: 7px',
[('baz', [('DIMENSION', 7)])],
["expected ':', got DELIM"]),
('foo; bar ; baz: {("}"/* comment */) {0@fizz}}',
[('baz', [('{', [
('(', [('STRING', '}')]), ('S', ' '),
('{', [('INTEGER', 0), ('ATKEYWORD', '@fizz')])
])])],
["expected ':'", "expected ':'"]),
('bar: ; baz: not(z)',
[('baz', [('FUNCTION', 'not', [('IDENT', 'z')])])],
['expected a property value']),
('bar: (]) ; baz: U+20',
[('baz', [('UNICODE-RANGE', 'U+20')])],
['unmatched ] token in (']),
]:
declarations, errors = CSS21Parser().parse_style_attr(css_source)
self.assert_errors(errors, expected_errors)
result = [(decl.name, list(jsonify(decl.value)))
for decl in declarations]
self.ae(result, expected_declarations)
def test_important(self):
for (css_source, expected_declarations, expected_errors) in [
(' /* hey */\n', [], []),
('a:1; b:2',
[('a', [('INTEGER', 1)], None), ('b', [('INTEGER', 2)], None)], []),
('a:1 important; b: important',
[('a', [('INTEGER', 1), ('S', ' '), ('IDENT', 'important')], None),
('b', [('IDENT', 'important')], None)],
[]),
('a:1 !important; b:2',
[('a', [('INTEGER', 1)], 'important'), ('b', [('INTEGER', 2)], None)],
[]),
('a:1!\t Im\\50 O\\RTant; b:2',
[('a', [('INTEGER', 1)], 'important'), ('b', [('INTEGER', 2)], None)],
[]),
('a: !important; b:2',
[('b', [('INTEGER', 2)], None)],
['expected a value before !important']),
]:
declarations, errors = CSS21Parser().parse_style_attr(css_source)
self.assert_errors(errors, expected_errors)
result = [(decl.name, list(jsonify(decl.value)), decl.priority)
for decl in declarations]
self.ae(result, expected_declarations)
def test_at_import(self):
for (css_source, expected_rules, expected_errors) in [
(' /* hey */\n', [], []),
('@import "foo.css";', [('foo.css', ['all'])], []),
('@import url(foo.css);', [('foo.css', ['all'])], []),
('@import "foo.css" screen, print;',
[('foo.css', ['screen', 'print'])], []),
('@charset "ascii"; @import "foo.css"; @import "bar.css";',
[('foo.css', ['all']), ('bar.css', ['all'])], []),
('foo {} @import "foo.css";',
[], ['@import rule not allowed after a ruleset']),
('@page {} @import "foo.css";',
[], ['@import rule not allowed after an @page rule']),
('@import ;',
[], ['expected URI or STRING for @import rule']),
('@import foo.css;',
[], ['expected URI or STRING for @import rule, got IDENT']),
('@import "foo.css" {}',
[], ["expected ';', got a block"]),
]:
# Pass 'encoding' to allow @charset
stylesheet = CSS21Parser().parse_stylesheet(css_source, encoding='utf8')
self.assert_errors(stylesheet.errors, expected_errors)
result = [
(rule.uri, rule.media)
for rule in stylesheet.rules
if rule.at_keyword == '@import'
]
self.ae(result, expected_rules)
def test_at_page(self):
for (css, expected_result, expected_errors) in [
('@page {}', (None, (0, 0), []), []),
('@page:first {}', ('first', (1, 0), []), []),
('@page :left{}', ('left', (0, 1), []), []),
('@page\t\n:right {}', ('right', (0, 1), []), []),
('@page :last {}', None, ['invalid @page selector']),
('@page : right {}', None, ['invalid @page selector']),
('@page table:left {}', None, ['invalid @page selector']),
('@page;', None, ['invalid @page rule: missing block']),
('@page { a:1; ; b: 2 }',
(None, (0, 0), [('a', [('INTEGER', 1)]), ('b', [('INTEGER', 2)])]),
[]),
('@page { a:1; c: ; b: 2 }',
(None, (0, 0), [('a', [('INTEGER', 1)]), ('b', [('INTEGER', 2)])]),
['expected a property value']),
('@page { a:1; @top-left {} b: 2 }',
(None, (0, 0), [('a', [('INTEGER', 1)]), ('b', [('INTEGER', 2)])]),
['unknown at-rule in @page context: @top-left']),
('@page { a:1; @top-left {}; b: 2 }',
(None, (0, 0), [('a', [('INTEGER', 1)]), ('b', [('INTEGER', 2)])]),
['unknown at-rule in @page context: @top-left']),
]:
stylesheet = CSS21Parser().parse_stylesheet(css)
self.assert_errors(stylesheet.errors, expected_errors)
if expected_result is None:
self.assertFalse(stylesheet.rules)
else:
self.ae(len(stylesheet.rules), 1)
rule = stylesheet.rules[0]
self.ae(rule.at_keyword, '@page')
self.ae(rule.at_rules, []) # in CSS 2.1
result = (
rule.selector,
rule.specificity,
[(decl.name, list(jsonify(decl.value)))
for decl in rule.declarations],
)
self.ae(result, expected_result)
def test_at_media(self):
for (css_source, expected_rules, expected_errors) in [
(' /* hey */\n', [], []),
('@media all {}', [(['all'], [])], []),
('@media screen, print {}', [(['screen', 'print'], [])], []),
('@media all;', [], ['invalid @media rule: missing block']),
('@media {}', [], ['expected media types for @media']),
('@media 4 {}', [], ['expected a media type, got INTEGER']),
('@media , screen {}', [], ['expected a media type']),
('@media screen, {}', [], ['expected a media type']),
('@media screen print {}', [],
['expected a media type, got IDENT, IDENT']),
('@media all { @page { a: 1 } @media; @import; foo { a: 1 } }',
[(['all'], [('foo', [('a', [('INTEGER', 1)])])])],
['@page rule not allowed in @media',
'@media rule not allowed in @media',
'@import rule not allowed in @media']),
]:
stylesheet = CSS21Parser().parse_stylesheet(css_source)
self.assert_errors(stylesheet.errors, expected_errors)
for rule in stylesheet.rules:
self.ae(rule.at_keyword, '@media')
result = [
(rule.media, [
(sub_rule.selector.as_css(), [
(decl.name, list(jsonify(decl.value)))
for decl in sub_rule.declarations])
for sub_rule in rule.rules
])
for rule in stylesheet.rules
]
self.ae(result, expected_rules)

View File

@ -0,0 +1,72 @@
#!/usr/bin/env python
# vim:fileencoding=utf-8
from __future__ import (unicode_literals, division, absolute_import,
print_function)
__license__ = 'GPL v3'
__copyright__ = '2014, Kovid Goyal <kovid at kovidgoyal.net>'
from tinycss.decoding import decode
from tinycss.tests import BaseTest
def params(css, encoding, use_bom=False, expect_error=False, **kwargs):
"""Nicer syntax to make a tuple."""
return css, encoding, use_bom, expect_error, kwargs
class TestDecoding(BaseTest):
def test_decoding(self):
for (css, encoding, use_bom, expect_error, kwargs) in [
params('', 'utf8'), # default to utf8
params('𐂃', 'utf8'),
params('é', 'latin1'), # utf8 fails, fall back on ShiftJIS
params('£', 'ShiftJIS', expect_error=True),
params('£', 'ShiftJIS', protocol_encoding='Shift-JIS'),
params('£', 'ShiftJIS', linking_encoding='Shift-JIS'),
params('£', 'ShiftJIS', document_encoding='Shift-JIS'),
params('£', 'ShiftJIS', protocol_encoding='utf8',
document_encoding='ShiftJIS'),
params('@charset "utf8"; £', 'ShiftJIS', expect_error=True),
params('@charset "utf£8"; £', 'ShiftJIS', expect_error=True),
params('@charset "unknown-encoding"; £', 'ShiftJIS', expect_error=True),
params('@charset "utf8"; £', 'ShiftJIS', document_encoding='ShiftJIS'),
params('£', 'ShiftJIS', linking_encoding='utf8',
document_encoding='ShiftJIS'),
params('@charset "utf-32"; 𐂃', 'utf-32-be'),
params('@charset "Shift-JIS"; £', 'ShiftJIS'),
params('@charset "ISO-8859-8"; £', 'ShiftJIS', expect_error=True),
params('𐂃', 'utf-16-le', expect_error=True), # no BOM
params('𐂃', 'utf-16-le', use_bom=True),
params('𐂃', 'utf-32-be', expect_error=True),
params('𐂃', 'utf-32-be', use_bom=True),
params('𐂃', 'utf-32-be', document_encoding='utf-32-be'),
params('𐂃', 'utf-32-be', linking_encoding='utf-32-be'),
params('@charset "utf-32-le"; 𐂃', 'utf-32-be',
use_bom=True, expect_error=True),
# protocol_encoding takes precedence over @charset
params('@charset "ISO-8859-8"; £', 'ShiftJIS',
protocol_encoding='Shift-JIS'),
params('@charset "unknown-encoding"; £', 'ShiftJIS',
protocol_encoding='Shift-JIS'),
params('@charset "Shift-JIS"; £', 'ShiftJIS',
protocol_encoding='utf8'),
# @charset takes precedence over document_encoding
params('@charset "Shift-JIS"; £', 'ShiftJIS',
document_encoding='ISO-8859-8'),
# @charset takes precedence over linking_encoding
params('@charset "Shift-JIS"; £', 'ShiftJIS',
linking_encoding='ISO-8859-8'),
# linking_encoding takes precedence over document_encoding
params('£', 'ShiftJIS',
linking_encoding='Shift-JIS', document_encoding='ISO-8859-8'),
]:
if use_bom:
source = '\ufeff' + css
else:
source = css
css_bytes = source.encode(encoding)
result, result_encoding = decode(css_bytes, **kwargs)
if expect_error:
self.assertNotEqual(result, css)
else:
self.ae(result, css)

48
src/tinycss/tests/main.py Normal file
View File

@ -0,0 +1,48 @@
#!/usr/bin/env python
# vim:fileencoding=utf-8
from __future__ import (unicode_literals, division, absolute_import,
print_function)
__license__ = 'GPL v3'
__copyright__ = '2014, Kovid Goyal <kovid at kovidgoyal.net>'
import unittest, os, argparse
def find_tests():
return unittest.defaultTestLoader.discover(os.path.dirname(os.path.abspath(__file__)), pattern='*.py')
def run_tests(find_tests=find_tests):
parser = argparse.ArgumentParser()
parser.add_argument('name', nargs='?', default=None,
help='The name of the test to run')
args = parser.parse_args()
if args.name and args.name.startswith('.'):
tests = find_tests()
q = args.name[1:]
if not q.startswith('test_'):
q = 'test_' + q
ans = None
try:
for suite in tests:
for test in suite._tests:
if test.__class__.__name__ == 'ModuleImportFailure':
raise Exception('Failed to import a test module: %s' % test)
for s in test:
if s._testMethodName == q:
ans = s
raise StopIteration()
except StopIteration:
pass
if ans is None:
print ('No test named %s found' % args.name)
raise SystemExit(1)
tests = ans
else:
tests = unittest.defaultTestLoader.loadTestsFromName(args.name) if args.name else find_tests()
r = unittest.TextTestRunner
r(verbosity=4).run(tests)
if __name__ == '__main__':
run_tests()

View File

@ -0,0 +1,92 @@
#!/usr/bin/env python
# vim:fileencoding=utf-8
from __future__ import (unicode_literals, division, absolute_import,
print_function)
__license__ = 'GPL v3'
__copyright__ = '2014, Kovid Goyal <kovid at kovidgoyal.net>'
from tinycss.page3 import CSSPage3Parser
from tinycss.tests import BaseTest
from tinycss.tests.tokenizer import jsonify
class TestPage3(BaseTest):
def test_selectors(self):
for css, expected_selector, expected_specificity, expected_errors in [
('@page {}', (None, None), (0, 0, 0), []),
('@page :first {}', (None, 'first'), (0, 1, 0), []),
('@page:left{}', (None, 'left'), (0, 0, 1), []),
('@page :right {}', (None, 'right'), (0, 0, 1), []),
('@page :blank{}', (None, 'blank'), (0, 1, 0), []),
('@page :last {}', None, None, ['invalid @page selector']),
('@page : first {}', None, None, ['invalid @page selector']),
('@page foo:first {}', ('foo', 'first'), (1, 1, 0), []),
('@page bar :left {}', ('bar', 'left'), (1, 0, 1), []),
(r'@page \26:right {}', ('&', 'right'), (1, 0, 1), []),
('@page foo {}', ('foo', None), (1, 0, 0), []),
(r'@page \26 {}', ('&', None), (1, 0, 0), []),
('@page foo fist {}', None, None, ['invalid @page selector']),
('@page foo, bar {}', None, None, ['invalid @page selector']),
('@page foo&first {}', None, None, ['invalid @page selector']),
]:
stylesheet = CSSPage3Parser().parse_stylesheet(css)
self.assert_errors(stylesheet.errors, expected_errors)
if stylesheet.rules:
self.ae(len(stylesheet.rules), 1)
rule = stylesheet.rules[0]
self.ae(rule.at_keyword, '@page')
selector = rule.selector
self.ae(rule.specificity, expected_specificity)
else:
selector = None
self.ae(selector, expected_selector)
def test_content(self):
for css, expected_declarations, expected_rules, expected_errors in [
('@page {}', [], [], []),
('@page { foo: 4; bar: z }',
[('foo', [('INTEGER', 4)]), ('bar', [('IDENT', 'z')])], [], []),
('''@page { foo: 4;
@top-center { content: "Awesome Title" }
@bottom-left { content: counter(page) }
bar: z
}''',
[('foo', [('INTEGER', 4)]), ('bar', [('IDENT', 'z')])],
[('@top-center', [('content', [('STRING', 'Awesome Title')])]),
('@bottom-left', [('content', [
('FUNCTION', 'counter', [('IDENT', 'page')])])])],
[]),
('''@page { foo: 4;
@bottom-top { content: counter(page) }
bar: z
}''',
[('foo', [('INTEGER', 4)]), ('bar', [('IDENT', 'z')])],
[],
['unknown at-rule in @page context: @bottom-top']),
('@page{} @top-right{}', [], [], [
'@top-right rule not allowed in stylesheet']),
('@page{ @top-right 4 {} }', [], [], [
'unexpected INTEGER token in @top-right rule header']),
# Not much error recovery tests here. This should be covered in test_css21
]:
stylesheet = CSSPage3Parser().parse_stylesheet(css)
self.assert_errors(stylesheet.errors, expected_errors)
def declarations(rule):
return [(decl.name, list(jsonify(decl.value)))
for decl in rule.declarations]
self.ae(len(stylesheet.rules), 1)
rule = stylesheet.rules[0]
self.ae(rule.at_keyword, '@page')
self.ae(declarations(rule), expected_declarations)
rules = [(margin_rule.at_keyword, declarations(margin_rule))
for margin_rule in rule.at_rules]
self.ae(rules, expected_rules)

View File

@ -0,0 +1,255 @@
#!/usr/bin/env python
# vim:fileencoding=utf-8
from __future__ import (unicode_literals, division, absolute_import,
print_function)
__license__ = 'GPL v3'
__copyright__ = '2014, Kovid Goyal <kovid at kovidgoyal.net>'
from tinycss.tests import BaseTest
from tinycss.tokenizer import tokenize_flat as tokenize, regroup
def jsonify(tokens):
"""Turn tokens into "JSON-compatible" data structures."""
for token in tokens:
if token.type == 'FUNCTION':
yield (token.type, token.function_name,
list(jsonify(token.content)))
elif token.is_container:
yield token.type, list(jsonify(token.content))
else:
yield token.type, token.value
class TestTokenizer(BaseTest):
def test_token_api(self):
for css_source in [
'(8, foo, [z])', '[8, foo, (z)]', '{8, foo, [z]}', 'func(8, foo, [z])'
]:
tokens = list(regroup(tokenize(css_source)))
self.ae(len(tokens), 1)
self.ae(len(tokens[0].content), 7)
def test_token_serialize_css(self):
for css_source in [
r'''p[example="\
foo(int x) {\
this.x = x;\
}\
"]''',
'"Lorem\\26Ipsum\ndolor" sit',
'/* Lorem\nipsum */\fa {\n color: red;\tcontent: "dolor\\\fsit" }',
'not([[lorem]]{ipsum (42)})',
'a[b{d]e}',
'a[b{"d',
]:
for _regroup in (regroup, lambda x: x):
tokens = _regroup(tokenize(css_source, ignore_comments=False))
result = ''.join(token.as_css() for token in tokens)
self.ae(result, css_source)
def test_comments(self):
for ignore_comments, expected_tokens in [
(False, [
('COMMENT', '/* lorem */'),
('S', ' '),
('IDENT', 'ipsum'),
('[', [
('IDENT', 'dolor'),
('COMMENT', '/* sit */'),
]),
('BAD_COMMENT', '/* amet')
]),
(True, [
('S', ' '),
('IDENT', 'ipsum'),
('[', [
('IDENT', 'dolor'),
]),
]),
]:
css_source = '/* lorem */ ipsum[dolor/* sit */]/* amet'
tokens = regroup(tokenize(css_source, ignore_comments))
result = list(jsonify(tokens))
self.ae(result, expected_tokens)
def test_token_grouping(self):
for css_source, expected_tokens in [
('', []),
(r'Lorem\26 "i\psum"4px', [
('IDENT', 'Lorem&'), ('STRING', 'ipsum'), ('DIMENSION', 4)]),
('not([[lorem]]{ipsum (42)})', [
('FUNCTION', 'not', [
('[', [
('[', [
('IDENT', 'lorem'),
]),
]),
('{', [
('IDENT', 'ipsum'),
('S', ' '),
('(', [
('INTEGER', 42),
])
])
])]),
# Close everything at EOF, no error
('a[b{"d', [
('IDENT', 'a'),
('[', [
('IDENT', 'b'),
('{', [
('STRING', 'd'),
]),
]),
]),
# Any remaining ), ] or } token is a nesting error
('a[b{d]e}', [
('IDENT', 'a'),
('[', [
('IDENT', 'b'),
('{', [
('IDENT', 'd'),
(']', ']'), # The error is visible here
('IDENT', 'e'),
]),
]),
]),
# ref:
('a[b{d}e]', [
('IDENT', 'a'),
('[', [
('IDENT', 'b'),
('{', [
('IDENT', 'd'),
]),
('IDENT', 'e'),
]),
]),
]:
tokens = regroup(tokenize(css_source, ignore_comments=False))
result = list(jsonify(tokens))
self.ae(result, expected_tokens)
def test_positions(self):
"""Test the reported line/column position of each token."""
css = '/* Lorem\nipsum */\fa {\n color: red;\tcontent: "dolor\\\fsit" }'
tokens = tokenize(css, ignore_comments=False)
result = [(token.type, token.line, token.column) for token in tokens]
self.ae(result, [
('COMMENT', 1, 1), ('S', 2, 9),
('IDENT', 3, 1), ('S', 3, 2), ('{', 3, 3),
('S', 3, 4), ('IDENT', 4, 5), (':', 4, 10),
('S', 4, 11), ('IDENT', 4, 12), (';', 4, 15), ('S', 4, 16),
('IDENT', 4, 17), (':', 4, 24), ('S', 4, 25), ('STRING', 4, 26),
('S', 5, 5), ('}', 5, 6)])
def test_tokens(self):
for css_source, expected_tokens in [
('', []),
('red -->',
[('IDENT', 'red'), ('S', ' '), ('CDC', '-->')]),
# Longest match rule: no CDC
('red-->',
[('IDENT', 'red--'), ('DELIM', '>')]),
(r'''p[example="\
foo(int x) {\
this.x = x;\
}\
"]''', [
('IDENT', 'p'),
('[', '['),
('IDENT', 'example'),
('DELIM', '='),
('STRING', 'foo(int x) { this.x = x;}'),
(']', ']')]),
# Numbers are parsed
('42 .5 -4pX 1.25em 30%',
[('INTEGER', 42), ('S', ' '),
('NUMBER', .5), ('S', ' '),
# units are normalized to lower-case:
('DIMENSION', -4, 'px'), ('S', ' '),
('DIMENSION', 1.25, 'em'), ('S', ' '),
('PERCENTAGE', 30, '%')]),
# URLs are extracted
('url(foo.png)', [('URI', 'foo.png')]),
('url("foo.png")', [('URI', 'foo.png')]),
# Escaping
(r'/* Comment with a \ backslash */',
[('COMMENT', '/* Comment with a \ backslash */')]), # Unchanged
# backslash followed by a newline in a string: ignored
('"Lorem\\\nIpsum"', [('STRING', 'LoremIpsum')]),
# backslash followed by a newline outside a string: stands for itself
('Lorem\\\nIpsum', [
('IDENT', 'Lorem'), ('DELIM', '\\'),
('S', '\n'), ('IDENT', 'Ipsum')]),
# Cancel the meaning of special characters
(r'"Lore\m Ipsum"', [('STRING', 'Lorem Ipsum')]), # or not specal
(r'"Lorem \49psum"', [('STRING', 'Lorem Ipsum')]),
(r'"Lorem \49 psum"', [('STRING', 'Lorem Ipsum')]),
(r'"Lorem\"Ipsum"', [('STRING', 'Lorem"Ipsum')]),
(r'"Lorem\\Ipsum"', [('STRING', r'Lorem\Ipsum')]),
(r'"Lorem\5c Ipsum"', [('STRING', r'Lorem\Ipsum')]),
(r'Lorem\+Ipsum', [('IDENT', 'Lorem+Ipsum')]),
(r'Lorem+Ipsum', [('IDENT', 'Lorem'), ('DELIM', '+'), ('IDENT', 'Ipsum')]),
(r'url(foo\).png)', [('URI', 'foo).png')]),
# Unicode and backslash escaping
('\\26 B', [('IDENT', '&B')]),
('\\&B', [('IDENT', '&B')]),
('@\\26\tB', [('ATKEYWORD', '@&B')]),
('@\\&B', [('ATKEYWORD', '@&B')]),
('#\\26\nB', [('HASH', '#&B')]),
('#\\&B', [('HASH', '#&B')]),
('\\26\r\nB(', [('FUNCTION', '&B(')]),
('\\&B(', [('FUNCTION', '&B(')]),
(r'12.5\000026B', [('DIMENSION', 12.5, '&b')]),
(r'12.5\0000263B', [('DIMENSION', 12.5, '&3b')]), # max 6 digits
(r'12.5\&B', [('DIMENSION', 12.5, '&b')]),
(r'"\26 B"', [('STRING', '&B')]),
(r"'\000026B'", [('STRING', '&B')]),
(r'"\&B"', [('STRING', '&B')]),
(r'url("\26 B")', [('URI', '&B')]),
(r'url(\26 B)', [('URI', '&B')]),
(r'url("\&B")', [('URI', '&B')]),
(r'url(\&B)', [('URI', '&B')]),
(r'Lorem\110000Ipsum', [('IDENT', 'Lorem\uFFFDIpsum')]),
# Bad strings
# String ends at EOF without closing: no error, parsed
('"Lorem\\26Ipsum', [('STRING', 'Lorem&Ipsum')]),
# Unescaped newline: ends the string, error, unparsed
('"Lorem\\26Ipsum\n', [
('BAD_STRING', r'"Lorem\26Ipsum'), ('S', '\n')]),
# Tokenization restarts after the newline, so the second " starts
# a new string (which ends at EOF without errors, as above.)
('"Lorem\\26Ipsum\ndolor" sit', [
('BAD_STRING', r'"Lorem\26Ipsum'), ('S', '\n'),
('IDENT', 'dolor'), ('STRING', ' sit')]),
]:
sources = [css_source]
for css_source in sources:
tokens = tokenize(css_source, ignore_comments=False)
result = [
(token.type, token.value) + (
() if token.unit is None else (token.unit,))
for token in tokens
]
self.ae(result, expected_tokens)