id
int64 0
843k
| repository_name
stringlengths 7
55
| file_path
stringlengths 9
332
| class_name
stringlengths 3
290
| human_written_code
stringlengths 12
4.36M
| class_skeleton
stringlengths 19
2.2M
| total_program_units
int64 1
9.57k
| total_doc_str
int64 0
4.2k
| AvgCountLine
float64 0
7.89k
| AvgCountLineBlank
float64 0
300
| AvgCountLineCode
float64 0
7.89k
| AvgCountLineComment
float64 0
7.89k
| AvgCyclomatic
float64 0
130
| CommentToCodeRatio
float64 0
176
| CountClassBase
float64 0
48
| CountClassCoupled
float64 0
589
| CountClassCoupledModified
float64 0
581
| CountClassDerived
float64 0
5.37k
| CountDeclInstanceMethod
float64 0
4.2k
| CountDeclInstanceVariable
float64 0
299
| CountDeclMethod
float64 0
4.2k
| CountDeclMethodAll
float64 0
4.2k
| CountLine
float64 1
115k
| CountLineBlank
float64 0
9.01k
| CountLineCode
float64 0
94.4k
| CountLineCodeDecl
float64 0
46.1k
| CountLineCodeExe
float64 0
91.3k
| CountLineComment
float64 0
27k
| CountStmt
float64 1
93.2k
| CountStmtDecl
float64 0
46.1k
| CountStmtExe
float64 0
90.2k
| MaxCyclomatic
float64 0
759
| MaxInheritanceTree
float64 0
16
| MaxNesting
float64 0
34
| SumCyclomatic
float64 0
6k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
145,348 |
LemmoTresto/Webcord
|
LemmoTresto_Webcord/webcord/webhook.py
|
webcord.webhook.Webhook
|
class Webhook:
webhook_url = None
avatar_url = None
def __init__(self, webhook_url, avatar_url=None):
self.webhook_url = webhook_url
self.avatar_url = avatar_url
def change_url(self, webhook_url):
self.webhook_url = webhook_url
def change_avatar_url(self, avatar_url):
self.avatar_url = avatar_url
def send_message(self, message, username, avatar_url=None):
# Check for input of avatar url in function itself
if avatar_url == None:
#Check if user wants default avatar url
if self.avatar_url == None:
#Doesn't have default avatar_url defined.
payload = {
"content":message,
"username":username
}
else:
#Want's to use default avatar from webhook instance.
payload = {
"content":message,
"username":username,
"avatar_url":self.avatar_url
}
#Doesn't want to use default avatar url nor any input avatar url.
elif avatar_url == '' or avatar_url == ' ':
payload = {
"content":message,
"username":username
}
else:
#Avatar_url was inputted in this function.
payload = {
"content":message,
"username":username,
"avatar_url":avatar_url
}
sess = requests.session()
if str(type(self.webhook_url)).replace("<class '", "").replace(">'", "").replace("'>", "") == 'array' or str(type(self.webhook_url)).replace("<class '", "").replace(">'", "").replace("'>", "") == 'list':
# do request for every webhook.
for url in self.webhook_url:
resp = sess.post(url, data=payload)
print(resp.status_code)
elif str(type(self.webhook_url)).replace("<class '", "").replace(">'", "").replace("'>", "") == 'str':
resp = sess.post(self.webhook_url, data=payload)
print(resp.status_code)
else:
print("Eror in webhook url(s)! Webhook url should be a string, list or array.")
return None
def send_embed(self, embed, username, avatar_url=None):
embedDict = embed.to_dict()
del embedDict['type']
# Check for input of avatar url in function itself
if avatar_url == None:
#Check if user wants default avatar url
if self.avatar_url == None:
#Doesn't have default avatar_url defined.
payload = {
"username":username,
"embeds": [embedDict]
}
else:
#Want's to use default avatar from webhook instance.
payload = {
"username":username,
"avatar_url":self.avatar_url,
"embed":[embedDict]
}
#Doesn't want to use default avatar url nor any input avatar url.
elif avatar_url == '' or avatar_url == ' ':
payload = {
"username":username,
"embed":[embedDict]
}
else:
#Avatar_url was inputted in this function.
payload = {
"username":username,
"avatar_url":avatar_url,
"embed":[embedDict]
}
sess = requests.session()
payload = json.dumps(payload)
headers = {'content-type': 'application/json'}
if str(type(self.webhook_url)).replace("<class '", "").replace(">'", "").replace("'>", "") == 'array' or str(type(self.webhook_url)).replace("<class '", "").replace(">'", "").replace("'>", "") == 'list':
# do request for every webhook.
for url in self.webhook_url:
resp = sess.post(url, data=payload, headers=headers)
print(resp.status_code)
elif str(type(self.webhook_url)).replace("<class '", "").replace(">'", "").replace("'>", "") == 'str':
resp = sess.post(self.webhook_url, data=payload, headers=headers)
print(resp.status_code)
else:
print("Eror in webhook url(s)! Webhook url should be a string, list or array.")
return None
|
class Webhook:
def __init__(self, webhook_url, avatar_url=None):
pass
def change_url(self, webhook_url):
pass
def change_avatar_url(self, avatar_url):
pass
def send_message(self, message, username, avatar_url=None):
pass
def send_embed(self, embed, username, avatar_url=None):
pass
| 6 | 0 | 19 | 0 | 16 | 3 | 3 | 0.17 | 0 | 2 | 0 | 0 | 5 | 0 | 5 | 5 | 103 | 5 | 84 | 18 | 78 | 14 | 46 | 18 | 40 | 7 | 0 | 2 | 17 |
145,349 |
LemmoTresto/Webcord
|
LemmoTresto_Webcord/webcord/errors.py
|
webcord.errors.InvalidArgument
|
class InvalidArgument(ClientException):
"""Exception that's thrown when an argument to a function
is invalid some way (e.g. wrong value or wrong type).
This could be considered the analogous of ``ValueError`` and
``TypeError`` except derived from :exc:`ClientException` and thus
:exc:`DiscordException`.
"""
pass
|
class InvalidArgument(ClientException):
'''Exception that's thrown when an argument to a function
is invalid some way (e.g. wrong value or wrong type).
This could be considered the analogous of ``ValueError`` and
``TypeError`` except derived from :exc:`ClientException` and thus
:exc:`DiscordException`.
'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 10 | 9 | 1 | 2 | 1 | 1 | 6 | 2 | 1 | 1 | 0 | 5 | 0 | 0 |
145,350 |
LemmoTresto/Webcord
|
LemmoTresto_Webcord/webcord/errors.py
|
webcord.errors.NotFound
|
class NotFound(HTTPException):
"""Exception that's thrown for when status code 404 occurs.
Subclass of :exc:`HTTPException`
"""
pass
|
class NotFound(HTTPException):
'''Exception that's thrown for when status code 404 occurs.
Subclass of :exc:`HTTPException`
'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1.5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 11 | 6 | 1 | 2 | 1 | 1 | 3 | 2 | 1 | 1 | 0 | 5 | 0 | 0 |
145,351 |
LemmoTresto/Webcord
|
LemmoTresto_Webcord/webcord/utils.py
|
webcord.utils.cached_property
|
class cached_property:
def __init__(self, function):
self.function = function
self.__doc__ = getattr(function, '__doc__')
def __get__(self, instance, owner):
if instance is None:
return self
value = self.function(instance)
setattr(instance, self.function.__name__, value)
return value
|
class cached_property:
def __init__(self, function):
pass
def __get__(self, instance, owner):
pass
| 3 | 0 | 6 | 1 | 5 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2 | 2 | 2 | 2 | 13 | 3 | 10 | 6 | 7 | 0 | 10 | 6 | 7 | 2 | 0 | 1 | 3 |
145,352 |
LemmoTresto/Webcord
|
LemmoTresto_Webcord/webcord/utils.py
|
webcord.utils.CachedSlotProperty
|
class CachedSlotProperty:
def __init__(self, name, function):
self.name = name
self.function = function
self.__doc__ = getattr(function, '__doc__')
def __get__(self, instance, owner):
if instance is None:
return self
try:
return getattr(instance, self.name)
except AttributeError:
value = self.function(instance)
setattr(instance, self.name, value)
return value
|
class CachedSlotProperty:
def __init__(self, name, function):
pass
def __get__(self, instance, owner):
pass
| 3 | 0 | 7 | 1 | 7 | 0 | 2 | 0 | 0 | 1 | 0 | 0 | 2 | 3 | 2 | 2 | 16 | 2 | 14 | 7 | 11 | 0 | 14 | 7 | 11 | 3 | 0 | 1 | 4 |
145,353 |
LeonardMH/namealizer
|
LeonardMH_namealizer/namealizer/namealizer.py
|
namealizer.namealizer.DictionaryNotFoundError
|
class DictionaryNotFoundError(Exception):
"""
Exception to be raised when the script fails at importing a dictionary
"""
pass
|
class DictionaryNotFoundError(Exception):
'''
Exception to be raised when the script fails at importing a dictionary
'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1.5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 10 | 5 | 0 | 2 | 1 | 1 | 3 | 2 | 1 | 1 | 0 | 3 | 0 | 0 |
145,354 |
LeonardMH/namealizer
|
LeonardMH_namealizer/namealizer/test_namealizer.py
|
namealizer.test_namealizer.TestWordGenerator
|
class TestWordGenerator(unittest.TestCase):
"""Test the WordGenerator class for expected operation"""
def test_default_initialization(self):
wg = namealizer.WordGenerator("dictionaries/all_en_US.dict")
self.assertEqual(wg.wordstyle, "lowercase")
self.assertEqual(wg.separator, " ")
self.assertIsInstance(wg.seed, int)
def test_valid_wordstyles(self):
wg = namealizer.WordGenerator("dictionaries/all_en_US.dict")
# test that these calls work, actual formatting is tested elsewhere
wg.wordstyle = "lowercase"
wg[3]
wg.wordstyle = "uppercase"
wg[3]
wg.wordstyle = "mixedcase"
wg[3]
wg.wordstyle = "capitalize"
wg[3]
def test_invalid_wordstyle(self):
wg = namealizer.WordGenerator("dictionaries/all_en_US.dict")
wg.wordstyle = "cookies"
with self.assertRaises(namealizer.InvalidWordStyleError):
wg[3]
def test_valid_separators(self):
wg = namealizer.WordGenerator("dictionaries/all_en_US.dict")
wg.separator = "-"
returned = wg[3]
self.assertEqual(len(returned.split(wg.separator)), 3)
def test_string_access_method(self):
wg = namealizer.WordGenerator("dictionaries/all_en_US.dict")
returned = wg["abc"].split()
self.assertEqual(returned[0][0], "a")
self.assertEqual(returned[1][0], "b")
self.assertEqual(returned[2][0], "c")
def test_count_access_method(self):
wg = namealizer.WordGenerator("dictionaries/all_en_US.dict")
returned = wg[3].split()
self.assertEqual(len(returned), 3)
def test_invalid_access_method(self):
wg = namealizer.WordGenerator("dictionaries/all_en_US.dict")
with self.assertRaises(TypeError):
wg[None]
|
class TestWordGenerator(unittest.TestCase):
'''Test the WordGenerator class for expected operation'''
def test_default_initialization(self):
pass
def test_valid_wordstyles(self):
pass
def test_invalid_wordstyle(self):
pass
def test_valid_separators(self):
pass
def test_string_access_method(self):
pass
def test_count_access_method(self):
pass
def test_invalid_access_method(self):
pass
| 8 | 1 | 6 | 0 | 6 | 0 | 1 | 0.05 | 1 | 4 | 2 | 0 | 7 | 0 | 7 | 79 | 51 | 9 | 40 | 18 | 32 | 2 | 40 | 18 | 32 | 1 | 2 | 1 | 7 |
145,355 |
LeonardMH/namealizer
|
LeonardMH_namealizer/namealizer/test_namealizer.py
|
namealizer.test_namealizer.TestDictionaryImport
|
class TestDictionaryImport(unittest.TestCase):
"""
Test the ability of the tool to import dictionaries. This tests
also serves as the specification for how dictionaries should be
formatted. This specification is also documented here.
Dictionary format (on-disk):
Dictionaries have a fairly simple format, each word is on it's own
line. That's it.
Dictionary format (in memory):
Within the script dictionaries should be stored as a Python
dictionary where each key is mapped to a unique first character of
the word and the value of each of these keys is a Python list of all
the words that have that first character.
"""
well_formatted_all = "well-formatted-all.dict"
words_all = ["able", "boson", "cannon", "dog",
"exxon", "foggy", "grand", "housing",
"interpreted", "joking", "king",
"lemon", "michael", "nixon", "opening",
"pricing", "queen", "respected",
"stuffing", "travis", "unopened", "very",
"washington", "xylo", "yocto", "zebra"]
well_formatted_sparse = "well-formatted-sparse.dict"
words_sparse = ["able", "exxon", "washington", "xylophone"]
def setUp(self):
# create and import the well formatted full dictionary
write_dictionary(self.well_formatted_all, self.words_all)
with open(self.well_formatted_all, "r") as dictionary_file:
imported = namealizer.import_dictionary(dictionary_file)
self.well_formatted_all = imported
# create and import the well formatted sparse dictionary
write_dictionary(self.well_formatted_sparse, self.words_sparse)
with open(self.well_formatted_sparse, "r") as dictionary_file:
imported = namealizer.import_dictionary(dictionary_file)
self.well_formatted_sparse = imported
def test_import_well_formatted_all_letters(self):
# first just make sure it is a dictionary
self.assertIsInstance(self.well_formatted_all, dict)
# make sure all of the keys are lists
for value in self.well_formatted_all.values():
self.assertIsInstance(value, list)
# verify that this dictionary has all 26 letters specified
self.assertEqual(len(self.words_all), len(self.well_formatted_all))
# check that the first word in a `letter group` got imported in whole
# this is a test for issue #17
self.assertEqual(self.words_all[0], self.well_formatted_all["a"][0])
def test_import_well_formatted_sparse(self):
# first just make sure it is a dictionary
self.assertIsInstance(self.well_formatted_all, dict)
# make sure all of the keys are lists
for value in self.well_formatted_all.values():
self.assertIsInstance(value, list)
# verify that this dictionary has all the letters specified
len_all = len(self.words_sparse)
len_sparse = len(self.well_formatted_sparse)
self.assertEqual(len_all, len_sparse)
# check that the first word in a `letter group` got imported in whole
# this is a test for issue #17
self.assertEqual(self.words_all[0], self.well_formatted_all["a"][0])
def test_sparse_dict_access_unavailable_letter(self):
"""Tests condition of dict not containing the desired letter"""
with self.assertRaises(namealizer.NoWordForLetter):
func = namealizer.get_random_word
dictionary = self.well_formatted_sparse
func(dictionary, starting_letter='c')
def tearDown(self):
# remove the dictionaries
for dict_file in glob.glob("*.dict"):
os.remove(dict_file)
|
class TestDictionaryImport(unittest.TestCase):
'''
Test the ability of the tool to import dictionaries. This tests
also serves as the specification for how dictionaries should be
formatted. This specification is also documented here.
Dictionary format (on-disk):
Dictionaries have a fairly simple format, each word is on it's own
line. That's it.
Dictionary format (in memory):
Within the script dictionaries should be stored as a Python
dictionary where each key is mapped to a unique first character of
the word and the value of each of these keys is a Python list of all
the words that have that first character.
'''
def setUp(self):
pass
def test_import_well_formatted_all_letters(self):
pass
def test_import_well_formatted_sparse(self):
pass
def test_sparse_dict_access_unavailable_letter(self):
'''Tests condition of dict not containing the desired letter'''
pass
def tearDown(self):
pass
| 6 | 2 | 10 | 1 | 6 | 3 | 2 | 0.64 | 1 | 3 | 1 | 0 | 5 | 0 | 5 | 77 | 84 | 15 | 42 | 19 | 36 | 27 | 36 | 18 | 30 | 2 | 2 | 1 | 8 |
145,356 |
LeonardMH/namealizer
|
LeonardMH_namealizer/namealizer/test_namealizer.py
|
namealizer.test_namealizer.TestCommandLineParameters
|
class TestCommandLineParameters(unittest.TestCase):
"""Verifies command line parameters are handled correctly
"""
def test_create_parser(self):
import argparse
return_value = namealizer.create_parser()
self.assertIsInstance(return_value, argparse.Namespace)
num_args = 6
self.assertEqual(len(return_value.__dict__), num_args)
|
class TestCommandLineParameters(unittest.TestCase):
'''Verifies command line parameters are handled correctly
'''
def test_create_parser(self):
pass
| 2 | 1 | 6 | 0 | 6 | 0 | 1 | 0.29 | 1 | 1 | 0 | 0 | 1 | 0 | 1 | 73 | 10 | 1 | 7 | 5 | 4 | 2 | 7 | 5 | 4 | 1 | 2 | 0 | 1 |
145,357 |
LeonardMH/namealizer
|
LeonardMH_namealizer/namealizer/test_namealizer.py
|
namealizer.test_namealizer.TestStringFormatter
|
class TestStringFormatter(unittest.TestCase):
"""Verifies string formatting functionality
This function is the final thing that processes strings before they
are printed so it takes strings of the format "hello this is a
string" and turns them into things like "HelloThisIsAString"
"""
# test all the base wordstyles
test_string = "all the world"
separators = ["", "_", "-", "*", "$", "@#$", "monkey"]
expected_lowercase = ["alltheworld", "all_the_world",
"all-the-world", "all*the*world", "all$the$world",
"all@#$the@#$world", "allmonkeythemonkeyworld"]
expected_uppercase = ["ALLTHEWORLD", "ALL_THE_WORLD", "ALL-THE-WORLD",
"ALL*THE*WORLD", "ALL$THE$WORLD",
"ALL@#$THE@#$WORLD", "ALLmonkeyTHEmonkeyWORLD"]
expected_capitalize = ["AllTheWorld", "All_The_World",
"All-The-World", "All*The*World", "All$The$World",
"All@#$The@#$World", "AllmonkeyThemonkeyWorld"]
expected_mixedcase = ["allTheWorld", "all_The_World",
"all-The-World", "all*The*World", "all$The$World",
"all@#$The@#$World", "allmonkeyThemonkeyWorld"]
def test_lowercase(self):
standard = self.test_string.lower()
test = namealizer.format_string(self.test_string, "lowercase")
self.assertEqual(standard, test)
def test_uppercase(self):
standard = self.test_string.upper()
test = namealizer.format_string(self.test_string, "uppercase")
self.assertEqual(standard, test)
def test_capitalize(self):
standard = "All The World"
test = namealizer.format_string(self.test_string, "capitalize")
self.assertEqual(standard, test)
def test_mixedcase(self):
standard = "all The World"
test = namealizer.format_string(self.test_string, "mixedcase")
self.assertEqual(standard, test)
# test some separators
def test_separators_lowercase(self):
for index, separator in enumerate(self.separators):
standard = self.expected_lowercase[index]
test = namealizer.format_string(self.test_string,
"lowercase",
separator)
self.assertEqual(standard, test)
def test_separators_uppercase(self):
for index, separator in enumerate(self.separators):
standard = self.expected_uppercase[index]
test = namealizer.format_string(self.test_string,
"uppercase",
separator)
self.assertEqual(standard, test)
def test_separators_capitalize(self):
for index, separator in enumerate(self.separators):
standard = self.expected_capitalize[index]
test = namealizer.format_string(self.test_string,
"capitalize",
separator)
self.assertEqual(standard, test)
def test_separators_mixedcase(self):
for index, separator in enumerate(self.separators):
standard = self.expected_mixedcase[index]
test = namealizer.format_string(self.test_string,
"mixedcase",
separator)
self.assertEqual(standard, test)
def test_invalid_wordstyle(self):
with self.assertRaises(namealizer.InvalidWordStyleError):
namealizer.format_string("My big pizza", "copy")
|
class TestStringFormatter(unittest.TestCase):
'''Verifies string formatting functionality
This function is the final thing that processes strings before they
are printed so it takes strings of the format "hello this is a
string" and turns them into things like "HelloThisIsAString"
'''
def test_lowercase(self):
pass
def test_uppercase(self):
pass
def test_capitalize(self):
pass
def test_mixedcase(self):
pass
def test_separators_lowercase(self):
pass
def test_separators_uppercase(self):
pass
def test_separators_capitalize(self):
pass
def test_separators_mixedcase(self):
pass
def test_invalid_wordstyle(self):
pass
| 10 | 1 | 5 | 0 | 5 | 0 | 1 | 0.19 | 1 | 2 | 1 | 0 | 9 | 0 | 9 | 81 | 80 | 11 | 62 | 36 | 52 | 12 | 46 | 36 | 36 | 2 | 2 | 1 | 13 |
145,358 |
LeonardMH/namealizer
|
LeonardMH_namealizer/namealizer/namealizer.py
|
namealizer.namealizer.WordGenerator
|
class WordGenerator(object):
"""Main word generation class"""
def __init__(self,
dictionary="dictionaries/all_en_US.dict",
wordstyle="lowercase", separator=" ",
seed=None):
"""Initializer for WordGenerator
:param dictionary Any valid .dict formatted dictionary
:param wordstyle Any allowed `wordstyle` format specification
:param separator What character (or word) to separate words with
:param seed Seed to use for the PRNG
:raises DictionaryNotFoundError if the `dictionary` parameter can't
be found on disk
:raises InvalidWordStyleError if user attempts to retrieve a word
when `self.wordstyle` is set to an invalid value
:raises NoWordForLetter when the user attempts to reteive a word
where the starting letter given does not exist in the
dictionary
"""
if dictionary == "dictionaries/all_en_US.dict":
dictionary = resource_filename('namealizer', dictionary)
self.dictionary = import_dictionary(dictionary)
self.wordstyle = wordstyle
self.separator = separator
self.seed = generate_seed(seed)
def __getitem__(self, key):
if isinstance(key, str):
return format_string(string_for_initials(self.dictionary, key),
wordstyle=self.wordstyle,
separator=self.separator)
elif isinstance(key, int):
return format_string(string_for_count(self.dictionary, key),
wordstyle=self.wordstyle,
separator=self.separator)
else:
raise TypeError
|
class WordGenerator(object):
'''Main word generation class'''
def __init__(self,
dictionary="dictionaries/all_en_US.dict",
wordstyle="lowercase", separator=" ",
seed=None):
'''Initializer for WordGenerator
:param dictionary Any valid .dict formatted dictionary
:param wordstyle Any allowed `wordstyle` format specification
:param separator What character (or word) to separate words with
:param seed Seed to use for the PRNG
:raises DictionaryNotFoundError if the `dictionary` parameter can't
be found on disk
:raises InvalidWordStyleError if user attempts to retrieve a word
when `self.wordstyle` is set to an invalid value
:raises NoWordForLetter when the user attempts to reteive a word
where the starting letter given does not exist in the
dictionary
'''
pass
def __getitem__(self, key):
pass
| 3 | 2 | 19 | 2 | 11 | 7 | 3 | 0.64 | 1 | 3 | 0 | 0 | 2 | 4 | 2 | 2 | 40 | 4 | 22 | 10 | 16 | 14 | 13 | 7 | 10 | 3 | 1 | 1 | 5 |
145,359 |
LeonardMH/namealizer
|
LeonardMH_namealizer/namealizer/namealizer.py
|
namealizer.namealizer.NoWordForLetter
|
class NoWordForLetter(Exception):
"""
Raised when dictionary has no word beginning with requested letter
"""
pass
|
class NoWordForLetter(Exception):
'''
Raised when dictionary has no word beginning with requested letter
'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1.5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 10 | 5 | 0 | 2 | 1 | 1 | 3 | 2 | 1 | 1 | 0 | 3 | 0 | 0 |
145,360 |
LeonardMH/namealizer
|
LeonardMH_namealizer/namealizer/namealizer.py
|
namealizer.namealizer.InvalidWordStyleError
|
class InvalidWordStyleError(Exception):
"""
Exception to raise when the user passes in an invalid wordstyle
"""
pass
|
class InvalidWordStyleError(Exception):
'''
Exception to raise when the user passes in an invalid wordstyle
'''
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1.5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 10 | 5 | 0 | 2 | 1 | 1 | 3 | 2 | 1 | 1 | 0 | 3 | 0 | 0 |
145,361 |
LeonardMH/namealizer
|
LeonardMH_namealizer/namealizer/test_namealizer.py
|
namealizer.test_namealizer.TestActualUsage
|
class TestActualUsage(unittest.TestCase):
"""Test expected program usage
"""
def test_no_arguments(self):
# this test should return a two letter lowercase set
self.assertEqual(2, len(namealizer.main().split(" ")))
def test_with_various_count_arguments(self):
# verify that we can return up to a certain number of words
for test in range(6):
result = namealizer.main(count=test)
if test == 0:
# this test has to be special cased because splitting
# on spaces means that even an empty string will have a
# length of 1.
self.assertEqual("", result)
else:
self.assertEqual(test, len(result.split(" ")))
def test_with_various_initials(self):
# check the case where initials is passed in as an empty string
self.assertEqual("", namealizer.main(initials=""))
max_number_of_initials = 24
for test in range(6):
num_initials = random.randint(1, max_number_of_initials)
initials = ""
# pull this many random letters from the alphabet
for _ in range(num_initials):
initials += random.choice(string.ascii_letters)
result = namealizer.main(initials=initials).split(" ")
self.assertEqual(num_initials, len(result))
def test_seed_option(self):
# perform a couple of tests and ensure that given everything
# else being constant, the same seed produces # the same results
# test for 0 seed
self.assertTrue(are_two_seed_runs_equal(0))
# test for sys.maxsize seed
self.assertTrue(are_two_seed_runs_equal(sys.maxsize))
# test for 10 random seeds
for _ in range(10):
seed = random.randint(1, sys.maxsize)
self.assertTrue(are_two_seed_runs_equal(seed))
def test_count_and_initials_both_defined(self):
"""If count and initials are passed to main, initials are used"""
initials_to_use = "MHL"
count_to_use = 4
result = namealizer.main(count=count_to_use, initials=initials_to_use)
self.assertTrue(len(result), len(initials_to_use))
def test_dictionary_not_found(self):
with self.assertRaises(namealizer.DictionaryNotFoundError):
namealizer.main(dictionary="your_mom.dict")
|
class TestActualUsage(unittest.TestCase):
'''Test expected program usage
'''
def test_no_arguments(self):
pass
def test_with_various_count_arguments(self):
pass
def test_with_various_initials(self):
pass
def test_seed_option(self):
pass
def test_count_and_initials_both_defined(self):
'''If count and initials are passed to main, initials are used'''
pass
def test_dictionary_not_found(self):
pass
| 7 | 2 | 9 | 1 | 6 | 2 | 2 | 0.44 | 1 | 2 | 1 | 0 | 6 | 0 | 6 | 78 | 60 | 11 | 34 | 20 | 27 | 15 | 33 | 20 | 26 | 3 | 2 | 2 | 11 |
145,362 |
LesPatamechanix/patalib
|
LesPatamechanix_patalib/src/patalib/anomaly.py
|
patalib.anomaly.Anomaly
|
class Anomaly(PataLib):
""" Anomaly sub class """
def generate_anomaly(self, input_word, list_of_dict_words, num):
""" Generate an anomaly. This is done
via a Psuedo-random number generator.
"""
results = []
for i in range(0,num):
index = randint(0,len(list_of_dict_words)-1)
name = list_of_dict_words[index]
if name != input_word and name not in results:
results.append(PataLib().strip_underscore(name))
else:
i = i +1
results = {'input' : input_word, 'results' : results, 'category' : 'anomaly'}
return results
|
class Anomaly(PataLib):
''' Anomaly sub class '''
def generate_anomaly(self, input_word, list_of_dict_words, num):
''' Generate an anomaly. This is done
via a Psuedo-random number generator.
'''
pass
| 2 | 2 | 15 | 1 | 11 | 3 | 3 | 0.33 | 1 | 1 | 0 | 0 | 1 | 0 | 1 | 3 | 18 | 2 | 12 | 6 | 10 | 4 | 11 | 6 | 9 | 3 | 1 | 2 | 3 |
145,363 |
LesPatamechanix/patalib
|
LesPatamechanix_patalib/src/patalib/antonym.py
|
patalib.antonym.Antonym
|
class Antonym(PataLib):
""" Antonym subclass """
def generate_antonym(self, input_word):
""" Generate an antonym using a Synset
and its lemmas.
"""
results = []
synset = wordnet.synsets(input_word)
for i in synset:
if i.pos in ['n','v']:
for j in i.lemmas:
if j.antonyms():
name = j.antonyms()[0].name()
results.append(PataLib().strip_underscore(name))
results = {'input' : input_word, 'results' : results, 'category' : 'antonym'}
return results
|
class Antonym(PataLib):
''' Antonym subclass '''
def generate_antonym(self, input_word):
''' Generate an antonym using a Synset
and its lemmas.
'''
pass
| 2 | 2 | 14 | 0 | 11 | 3 | 5 | 0.33 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 3 | 18 | 2 | 12 | 7 | 10 | 4 | 12 | 7 | 10 | 5 | 1 | 4 | 5 |
145,364 |
LesPatamechanix/patalib
|
LesPatamechanix_patalib/src/patalib/clinamen.py
|
patalib.clinamen.Clinamen
|
class Clinamen(PataLib):
""" Generate clinamen subclass """
def generate_clinamen(self, input_word, list_of_dict_words, swerve):
""" Generate a clinamen. Here we
looks for words via the damerau levenshtein distance
with a distance of 2.
"""
results = []
selected_list = []
for i in list_of_dict_words: #produce a subset for efficency
if len(i) < len(input_word)+1 and len(i) > len(input_word)/2:
if '_' not in i:
selected_list.append(i)
for i in selected_list:
match = self.damerau_levenshtein_distance(input_word,i)
if match == swerve:
results.append(i)
results = {'input' : input_word, 'results' : results, 'category' : 'clinamen'}
return results
def damerau_levenshtein_distance(self, s1, s2):
""" Dervied algorithm from the following website:
https://www.guyrutenberg.com/2008/12/15/damerau-levenshtein-distance-in-python/
Gives us the distance between two words.
"""
d = {}
lenstr1 = len(s1)
lenstr2 = len(s2)
for i in range(-1,lenstr1+1):
d[(i,-1)] = i+1
for j in range(-1,lenstr2+1):
d[(-1,j)] = j+1
for i in range(lenstr1):
for j in range(lenstr2):
if s1[i] == s2[j]:
cost = 0
else:
cost = 1
d[(i,j)] = min(
d[(i-1,j)] + 1, # deletion
d[(i,j-1)] + 1, # insertion
d[(i-1,j-1)] + cost, # substitution
)
if i and j and s1[i]==s2[j-1] and s1[i-1] == s2[j]:
d[(i,j)] = min (d[(i,j)], d[i-2,j-2] + cost) # transposition
return d[lenstr1-1,lenstr2-1]
|
class Clinamen(PataLib):
''' Generate clinamen subclass '''
def generate_clinamen(self, input_word, list_of_dict_words, swerve):
''' Generate a clinamen. Here we
looks for words via the damerau levenshtein distance
with a distance of 2.
'''
pass
def damerau_levenshtein_distance(self, s1, s2):
''' Dervied algorithm from the following website:
https://www.guyrutenberg.com/2008/12/15/damerau-levenshtein-distance-in-python/
Gives us the distance between two words.
'''
pass
| 3 | 3 | 23 | 2 | 18 | 7 | 7 | 0.39 | 1 | 1 | 0 | 0 | 2 | 0 | 2 | 4 | 51 | 6 | 36 | 13 | 33 | 14 | 31 | 13 | 28 | 7 | 1 | 3 | 13 |
145,365 |
LesPatamechanix/patalib
|
LesPatamechanix_patalib/src/patalib/patalib.py
|
patalib.patalib.PataLib
|
class PataLib():
""" Class containing functions for
generating patadata results
"""
def strip_underscore(self, input_word):
""" Remove underscore from word """
if '_' in input_word:
return input_word.replace('_',' ')
else:
return input_word
def palindrome(self, input_word):
""" Check if string is a plaindrome """
return str(input_word) == str(input_word)[::-1]
|
class PataLib():
''' Class containing functions for
generating patadata results
'''
def strip_underscore(self, input_word):
''' Remove underscore from word '''
pass
def palindrome(self, input_word):
''' Check if string is a plaindrome '''
pass
| 3 | 3 | 5 | 0 | 4 | 1 | 2 | 0.63 | 0 | 1 | 0 | 5 | 2 | 0 | 2 | 2 | 16 | 3 | 8 | 3 | 5 | 5 | 7 | 3 | 4 | 2 | 0 | 1 | 3 |
145,366 |
LesPatamechanix/patalib
|
LesPatamechanix_patalib/src/patalib/synonym.py
|
patalib.synonym.Synonym
|
class Synonym(PataLib):
""" Sub class of PataLib
"""
def generate_synonym(self, input_word):
""" Generate Synonym using a WordNet
synset.
"""
results = []
results.append(input_word)
synset = wordnet.synsets(input_word)
for i in synset:
index = 0
syn = i.name().split('.')
if syn[index]!= input_word:
name = syn[0]
results.append(PataLib().strip_underscore(name))
else:
index = index + 1
results = {'input' : input_word, 'results' : results, 'category' : 'synonym'}
return results
|
class Synonym(PataLib):
''' Sub class of PataLib
'''
def generate_synonym(self, input_word):
''' Generate Synonym using a WordNet
synset.
'''
pass
| 2 | 2 | 17 | 0 | 14 | 3 | 3 | 0.33 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 3 | 21 | 1 | 15 | 8 | 13 | 5 | 14 | 8 | 12 | 3 | 1 | 2 | 3 |
145,367 |
LesPatamechanix/patalib
|
LesPatamechanix_patalib/src/patalib/syzygy.py
|
patalib.syzygy.Syzygy
|
class Syzygy(PataLib):
""" Syzygy subclass """
def generate_syzygy(self, input_word):
""" Generate a syzygy. Here we
generate a list of hypernyms associated
with an input word. Using the hypernym we
then generate another synonym. For example:
The input word 'weed' will result in a syzgy
of 'band' as in rock band.
"""
results = []
synset = wordnet.synsets(input_word)
for i in synset:
if i.hypernyms():
hyp = i.hypernyms()[0].name().split('.')
if '_' in hyp[0]:
hyp[0] = PataLib().strip_underscore(hyp[0])
syns = wordnet.synsets(hyp[0])
if len(syns) > 0:
name = syns[0].name().split('.')[0]
results.append(PataLib().strip_underscore(name))
results = {'input' : input_word, 'results' : results, 'category' : 'syzygy'}
return results
|
class Syzygy(PataLib):
''' Syzygy subclass '''
def generate_syzygy(self, input_word):
''' Generate a syzygy. Here we
generate a list of hypernyms associated
with an input word. Using the hypernym we
then generate another synonym. For example:
The input word 'weed' will result in a syzgy
of 'band' as in rock band.
'''
pass
| 2 | 2 | 21 | 0 | 14 | 7 | 5 | 0.53 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 3 | 24 | 1 | 15 | 8 | 13 | 8 | 15 | 8 | 13 | 5 | 1 | 3 | 5 |
145,368 |
LettError/MutatorMath
|
LettError_MutatorMath/Lib/mutatorMath/objects/bender.py
|
mutatorMath.objects.bender.Bender
|
class Bender(object):
# object with a dictionary of warpmaps
# call instance with a location to bend it
def __init__(self, axes):
# axes dict:
# { <axisname>: {'map':[], 'minimum':0, 'maximum':1000, 'default':0, 'tag':'aaaa', 'name':"longname"}}
warpDict = {}
self.maps = {} # not needed?
self.warps = {}
for axisName, axisAttributes in axes.items():
mapData = axisAttributes.get('map', [])
if type(mapData)==list:
if mapData==0:
# this axis has no bender
self.warps[axisName] = None
else:
self._makeWarpFromList(axisName, mapData, axisAttributes['minimum'], axisAttributes['maximum'])
elif hasattr(mapData, '__call__'):
self.warps[axisName] = mapData
def __repr__(self):
return "<Bender %s>"%(str(self.warps.items()))
def getMap(self, axisName):
return self.maps.get(axisName, [])
def _makeWarpFromList(self, axisName, warpMap, minimum, maximum):
if not warpMap:
warpMap = [(minimum,minimum), (maximum,maximum)]
self.warps[axisName] = warpMap
# check for the extremes, add if necessary
if not sum([a==minimum for a, b in warpMap]):
warpMap = [(minimum,minimum)] + warpMap
if not sum([a==maximum for a, b in warpMap]):
warpMap.append((maximum,maximum))
items = []
for x, y in warpMap:
items.append((Location(w=x), y))
m = WarpMutator()
items.sort()
bias = biasFromLocations([loc for loc, obj in items], True)
m.setBias(bias)
n = None
ofx = []
onx = []
for loc, obj in items:
if (loc-bias).isOrigin():
m.setNeutral(obj)
break
if m.getNeutral() is None:
raise MutatorError("Did not find a neutral for this system", m)
for loc, obj in items:
lb = loc-bias
if lb.isOrigin(): continue
if lb.isOnAxis():
onx.append((lb, obj-m.getNeutral()))
else:
ofx.append((lb, obj-m.getNeutral()))
for loc, obj in onx:
m.addDelta(loc, obj, punch=False, axisOnly=True)
for loc, obj in ofx:
m.addDelta(loc, obj, punch=True, axisOnly=True)
self.warps[axisName] = m
def __call__(self, loc):
# bend a location according to the defined warps
new = loc.copy()
for dim, warp in self.warps.items():
if warp is None:
new[dim] = loc[dim]
continue
if not dim in loc: continue
try:
new[dim] = warp(loc.get(dim))
except:
ex_type, ex, tb = sys.exc_info()
raise MutatorError("A warpfunction \"%s\" (for axis \"%s\") raised \"%s\" at location %s"%(str(warp), dim, ex, loc.asString()), loc)
return new
|
class Bender(object):
def __init__(self, axes):
pass
def __repr__(self):
pass
def getMap(self, axisName):
pass
def _makeWarpFromList(self, axisName, warpMap, minimum, maximum):
pass
def __call__(self, loc):
pass
| 6 | 0 | 14 | 0 | 13 | 1 | 5 | 0.12 | 1 | 6 | 3 | 0 | 5 | 2 | 5 | 5 | 78 | 4 | 67 | 23 | 61 | 8 | 66 | 22 | 60 | 13 | 1 | 3 | 25 |
145,369 |
LettError/MutatorMath
|
LettError_MutatorMath/Lib/mutatorMath/objects/bender.py
|
mutatorMath.objects.bender.WarpMutator
|
class WarpMutator(mutatorMath.objects.mutator.Mutator):
def __call__(self, value):
if isinstance(value, tuple):
# handle split location
return self.makeInstance(Location(w=value[0])), self.makeInstance(Location(w=value[1]))
return self.makeInstance(Location(w=value))
|
class WarpMutator(mutatorMath.objects.mutator.Mutator):
def __call__(self, value):
pass
| 2 | 0 | 5 | 0 | 4 | 1 | 2 | 0.2 | 1 | 2 | 1 | 0 | 1 | 0 | 1 | 47 | 6 | 0 | 5 | 2 | 3 | 1 | 5 | 2 | 3 | 2 | 3 | 1 | 2 |
145,370 |
LettError/MutatorMath
|
LettError_MutatorMath/Lib/mutatorMath/objects/error.py
|
mutatorMath.objects.error.MutatorError
|
class MutatorError(Exception):
def __init__(self, msg, obj=None):
self.msg = msg
self.obj = obj
def __str__(self):
return repr(self.msg) + repr(self.obj)
|
class MutatorError(Exception):
def __init__(self, msg, obj=None):
pass
def __str__(self):
pass
| 3 | 0 | 3 | 0 | 3 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 2 | 2 | 2 | 12 | 6 | 0 | 6 | 5 | 3 | 0 | 6 | 5 | 3 | 1 | 3 | 0 | 2 |
145,371 |
LettError/MutatorMath
|
LettError_MutatorMath/Lib/mutatorMath/objects/mutator.py
|
mutatorMath.objects.mutator.Mutator
|
class Mutator(dict):
"""
Calculator for multi dimensional interpolations.
::
# The mutator needs one neutral object.
m = Mutator(myNeutralMathObject)
# The mutator needs one or more deltas.
m.addDelta(Location(pop=1), myMasterMathObject-myNeutralMathObject)
# The mutator calculates instances at other locations. Remember to inflate.
m.getInstance(Location(pop=0.5)) + myNeutralMathObject
"""
def __init__(self, neutral=None):
self._axes = {}
self._tags = {}
self._bender = noBend
self._neutral = neutral
self._bias = Location()
def setBender(self, bender):
self._bender = bender
def setBias(self, bias):
self._bias = bias
def getBias(self):
return self._bias
def setNeutral(self, aMathObject, deltaName="origin"):
"""Set the neutral object."""
self._neutral = aMathObject
self.addDelta(Location(), aMathObject-aMathObject, deltaName, punch=False, axisOnly=True)
def getNeutral(self):
"""Get the neutral object."""
return self._neutral
def addDelta(self, location, aMathObject, deltaName = None, punch=False, axisOnly=True):
""" Add a delta at this location.
* location: a Location object
* mathObject: a math-sensitive object
* deltaName: optional string/token
* punch:
* True: add the difference with the instance value at that location and the delta
* False: just add the delta.
"""
if punch:
r = self.getInstance(location, axisOnly=axisOnly)
if r is not None:
self[location.asTuple()] = aMathObject-r, deltaName
else:
raise MutatorError("Could not get instance.")
else:
self[location.asTuple()] = aMathObject, deltaName
#
# info
#
def getAxisNames(self):
"""
Collect a set of axis names from all deltas.
"""
s = {}
for l, x in self.items():
s.update(dict.fromkeys([k for k, v in l], None))
return set(s.keys())
def _collectAxisPoints(self):
"""
Return a dictionary with all on-axis locations.
"""
for l, (value, deltaName) in self.items():
location = Location(l)
name = location.isOnAxis()
if name is not None and name is not False:
if name not in self._axes:
self._axes[name] = []
if l not in self._axes[name]:
self._axes[name].append(l)
return self._axes
def _collectOffAxisPoints(self):
"""
Return a dictionary with all off-axis locations.
"""
offAxis = {}
for l, (value, deltaName) in self.items():
location = Location(l)
name = location.isOnAxis()
if name is None or name is False:
offAxis[l] = 1
return list(offAxis.keys())
def collectLocations(self):
"""
Return a dictionary with all objects.
"""
pts = []
for l, (value, deltaName) in self.items():
pts.append(Location(l))
return pts
def _allLocations(self):
"""
Return a list of all locations of all objects.
"""
l = []
for locationTuple in self.keys():
l.append(Location(locationTuple))
return l
#
# get instances
#
def getInstance(self, aLocation, axisOnly=False, getFactors=False):
""" Calculate the delta at aLocation.
* aLocation: a Location object, expected to be in bent space
* axisOnly:
* True: calculate an instance only with the on-axis masters.
* False: calculate an instance with on-axis and off-axis masters.
* getFactors:
* True: return a list of the calculated factors.
"""
self._collectAxisPoints()
factors = self.getFactors(aLocation, axisOnly)
total = None
for f, item, name in factors:
if total is None:
total = f * item
continue
total += f * item
if total is None:
total = 0 * self._neutral
if getFactors:
return total, factors
return total
def makeLocation(self, aLocation):
if isinstance(aLocation, Location):
return aLocation
return Location(aLocation)
def makeInstance(self, aLocation, bend=False):
"""
Calculate an instance with the right bias and add the neutral.
aLocation: expected to be in input space
"""
aLocation = self.makeLocation(aLocation)
if bend:
aLocation = self._bender(aLocation)
if not aLocation.isAmbivalent():
instanceObject = self.getInstance(aLocation-self._bias)
else:
locX, locY = aLocation.split()
instanceObject = self.getInstance(locX-self._bias)*(1,0)+self.getInstance(locY-self._bias)*(0,1)
return instanceObject+self._neutral
def getFactors(self, aLocation, axisOnly=False, allFactors=False):
"""
Return a list of all factors and math items at aLocation.
factor, mathItem, deltaName
all = True: include factors that are zero or near-zero
"""
deltas = []
aLocation.expand(self.getAxisNames())
limits = getLimits(self._allLocations(), aLocation)
for deltaLocationTuple, (mathItem, deltaName) in sorted(self.items()):
deltaLocation = Location(deltaLocationTuple)
deltaLocation.expand( self.getAxisNames())
factor = self._accumulateFactors(aLocation, deltaLocation, limits, axisOnly)
if not (factor-_EPSILON < 0 < factor+_EPSILON) or allFactors:
# only add non-zero deltas.
deltas.append((factor, mathItem, deltaName))
deltas = sorted(deltas, key=itemgetter(0), reverse=True)
return deltas
#
# calculate
#
def _accumulateFactors(self, aLocation, deltaLocation, limits, axisOnly):
"""
Calculate the factors of deltaLocation towards aLocation,
"""
relative = []
deltaAxis = deltaLocation.isOnAxis()
if deltaAxis is None:
relative.append(1)
elif deltaAxis:
deltasOnSameAxis = self._axes.get(deltaAxis, [])
d = ((deltaAxis, 0),)
if d not in deltasOnSameAxis:
deltasOnSameAxis.append(d)
if len(deltasOnSameAxis) == 1:
relative.append(aLocation[deltaAxis] * deltaLocation[deltaAxis])
else:
factor = self._calcOnAxisFactor(aLocation, deltaAxis, deltasOnSameAxis, deltaLocation)
relative.append(factor)
elif not axisOnly:
factor = self._calcOffAxisFactor(aLocation, deltaLocation, limits)
relative.append(factor)
if not relative:
return 0
f = None
for v in relative:
if f is None: f = v
else:
f *= v
return f
def _calcOnAxisFactor(self, aLocation, deltaAxis, deltasOnSameAxis, deltaLocation):
"""
Calculate the on-axis factors.
"""
if deltaAxis == "origin":
f = 0
v = 0
else:
f = aLocation[deltaAxis]
v = deltaLocation[deltaAxis]
i = []
iv = {}
for value in deltasOnSameAxis:
iv[Location(value)[deltaAxis]]=1
i = sorted(iv.keys())
r = 0
B, M, A = [], [], []
mA, mB, mM = None, None, None
for value in i:
if value < f: B.append(value)
elif value > f: A.append(value)
else: M.append(value)
if len(B) > 0:
mB = max(B)
B.sort()
if len(A) > 0:
mA = min(A)
A.sort()
if len(M) > 0:
mM = min(M)
M.sort()
if mM is not None:
if ((f-_EPSILON < v) and (f+_EPSILON > v)) or f==v: r = 1
else: r = 0
elif mB is not None and mA is not None:
if v < mB or v > mA: r = 0
else:
if v == mA:
r = float(f-mB)/(mA-mB)
else:
r = float(f-mA)/(mB-mA)
elif mB is None and mA is not None:
if v==A[1]:
r = float(f-A[0])/(A[1]-A[0])
elif v == A[0]:
r = float(f-A[1])/(A[0]-A[1])
else:
r = 0
elif mB is not None and mA is None:
if v == B[-2]:
r = float(f-B[-1])/(B[-2]-B[-1])
elif v == mB:
r = float(f-B[-2])/(B[-1]-B[-2])
else:
r = 0
return r
def _calcOffAxisFactor(self, aLocation, deltaLocation, limits):
"""
Calculate the off-axis factors.
"""
relative = []
for dim in limits.keys():
f = aLocation[dim]
v = deltaLocation[dim]
mB, M, mA = limits[dim]
r = 0
if mA is not None and v > mA:
relative.append(0)
continue
elif mB is not None and v < mB:
relative.append(0)
continue
if f < v-_EPSILON:
if mB is None:
if M is not None and mA is not None:
if v == M:
r = (float(max(f,mA)-min(f, mA))/float(max(M,mA)-min(M, mA)))
else:
r = -(float(max(f,mA)-min(f, mA))/float(max(M,mA)-min(M, mA)) -1)
else: r = 0
elif mA is None: r = 0
else: r = float(f-mB)/(mA-mB)
elif f > v+_EPSILON:
if mB is None: r = 0
elif mA is None:
if M is not None and mB is not None:
if v == M:
r = (float(max(f,mB)-min(f, mB))/(max(mB, M)-min(mB, M)))
else:
r = -(float(max(f,mB)-min(f, mB))/(max(mB, M)-min(mB, M)) - 1)
else: r = 0
else: r = float(mA-f)/(mA-mB)
else: r = 1
relative.append(r)
f = 1
for i in relative:
f *= i
return f
|
class Mutator(dict):
'''
Calculator for multi dimensional interpolations.
::
# The mutator needs one neutral object.
m = Mutator(myNeutralMathObject)
# The mutator needs one or more deltas.
m.addDelta(Location(pop=1), myMasterMathObject-myNeutralMathObject)
# The mutator calculates instances at other locations. Remember to inflate.
m.getInstance(Location(pop=0.5)) + myNeutralMathObject
'''
def __init__(self, neutral=None):
pass
def setBender(self, bender):
pass
def setBias(self, bias):
pass
def getBias(self):
pass
def setNeutral(self, aMathObject, deltaName="origin"):
'''Set the neutral object.'''
pass
def getNeutral(self):
'''Get the neutral object.'''
pass
def addDelta(self, location, aMathObject, deltaName = None, punch=False, axisOnly=True):
''' Add a delta at this location.
* location: a Location object
* mathObject: a math-sensitive object
* deltaName: optional string/token
* punch:
* True: add the difference with the instance value at that location and the delta
* False: just add the delta.
'''
pass
def getAxisNames(self):
'''
Collect a set of axis names from all deltas.
'''
pass
def _collectAxisPoints(self):
'''
Return a dictionary with all on-axis locations.
'''
pass
def _collectOffAxisPoints(self):
'''
Return a dictionary with all off-axis locations.
'''
pass
def collectLocations(self):
'''
Return a dictionary with all objects.
'''
pass
def _allLocations(self):
'''
Return a list of all locations of all objects.
'''
pass
def getInstance(self, aLocation, axisOnly=False, getFactors=False):
''' Calculate the delta at aLocation.
* aLocation: a Location object, expected to be in bent space
* axisOnly:
* True: calculate an instance only with the on-axis masters.
* False: calculate an instance with on-axis and off-axis masters.
* getFactors:
* True: return a list of the calculated factors.
'''
pass
def makeLocation(self, aLocation):
pass
def makeInstance(self, aLocation, bend=False):
'''
Calculate an instance with the right bias and add the neutral.
aLocation: expected to be in input space
'''
pass
def getFactors(self, aLocation, axisOnly=False, allFactors=False):
'''
Return a list of all factors and math items at aLocation.
factor, mathItem, deltaName
all = True: include factors that are zero or near-zero
'''
pass
def _accumulateFactors(self, aLocation, deltaLocation, limits, axisOnly):
'''
Calculate the factors of deltaLocation towards aLocation,
'''
pass
def _calcOnAxisFactor(self, aLocation, deltaAxis, deltasOnSameAxis, deltaLocation):
'''
Calculate the on-axis factors.
'''
pass
def _calcOffAxisFactor(self, aLocation, deltaLocation, limits):
'''
Calculate the off-axis factors.
'''
pass
| 20 | 15 | 14 | 0 | 11 | 3 | 4 | 0.33 | 1 | 6 | 2 | 1 | 19 | 5 | 19 | 46 | 319 | 30 | 218 | 72 | 198 | 71 | 201 | 71 | 181 | 20 | 2 | 5 | 80 |
145,372 |
LettError/MutatorMath
|
LettError_MutatorMath/Lib/mutatorMath/objects/location.py
|
mutatorMath.objects.location.Location
|
class Location(dict):
"""
A object subclassed from dict to store n-dimensional locations.
- key is dimension or axis name
- value is the coordinate.
- Location objects behave like numbers.
- If a specific dimension is missing, assume it is zero.
- Convert to and from dict, tuple.
::
>>> l = Location(pop=1, snap=-100)
>>> print(l)
<Location pop:1, snap:-100 >
Location objects can be used as math objects:
::
>>> l = Location(pop=1)
>>> l * 2
<Location pop:2 >
>>> 2 * l
<Location pop:2 >
>>> l / 2
<Location pop:0.500 >
>>> l = Location(pop=1)
>>> m = Location(pop=10)
>>> l + m
<Location pop:11 >
>>> l = Location(pop=1)
>>> m = Location(pop=10)
>>> l - m
<Location pop:-9 >
"""
def __repr__(self):
t = ["<%s"%self.__class__.__name__]
t.append(self.asString())
t.append(">")
return " ".join(t)
def __lt__(self, other):
if len(self) < len(other):
return True
elif len(self) > len(other):
return False
self_keys = sorted(self.keys())
other_keys = sorted(other.keys())
for i, key in enumerate(self_keys):
if key < other_keys[i]:
return True
elif key > other_keys[i]:
return False
if self[key] < other[key]:
return True
return False
def expand(self, axisNames):
"""
Expand the location with zero values for all axes in axisNames that aren't filled in the current location.
::
>>> l = Location(pop=1)
>>> l.expand(['snap', 'crackle'])
>>> print(l)
<Location crackle:0, pop:1, snap:0 >
"""
for k in axisNames:
if k not in self:
self[k] = 0
def copy(self):
"""
Return a copy of this location.
::
>>> l = Location(pop=1, snap=0)
>>> l.copy()
<Location pop:1, snap:0 >
"""
new = self.__class__()
new.update(self)
return new
def fromTuple(self, locationTuple):
"""
Read the coordinates from a tuple.
::
>>> t = (('pop', 1), ('snap', -100))
>>> l = Location()
>>> l.fromTuple(t)
>>> print(l)
<Location pop:1, snap:-100 >
"""
for key, value in locationTuple:
try:
self[key] = float(value)
except TypeError:
self[key] = tuple([float(v) for v in value])
def asTuple(self):
"""Return the location as a tuple.
Sort the dimension names alphabetically.
::
>>> l = Location(pop=1, snap=-100)
>>> l.asTuple()
(('pop', 1), ('snap', -100))
"""
t = []
k = sorted(self.keys())
for key in k:
t.append((key, self[key]))
return tuple(t)
def getType(self, short=False):
"""Return a string describing the type of the location, i.e. origin, on axis, off axis etc.
::
>>> l = Location()
>>> l.getType()
'origin'
>>> l = Location(pop=1)
>>> l.getType()
'on-axis, pop'
>>> l = Location(pop=1, snap=1)
>>> l.getType()
'off-axis, pop snap'
>>> l = Location(pop=(1,2))
>>> l.getType()
'on-axis, pop, split'
"""
if self.isOrigin():
return "origin"
t = []
onAxis = self.isOnAxis()
if onAxis is False:
if short:
t.append("off-axis")
else:
t.append("off-axis, "+ " ".join(self.getActiveAxes()))
else:
if short:
t.append("on-axis")
else:
t.append("on-axis, %s"%onAxis)
if self.isAmbivalent():
t.append("split")
return ', '.join(t)
def getActiveAxes(self):
"""
Return a list of names of axes which are not zero
::
>>> l = Location(pop=1, snap=0, crackle=1)
>>> l.getActiveAxes()
['crackle', 'pop']
"""
names = sorted(k for k in self.keys() if self[k]!=0)
return names
def asString(self, strict=False):
"""
Return the location as a string.
::
>>> l = Location(pop=1, snap=(-100.0, -200))
>>> l.asString()
'pop:1, snap:(-100.000,-200.000)'
"""
if len(self.keys())==0:
return "origin"
v = []
n = []
try:
for name, value in self.asTuple():
s = ''
if value is None:
s = "None"
elif type(value) == tuple or type(value) == list:
s = "(%.3f,%.3f)"%(value[0], value[1])
elif int(value) == value:
s = "%d"%(int(value))
else:
s = "%.3f"%(value)
if s != '':
n.append("%s:%s"%(name, s))
return ", ".join(n)
except TypeError:
import traceback
print("Location value error:", name, value)
for key, value in self.items():
print("\t\tkey:", key)
print("\t\tvalue:", value)
traceback.print_exc()
return "error"
def asDict(self):
"""
Return the location as a plain python dict.
::
>>> l = Location(pop=1, snap=-100)
>>> l.asDict()['snap']
-100
>>> l.asDict()['pop']
1
"""
new = {}
new.update(self)
return new
def asSortedStringDict(self, roundValue=False):
""" Return the data in a dict with sorted names and column titles.
::
>>> l = Location(pop=1, snap=(1,10))
>>> l.asSortedStringDict()[0]['value']
'1'
>>> l.asSortedStringDict()[0]['axis']
'pop'
>>> l.asSortedStringDict()[1]['axis']
'snap'
>>> l.asSortedStringDict()[1]['value']
'(1,10)'
"""
data = []
names = sorted(self.keys())
for n in names:
data.append({'axis':n, 'value':numberToString(self[n])})
return data
def strip(self):
""" Remove coordinates that are zero, the opposite of expand().
::
>>> l = Location(pop=1, snap=0)
>>> l.strip()
<Location pop:1 >
"""
result = []
for k, v in self.items():
if isinstance(v, tuple):
if v > (_EPSILON, ) * len(v) or v < (-_EPSILON, ) * len(v):
result.append((k, v))
elif v > _EPSILON or v < -_EPSILON:
result.append((k, v))
return self.__class__(result)
def common(self, other):
"""
Return two objects with the same dimensions if they lie in the same orthogonal plane.
::
>>> l = Location(pop=1, snap=2)
>>> m = Location(crackle=1, snap=3)
>>> l.common(m)
(<Location snap:2 >, <Location snap:3 >)
"""
selfDim = set(self.keys())
otherDim = set(other.keys())
dims = selfDim | otherDim
newSelf = None
newOther = None
for dim in dims:
sd = self.get(dim, None)
od = other.get(dim, None)
if sd is None or od is None:
# axis is missing in one or the other
continue
if -_EPSILON < sd < _EPSILON and -_EPSILON < od < _EPSILON:
# values are both zero
continue
if newSelf is None:
newSelf = self.__class__()
if newOther is None:
newOther = self.__class__()
newSelf[dim] = self[dim]
newOther[dim] = other[dim]
return newSelf, newOther
#
#
# tests
#
#
def isOrigin(self):
"""
Return True if the location is at the origin.
::
>>> l = Location(pop=1)
>>> l.isOrigin()
False
>>> l = Location()
>>> l.isOrigin()
True
"""
for name, value in self.items():
if isinstance(value, tuple):
if (value < (-_EPSILON,) * len(value)
or value > (_EPSILON,) * len(value)):
return False
if value < -_EPSILON or value > _EPSILON:
return False
return True
def isOnAxis(self):
"""
Returns statements about this location:
* False if the location is not on-axis
* The name of the axis if it is on-axis
* None if the Location is at the origin
Note: this is only valid for an unbiased location.
::
>>> l = Location(pop=1)
>>> l.isOnAxis()
'pop'
>>> l = Location(pop=1, snap=1)
>>> l.isOnAxis()
False
>>> l = Location()
>>> l.isOnAxis() is None
True
"""
new = self.__class__()
new.update(self)
s = new.strip()
dims = list(s.keys())
if len(dims)> 1:
return False
elif len(dims)==1:
return dims[0]
return None
def isAmbivalent(self, dim=None):
"""
Return True if any of the factors are in fact tuples.
If a dimension name is given only that dimension is tested.
::
>>> l = Location(pop=1)
>>> l.isAmbivalent()
False
>>> l = Location(pop=1, snap=(100, -100))
>>> l.isAmbivalent()
True
"""
if dim is not None:
try:
return isinstance(self[dim], tuple)
except KeyError:
# dimension is not present, it should be 0, so not ambivalent
return False
for dim, val in self.items():
if isinstance(val, tuple):
return True
return False
def split(self):
"""
Split an ambivalent location into 2. One for the x, the other for the y.
::
>>> l = Location(pop=(-5,5))
>>> l.split()
(<Location pop:-5 >, <Location pop:5 >)
"""
x = self.__class__()
y = self.__class__()
for dim, val in self.items():
if isinstance(val, tuple):
x[dim] = val[0]
y[dim] = val[1]
else:
x[dim] = val
y[dim] = val
return x, y
def spliceX(self):
"""
Return a copy with the x values preferred for ambivalent locations.
::
>>> l = Location(pop=(-5,5))
>>> l.spliceX()
<Location pop:-5 >
"""
new = self.__class__()
for dim, val in self.items():
if isinstance(val, tuple):
new[dim] = val[0]
else:
new[dim] = val
return new
def spliceY(self):
"""
Return a copy with the y values preferred for ambivalent locations.
::
>>> l = Location(pop=(-5,5))
>>> l.spliceY()
<Location pop:5 >
"""
new = self.__class__()
for dim, val in self.items():
if isinstance(val, tuple):
new[dim] = val[1]
else:
new[dim] = val
return new
def distance(self, other=None):
"""Return the geometric distance to the other location.
If no object is provided, this will calculate the distance to the origin.
::
>>> l = Location(pop=100)
>>> m = Location(pop=200)
>>> l.distance(m)
100.0
>>> l = Location()
>>> m = Location(pop=200)
>>> l.distance(m)
200.0
>>> l = Location(pop=3, snap=5)
>>> m = Location(pop=7, snap=8)
>>> l.distance(m)
5.0
"""
t = 0
if other is None:
other = self.__class__()
for axisName in set(self.keys()) | set(other.keys()):
t += (other.get(axisName,0)-self.get(axisName,0))**2
return math.sqrt(t)
def sameAs(self, other):
"""
Check if this is the same location.
::
>>> l = Location(pop=5, snap=100)
>>> m = Location(pop=5.0, snap=100.0)
>>> l.sameAs(m)
0
>>> l = Location(pop=5, snap=100)
>>> m = Location(pop=5.0, snap=100.0001)
>>> l.sameAs(m)
-1
"""
if not hasattr(other, "get"):
return -1
d = self.distance(other)
if d < _EPSILON:
return 0
return -1
# math operators
def __add__(self, other):
new = self.__class__()
new.update(self)
new.update(other)
selfDim = set(self.keys())
otherDim = set(other.keys())
for key in selfDim & otherDim:
ts = type(self[key])!=tuple
to = type(other[key])!=tuple
if ts:
sx = sy = self[key]
else:
sx = self[key][0]
sy = self[key][1]
if to:
ox = oy = other[key]
else:
ox = other[key][0]
oy = other[key][1]
x = sx+ox
y = sy+oy
if x==y:
new[key] = x
else:
new[key] = x,y
return new
def __sub__(self, other):
new = self.__class__()
new.update(self)
for key, value in other.items():
try:
new[key] = -value
except TypeError:
new[key] = (-value[0], -value[1])
selfDim = set(self.keys())
otherDim = set(other.keys())
for key in selfDim & otherDim:
ts = type(self[key])!=tuple
to = type(other[key])!=tuple
if ts:
sx = sy = self[key]
else:
sx = self[key][0]
sy = self[key][1]
if to:
ox = oy = other[key]
else:
ox = other[key][0]
oy = other[key][1]
x = sx-ox
y = sy-oy
if x==y:
new[key] = x
else:
new[key] = x,y
return new
def __mul__(self, factor):
new = self.__class__()
if isinstance(factor, tuple):
for key, value in self.items():
if type(value) == tuple:
new[key] = factor[0] * value[0], factor[1] * value[1]
else:
new[key] = factor[0] * value, factor[1] * value
else:
for key, value in self.items():
if type(value) == tuple:
new[key] = factor * value[0], factor * value[1]
else:
new[key] = factor * value
return new
__rmul__ = __mul__
def __truediv__(self, factor):
if factor == 0:
raise ZeroDivisionError
if isinstance(factor, tuple):
if factor[0] == 0 or factor[1] == 0:
raise ZeroDivisionError
return self * (1.0/factor[0]) + self * (1.0/factor[1])
return self * (1.0/factor)
__div__ = __truediv__
def transform(self, transformDict):
if transformDict is None:
return self
new = self.__class__()
for dim, (offset, scale) in transformDict.items():
new[dim] = (self.get(dim,0)+offset)*scale
return new
|
class Location(dict):
'''
A object subclassed from dict to store n-dimensional locations.
- key is dimension or axis name
- value is the coordinate.
- Location objects behave like numbers.
- If a specific dimension is missing, assume it is zero.
- Convert to and from dict, tuple.
::
>>> l = Location(pop=1, snap=-100)
>>> print(l)
<Location pop:1, snap:-100 >
Location objects can be used as math objects:
::
>>> l = Location(pop=1)
>>> l * 2
<Location pop:2 >
>>> 2 * l
<Location pop:2 >
>>> l / 2
<Location pop:0.500 >
>>> l = Location(pop=1)
>>> m = Location(pop=10)
>>> l + m
<Location pop:11 >
>>> l = Location(pop=1)
>>> m = Location(pop=10)
>>> l - m
<Location pop:-9 >
'''
def __repr__(self):
pass
def __lt__(self, other):
pass
def expand(self, axisNames):
'''
Expand the location with zero values for all axes in axisNames that aren't filled in the current location.
::
>>> l = Location(pop=1)
>>> l.expand(['snap', 'crackle'])
>>> print(l)
<Location crackle:0, pop:1, snap:0 >
'''
pass
def copy(self):
'''
Return a copy of this location.
::
>>> l = Location(pop=1, snap=0)
>>> l.copy()
<Location pop:1, snap:0 >
'''
pass
def fromTuple(self, locationTuple):
'''
Read the coordinates from a tuple.
::
>>> t = (('pop', 1), ('snap', -100))
>>> l = Location()
>>> l.fromTuple(t)
>>> print(l)
<Location pop:1, snap:-100 >
'''
pass
def asTuple(self):
'''Return the location as a tuple.
Sort the dimension names alphabetically.
::
>>> l = Location(pop=1, snap=-100)
>>> l.asTuple()
(('pop', 1), ('snap', -100))
'''
pass
def getType(self, short=False):
'''Return a string describing the type of the location, i.e. origin, on axis, off axis etc.
::
>>> l = Location()
>>> l.getType()
'origin'
>>> l = Location(pop=1)
>>> l.getType()
'on-axis, pop'
>>> l = Location(pop=1, snap=1)
>>> l.getType()
'off-axis, pop snap'
>>> l = Location(pop=(1,2))
>>> l.getType()
'on-axis, pop, split'
'''
pass
def getActiveAxes(self):
'''
Return a list of names of axes which are not zero
::
>>> l = Location(pop=1, snap=0, crackle=1)
>>> l.getActiveAxes()
['crackle', 'pop']
'''
pass
def asString(self, strict=False):
'''
Return the location as a string.
::
>>> l = Location(pop=1, snap=(-100.0, -200))
>>> l.asString()
'pop:1, snap:(-100.000,-200.000)'
'''
pass
def asDict(self):
'''
Return the location as a plain python dict.
::
>>> l = Location(pop=1, snap=-100)
>>> l.asDict()['snap']
-100
>>> l.asDict()['pop']
1
'''
pass
def asSortedStringDict(self, roundValue=False):
''' Return the data in a dict with sorted names and column titles.
::
>>> l = Location(pop=1, snap=(1,10))
>>> l.asSortedStringDict()[0]['value']
'1'
>>> l.asSortedStringDict()[0]['axis']
'pop'
>>> l.asSortedStringDict()[1]['axis']
'snap'
>>> l.asSortedStringDict()[1]['value']
'(1,10)'
'''
pass
def strip(self):
''' Remove coordinates that are zero, the opposite of expand().
::
>>> l = Location(pop=1, snap=0)
>>> l.strip()
<Location pop:1 >
'''
pass
def common(self, other):
'''
Return two objects with the same dimensions if they lie in the same orthogonal plane.
::
>>> l = Location(pop=1, snap=2)
>>> m = Location(crackle=1, snap=3)
>>> l.common(m)
(<Location snap:2 >, <Location snap:3 >)
'''
pass
def isOrigin(self):
'''
Return True if the location is at the origin.
::
>>> l = Location(pop=1)
>>> l.isOrigin()
False
>>> l = Location()
>>> l.isOrigin()
True
'''
pass
def isOnAxis(self):
'''
Returns statements about this location:
* False if the location is not on-axis
* The name of the axis if it is on-axis
* None if the Location is at the origin
Note: this is only valid for an unbiased location.
::
>>> l = Location(pop=1)
>>> l.isOnAxis()
'pop'
>>> l = Location(pop=1, snap=1)
>>> l.isOnAxis()
False
>>> l = Location()
>>> l.isOnAxis() is None
True
'''
pass
def isAmbivalent(self, dim=None):
'''
Return True if any of the factors are in fact tuples.
If a dimension name is given only that dimension is tested.
::
>>> l = Location(pop=1)
>>> l.isAmbivalent()
False
>>> l = Location(pop=1, snap=(100, -100))
>>> l.isAmbivalent()
True
'''
pass
def split(self):
'''
Split an ambivalent location into 2. One for the x, the other for the y.
::
>>> l = Location(pop=(-5,5))
>>> l.split()
(<Location pop:-5 >, <Location pop:5 >)
'''
pass
def spliceX(self):
'''
Return a copy with the x values preferred for ambivalent locations.
::
>>> l = Location(pop=(-5,5))
>>> l.spliceX()
<Location pop:-5 >
'''
pass
def spliceY(self):
'''
Return a copy with the y values preferred for ambivalent locations.
::
>>> l = Location(pop=(-5,5))
>>> l.spliceY()
<Location pop:5 >
'''
pass
def distance(self, other=None):
'''Return the geometric distance to the other location.
If no object is provided, this will calculate the distance to the origin.
::
>>> l = Location(pop=100)
>>> m = Location(pop=200)
>>> l.distance(m)
100.0
>>> l = Location()
>>> m = Location(pop=200)
>>> l.distance(m)
200.0
>>> l = Location(pop=3, snap=5)
>>> m = Location(pop=7, snap=8)
>>> l.distance(m)
5.0
'''
pass
def sameAs(self, other):
'''
Check if this is the same location.
::
>>> l = Location(pop=5, snap=100)
>>> m = Location(pop=5.0, snap=100.0)
>>> l.sameAs(m)
0
>>> l = Location(pop=5, snap=100)
>>> m = Location(pop=5.0, snap=100.0001)
>>> l.sameAs(m)
-1
'''
pass
def __add__(self, other):
pass
def __sub__(self, other):
pass
def __mul__(self, factor):
pass
def __truediv__(self, factor):
pass
def transform(self, transformDict):
pass
| 27 | 20 | 19 | 1 | 11 | 7 | 4 | 0.77 | 1 | 10 | 0 | 0 | 26 | 0 | 26 | 53 | 572 | 66 | 286 | 101 | 258 | 220 | 263 | 101 | 235 | 9 | 2 | 3 | 100 |
145,373 |
LettError/MutatorMath
|
LettError_MutatorMath/setup.py
|
setup.release
|
class release(bump_version):
"""Drop the developmental release '.devN' suffix from the package version,
open the default text $EDITOR to write release notes, commit the changes
and generate a git tag.
Release notes can also be set with the -m/--message option, or by reading
from standard input.
If --major, --minor or --patch options are passed, the respective
'SemVer' digit is also incremented before tagging the release.
"""
description = "tag a new release"
user_options = bump_version.user_options + [
("message=", 'm', "message containing the release notes"),
]
def initialize_options(self):
bump_version.initialize_options(self)
self.message = None
def finalize_options(self):
bump_version.finalize_options(self)
self.bump_first = any(
getattr(self, a, False) for a in ("major", "minor", "patch"))
if not self.bump_first:
import re
current_version = self.distribution.metadata.get_version()
if not re.search(r"\.dev[0-9]+", current_version):
from distutils.errors import DistutilsSetupError
raise DistutilsSetupError(
"current version (%s) has no '.devN' suffix.\n "
"Run 'setup.py bump_version', or use any of "
"--major, --minor, --patch options" % current_version)
message = self.message
if message is None:
if sys.stdin.isatty():
# stdin is interactive, use editor to write release notes
message = self.edit_release_notes()
else:
# read release notes from stdin pipe
message = sys.stdin.read()
if not message.strip():
from distutils.errors import DistutilsSetupError
raise DistutilsSetupError("release notes message is empty")
self.message = "Release {new_version}\n\n%s" % (message)
@staticmethod
def edit_release_notes():
"""Use the default text $EDITOR to write release notes.
If $EDITOR is not set, use 'nano'."""
from tempfile import mkstemp
import os
import shlex
import subprocess
text_editor = shlex.split(os.environ.get('EDITOR', 'nano'))
fd, tmp = mkstemp(prefix='bumpversion-')
try:
os.close(fd)
with open(tmp, 'w') as f:
f.write("\n\n# Write release notes.\n"
"# Lines starting with '#' will be ignored.")
subprocess.check_call(text_editor + [tmp])
with open(tmp, 'r') as f:
changes = "".join(
l for l in f.readlines() if not l.startswith('#'))
finally:
os.remove(tmp)
return changes
def run(self):
if self.bump_first:
# bump the specified version part but don't commit immediately
log.info("bumping '%s' version" % self.part)
self.bumpversion(self.part, commit=False)
dirty=True
else:
dirty=False
log.info("stripping developmental release suffix")
# drop '.dev0' suffix, commit with given message and create git tag
self.bumpversion(
"release", tag=True, message=self.message, allow_dirty=dirty)
|
class release(bump_version):
'''Drop the developmental release '.devN' suffix from the package version,
open the default text $EDITOR to write release notes, commit the changes
and generate a git tag.
Release notes can also be set with the -m/--message option, or by reading
from standard input.
If --major, --minor or --patch options are passed, the respective
'SemVer' digit is also incremented before tagging the release.
'''
def initialize_options(self):
pass
def finalize_options(self):
pass
@staticmethod
def edit_release_notes():
'''Use the default text $EDITOR to write release notes.
If $EDITOR is not set, use 'nano'.'''
pass
def run(self):
pass
| 6 | 2 | 17 | 2 | 14 | 2 | 3 | 0.28 | 1 | 1 | 0 | 0 | 3 | 3 | 4 | 47 | 89 | 14 | 61 | 25 | 48 | 17 | 48 | 22 | 36 | 6 | 3 | 2 | 10 |
145,374 |
LettError/MutatorMath
|
LettError_MutatorMath/setup.py
|
setup.bump_version
|
class bump_version(Command):
description = "increment the package version and commit the changes"
user_options = [
("major", None, "bump the first digit, for incompatible API changes"),
("minor", None, "bump the second digit, for new backward-compatible features"),
("patch", None, "bump the third digit, for bug fixes (default)"),
]
def initialize_options(self):
self.minor = False
self.major = False
self.patch = False
def finalize_options(self):
part = None
for attr in ("major", "minor", "patch"):
if getattr(self, attr, False):
if part is None:
part = attr
else:
from distutils.errors import DistutilsOptionError
raise DistutilsOptionError(
"version part options are mutually exclusive")
self.part = part or "patch"
def bumpversion(self, part, commit=True, tag=False, message=None,
allow_dirty=False):
""" Run bumpversion.main() with the specified arguments, and return the
new computed version string.
"""
import bumpversion
args = (
(['--verbose'] if self.verbose > 1 else []) +
(['--allow-dirty'] if allow_dirty else []) +
(['--commit'] if commit else ['--no-commit']) +
(['--tag'] if tag else ['--no-tag']) +
(['--message', message] if message is not None else []) +
['--list', part]
)
log.debug(
"$ bumpversion %s" % " ".join(a.replace(" ", "\\ ") for a in args))
with capture_logger("bumpversion.list") as out:
bumpversion.main(args)
last_line = out.getvalue().splitlines()[-1]
new_version = last_line.replace("new_version=", "")
return new_version
def run(self):
log.info("bumping '%s' version" % self.part)
self.bumpversion(self.part)
|
class bump_version(Command):
def initialize_options(self):
pass
def finalize_options(self):
pass
def bumpversion(self, part, commit=True, tag=False, message=None,
allow_dirty=False):
''' Run bumpversion.main() with the specified arguments, and return the
new computed version string.
'''
pass
def run(self):
pass
| 5 | 1 | 11 | 1 | 9 | 1 | 3 | 0.07 | 1 | 1 | 0 | 1 | 4 | 4 | 4 | 43 | 55 | 9 | 43 | 20 | 35 | 3 | 28 | 18 | 21 | 6 | 2 | 3 | 12 |
145,375 |
LettError/MutatorMath
|
LettError_MutatorMath/Lib/mutatorMath/ufo/instance.py
|
mutatorMath.ufo.instance.InstanceWriter
|
class InstanceWriter(object):
"""
Simple object to build a UFO instance.
Collect the data needed for an instance
and generate it as fast as possible.
Make a font object.
Add data straight to the font.
Calculate the data immediately, while reading the document.
Don't edit the data.
Don't represent the data.
"""
_fontClass = defcon.objects.font.Font
_tempFontLibGlyphMuteKey = "_mutatorMath.temp.mutedGlyphNames"
def __init__(self, path, ufoVersion=1,
roundGeometry=False,
axes=None,
verbose=False,
logger=None,
bendLocations=False,
):
self.path = path
self.font = self._fontClass()
self.ufoVersion = ufoVersion
self.roundGeometry = roundGeometry
self.bendLocations = bendLocations
if axes is not None:
self.axes = axes
else:
self.axes = {}
self.sources = {}
self.muted = dict(kerning=[], info=[], glyphs={}) # muted data in the masters
self.mutedGlyphsNames = [] # muted glyphs in the instance
self.familyName = None
self.styleName = None
self.postScriptFontName = None
self.locationObject = None
self.unicodeValues = {}
self.verbose=verbose
self.logger = None
if self.verbose:
self.logger = logging.getLogger("mutatorMath")
self._failed = [] # list of glyphnames we could not generate
self._missingUnicodes = [] # list of glyphnames with missing unicode values
def setSources(self, sources):
""" Set a list of sources."""
self.sources = sources
def setMuted(self, muted):
""" Set the mute states. """
self.muted.update(muted)
def muteGlyph(self, glyphName):
""" Mute the generating of this specific glyph. """
self.mutedGlyphsNames.append(glyphName)
def setGroups(self, groups, kerningGroupConversionRenameMaps=None):
""" Copy the groups into our font. """
skipping = []
for name, members in groups.items():
checked = []
for m in members:
if m in self.font:
checked.append(m)
else:
skipping.append(m)
if checked:
self.font.groups[name] = checked
if skipping:
if self.verbose and self.logger:
self.logger.info("\tNote: some glyphnames were removed from groups: %s (unavailable in the font)", ", ".join(skipping))
if kerningGroupConversionRenameMaps:
# in case the sources were UFO2,
# and defcon upconverted them to UFO3
# and now we have to down convert them again,
# we don't want the UFO3 public prefixes in the group names
self.font.kerningGroupConversionRenameMaps = kerningGroupConversionRenameMaps
def getFailed(self):
""" Return the list of glyphnames that failed to generate."""
return self._failed
def getMissingUnicodes(self):
""" Return the list of glyphnames with missing unicode values. """
return self._missingUnicodes
def setLib(self, lib):
""" Copy the lib items into our font. """
for name, item in lib.items():
self.font.lib[name] = item
def setPostScriptFontName(self, name):
""" Set the postScriptFontName. """
self.font.info.postscriptFontName = name
def setStyleMapFamilyName(self, name):
""" Set the stylemap FamilyName. """
self.font.info.styleMapFamilyName = name
def setStyleMapStyleName(self, name):
""" Set the stylemap StyleName. """
self.font.info.styleMapStyleName = name
def setStyleName(self, name):
""" Set the styleName. """
self.font.info.styleName = name
def setFamilyName(self, name):
""" Set the familyName"""
self.font.info.familyName = name
def copyFeatures(self, featureSource):
""" Copy the features from this source """
if featureSource in self.sources:
src, loc = self.sources[featureSource]
if isinstance(src.features.text, str):
self.font.features.text = u""+src.features.text
elif isinstance(src.features.text, unicode):
self.font.features.text = src.features.text
def makeUnicodeMapFromSources(self):
""" Create a dict with glyphName -> unicode value pairs
using the data in the sources.
If all master glyphs have the same unicode value
this value will be used in the map.
If master glyphs have conflicting value, a warning will be printed, no value will be used.
If only a single master has a value, that value will be used.
"""
values = {}
for locationName, (source, loc) in self.sources.items():
# this will be expensive in large fonts
for glyph in source:
if glyph.unicodes is not None:
if glyph.name not in values:
values[glyph.name] = {}
for u in glyph.unicodes:
values[glyph.name][u] = 1
for name, u in values.items():
if len(u) == 0:
# only report missing unicodes if the name has no extension
if "." not in name:
self._missingUnicodes.append(name)
continue
k = list(u.keys())
self.unicodeValues[name] = k
return self.unicodeValues
def getAvailableGlyphnames(self):
""" Return a list of all glyphnames we have masters for."""
glyphNames = {}
for locationName, (source, loc) in self.sources.items():
for glyph in source:
glyphNames[glyph.name] = 1
names = sorted(glyphNames.keys())
return names
def setLocation(self, locationObject):
""" Set the location directly. """
self.locationObject = locationObject
def addInfo(self, instanceLocation=None, sources=None, copySourceName=None):
""" Add font info data. """
if instanceLocation is None:
instanceLocation = self.locationObject
infoObject = self.font.info
infoMasters = []
if sources is None:
sources = self.sources
items = []
for sourceName, (source, sourceLocation) in sources.items():
if sourceName in self.muted['info']:
# info in this master was muted, so do not add.
continue
items.append((sourceLocation, MathInfo(source.info)))
try:
bias, m = buildMutator(items, axes=self.axes)
except:
if self.logger:
self.logger.exception("Error processing font info. %s", items)
return
instanceObject = m.makeInstance(instanceLocation, bend=self.bendLocations)
if self.roundGeometry:
try:
instanceObject = instanceObject.round()
except AttributeError:
warnings.warn("MathInfo object missing round() method.")
instanceObject.extractInfo(self.font.info)
# handle the copyable info fields
if copySourceName is not None:
if not copySourceName in sources:
if self.verbose and self.logger:
self.logger.info("Copy info source %s not found, skipping.", copySourceName)
return
copySourceObject, loc = sources[copySourceName]
self._copyFontInfo(self.font.info, copySourceObject.info)
def _copyFontInfo(self, targetInfo, sourceInfo):
""" Copy the non-calculating fields from the source info.
"""
infoAttributes = [
"versionMajor",
"versionMinor",
"copyright",
"trademark",
"note",
"openTypeGaspRangeRecords",
"openTypeHeadCreated",
"openTypeHeadFlags",
"openTypeNameDesigner",
"openTypeNameDesignerURL",
"openTypeNameManufacturer",
"openTypeNameManufacturerURL",
"openTypeNameLicense",
"openTypeNameLicenseURL",
"openTypeNameVersion",
"openTypeNameUniqueID",
"openTypeNameDescription",
"#openTypeNamePreferredFamilyName",
"#openTypeNamePreferredSubfamilyName",
"#openTypeNameCompatibleFullName",
"openTypeNameSampleText",
"openTypeNameWWSFamilyName",
"openTypeNameWWSSubfamilyName",
"openTypeNameRecords",
"openTypeOS2Selection",
"openTypeOS2VendorID",
"openTypeOS2Panose",
"openTypeOS2FamilyClass",
"openTypeOS2UnicodeRanges",
"openTypeOS2CodePageRanges",
"openTypeOS2Type",
"postscriptIsFixedPitch",
"postscriptForceBold",
"postscriptDefaultCharacter",
"postscriptWindowsCharacterSet"
]
for infoAttribute in infoAttributes:
copy = False
if self.ufoVersion == 1 and infoAttribute in fontInfoAttributesVersion1:
copy = True
elif self.ufoVersion == 2 and infoAttribute in fontInfoAttributesVersion2:
copy = True
elif self.ufoVersion == 3 and infoAttribute in fontInfoAttributesVersion3:
copy = True
if copy:
value = getattr(sourceInfo, infoAttribute)
setattr(targetInfo, infoAttribute, value)
def addKerning(self, instanceLocation=None, sources=None):
"""
Calculate the kerning data for this location and add it to this instance.
* instanceLocation: Location object
* source: dict of {sourcename: (source, sourceLocation)}
"""
items = []
kerningObject = self.font.kerning
kerningMasters = []
if instanceLocation is None:
instanceLocation = self.locationObject
if sources is None:
# kerning has no special requests, add the default sources
sources = self.sources
for sourceName, (source, sourceLocation) in sources.items():
if sourceName in self.muted['kerning']:
# kerning in this master was muted, so do not add.
if self.verbose and self.logger:
self.logger.info("\tMuting kerning data for %s", instanceLocation)
continue
if len(source.kerning.keys())>0:
items.append((sourceLocation, MathKerning(source.kerning, source.groups)))
if items:
m = None
try:
bias, m = buildMutator(items, axes=self.axes)
except:
if self.logger:
self.logger.exception("\tError processing kerning data. %s", items)
return
instanceObject = m.makeInstance(instanceLocation, bend=self.bendLocations)
if self.roundGeometry:
instanceObject.round()
instanceObject.extractKerning(self.font)
def addGlyph(self, glyphName, unicodes=None, instanceLocation=None, sources=None, note=None):
"""
Calculate a new glyph and add it to this instance.
* glyphName: The name of the glyph
* unicodes: The unicode values for this glyph (optional)
* instanceLocation: Location for this glyph
* sources: List of sources for this glyph.
* note: Note for this glyph.
"""
self.font.newGlyph(glyphName)
glyphObject = self.font[glyphName]
if note is not None:
glyphObject.note = note
# why does this not save?
if unicodes is not None:
glyphObject.unicodes = unicodes
if instanceLocation is None:
instanceLocation = self.locationObject
glyphMasters = []
if sources is None:
# glyph has no special requests, add the default sources
for sourceName, (source, sourceLocation) in self.sources.items():
if glyphName in self.muted['glyphs'].get(sourceName, []):
# this glyph in this master was muted, so do not add.
continue
d = dict( font=source,
location=sourceLocation,
glyphName=glyphName)
glyphMasters.append(d)
else:
# use the glyph sources provided
# if self.verbose and self.logger:
# self.logger.info("\tGlyph %s has special masters %s", glyphName, sources)
glyphMasters = sources
# make the glyphs
try:
self._calculateGlyph(glyphObject, instanceLocation, glyphMasters)
except:
self._failed.append(glyphName)
def _calculateGlyph(self, targetGlyphObject, instanceLocationObject, glyphMasters):
"""
Build a Mutator object for this glyph.
* name: glyphName
* location: Location object
* glyphMasters: dict with font objects.
"""
sources = None
items = []
for item in glyphMasters:
locationObject = item['location']
fontObject = item['font']
glyphName = item['glyphName']
if not glyphName in fontObject:
continue
glyphObject = MathGlyph(fontObject[glyphName])
items.append((locationObject, glyphObject))
bias, m = buildMutator(items, axes=self.axes)
instanceObject = m.makeInstance(instanceLocationObject, bend=self.bendLocations)
if self.roundGeometry:
try:
instanceObject = instanceObject.round()
except AttributeError:
if self.verbose and self.logger:
self.logger.info("MathGlyph object missing round() method.")
try:
instanceObject.extractGlyph(targetGlyphObject, onlyGeometry=True)
except TypeError:
# this causes ruled glyphs to end up in the wrong glyphname
# but defcon2 objects don't support it
pPen = targetGlyphObject.getPointPen()
targetGlyphObject.clear()
instanceObject.drawPoints(pPen)
targetGlyphObject.width = instanceObject.width
def save(self):
""" Save the UFO."""
# handle glyphs that were muted
for name in self.mutedGlyphsNames:
if name not in self.font: continue
if self.logger:
self.logger.info("removing muted glyph %s", name)
del self.font[name]
# XXX housekeeping:
# remove glyph from groups / kerning as well?
# remove components referencing this glyph?
# fontTools.ufoLib no longer calls os.makedirs for us if the
# parent directories of the Font we are saving do not exist.
# We want to keep backward compatibility with the previous
# MutatorMath behavior, so we create the instance' parent
# directories if they do not exist. We assume that the users
# knows what they are doing...
directory = os.path.dirname(os.path.normpath(self.path))
if directory and not os.path.exists(directory):
os.makedirs(directory)
try:
self.font.save(os.path.abspath(self.path), self.ufoVersion)
except defcon.DefconError as error:
if self.logger:
self.logger.exception("Error generating.")
return False, error.report
return True, None
|
class InstanceWriter(object):
'''
Simple object to build a UFO instance.
Collect the data needed for an instance
and generate it as fast as possible.
Make a font object.
Add data straight to the font.
Calculate the data immediately, while reading the document.
Don't edit the data.
Don't represent the data.
'''
def __init__(self, path, ufoVersion=1,
roundGeometry=False,
axes=None,
verbose=False,
logger=None,
bendLocations=False,
):
pass
def setSources(self, sources):
''' Set a list of sources.'''
pass
def setMuted(self, muted):
''' Set the mute states. '''
pass
def muteGlyph(self, glyphName):
''' Mute the generating of this specific glyph. '''
pass
def setGroups(self, groups, kerningGroupConversionRenameMaps=None):
''' Copy the groups into our font. '''
pass
def getFailed(self):
''' Return the list of glyphnames that failed to generate.'''
pass
def getMissingUnicodes(self):
''' Return the list of glyphnames with missing unicode values. '''
pass
def setLib(self, lib):
''' Copy the lib items into our font. '''
pass
def setPostScriptFontName(self, name):
''' Set the postScriptFontName. '''
pass
def setStyleMapFamilyName(self, name):
''' Set the stylemap FamilyName. '''
pass
def setStyleMapStyleName(self, name):
''' Set the stylemap StyleName. '''
pass
def setStyleName(self, name):
''' Set the styleName. '''
pass
def setFamilyName(self, name):
''' Set the familyName'''
pass
def copyFeatures(self, featureSource):
''' Copy the features from this source '''
pass
def makeUnicodeMapFromSources(self):
''' Create a dict with glyphName -> unicode value pairs
using the data in the sources.
If all master glyphs have the same unicode value
this value will be used in the map.
If master glyphs have conflicting value, a warning will be printed, no value will be used.
If only a single master has a value, that value will be used.
'''
pass
def getAvailableGlyphnames(self):
''' Return a list of all glyphnames we have masters for.'''
pass
def setLocation(self, locationObject):
''' Set the location directly. '''
pass
def addInfo(self, instanceLocation=None, sources=None, copySourceName=None):
''' Add font info data. '''
pass
def _copyFontInfo(self, targetInfo, sourceInfo):
''' Copy the non-calculating fields from the source info.
'''
pass
def addKerning(self, instanceLocation=None, sources=None):
'''
Calculate the kerning data for this location and add it to this instance.
* instanceLocation: Location object
* source: dict of {sourcename: (source, sourceLocation)}
'''
pass
def addGlyph(self, glyphName, unicodes=None, instanceLocation=None, sources=None, note=None):
'''
Calculate a new glyph and add it to this instance.
* glyphName: The name of the glyph
* unicodes: The unicode values for this glyph (optional)
* instanceLocation: Location for this glyph
* sources: List of sources for this glyph.
* note: Note for this glyph.
'''
pass
def _calculateGlyph(self, targetGlyphObject, instanceLocationObject, glyphMasters):
'''
Build a Mutator object for this glyph.
* name: glyphName
* location: Location object
* glyphMasters: dict with font objects.
'''
pass
def save(self):
''' Save the UFO.'''
pass
| 24 | 23 | 16 | 0 | 12 | 4 | 4 | 0.33 | 1 | 5 | 0 | 0 | 23 | 18 | 23 | 23 | 398 | 35 | 279 | 101 | 249 | 91 | 230 | 94 | 206 | 12 | 1 | 4 | 91 |
145,376 |
LettError/MutatorMath
|
LettError_MutatorMath/Lib/mutatorMath/ufo/document.py
|
mutatorMath.ufo.document.DesignSpaceDocumentWriter
|
class DesignSpaceDocumentWriter(object):
"""
Writer for a design space description file.
* path: path for the document
* toolVersion: version of this tool
"""
_whiteSpace = " "
def __init__(self, path, toolVersion=3, verbose=False):
self.path = path
self.toolVersion = toolVersion
self.verbose = verbose
self.root = ET.Element("designspace")
self.root.attrib['format'] = "%d"%toolVersion
self.root.append(ET.Element("axes"))
self.root.append(ET.Element("sources"))
self.root.append(ET.Element("instances"))
self.logger = None
if verbose:
self.logger = logging.getLogger("mutatorMath")
self.currentInstance = None
def save(self, pretty=True):
""" Save the xml. Make pretty if necessary. """
self.endInstance()
if pretty:
_indent(self.root, whitespace=self._whiteSpace)
tree = ET.ElementTree(self.root)
tree.write(self.path, encoding="utf-8", method='xml', xml_declaration=True)
if self.logger:
self.logger.info("Writing %s", self.path)
def _makeLocationElement(self, locationObject, name=None):
""" Convert Location object to an locationElement."""
locElement = ET.Element("location")
if name is not None:
locElement.attrib['name'] = name
for dimensionName, dimensionValue in locationObject.items():
dimElement = ET.Element('dimension')
dimElement.attrib['name'] = dimensionName
if type(dimensionValue)==tuple:
dimElement.attrib['xvalue'] = "%f"%dimensionValue[0]
dimElement.attrib['yvalue'] = "%f"%dimensionValue[1]
else:
dimElement.attrib['xvalue'] = "%f"%dimensionValue
locElement.append(dimElement)
return locElement
def _posixPathRelativeToDocument(self, otherPath):
relative = os.path.relpath(otherPath, os.path.dirname(self.path))
return posixpath.join(*relative.split(os.path.sep))
def addSource(self,
path,
name,
location,
copyLib=False,
copyGroups=False,
copyInfo=False,
copyFeatures=False,
muteKerning=False,
muteInfo=False,
mutedGlyphNames=None,
familyName=None,
styleName=None,
):
"""
Add a new UFO source to the document.
* path: path to this UFO, will be written as a relative path to the document path.
* name: reference name for this source
* location: name of the location for this UFO
* copyLib: copy the contents of this source to instances
* copyGroups: copy the groups of this source to instances
* copyInfo: copy the non-numerical fields from this source.info to instances.
* copyFeatures: copy the feature text from this source to instances
* muteKerning: mute the kerning data from this source
* muteInfo: mute the font info data from this source
* familyName: family name for this UFO (to be able to work on the names without reading the whole UFO)
* styleName: style name for this UFO (to be able to work on the names without reading the whole UFO)
Note: no separate flag for mute font: the source is just not added.
"""
sourceElement = ET.Element("source")
sourceElement.attrib['filename'] = self._posixPathRelativeToDocument(path)
sourceElement.attrib['name'] = name
if copyLib:
libElement = ET.Element('lib')
libElement.attrib['copy'] = "1"
sourceElement.append(libElement)
if copyGroups:
groupsElement = ET.Element('groups')
groupsElement.attrib['copy'] = "1"
sourceElement.append(groupsElement)
if copyFeatures:
featuresElement = ET.Element('features')
featuresElement.attrib['copy'] = "1"
sourceElement.append(featuresElement)
if copyInfo or muteInfo:
# copy info:
infoElement = ET.Element('info')
if copyInfo:
infoElement.attrib['copy'] = "1"
if muteInfo:
infoElement.attrib['mute'] = "1"
sourceElement.append(infoElement)
if muteKerning:
# add kerning element to the source
kerningElement = ET.Element("kerning")
kerningElement.attrib["mute"] = '1'
sourceElement.append(kerningElement)
if mutedGlyphNames:
# add muted glyphnames to the source
for name in mutedGlyphNames:
glyphElement = ET.Element("glyph")
glyphElement.attrib["name"] = name
glyphElement.attrib["mute"] = '1'
sourceElement.append(glyphElement)
if familyName is not None:
sourceElement.attrib['familyname'] = familyName
if styleName is not None:
sourceElement.attrib['stylename'] = styleName
locationElement = self._makeLocationElement(location)
sourceElement.append(locationElement)
self.root.findall('.sources')[0].append(sourceElement)
def startInstance(self, name=None,
location=None,
familyName=None,
styleName=None,
fileName=None,
postScriptFontName=None,
styleMapFamilyName=None,
styleMapStyleName=None,
):
""" Start a new instance.
Instances can need a lot of configuration.
So this method starts a new instance element. Use endInstance() to finish it.
* name: the name of this instance
* familyName: name for the font.info.familyName field. Required.
* styleName: name fot the font.info.styleName field. Required.
* fileName: filename for the instance UFO file. Required.
* postScriptFontName: name for the font.info.postScriptFontName field. Optional.
* styleMapFamilyName: name for the font.info.styleMapFamilyName field. Optional.
* styleMapStyleName: name for the font.info.styleMapStyleName field. Optional.
"""
if self.currentInstance is not None:
# We still have the previous one open
self.endInstance()
instanceElement = ET.Element('instance')
if name is not None:
instanceElement.attrib['name'] = name
if location is not None:
locationElement = self._makeLocationElement(location)
instanceElement.append(locationElement)
if familyName is not None:
instanceElement.attrib['familyname'] = familyName
if styleName is not None:
instanceElement.attrib['stylename'] = styleName
if fileName is not None:
instanceElement.attrib['filename'] = self._posixPathRelativeToDocument(fileName)
if postScriptFontName is not None:
instanceElement.attrib['postscriptfontname'] = postScriptFontName
if styleMapFamilyName is not None:
instanceElement.attrib['stylemapfamilyname'] = styleMapFamilyName
if styleMapStyleName is not None:
instanceElement.attrib['stylemapstylename'] = styleMapStyleName
self.currentInstance = instanceElement
def endInstance(self):
"""
Finalise the instance definition started by startInstance().
"""
if self.currentInstance is None:
return
allInstances = self.root.findall('.instances')[0].append(self.currentInstance)
self.currentInstance = None
def writeGlyph(self,
name,
unicodes=None,
location=None,
masters=None,
note=None,
mute=False,
):
""" Add a new glyph to the current instance.
* name: the glyph name. Required.
* unicodes: unicode values for this glyph if it needs to be different from the unicode values associated with this glyph name in the masters.
* location: a design space location for this glyph if it needs to be different from the instance location.
* masters: a list of masters and locations for this glyph if they need to be different from the masters specified for this instance.
* note: a note for this glyph
* mute: if this glyph is muted. None of the other attributes matter if this one is true.
"""
if self.currentInstance is None:
return
glyphElement = ET.Element('glyph')
if mute:
glyphElement.attrib['mute'] = "1"
if unicodes is not None:
glyphElement.attrib['unicode'] = " ".join([hex(u) for u in unicodes])
if location is not None:
locationElement = self._makeLocationElement(location)
glyphElement.append(locationElement)
if name is not None:
glyphElement.attrib['name'] = name
if note is not None:
noteElement = ET.Element('note')
noteElement.text = note
glyphElement.append(noteElement)
if masters is not None:
mastersElement = ET.Element("masters")
for glyphName, masterName, location in masters:
masterElement = ET.Element("master")
if glyphName is not None:
masterElement.attrib['glyphname'] = glyphName
masterElement.attrib['source'] = masterName
if location is not None:
locationElement = self._makeLocationElement(location)
masterElement.append(locationElement)
mastersElement.append(masterElement)
glyphElement.append(mastersElement)
if self.currentInstance.findall('.glyphs') == []:
glyphsElement = ET.Element('glyphs')
self.currentInstance.append(glyphsElement)
else:
glyphsElement = self.currentInstance.findall('.glyphs')[0]
glyphsElement.append(glyphElement)
def writeInfo(self, location=None, masters=None):
""" Write font into the current instance.
Note: the masters attribute is ignored at the moment.
"""
if self.currentInstance is None:
return
infoElement = ET.Element("info")
if location is not None:
locationElement = self._makeLocationElement(location)
infoElement.append(locationElement)
self.currentInstance.append(infoElement)
def writeKerning(self, location=None, masters=None):
""" Write kerning into the current instance.
Note: the masters attribute is ignored at the moment.
"""
if self.currentInstance is None:
return
kerningElement = ET.Element("kerning")
if location is not None:
locationElement = self._makeLocationElement(location)
kerningElement.append(locationElement)
self.currentInstance.append(kerningElement)
def writeWarp(self, warpDict):
""" Write a list of (in, out) values for a warpmap """
warpElement = ET.Element("warp")
axisNames = sorted(warpDict.keys())
for name in axisNames:
axisElement = ET.Element("axis")
axisElement.attrib['name'] = name
for a, b in warpDict[name]:
warpPt = ET.Element("map")
warpPt.attrib['input'] = str(a)
warpPt.attrib['output'] = str(b)
axisElement.append(warpPt)
warpElement.append(axisElement)
self.root.append(warpElement)
def addAxis(self, tag, name, minimum, maximum, default, warpMap=None):
""" Write an axis element.
This will be added to the <axes> element.
"""
axisElement = ET.Element("axis")
axisElement.attrib['name'] = name
axisElement.attrib['tag'] = tag
axisElement.attrib['minimum'] = str(minimum)
axisElement.attrib['maximum'] = str(maximum)
axisElement.attrib['default'] = str(default)
if warpMap is not None:
for a, b in warpMap:
warpPt = ET.Element("map")
warpPt.attrib['input'] = str(a)
warpPt.attrib['output'] = str(b)
axisElement.append(warpPt)
self.root.findall('.axes')[0].append(axisElement)
|
class DesignSpaceDocumentWriter(object):
'''
Writer for a design space description file.
* path: path for the document
* toolVersion: version of this tool
'''
def __init__(self, path, toolVersion=3, verbose=False):
pass
def save(self, pretty=True):
''' Save the xml. Make pretty if necessary. '''
pass
def _makeLocationElement(self, locationObject, name=None):
''' Convert Location object to an locationElement.'''
pass
def _posixPathRelativeToDocument(self, otherPath):
pass
def addSource(self,
path,
name,
location,
copyLib=False,
copyGroups=False,
copyInfo=False,
copyFeatures=False,
muteKerning=False,
muteInfo=False,
mutedGlyphNames=None,
familyName=None,
styleName=None,
):
'''
Add a new UFO source to the document.
* path: path to this UFO, will be written as a relative path to the document path.
* name: reference name for this source
* location: name of the location for this UFO
* copyLib: copy the contents of this source to instances
* copyGroups: copy the groups of this source to instances
* copyInfo: copy the non-numerical fields from this source.info to instances.
* copyFeatures: copy the feature text from this source to instances
* muteKerning: mute the kerning data from this source
* muteInfo: mute the font info data from this source
* familyName: family name for this UFO (to be able to work on the names without reading the whole UFO)
* styleName: style name for this UFO (to be able to work on the names without reading the whole UFO)
Note: no separate flag for mute font: the source is just not added.
'''
pass
def startInstance(self, name=None,
location=None,
familyName=None,
styleName=None,
fileName=None,
postScriptFontName=None,
styleMapFamilyName=None,
styleMapStyleName=None,
):
''' Start a new instance.
Instances can need a lot of configuration.
So this method starts a new instance element. Use endInstance() to finish it.
* name: the name of this instance
* familyName: name for the font.info.familyName field. Required.
* styleName: name fot the font.info.styleName field. Required.
* fileName: filename for the instance UFO file. Required.
* postScriptFontName: name for the font.info.postScriptFontName field. Optional.
* styleMapFamilyName: name for the font.info.styleMapFamilyName field. Optional.
* styleMapStyleName: name for the font.info.styleMapStyleName field. Optional.
'''
pass
def endInstance(self):
'''
Finalise the instance definition started by startInstance().
'''
pass
def writeGlyph(self,
name,
unicodes=None,
location=None,
masters=None,
note=None,
mute=False,
):
''' Add a new glyph to the current instance.
* name: the glyph name. Required.
* unicodes: unicode values for this glyph if it needs to be different from the unicode values associated with this glyph name in the masters.
* location: a design space location for this glyph if it needs to be different from the instance location.
* masters: a list of masters and locations for this glyph if they need to be different from the masters specified for this instance.
* note: a note for this glyph
* mute: if this glyph is muted. None of the other attributes matter if this one is true.
'''
pass
def writeInfo(self, location=None, masters=None):
''' Write font into the current instance.
Note: the masters attribute is ignored at the moment.
'''
pass
def writeKerning(self, location=None, masters=None):
''' Write kerning into the current instance.
Note: the masters attribute is ignored at the moment.
'''
pass
def writeWarp(self, warpDict):
''' Write a list of (in, out) values for a warpmap '''
pass
def addAxis(self, tag, name, minimum, maximum, default, warpMap=None):
''' Write an axis element.
This will be added to the <axes> element.
'''
pass
| 13 | 11 | 23 | 1 | 18 | 4 | 5 | 0.27 | 1 | 5 | 0 | 0 | 12 | 6 | 12 | 12 | 298 | 27 | 213 | 84 | 172 | 58 | 183 | 56 | 170 | 12 | 1 | 3 | 58 |
145,377 |
LettError/MutatorMath
|
LettError_MutatorMath/Lib/mutatorMath/ufo/document.py
|
mutatorMath.ufo.document.DesignSpaceDocumentReader
|
class DesignSpaceDocumentReader(object):
""" Read a designspace description.
Build Instance objects, generate them.
* documentPath: path of the document to read
* ufoVersion: target UFO version
* roundGeometry: apply rounding to all geometry
"""
_fontClass = defcon.Font
_glyphClass = defcon.Glyph
_libClass = defcon.Lib
_glyphContourClass = defcon.Contour
_glyphPointClass = defcon.Point
_glyphComponentClass = defcon.Component
_glyphAnchorClass = defcon.Anchor
_kerningClass = defcon.Kerning
_groupsClass = defcon.Groups
_infoClass = defcon.Info
_featuresClass = defcon.Features
_instanceWriterClass = InstanceWriter
_tempFontLibGlyphMuteKey = "_mutatorMath.temp.mutedGlyphNames"
_tempFontLocationKey = "_mutatorMath.temp.fontLocation"
def __init__(self, documentPath,
ufoVersion,
roundGeometry=False,
verbose=False,
logPath=None,
progressFunc=None
):
self.path = documentPath
self.ufoVersion = ufoVersion
self.roundGeometry = roundGeometry
self.documentFormatVersion = 0
self.sources = {}
self.instances = {}
self.axes = {} # dict with axes info
self.axesOrder = [] # order in which the axes were defined
self.warpDict = None # let's stop using this one
self.libSource = None
self.groupsSource = None
self.infoSource = None
self.featuresSource = None
self.progressFunc=progressFunc
self.muted = dict(kerning=[], info=[], glyphs={})
self.verbose = verbose
self.logger = None
if self.verbose:
self.logger = logging.getLogger("mutatorMath")
self.results = {} # dict with instancename / filepaths for post processing.
tree = ET.parse(self.path)
self.root = tree.getroot()
self.readVersion()
assert self.documentFormatVersion >= 3
self.readAxes()
self.readWarp()
self.readSources()
def reportProgress(self, state, action, text=None, tick=None):
""" If we want to keep other code updated about our progress.
state: 'prep' reading sources
'generate' making instances
'done' wrapping up
'error' reporting a problem
action: 'start' begin generating
'stop' end generating
'source' which ufo we're reading
text: <file.ufo> ufoname (for instance)
tick: a float between 0 and 1 indicating progress.
"""
if self.progressFunc is not None:
self.progressFunc(state=state, action=action, text=text, tick=tick)
def getSourcePaths(self, makeGlyphs=True, makeKerning=True, makeInfo=True):
""" Return a list of paths referenced in the document."""
paths = []
for name in self.sources.keys():
paths.append(self.sources[name][0].path)
return paths
def process(
self,
makeGlyphs=True,
makeKerning=True,
makeInfo=True,
bendLocations=False,
):
""" Process the input file and generate the instances. """
if self.logger:
self.logger.info("Reading %s", self.path)
self.readInstances(
makeGlyphs=makeGlyphs,
makeKerning=makeKerning,
makeInfo=makeInfo,
bendLocations=bendLocations,
)
self.reportProgress("done", 'stop')
def readVersion(self):
""" Read the document version.
::
<designspace format="3">
"""
ds = self.root.findall("[@format]")[0]
raw_format = ds.attrib['format']
try:
self.documentFormatVersion = int(raw_format)
except ValueError:
# as of fontTools >= 3.27 'format' is formatted as a float "4.0"
self.documentFormatVersion = float(raw_format)
def readWarp(self):
""" Read the warp element
::
<warp>
<axis name="weight">
<map input="0" output="0" />
<map input="500" output="200" />
<map input="1000" output="1000" />
</axis>
</warp>
"""
warpDict = {}
for warpAxisElement in self.root.findall(".warp/axis"):
axisName = warpAxisElement.attrib.get("name")
warpDict[axisName] = []
for warpPoint in warpAxisElement.findall(".map"):
inputValue = float(warpPoint.attrib.get("input"))
outputValue = float(warpPoint.attrib.get("output"))
warpDict[axisName].append((inputValue, outputValue))
self.warpDict = warpDict
def readAxes(self):
""" Read the axes element.
"""
for axisElement in self.root.findall(".axes/axis"):
axis = {}
axis['name'] = name = axisElement.attrib.get("name")
axis['tag'] = axisElement.attrib.get("tag")
axis['minimum'] = float(axisElement.attrib.get("minimum"))
axis['maximum'] = float(axisElement.attrib.get("maximum"))
axis['default'] = float(axisElement.attrib.get("default"))
# we're not using the map for anything.
axis['map'] = []
for warpPoint in axisElement.findall(".map"):
inputValue = float(warpPoint.attrib.get("input"))
outputValue = float(warpPoint.attrib.get("output"))
axis['map'].append((inputValue, outputValue))
# there are labelnames in the element
# but we don't need them for building the fonts.
self.axes[name] = axis
self.axesOrder.append(axis['name'])
def readSources(self):
""" Read the source elements.
::
<source filename="LightCondensed.ufo" location="location-token-aaa" name="master-token-aaa1">
<info mute="1" copy="1"/>
<kerning mute="1"/>
<glyph mute="1" name="thirdGlyph"/>
</source>
"""
for sourceCount, sourceElement in enumerate(self.root.findall(".sources/source")):
# shall we just read the UFO here?
filename = sourceElement.attrib.get('filename')
# filename is a path relaive to the documentpath. resolve first.
sourcePath = os.path.abspath(os.path.join(os.path.dirname(self.path), filename))
sourceName = sourceElement.attrib.get('name')
if sourceName is None:
# if the source element has no name attribute
# (some authoring tools do not need them)
# then we should make a temporary one. We still need it for reference.
sourceName = "temp_master.%d"%(sourceCount)
self.reportProgress("prep", 'load', sourcePath)
if not os.path.exists(sourcePath):
raise MutatorError("Source not found at %s"%sourcePath)
sourceObject = self._instantiateFont(sourcePath)
# read the locations
sourceLocationObject = None
sourceLocationObject = self.locationFromElement(sourceElement)
if sourceLocationObject is None:
raise MutatorError("No location defined for source %s"%sourceName)
# read lib flag
for libElement in sourceElement.findall('.lib'):
if libElement.attrib.get('copy') == '1':
self.libSource = sourceName
# read the groups flag
for groupsElement in sourceElement.findall('.groups'):
if groupsElement.attrib.get('copy') == '1':
self.groupsSource = sourceName
# read the info flag
for infoElement in sourceElement.findall(".info"):
if infoElement.attrib.get('copy') == '1':
self.infoSource = sourceName
if infoElement.attrib.get('mute') == '1':
self.muted['info'].append(sourceName)
# read the features flag
for featuresElement in sourceElement.findall(".features"):
if featuresElement.attrib.get('copy') == '1':
if self.featuresSource is not None:
self.featuresSource = None
else:
self.featuresSource = sourceName
mutedGlyphs = []
for glyphElement in sourceElement.findall(".glyph"):
glyphName = glyphElement.attrib.get('name')
if glyphName is None:
continue
if glyphElement.attrib.get('mute') == '1':
if not sourceName in self.muted['glyphs']:
self.muted['glyphs'][sourceName] = []
self.muted['glyphs'][sourceName].append(glyphName)
for kerningElement in sourceElement.findall(".kerning"):
if kerningElement.attrib.get('mute') == '1':
self.muted['kerning'].append(sourceName)
# store
self.sources[sourceName] = sourceObject, sourceLocationObject
self.reportProgress("prep", 'done')
def locationFromElement(self, element):
"""
Find the MutatorMath location of this element, either by name or from a child element.
"""
elementLocation = None
for locationElement in element.findall('.location'):
elementLocation = self.readLocationElement(locationElement)
break
return elementLocation
def readLocationElement(self, locationElement):
""" Format 0 location reader """
loc = Location()
for dimensionElement in locationElement.findall(".dimension"):
dimName = dimensionElement.attrib.get("name")
xValue = yValue = None
try:
xValue = dimensionElement.attrib.get('xvalue')
xValue = float(xValue)
except ValueError:
if self.logger:
self.logger.info("KeyError in readLocation xValue %3.3f", xValue)
try:
yValue = dimensionElement.attrib.get('yvalue')
if yValue is not None:
yValue = float(yValue)
except ValueError:
pass
if yValue is not None:
loc[dimName] = (xValue, yValue)
else:
loc[dimName] = xValue
return loc
def readInstance(
self,
key,
makeGlyphs=True,
makeKerning=True,
makeInfo=True,
bendLocations=False,
):
""" Read a single instance element.
key: an (attribute, value) tuple used to find the requested instance.
::
<instance familyname="SuperFamily" filename="OutputNameInstance1.ufo" location="location-token-aaa" stylename="Regular">
"""
attrib, value = key
for instanceElement in self.root.findall('.instances/instance'):
if instanceElement.attrib.get(attrib) == value:
self._readSingleInstanceElement(
instanceElement,
makeGlyphs=makeGlyphs,
makeKerning=makeKerning,
makeInfo=makeInfo,
bendLocations=bendLocations,
)
return
raise MutatorError("No instance found with key: (%s, %s)." % key)
def readInstances(
self,
makeGlyphs=True,
makeKerning=True,
makeInfo=True,
bendLocations=False,
):
""" Read all instance elements.
::
<instance familyname="SuperFamily" filename="OutputNameInstance1.ufo" location="location-token-aaa" stylename="Regular">
"""
for instanceElement in self.root.findall('.instances/instance'):
self._readSingleInstanceElement(
instanceElement,
makeGlyphs=makeGlyphs,
makeKerning=makeKerning,
makeInfo=makeInfo,
bendLocations=bendLocations,
)
def _readSingleInstanceElement(
self,
instanceElement,
makeGlyphs=True,
makeKerning=True,
makeInfo=True,
bendLocations=False,
):
""" Read a single instance element.
If we have glyph specifications, only make those.
Otherwise make all available glyphs.
"""
# get the data from the instanceElement itself
filename = instanceElement.attrib.get('filename')
instancePath = os.path.join(os.path.dirname(self.path), filename)
self.reportProgress("generate", 'start', instancePath)
if self.verbose and self.logger:
self.logger.info("\tGenerating instance %s", os.path.basename(instancePath))
filenameTokenForResults = os.path.basename(filename)
instanceObject = self._instanceWriterClass(
instancePath,
ufoVersion=self.ufoVersion,
roundGeometry=self.roundGeometry,
axes = self.axes,
verbose=self.verbose,
logger=self.logger,
bendLocations=bendLocations,
)
self.results[filenameTokenForResults] = instancePath
# set the masters
instanceObject.setSources(self.sources)
self.unicodeMap = instanceObject.makeUnicodeMapFromSources()
instanceObject.setMuted(self.muted)
familyname = instanceElement.attrib.get('familyname')
if familyname is not None:
instanceObject.setFamilyName(familyname)
stylename = instanceElement.attrib.get('stylename')
if stylename is not None:
instanceObject.setStyleName(stylename)
postScriptFontName = instanceElement.attrib.get('postscriptfontname')
if postScriptFontName is not None:
instanceObject.setPostScriptFontName(postScriptFontName)
styleMapFamilyName = instanceElement.attrib.get('stylemapfamilyname')
if styleMapFamilyName is not None:
instanceObject.setStyleMapFamilyName(styleMapFamilyName)
styleMapStyleName = instanceElement.attrib.get('stylemapstylename')
if styleMapStyleName is not None:
instanceObject.setStyleMapStyleName(styleMapStyleName)
# location
instanceLocation = self.locationFromElement(instanceElement)
if instanceLocation is not None:
instanceObject.setLocation(instanceLocation)
if makeGlyphs:
# step 1: generate all glyphs we have mutators for.
names = instanceObject.getAvailableGlyphnames()
for n in names:
unicodes = self.unicodeMap.get(n, None)
try:
instanceObject.addGlyph(n, unicodes)
except AssertionError:
if self.verbose and self.logger:
self.logger.info("Problem making glyph %s, skipping.", n)
# step 2: generate all the glyphs that have special definitions.
for glyphElement in instanceElement.findall('.glyphs/glyph'):
self.readGlyphElement(glyphElement, instanceObject)
# read the kerning
if makeKerning:
for kerningElement in instanceElement.findall('.kerning'):
self.readKerningElement(kerningElement, instanceObject)
break
# read the fontinfo
if makeInfo:
for infoElement in instanceElement.findall('.info'):
self.readInfoElement(infoElement, instanceObject)
# copy the features
if self.featuresSource is not None:
instanceObject.copyFeatures(self.featuresSource)
# copy the groups
if self.groupsSource is not None:
if self.groupsSource in self.sources:
groupSourceObject, loc = self.sources[self.groupsSource]
# copy the groups from the designated source to the new instance
# note: setGroups will filter the group members
# only glyphs present in the font will be added to the group.
# Depending on the ufoversion we might or might not expect the kerningGroupConversionRenameMaps attribute.
if hasattr(groupSourceObject, "kerningGroupConversionRenameMaps"):
renameMap = groupSourceObject.kerningGroupConversionRenameMaps
else:
renameMap = {}
instanceObject.setGroups(groupSourceObject.groups, kerningGroupConversionRenameMaps=renameMap)
# lib items
if self.libSource is not None:
if self.libSource in self.sources:
libSourceObject, loc = self.sources[self.libSource]
instanceObject.setLib(libSourceObject.lib)
# save the instance. Done.
success, report = instanceObject.save()
if not success and self.logger:
# report problems other than validation errors and failed glyphs
self.logger.info("%s:\nErrors generating: %s", filename, report)
# report failed glyphs
failed = instanceObject.getFailed()
if failed:
failed.sort()
msg = "%s:\nErrors calculating %s glyphs: \n%s"%(filename, len(failed),"\t"+"\n\t".join(failed))
self.reportProgress('error', 'glyphs', msg)
if self.verbose and self.logger:
self.logger.info(msg)
# report missing unicodes
missing = instanceObject.getMissingUnicodes()
if missing:
missing.sort()
msg = "%s:\nPossibly missing unicodes for %s glyphs: \n%s"%(filename, len(missing),"\t"+"\n\t".join(missing))
self.reportProgress('error', 'unicodes', msg)
# store
self.instances[postScriptFontName] = instanceObject
self.reportProgress("generate", 'stop', filenameTokenForResults)
def readInfoElement(self, infoElement, instanceObject):
""" Read the info element.
::
<info/>
<info">
<location/>
</info>
"""
infoLocation = self.locationFromElement(infoElement)
instanceObject.addInfo(infoLocation, copySourceName=self.infoSource)
def readKerningElement(self, kerningElement, instanceObject):
""" Read the kerning element.
::
Make kerning at the location and with the masters specified at the instance level.
<kerning/>
"""
kerningLocation = self.locationFromElement(kerningElement)
instanceObject.addKerning(kerningLocation)
def readGlyphElement(self, glyphElement, instanceObject):
"""
Read the glyph element.
::
<glyph name="b" unicode="0x62"/>
<glyph name="b"/>
<glyph name="b">
<master location="location-token-bbb" source="master-token-aaa2"/>
<master glyphname="b.alt1" location="location-token-ccc" source="master-token-aaa3"/>
<note>
This is an instance from an anisotropic interpolation.
</note>
</glyph>
"""
# name
glyphName = glyphElement.attrib.get('name')
if glyphName is None:
raise MutatorError("Glyph object without name attribute.")
# mute
mute = glyphElement.attrib.get("mute")
if mute == "1":
instanceObject.muteGlyph(glyphName)
# we do not need to stick around after this
return
# unicode
unicodes = glyphElement.attrib.get('unicode')
if unicodes == None:
unicodes = self.unicodeMap.get(glyphName, None)
else:
try:
unicodes = [int(u, 16) for u in unicodes.split(" ")]
except ValueError:
raise MutatorError("unicode values %s are not integers" % unicodes)
# note
note = None
for noteElement in glyphElement.findall('.note'):
note = noteElement.text
break
# location
instanceLocation = self.locationFromElement(glyphElement)
# masters
glyphSources = None
for masterElement in glyphElement.findall('.masters/master'):
fontSourceName = masterElement.attrib.get('source')
fontSource, fontLocation = self.sources.get(fontSourceName)
if fontSource is None:
raise MutatorError("Unknown glyph master: %s"%masterElement)
sourceLocation = self.locationFromElement(masterElement)
if sourceLocation is None:
# if we don't read a location, use the instance location
sourceLocation = fontLocation
masterGlyphName = masterElement.attrib.get('glyphname')
if masterGlyphName is None:
# if we don't read a glyphname, use the one we have
masterGlyphName = glyphName
d = dict( font=fontSource,
location=sourceLocation,
glyphName=masterGlyphName)
if glyphSources is None:
glyphSources = []
glyphSources.append(d)
# calculate the glyph
instanceObject.addGlyph(glyphName, unicodes, instanceLocation, glyphSources, note=note)
def _instantiateFont(self, path):
"""
Return a instance of a font object
with all the given subclasses
"""
return self._fontClass(path,
libClass=self._libClass,
kerningClass=self._kerningClass,
groupsClass=self._groupsClass,
infoClass=self._infoClass,
featuresClass=self._featuresClass,
glyphClass=self._glyphClass,
glyphContourClass=self._glyphContourClass,
glyphPointClass=self._glyphPointClass,
glyphComponentClass=self._glyphComponentClass,
glyphAnchorClass=self._glyphAnchorClass)
|
class DesignSpaceDocumentReader(object):
''' Read a designspace description.
Build Instance objects, generate them.
* documentPath: path of the document to read
* ufoVersion: target UFO version
* roundGeometry: apply rounding to all geometry
'''
def __init__(self, documentPath,
ufoVersion,
roundGeometry=False,
verbose=False,
logPath=None,
progressFunc=None
):
pass
def reportProgress(self, state, action, text=None, tick=None):
''' If we want to keep other code updated about our progress.
state: 'prep' reading sources
'generate' making instances
'done' wrapping up
'error' reporting a problem
action: 'start' begin generating
'stop' end generating
'source' which ufo we're reading
text: <file.ufo> ufoname (for instance)
tick: a float between 0 and 1 indicating progress.
'''
pass
def getSourcePaths(self, makeGlyphs=True, makeKerning=True, makeInfo=True):
''' Return a list of paths referenced in the document.'''
pass
def process(
self,
makeGlyphs=True,
makeKerning=True,
makeInfo=True,
bendLocations=False,
):
''' Process the input file and generate the instances. '''
pass
def readVersion(self):
''' Read the document version.
::
<designspace format="3">
'''
pass
def readWarp(self):
''' Read the warp element
::
<warp>
<axis name="weight">
<map input="0" output="0" />
<map input="500" output="200" />
<map input="1000" output="1000" />
</axis>
</warp>
'''
pass
def readAxes(self):
''' Read the axes element.
'''
pass
def readSources(self):
''' Read the source elements.
::
<source filename="LightCondensed.ufo" location="location-token-aaa" name="master-token-aaa1">
<info mute="1" copy="1"/>
<kerning mute="1"/>
<glyph mute="1" name="thirdGlyph"/>
</source>
'''
pass
def locationFromElement(self, element):
'''
Find the MutatorMath location of this element, either by name or from a child element.
'''
pass
def readLocationElement(self, locationElement):
''' Format 0 location reader '''
pass
def readInstance(
self,
key,
makeGlyphs=True,
makeKerning=True,
makeInfo=True,
bendLocations=False,
):
''' Read a single instance element.
key: an (attribute, value) tuple used to find the requested instance.
::
<instance familyname="SuperFamily" filename="OutputNameInstance1.ufo" location="location-token-aaa" stylename="Regular">
'''
pass
def readInstances(
self,
makeGlyphs=True,
makeKerning=True,
makeInfo=True,
bendLocations=False,
):
''' Read all instance elements.
::
<instance familyname="SuperFamily" filename="OutputNameInstance1.ufo" location="location-token-aaa" stylename="Regular">
'''
pass
def _readSingleInstanceElement(
self,
instanceElement,
makeGlyphs=True,
makeKerning=True,
makeInfo=True,
bendLocations=False,
):
''' Read a single instance element.
If we have glyph specifications, only make those.
Otherwise make all available glyphs.
'''
pass
def readInfoElement(self, infoElement, instanceObject):
''' Read the info element.
::
<info/>
<info">
<location/>
</info>
'''
pass
def readKerningElement(self, kerningElement, instanceObject):
''' Read the kerning element.
::
Make kerning at the location and with the masters specified at the instance level.
<kerning/>
'''
pass
def readGlyphElement(self, glyphElement, instanceObject):
'''
Read the glyph element.
::
<glyph name="b" unicode="0x62"/>
<glyph name="b"/>
<glyph name="b">
<master location="location-token-bbb" source="master-token-aaa2"/>
<master glyphname="b.alt1" location="location-token-ccc" source="master-token-aaa3"/>
<note>
This is an instance from an anisotropic interpolation.
</note>
</glyph>
'''
pass
def _instantiateFont(self, path):
'''
Return a instance of a font object
with all the given subclasses
'''
pass
| 18 | 17 | 32 | 3 | 21 | 8 | 5 | 0.37 | 1 | 8 | 2 | 0 | 17 | 20 | 17 | 17 | 579 | 80 | 366 | 162 | 316 | 137 | 293 | 129 | 275 | 27 | 1 | 4 | 92 |
145,378 |
LettError/ufoProcessor
|
LettError_ufoProcessor/Lib/ufoProcessor/emptyPen.py
|
ufoProcessor.emptyPen.EmptyPen
|
class EmptyPen(AbstractPointPen):
def __init__(self):
self.points = 0
self.contours = 0
self.components = 0
def beginPath(self, identifier=None, **kwargs):
pass
def endPath(self):
self.contours += 1
def addPoint(self, pt, segmentType=None, smooth=False, name=None, identifier=None, **kwargs):
self.points+=1
def addComponent(self, baseGlyphName=None, transformation=None, identifier=None, **kwargs):
self.components+=1
def getCount(self):
return self.points, self.contours, self.components
def isEmpty(self):
return self.points==0 and self.contours==0 and self.components==0
|
class EmptyPen(AbstractPointPen):
def __init__(self):
pass
def beginPath(self, identifier=None, **kwargs):
pass
def endPath(self):
pass
def addPoint(self, pt, segmentType=None, smooth=False, name=None, identifier=None, **kwargs):
pass
def addComponent(self, baseGlyphName=None, transformation=None, identifier=None, **kwargs):
pass
def getCount(self):
pass
def isEmpty(self):
pass
| 8 | 0 | 2 | 0 | 2 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 7 | 3 | 7 | 7 | 24 | 7 | 17 | 11 | 9 | 0 | 17 | 11 | 9 | 1 | 1 | 0 | 7 |
145,379 |
LettError/ufoProcessor
|
LettError_ufoProcessor/Lib/ufoProcessor/__init__.py
|
ufoProcessor.UFOProcessorError
|
class UFOProcessorError(Exception):
def __init__(self, msg, obj=None):
self.msg = msg
self.obj = obj
def __str__(self):
return repr(self.msg) + repr(self.obj)
|
class UFOProcessorError(Exception):
def __init__(self, msg, obj=None):
pass
def __str__(self):
pass
| 3 | 0 | 3 | 0 | 3 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 2 | 2 | 2 | 12 | 7 | 1 | 6 | 5 | 3 | 0 | 6 | 5 | 3 | 1 | 3 | 0 | 2 |
145,380 |
LettError/ufoProcessor
|
LettError_ufoProcessor/Lib/ufoProcessor/__init__.py
|
ufoProcessor.DesignSpaceProcessor
|
class DesignSpaceProcessor(DesignSpaceDocument):
"""
A subclassed DesignSpaceDocument that can
- process the document and generate finished UFOs with MutatorMath or varLib.model.
- read and write documents
- Replacement for the mutatorMath.ufo generator.
"""
fontClass = defcon.Font
layerClass = defcon.Layer
glyphClass = defcon.Glyph
libClass = defcon.Lib
glyphContourClass = defcon.Contour
glyphPointClass = defcon.Point
glyphComponentClass = defcon.Component
glyphAnchorClass = defcon.Anchor
kerningClass = defcon.Kerning
groupsClass = defcon.Groups
infoClass = defcon.Info
featuresClass = defcon.Features
mathInfoClass = MathInfo
mathGlyphClass = MathGlyph
mathKerningClass = MathKerning
def __init__(self, readerClass=None, writerClass=None, fontClass=None, ufoVersion=3, useVarlib=False):
super(DesignSpaceProcessor, self).__init__(readerClass=readerClass, writerClass=writerClass)
self.ufoVersion = ufoVersion # target UFO version
self.useVarlib = useVarlib
self.roundGeometry = False
self._glyphMutators = {}
self._infoMutator = None
self._kerningMutator = None
self._kerningMutatorPairs = None
self.fonts = {}
self._fontsLoaded = False
self.mutedAxisNames = None # list of axisname that need to be muted
self.glyphNames = [] # list of all glyphnames
self.processRules = True
self.problems = [] # receptacle for problem notifications. Not big enough to break, but also not small enough to ignore.
self.toolLog = []
def generateUFO(self, processRules=True, glyphNames=None, pairs=None, bend=False):
# makes the instances
# option to execute the rules
# make sure we're not trying to overwrite a newer UFO format
self.loadFonts()
self.findDefault()
if self.default is None:
# we need one to genenerate
raise UFOProcessorError("Can't generate UFO from this designspace: no default font.", self)
v = 0
for instanceDescriptor in self.instances:
if instanceDescriptor.path is None:
continue
font = self.makeInstance(instanceDescriptor,
processRules,
glyphNames=glyphNames,
pairs=pairs,
bend=bend)
folder = os.path.dirname(os.path.abspath(instanceDescriptor.path))
path = instanceDescriptor.path
if not os.path.exists(folder):
os.makedirs(folder)
if os.path.exists(path):
existingUFOFormatVersion = getUFOVersion(path)
if existingUFOFormatVersion > self.ufoVersion:
self.problems.append("Can’t overwrite existing UFO%d with UFO%d." % (existingUFOFormatVersion, self.ufoVersion))
continue
font.save(path, self.ufoVersion)
self.problems.append("Generated %s as UFO%d"%(os.path.basename(path), self.ufoVersion))
return True
def getSerializedAxes(self):
return [a.serialize() for a in self.axes]
def getMutatorAxes(self):
# map the axis values?
d = collections.OrderedDict()
for a in self.axes:
d[a.name] = a.serialize()
return d
def _getAxisOrder(self):
return [a.name for a in self.axes]
axisOrder = property(_getAxisOrder, doc="get the axis order from the axis descriptors")
serializedAxes = property(getSerializedAxes, doc="a list of dicts with the axis values")
def getVariationModel(self, items, axes, bias=None):
# Return either a mutatorMath or a varlib.model object for calculating.
try:
if self.useVarlib:
# use the varlib variation model
try:
return dict(), VariationModelMutator(items, self.axes)
except (KeyError, AssertionError):
error = traceback.format_exc()
self.toolLog.append("UFOProcessor.getVariationModel error: %s" % error)
self.toolLog.append(items)
return {}, None
else:
# use mutatormath model
axesForMutator = self.getMutatorAxes()
return buildMutator(items, axes=axesForMutator, bias=bias)
except:
error = traceback.format_exc()
self.toolLog.append("UFOProcessor.getVariationModel error: %s" % error)
return {}, None
def getInfoMutator(self):
""" Returns a info mutator """
if self._infoMutator:
return self._infoMutator
infoItems = []
for sourceDescriptor in self.sources:
if sourceDescriptor.layerName is not None:
continue
loc = Location(sourceDescriptor.location)
sourceFont = self.fonts[sourceDescriptor.name]
if sourceFont is None:
continue
if hasattr(sourceFont.info, "toMathInfo"):
infoItems.append((loc, sourceFont.info.toMathInfo()))
else:
infoItems.append((loc, self.mathInfoClass(sourceFont.info)))
infoBias = self.newDefaultLocation(bend=True)
bias, self._infoMutator = self.getVariationModel(infoItems, axes=self.serializedAxes, bias=infoBias)
return self._infoMutator
def getKerningMutator(self, pairs=None):
""" Return a kerning mutator, collect the sources, build mathGlyphs.
If no pairs are given: calculate the whole table.
If pairs are given then query the sources for a value and make a mutator only with those values.
"""
if self._kerningMutator and pairs == self._kerningMutatorPairs:
return self._kerningMutator
kerningItems = []
foregroundLayers = [None, 'foreground', 'public.default']
if pairs is None:
for sourceDescriptor in self.sources:
if sourceDescriptor.layerName not in foregroundLayers:
continue
if not sourceDescriptor.muteKerning:
loc = Location(sourceDescriptor.location)
sourceFont = self.fonts[sourceDescriptor.name]
if sourceFont is None: continue
# this makes assumptions about the groups of all sources being the same.
kerningItems.append((loc, self.mathKerningClass(sourceFont.kerning, sourceFont.groups)))
else:
self._kerningMutatorPairs = pairs
for sourceDescriptor in self.sources:
# XXX check sourceDescriptor layerName, only foreground should contribute
if sourceDescriptor.layerName is not None:
continue
if not os.path.exists(sourceDescriptor.path):
continue
if not sourceDescriptor.muteKerning:
sourceFont = self.fonts[sourceDescriptor.name]
if sourceFont is None:
continue
loc = Location(sourceDescriptor.location)
# XXX can we get the kern value from the fontparts kerning object?
kerningItem = self.mathKerningClass(sourceFont.kerning, sourceFont.groups)
if kerningItem is not None:
sparseKerning = {}
for pair in pairs:
v = kerningItem.get(pair)
if v is not None:
sparseKerning[pair] = v
kerningItems.append((loc, self.mathKerningClass(sparseKerning)))
kerningBias = self.newDefaultLocation(bend=True)
bias, self._kerningMutator = self.getVariationModel(kerningItems, axes=self.serializedAxes, bias=kerningBias)
return self._kerningMutator
def filterThisLocation(self, location, mutedAxes):
# return location with axes is mutedAxes removed
# this means checking if the location is a non-default value
if not mutedAxes:
return False, location
defaults = {}
ignoreMaster = False
for aD in self.axes:
defaults[aD.name] = aD.default
new = {}
new.update(location)
for mutedAxisName in mutedAxes:
if mutedAxisName not in location:
continue
if mutedAxisName not in defaults:
continue
if location[mutedAxisName] != defaults.get(mutedAxisName):
ignoreMaster = True
del new[mutedAxisName]
return ignoreMaster, new
def getGlyphMutator(self, glyphName,
decomposeComponents=False,
fromCache=None):
# make a mutator / varlib object for glyphName.
cacheKey = (glyphName, decomposeComponents)
if cacheKey in self._glyphMutators and fromCache:
return self._glyphMutators[cacheKey]
items = self.collectMastersForGlyph(glyphName, decomposeComponents=decomposeComponents)
new = []
for a, b, c in items:
if hasattr(b, "toMathGlyph"):
# note: calling toMathGlyph ignores the mathGlyphClass preference
# maybe the self.mathGlyphClass is not necessary?
new.append((a,b.toMathGlyph()))
else:
new.append((a,self.mathGlyphClass(b)))
thing = None
try:
bias, thing = self.getVariationModel(new, axes=self.serializedAxes, bias=self.newDefaultLocation(bend=True)) #xx
except TypeError:
self.toolLog.append("getGlyphMutator %s items: %s new: %s" % (glyphName, items, new))
self.problems.append("\tCan't make processor for glyph %s" % (glyphName))
if thing is not None:
self._glyphMutators[cacheKey] = thing
return thing
def collectMastersForGlyph(self, glyphName, decomposeComponents=False):
""" Return a glyph mutator.defaultLoc
decomposeComponents = True causes the source glyphs to be decomposed first
before building the mutator. That gives you instances that do not depend
on a complete font. If you're calculating previews for instance.
XXX check glyphs in layers
"""
items = []
empties = []
foundEmpty = False
for sourceDescriptor in self.sources:
if not os.path.exists(sourceDescriptor.path):
#kthxbai
p = "\tMissing UFO at %s" % sourceDescriptor.path
if p not in self.problems:
self.problems.append(p)
continue
if glyphName in sourceDescriptor.mutedGlyphNames:
continue
thisIsDefault = self.default == sourceDescriptor
ignoreMaster, filteredLocation = self.filterThisLocation(sourceDescriptor.location, self.mutedAxisNames)
if ignoreMaster:
continue
f = self.fonts.get(sourceDescriptor.name)
if f is None: continue
loc = Location(sourceDescriptor.location)
sourceLayer = f
if not glyphName in f:
# log this>
continue
layerName = getDefaultLayerName(f)
sourceGlyphObject = None
# handle source layers
if sourceDescriptor.layerName is not None:
# start looking for a layer
# Do not bother for mutatorMath designspaces
layerName = sourceDescriptor.layerName
sourceLayer = getLayer(f, sourceDescriptor.layerName)
if sourceLayer is None:
continue
if glyphName not in sourceLayer:
# start looking for a glyph
# this might be a support in a sparse layer
# so we're skipping!
continue
# still have to check if the sourcelayer glyph is empty
if not glyphName in sourceLayer:
continue
else:
sourceGlyphObject = sourceLayer[glyphName]
if checkGlyphIsEmpty(sourceGlyphObject, allowWhiteSpace=True):
foundEmpty = True
#sourceGlyphObject = None
#continue
if decomposeComponents:
# what about decomposing glyphs in a partial font?
temp = self.glyphClass()
p = temp.getPointPen()
dpp = DecomposePointPen(sourceLayer, p)
sourceGlyphObject.drawPoints(dpp)
temp.width = sourceGlyphObject.width
temp.name = sourceGlyphObject.name
processThis = temp
else:
processThis = sourceGlyphObject
sourceInfo = dict(source=f.path, glyphName=glyphName,
layerName=layerName,
location=filteredLocation, # sourceDescriptor.location,
sourceName=sourceDescriptor.name,
)
if hasattr(processThis, "toMathGlyph"):
processThis = processThis.toMathGlyph()
else:
processThis = self.mathGlyphClass(processThis)
items.append((loc, processThis, sourceInfo))
empties.append((thisIsDefault, foundEmpty))
# check the empties:
# if the default glyph is empty, then all must be empty
# if the default glyph is not empty then none can be empty
checkedItems = []
emptiesAllowed = False
# first check if the default is empty.
# remember that the sources can be in any order
for i, p in enumerate(empties):
isDefault, isEmpty = p
if isDefault and isEmpty:
emptiesAllowed = True
# now we know what to look for
if not emptiesAllowed:
for i, p in enumerate(empties):
isDefault, isEmpty = p
if not isEmpty:
checkedItems.append(items[i])
else:
for i, p in enumerate(empties):
isDefault, isEmpty = p
if isEmpty:
checkedItems.append(items[i])
return checkedItems
def getNeutralFont(self):
# Return a font object for the neutral font
# self.fonts[self.default.name] ?
neutralLoc = self.newDefaultLocation(bend=True)
for sd in self.sources:
if sd.location == neutralLoc:
if sd.name in self.fonts:
#candidate = self.fonts[sd.name]
#if sd.layerName:
# if sd.layerName in candidate.layers:
return self.fonts[sd.name]
return None
def findDefault(self):
"""Set and return SourceDescriptor at the default location or None.
The default location is the set of all `default` values in user space of all axes.
"""
self.default = None
# Convert the default location from user space to design space before comparing
# it against the SourceDescriptor locations (always in design space).
default_location_design = self.newDefaultLocation(bend=True)
for sourceDescriptor in self.sources:
if sourceDescriptor.location == default_location_design:
self.default = sourceDescriptor
return sourceDescriptor
return None
def newDefaultLocation(self, bend=False):
# overwrite from fontTools.newDefaultLocation
# we do not want this default location to be mapped.
loc = collections.OrderedDict()
for axisDescriptor in self.axes:
if bend:
loc[axisDescriptor.name] = axisDescriptor.map_forward(
axisDescriptor.default
)
else:
loc[axisDescriptor.name] = axisDescriptor.default
return loc
def loadFonts(self, reload=False):
# Load the fonts and find the default candidate based on the info flag
if self._fontsLoaded and not reload:
return
names = set()
for i, sourceDescriptor in enumerate(self.sources):
if sourceDescriptor.name is None:
# make sure it has a unique name
sourceDescriptor.name = "master.%d" % i
if sourceDescriptor.name not in self.fonts:
if os.path.exists(sourceDescriptor.path):
self.fonts[sourceDescriptor.name] = self._instantiateFont(sourceDescriptor.path)
self.problems.append("loaded master from %s, layer %s, format %d" % (sourceDescriptor.path, sourceDescriptor.layerName, getUFOVersion(sourceDescriptor.path)))
names |= set(self.fonts[sourceDescriptor.name].keys())
else:
self.fonts[sourceDescriptor.name] = None
self.problems.append("source ufo not found at %s" % (sourceDescriptor.path))
self.glyphNames = list(names)
self._fontsLoaded = True
def getFonts(self):
# returnn a list of (font object, location) tuples
fonts = []
for sourceDescriptor in self.sources:
f = self.fonts.get(sourceDescriptor.name)
if f is not None:
fonts.append((f, sourceDescriptor.location))
return fonts
def makeInstance(self, instanceDescriptor,
doRules=False,
glyphNames=None,
pairs=None,
bend=False):
""" Generate a font object for this instance """
font = self._instantiateFont(None)
# make fonty things here
loc = Location(instanceDescriptor.location)
anisotropic = False
locHorizontal = locVertical = loc
if self.isAnisotropic(loc):
anisotropic = True
locHorizontal, locVertical = self.splitAnisotropic(loc)
# groups
renameMap = getattr(self.fonts[self.default.name], "kerningGroupConversionRenameMaps", None)
font.kerningGroupConversionRenameMaps = renameMap if renameMap is not None else {'side1': {}, 'side2': {}}
# make the kerning
# this kerning is always horizontal. We can take the horizontal location
# filter the available pairs?
if instanceDescriptor.kerning:
if pairs:
try:
kerningMutator = self.getKerningMutator(pairs=pairs)
kerningObject = kerningMutator.makeInstance(locHorizontal, bend=bend)
kerningObject.extractKerning(font)
except:
self.problems.append("Could not make kerning for %s. %s" % (loc, traceback.format_exc()))
else:
kerningMutator = self.getKerningMutator()
if kerningMutator is not None:
kerningObject = kerningMutator.makeInstance(locHorizontal, bend=bend)
kerningObject.extractKerning(font)
# make the info
try:
infoMutator = self.getInfoMutator()
if infoMutator is not None:
if not anisotropic:
infoInstanceObject = infoMutator.makeInstance(loc, bend=bend)
else:
horizontalInfoInstanceObject = infoMutator.makeInstance(locHorizontal, bend=bend)
verticalInfoInstanceObject = infoMutator.makeInstance(locVertical, bend=bend)
# merge them again
infoInstanceObject = (1,0)*horizontalInfoInstanceObject + (0,1)*verticalInfoInstanceObject
if self.roundGeometry:
try:
infoInstanceObject = infoInstanceObject.round()
except AttributeError:
pass
infoInstanceObject.extractInfo(font.info)
font.info.familyName = instanceDescriptor.familyName
font.info.styleName = instanceDescriptor.styleName
font.info.postscriptFontName = instanceDescriptor.postScriptFontName # yikes, note the differences in capitalisation..
font.info.styleMapFamilyName = instanceDescriptor.styleMapFamilyName
font.info.styleMapStyleName = instanceDescriptor.styleMapStyleName
# NEED SOME HELP WITH THIS
# localised names need to go to the right openTypeNameRecords
# records = []
# nameID = 1
# platformID =
# for languageCode, name in instanceDescriptor.localisedStyleMapFamilyName.items():
# # Name ID 1 (font family name) is found at the generic styleMapFamily attribute.
# records.append((nameID, ))
except:
self.problems.append("Could not make fontinfo for %s. %s" % (loc, traceback.format_exc()))
for sourceDescriptor in self.sources:
if sourceDescriptor.copyInfo:
# this is the source
if self.fonts[sourceDescriptor.name] is not None:
self._copyFontInfo(self.fonts[sourceDescriptor.name].info, font.info)
if sourceDescriptor.copyLib:
# excplicitly copy the font.lib items
if self.fonts[sourceDescriptor.name] is not None:
for key, value in self.fonts[sourceDescriptor.name].lib.items():
font.lib[key] = value
if sourceDescriptor.copyGroups:
if self.fonts[sourceDescriptor.name] is not None:
sides = font.kerningGroupConversionRenameMaps.get('side1', {})
sides.update(font.kerningGroupConversionRenameMaps.get('side2', {}))
for key, value in self.fonts[sourceDescriptor.name].groups.items():
if key not in sides:
font.groups[key] = value
if sourceDescriptor.copyFeatures:
if self.fonts[sourceDescriptor.name] is not None:
featuresText = self.fonts[sourceDescriptor.name].features.text
font.features.text = featuresText
# glyphs
if glyphNames:
selectedGlyphNames = glyphNames
else:
selectedGlyphNames = self.glyphNames
# add the glyphnames to the font.lib['public.glyphOrder']
if not 'public.glyphOrder' in font.lib.keys():
font.lib['public.glyphOrder'] = selectedGlyphNames
for glyphName in selectedGlyphNames:
try:
glyphMutator = self.getGlyphMutator(glyphName)
if glyphMutator is None:
self.problems.append("Could not make mutator for glyph %s" % (glyphName))
continue
except:
self.problems.append("Could not make mutator for glyph %s %s" % (glyphName, traceback.format_exc()))
continue
if glyphName in instanceDescriptor.glyphs.keys():
# XXX this should be able to go now that we have full rule support.
# reminder: this is what the glyphData can look like
# {'instanceLocation': {'custom': 0.0, 'weight': 824.0},
# 'masters': [{'font': 'master.Adobe VF Prototype.Master_0.0',
# 'glyphName': 'dollar.nostroke',
# 'location': {'custom': 0.0, 'weight': 0.0}},
# {'font': 'master.Adobe VF Prototype.Master_1.1',
# 'glyphName': 'dollar.nostroke',
# 'location': {'custom': 0.0, 'weight': 368.0}},
# {'font': 'master.Adobe VF Prototype.Master_2.2',
# 'glyphName': 'dollar.nostroke',
# 'location': {'custom': 0.0, 'weight': 1000.0}},
# {'font': 'master.Adobe VF Prototype.Master_3.3',
# 'glyphName': 'dollar.nostroke',
# 'location': {'custom': 100.0, 'weight': 1000.0}},
# {'font': 'master.Adobe VF Prototype.Master_0.4',
# 'glyphName': 'dollar.nostroke',
# 'location': {'custom': 100.0, 'weight': 0.0}},
# {'font': 'master.Adobe VF Prototype.Master_4.5',
# 'glyphName': 'dollar.nostroke',
# 'location': {'custom': 100.0, 'weight': 368.0}}],
# 'unicodes': [36]}
glyphData = instanceDescriptor.glyphs[glyphName]
else:
glyphData = {}
font.newGlyph(glyphName)
font[glyphName].clear()
if glyphData.get('mute', False):
# mute this glyph, skip
continue
glyphInstanceLocation = glyphData.get("instanceLocation", instanceDescriptor.location)
glyphInstanceLocation = Location(glyphInstanceLocation)
uniValues = []
neutral = glyphMutator.get(())
if neutral is not None:
uniValues = neutral[0].unicodes
else:
neutralFont = self.getNeutralFont()
if glyphName in neutralFont:
uniValues = neutralFont[glyphName].unicodes
glyphInstanceUnicodes = glyphData.get("unicodes", uniValues)
note = glyphData.get("note")
if note:
font[glyphName] = note
# XXXX phase out support for instance-specific masters
# this should be handled by the rules system.
masters = glyphData.get("masters", None)
if masters is not None:
items = []
for glyphMaster in masters:
sourceGlyphFont = glyphMaster.get("font")
sourceGlyphName = glyphMaster.get("glyphName", glyphName)
m = self.fonts.get(sourceGlyphFont)
if not sourceGlyphName in m:
continue
if hasattr(m[sourceGlyphName], "toMathGlyph"):
sourceGlyph = m[sourceGlyphName].toMathGlyph()
else:
sourceGlyph = MathGlyph(m[sourceGlyphName])
sourceGlyphLocation = glyphMaster.get("location")
items.append((Location(sourceGlyphLocation), sourceGlyph))
bias, glyphMutator = self.getVariationModel(items, axes=self.serializedAxes, bias=self.newDefaultLocation(bend=True))
try:
if not self.isAnisotropic(glyphInstanceLocation):
glyphInstanceObject = glyphMutator.makeInstance(glyphInstanceLocation, bend=bend)
else:
# split anisotropic location into horizontal and vertical components
horizontal, vertical = self.splitAnisotropic(glyphInstanceLocation)
horizontalGlyphInstanceObject = glyphMutator.makeInstance(horizontal, bend=bend)
verticalGlyphInstanceObject = glyphMutator.makeInstance(vertical, bend=bend)
# merge them again
glyphInstanceObject = (1,0)*horizontalGlyphInstanceObject + (0,1)*verticalGlyphInstanceObject
except IndexError:
# alignment problem with the data?
self.problems.append("Quite possibly some sort of data alignment error in %s" % glyphName)
continue
font.newGlyph(glyphName)
font[glyphName].clear()
if self.roundGeometry:
try:
glyphInstanceObject = glyphInstanceObject.round()
except AttributeError:
pass
try:
# File "/Users/erik/code/ufoProcessor/Lib/ufoProcessor/__init__.py", line 649, in makeInstance
# glyphInstanceObject.extractGlyph(font[glyphName], onlyGeometry=True)
# File "/Applications/RoboFont.app/Contents/Resources/lib/python3.6/fontMath/mathGlyph.py", line 315, in extractGlyph
# glyph.anchors = [dict(anchor) for anchor in self.anchors]
# File "/Applications/RoboFont.app/Contents/Resources/lib/python3.6/fontParts/base/base.py", line 103, in __set__
# raise FontPartsError("no setter for %r" % self.name)
# fontParts.base.errors.FontPartsError: no setter for 'anchors'
if hasattr(font[glyphName], "fromMathGlyph"):
font[glyphName].fromMathGlyph(glyphInstanceObject)
else:
glyphInstanceObject.extractGlyph(font[glyphName], onlyGeometry=True)
except TypeError:
# this causes ruled glyphs to end up in the wrong glyphname
# but defcon2 objects don't support it
pPen = font[glyphName].getPointPen()
font[glyphName].clear()
glyphInstanceObject.drawPoints(pPen)
font[glyphName].width = glyphInstanceObject.width
font[glyphName].unicodes = glyphInstanceUnicodes
if doRules:
resultNames = processRules(self.rules, loc, self.glyphNames)
for oldName, newName in zip(self.glyphNames, resultNames):
if oldName != newName:
swapGlyphNames(font, oldName, newName)
# copy the glyph lib?
#for sourceDescriptor in self.sources:
# if sourceDescriptor.copyLib:
# pass
# pass
# store designspace location in the font.lib
font.lib['designspace.location'] = list(instanceDescriptor.location.items())
return font
def isAnisotropic(self, location):
for v in location.values():
if type(v)==tuple:
return True
return False
def splitAnisotropic(self, location):
x = Location()
y = Location()
for dim, val in location.items():
if type(val)==tuple:
x[dim] = val[0]
y[dim] = val[1]
else:
x[dim] = y[dim] = val
return x, y
def _instantiateFont(self, path):
""" Return a instance of a font object with all the given subclasses"""
try:
return self.fontClass(path,
layerClass=self.layerClass,
libClass=self.libClass,
kerningClass=self.kerningClass,
groupsClass=self.groupsClass,
infoClass=self.infoClass,
featuresClass=self.featuresClass,
glyphClass=self.glyphClass,
glyphContourClass=self.glyphContourClass,
glyphPointClass=self.glyphPointClass,
glyphComponentClass=self.glyphComponentClass,
glyphAnchorClass=self.glyphAnchorClass)
except TypeError:
# if our fontClass doesnt support all the additional classes
return self.fontClass(path)
def _copyFontInfo(self, sourceInfo, targetInfo):
""" Copy the non-calculating fields from the source info."""
infoAttributes = [
"versionMajor",
"versionMinor",
"copyright",
"trademark",
"note",
"openTypeGaspRangeRecords",
"openTypeHeadCreated",
"openTypeHeadFlags",
"openTypeNameDesigner",
"openTypeNameDesignerURL",
"openTypeNameManufacturer",
"openTypeNameManufacturerURL",
"openTypeNameLicense",
"openTypeNameLicenseURL",
"openTypeNameVersion",
"openTypeNameUniqueID",
"openTypeNameDescription",
"#openTypeNamePreferredFamilyName",
"#openTypeNamePreferredSubfamilyName",
"#openTypeNameCompatibleFullName",
"openTypeNameSampleText",
"openTypeNameWWSFamilyName",
"openTypeNameWWSSubfamilyName",
"openTypeNameRecords",
"openTypeOS2Selection",
"openTypeOS2VendorID",
"openTypeOS2Panose",
"openTypeOS2FamilyClass",
"openTypeOS2UnicodeRanges",
"openTypeOS2CodePageRanges",
"openTypeOS2Type",
"postscriptIsFixedPitch",
"postscriptForceBold",
"postscriptDefaultCharacter",
"postscriptWindowsCharacterSet"
]
for infoAttribute in infoAttributes:
copy = False
if self.ufoVersion == 1 and infoAttribute in fontInfoAttributesVersion1:
copy = True
elif self.ufoVersion == 2 and infoAttribute in fontInfoAttributesVersion2:
copy = True
elif self.ufoVersion == 3 and infoAttribute in fontInfoAttributesVersion3:
copy = True
if copy:
value = getattr(sourceInfo, infoAttribute)
setattr(targetInfo, infoAttribute, value)
|
class DesignSpaceProcessor(DesignSpaceDocument):
'''
A subclassed DesignSpaceDocument that can
- process the document and generate finished UFOs with MutatorMath or varLib.model.
- read and write documents
- Replacement for the mutatorMath.ufo generator.
'''
def __init__(self, readerClass=None, writerClass=None, fontClass=None, ufoVersion=3, useVarlib=False):
pass
def generateUFO(self, processRules=True, glyphNames=None, pairs=None, bend=False):
pass
def getSerializedAxes(self):
pass
def getMutatorAxes(self):
pass
def _getAxisOrder(self):
pass
def getVariationModel(self, items, axes, bias=None):
pass
def getInfoMutator(self):
''' Returns a info mutator '''
pass
def getKerningMutator(self, pairs=None):
''' Return a kerning mutator, collect the sources, build mathGlyphs.
If no pairs are given: calculate the whole table.
If pairs are given then query the sources for a value and make a mutator only with those values.
'''
pass
def filterThisLocation(self, location, mutedAxes):
pass
def getGlyphMutator(self, glyphName,
decomposeComponents=False,
fromCache=None):
pass
def collectMastersForGlyph(self, glyphName, decomposeComponents=False):
''' Return a glyph mutator.defaultLoc
decomposeComponents = True causes the source glyphs to be decomposed first
before building the mutator. That gives you instances that do not depend
on a complete font. If you're calculating previews for instance.
XXX check glyphs in layers
'''
pass
def getNeutralFont(self):
pass
def findDefault(self):
'''Set and return SourceDescriptor at the default location or None.
The default location is the set of all `default` values in user space of all axes.
'''
pass
def newDefaultLocation(self, bend=False):
pass
def loadFonts(self, reload=False):
pass
def getFonts(self):
pass
def makeInstance(self, instanceDescriptor,
doRules=False,
glyphNames=None,
pairs=None,
bend=False):
''' Generate a font object for this instance '''
pass
def isAnisotropic(self, location):
pass
def splitAnisotropic(self, location):
pass
def _instantiateFont(self, path):
''' Return a instance of a font object with all the given subclasses'''
pass
def _copyFontInfo(self, sourceInfo, targetInfo):
''' Copy the non-calculating fields from the source info.'''
pass
| 22 | 8 | 31 | 0 | 25 | 6 | 7 | 0.26 | 1 | 17 | 3 | 0 | 21 | 15 | 21 | 21 | 704 | 30 | 542 | 178 | 514 | 142 | 460 | 172 | 438 | 47 | 1 | 6 | 152 |
145,381 |
LettError/ufoProcessor
|
LettError_ufoProcessor/test_window_RF.py
|
test_window_RF.UFOOperatorTester
|
class UFOOperatorTester(object):
def __init__(self, designspacePath):
self.doc = None
self.w = vanilla.Window((800,700), "UFOOperator Tester")
self.w.reloadButton = vanilla.Button((10, 10, 200, 20), "Reload Designspace", callback=self.reloadDesignspace)
self.w.makeSomeInstancesButton = vanilla.Button((10, 40, 400, 20), "Make instances of the same glyph", callback=self.makeInstancesOfSameGlyphButtonCallback)
self.w.makeSomeGlyphsButton = vanilla.Button((10, 70, 400, 20), "Make instances of different glyphs", callback=self.makeInstancesOfDifferentGlyphsButtonCallback)
self.w.generateInstancesButton = vanilla.Button((10, 100, 400, 20), "Generate instances", callback=self.generateInstancesButtonCallback)
self.w.reportGlyphChangedButton = vanilla.Button((10, 130, 400, 20), "Report random glyph as changed", callback=self.reportRandomGlyphChangedButtonCallback)
self.w.pathText = vanilla.TextBox((230, 12, -10, 20), "...")
self.w.cacheItemsList = vanilla.List((0, 170, -0, 210),
[{"funcName": "A", "count": "a"}, {"funcName": "B", "count": "b"}],
columnDescriptions=[{"title": "Function", "key": "funcName"}, {"title": "Items stored", "key": "count"}],
selectionCallback=self.selectionCallback)
self.w.callsToCacheList = vanilla.List((0, 400, -0, -0),
[{"funcName": "A", "count": "a"}, {"funcName": "B", "count": "b"}],
columnDescriptions=[{"title": "Function", "key": "funcName"}, {"title": "Calls served from cache", "key": "count"}],
selectionCallback=self.selectionCallback)
self.w.open()
self.w.bind("close", self.closeWindow)
self.reload()
def reloadDesignspace(self, sender=None):
print('reloadDesignspace', sender)
self.reload()
def selectionCallback(self, sender):
pass
def closeWindow(self, something=None):
#print("closeWindow", something)
self.doc.changed()
pass
def reportRandomGlyphChangedButtonCallback(self, sender):
for i in range(10):
namesLeft = self.doc.glyphsInCache()
candidateName = None
if namesLeft:
candidateName = random.choice(namesLeft)
print(f'reportRandomGlyphChangedButtonCallback {i} {candidateName}')
if candidateName:
self.doc.glyphChanged(candidateName, includeDependencies=True)
self.updateList()
def generateInstancesButtonCallback(self, sender):
self.doc.loadFonts()
self.doc.generateUFOs()
self.updateList()
def makeInstancesOfSameGlyphButtonCallback(self, sender):
# make some instances of the same glyph
hits = 100
glyphName = random.choice(self.doc.glyphNames)
for item in range(hits):
location = self.doc.randomLocation()
self.doc.makeOneGlyph(glyphName, location, bend=False, decomposeComponents=True, useVarlib=False, roundGeometry=False, clip=False)
self.updateList()
def makeInstancesOfDifferentGlyphsButtonCallback(self, sender):
location = self.doc.randomLocation()
for glyphName in self.doc.glyphNames:
self.doc.makeOneGlyph(glyphName, location, bend=False, decomposeComponents=True, useVarlib=False, roundGeometry=False, clip=False)
self.updateList()
def reload(self):
if self.doc is not None:
# we might still have a previous UFOOperator and we need it to clear the cache
self.doc.changed()
self.doc = ufoProcessor.ufoOperator.UFOOperator(designspacePath)
self.doc.loadFonts()
self.doc.changed()
self.updateList()
def updateList(self):
self.w.pathText.set(designspacePath)
frequencyItems = []
objectItems = []
objects, frequency = ufoProcessor.ufoOperator.inspectMemoizeCache()
for funcName, count in frequency:
frequencyItems.append(dict(count=count, funcName= funcName))
for funcName, count in objects:
objectItems.append(dict(count=count, funcName= funcName))
self.w.callsToCacheList.set(frequencyItems)
self.w.cacheItemsList.set(objectItems)
|
class UFOOperatorTester(object):
def __init__(self, designspacePath):
pass
def reloadDesignspace(self, sender=None):
pass
def selectionCallback(self, sender):
pass
def closeWindow(self, something=None):
pass
def reportRandomGlyphChangedButtonCallback(self, sender):
pass
def generateInstancesButtonCallback(self, sender):
pass
def makeInstancesOfSameGlyphButtonCallback(self, sender):
pass
def makeInstancesOfDifferentGlyphsButtonCallback(self, sender):
pass
def reloadDesignspace(self, sender=None):
pass
def updateList(self):
pass
| 11 | 0 | 8 | 0 | 7 | 0 | 2 | 0.04 | 1 | 3 | 1 | 0 | 10 | 2 | 10 | 10 | 88 | 12 | 73 | 26 | 62 | 3 | 67 | 26 | 56 | 4 | 1 | 2 | 18 |
145,382 |
LettError/ufoProcessor
|
LettError_ufoProcessor/Lib/ufoProcessor/logger.py
|
ufoProcessor.logger.Logger
|
class Logger:
def __init__(self, path, rootDirectory, nest=0):
self.path = path
self.rootDirectory = rootDirectory
self.nest = nest
if not nest:
if path is not None:
if os.path.exists(path):
os.remove(path)
if not os.path.exists(path):
f = open(path, "w")
f.close()
def child(self, text=None):
logger = Logger(
self.path,
self.rootDirectory,
nest=self.nest + 1
)
if text:
logger.info(text)
return logger
def relativePath(self, path):
return os.path.relpath(path, self.rootDirectory)
def _makeText(self, text):
if self.nest:
text = f"{('| ' * self.nest).strip()} {text}"
return text
def _toConsole(self, text):
print(text)
def _toFile(self, text):
if self.path is None:
return
text += "\n"
f = open(self.path, "a")
f.write(text)
f.close()
def time(self, prefix=None):
now = time.strftime("%Y-%m-%d %H:%M")
if prefix:
now = prefix + " " + now
self.info(now)
def info(self, text):
text = self._makeText(text)
self._toConsole(text)
self._toFile(text)
def infoItem(self, text):
text = f"\t- {text}"
self.info(text)
def infoPath(self, path):
text = self.relativePath(path)
self.infoItem(text)
def detail(self, text):
text = self._makeText(text)
self._toFile(text)
def detailItem(self, text):
text = f"- {text}"
self.detail(text)
def detailPath(self, path):
text = self.relativePath(path)
self.detailItem(text)
|
class Logger:
def __init__(self, path, rootDirectory, nest=0):
pass
def child(self, text=None):
pass
def relativePath(self, path):
pass
def _makeText(self, text):
pass
def _toConsole(self, text):
pass
def _toFile(self, text):
pass
def time(self, prefix=None):
pass
def info(self, text):
pass
def infoItem(self, text):
pass
def infoPath(self, path):
pass
def detail(self, text):
pass
def detailItem(self, text):
pass
def detailPath(self, path):
pass
| 14 | 0 | 5 | 0 | 5 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 13 | 3 | 13 | 13 | 73 | 13 | 60 | 23 | 46 | 0 | 56 | 23 | 42 | 5 | 0 | 3 | 21 |
145,383 |
LettError/ufoProcessor
|
LettError_ufoProcessor/Lib/ufoProcessor/emptyPen.py
|
ufoProcessor.emptyPen.DecomposePointPen
|
class DecomposePointPen(object):
def __init__(self, glyphSet, outPointPen):
self._glyphSet = glyphSet
self._outPointPen = outPointPen
self.beginPath = outPointPen.beginPath
self.endPath = outPointPen.endPath
self.addPoint = outPointPen.addPoint
def addComponent(self, baseGlyphName, transformation, identifier=None, **kwargs):
if baseGlyphName in self._glyphSet:
baseGlyph = self._glyphSet[baseGlyphName]
if transformation == _defaultTransformation:
baseGlyph.drawPoints(self)
else:
transformPointPen = TransformPointPen(self, transformation)
baseGlyph.drawPoints(transformPointPen)
|
class DecomposePointPen(object):
def __init__(self, glyphSet, outPointPen):
pass
def addComponent(self, baseGlyphName, transformation, identifier=None, **kwargs):
pass
| 3 | 0 | 7 | 0 | 7 | 0 | 2 | 0 | 1 | 0 | 0 | 0 | 2 | 5 | 2 | 2 | 17 | 2 | 15 | 10 | 12 | 0 | 14 | 10 | 11 | 3 | 1 | 2 | 4 |
145,384 |
LettError/ufoProcessor
|
LettError_ufoProcessor/Lib/ufoProcessor/varModels.py
|
ufoProcessor.varModels.VariationModelMutator
|
class VariationModelMutator(object):
""" a thing that looks like a mutator on the outside,
but uses the fonttools varlib logic to calculate.
"""
def __init__(self, items, axes, model=None, extrapolate=True):
# items: list of locationdict, value tuples
# axes: list of axis dictionaries, not axisdescriptor objects.
# model: a model, if we want to share one
self.extrapolate = extrapolate
self.axisOrder = [a.name for a in axes]
self.axisMapper = AxisMapper(axes)
self.axes = {}
for a in axes:
axisMinimum, axisMaximum = self.getAxisMinMax(a)
mappedMinimum, mappedDefault, mappedMaximum = a.map_forward(axisMinimum), a.map_forward(a.default), a.map_forward(axisMaximum)
self.axes[a.name] = (mappedMinimum, mappedDefault, mappedMaximum)
if model is None:
dd = [self._normalize(a) for a,b in items]
ee = self.axisOrder
self.model = VariationModel(dd, axisOrder=ee, extrapolate=self.extrapolate)
else:
self.model = model
self.masters = [b for a, b in items]
self.locations = [a for a, b in items]
def getAxisMinMax(self, axis):
# return tha axis.minimum and axis.maximum for continuous axes
# return the min(axis.values), max(axis.values) for discrete axes
if hasattr(axis, "values"):
return min(axis.values), max(axis.values)
return axis.minimum, axis.maximum
def get(self, key):
if key in self.model.locations:
i = self.model.locations.index(key)
return self.masters[i]
return None
def getFactors(self, location):
nl = self._normalize(location)
return self.model.getScalars(nl)
def getMasters(self):
return self.masters
def getSupports(self):
return self.model.supports
def getReach(self):
items = []
for supportIndex, s in enumerate(self.getSupports()):
sortedOrder = self.model.reverseMapping[supportIndex]
items.append((self.masters[sortedOrder], s))
return items
def makeInstance(self, location, bend=False):
# check for anisotropic locations here
if bend:
location = self.axisMapper(location)
nl = self._normalize(location)
return self.model.interpolateFromMasters(nl, self.masters)
def _normalize(self, location):
return normalizeLocation(location, self.axes)
|
class VariationModelMutator(object):
''' a thing that looks like a mutator on the outside,
but uses the fonttools varlib logic to calculate.
'''
def __init__(self, items, axes, model=None, extrapolate=True):
pass
def getAxisMinMax(self, axis):
pass
def getAxisMinMax(self, axis):
pass
def getFactors(self, location):
pass
def getMasters(self):
pass
def getSupports(self):
pass
def getReach(self):
pass
def makeInstance(self, location, bend=False):
pass
def _normalize(self, location):
pass
| 10 | 1 | 6 | 0 | 5 | 1 | 2 | 0.19 | 1 | 2 | 1 | 0 | 9 | 7 | 9 | 9 | 66 | 10 | 47 | 27 | 37 | 9 | 46 | 27 | 36 | 3 | 1 | 1 | 15 |
145,385 |
LettError/ufoProcessor
|
LettError_ufoProcessor/Lib/ufoProcessor/ufoOperator.py
|
ufoProcessor.ufoOperator.MemoizeDict
|
class MemoizeDict(dict):
"""
An immutable dictionary.
>>> d = MemoizeDict(name="a", test="b")
>>> d["name"]
'a'
>>> d["name"] = "c"
Traceback (most recent call last):
...
RuntimeError: Cannot modify ImmutableDict
"""
def __readonly__(self, *args, **kwargs):
raise RuntimeError("Cannot modify MemoizeDict")
__setitem__ = __readonly__
__delitem__ = __readonly__
pop = __readonly__
popitem = __readonly__
clear = __readonly__
update = __readonly__
setdefault = __readonly__
del __readonly__
_hash = None
def __hash__(self):
if self._hash is None:
self._hash = hash(frozenset(self.items()))
return self._hash
|
class MemoizeDict(dict):
'''
An immutable dictionary.
>>> d = MemoizeDict(name="a", test="b")
>>> d["name"]
'a'
>>> d["name"] = "c"
Traceback (most recent call last):
...
RuntimeError: Cannot modify ImmutableDict
'''
def __readonly__(self, *args, **kwargs):
pass
def __hash__(self):
pass
| 3 | 1 | 3 | 0 | 3 | 0 | 2 | 0.63 | 1 | 2 | 0 | 0 | 2 | 0 | 2 | 29 | 32 | 6 | 16 | 11 | 13 | 10 | 16 | 11 | 13 | 2 | 2 | 1 | 3 |
145,386 |
LettError/ufoProcessor
|
LettError_ufoProcessor/Lib/ufoProcessor/ufoOperator.py
|
ufoProcessor.ufoOperator.UFOOperator
|
class UFOOperator(object):
# wrapped, not inherited, as Just says.
fontClass = defcon.Font
layerClass = defcon.Layer
glyphClass = defcon.Glyph
libClass = defcon.Lib
glyphContourClass = defcon.Contour
glyphPointClass = defcon.Point
glyphComponentClass = defcon.Component
glyphAnchorClass = defcon.Anchor
kerningClass = defcon.Kerning
groupsClass = defcon.Groups
infoClass = defcon.Info
featuresClass = defcon.Features
mathInfoClass = MathInfo
mathGlyphClass = MathGlyph
mathKerningClass = MathKerning
# RF italic slant offset lib key
italicSlantOffsetLibKey = "com.typemytype.robofont.italicSlantOffset"
def __init__(self, pathOrObject=None, ufoVersion=3, useVarlib=True, extrapolate=False, strict=False, debug=False):
self.ufoVersion = ufoVersion
self.useVarlib = useVarlib
self._fontsLoaded = False
self.fonts = {}
self.tempLib = {}
self.libKeysForProcessing = [self.italicSlantOffsetLibKey]
self.roundGeometry = False
self.mutedAxisNames = None # list of axisname that need to be muted
self.strict = strict
self.debug = debug
self.extrapolate = extrapolate # if true allow extrapolation
self.logger = None
self.doc = None
if isinstance(pathOrObject, DesignSpaceDocument):
self.doc = pathOrObject
elif isinstance(pathOrObject, str):
self.doc = DesignSpaceDocument()
self.doc.read(pathOrObject)
else:
self.doc = DesignSpaceDocument()
if self.debug:
self.startLog()
def startLog(self):
# so we can call it later
self.debug = True
docBaseName = os.path.splitext(self.doc.path)[0]
logPath = f"{docBaseName}_log.txt"
self.logger = Logger(path=logPath, rootDirectory=None)
self.logger.time()
self.logger.info(f"## {self.doc.path}")
self.logger.info(f"\tUFO version: {self.ufoVersion}")
self.logger.info(f"\tround Geometry: {self.roundGeometry}")
if self.useVarlib:
self.logger.info(f"\tinterpolating with varlib")
else:
self.logger.info(f"\tinterpolating with mutatorMath")
def _instantiateFont(self, path):
""" Return a instance of a font object with all the given subclasses"""
try:
return self.fontClass(
path,
layerClass=self.layerClass,
libClass=self.libClass,
kerningClass=self.kerningClass,
groupsClass=self.groupsClass,
infoClass=self.infoClass,
featuresClass=self.featuresClass,
glyphClass=self.glyphClass,
glyphContourClass=self.glyphContourClass,
glyphPointClass=self.glyphPointClass,
glyphComponentClass=self.glyphComponentClass,
glyphAnchorClass=self.glyphAnchorClass
)
except TypeError:
# if our fontClass doesnt support all the additional classes
return self.fontClass(path)
# UFOProcessor compatibility
# not sure whether to expose all the DesignSpaceDocument internals here
# One can just use ufoOperator.doc to get it going?
# Let's see how difficilt it is
def read(self, path):
"""Wrap a DesignSpaceDocument"""
self.doc = DesignSpaceDocument()
self.doc.read(path)
self.changed()
def write(self, path):
"""Write the wrapped DesignSpaceDocument"""
self.doc.write(path)
def addAxis(self, axisDescriptor):
self.doc.addAxis(axisDescriptor)
def addAxisDescriptor(self, **kwargs):
return self.doc.addAxisDescriptor(**kwargs)
def addLocationLabel(self, locationLabelDescriptor):
self.doc.addLocationLabel(locationLabelDescriptor)
def addLocationLabelDescriptor(self, **kwargs):
return self.doc.addLocationLabelDescriptor(**kwargs)
def addRule(self, ruleDescriptor):
self.doc.addRule(ruleDescriptor)
def addRuleDescriptor(self, **kwargs):
return self.doc.addRuleDescriptor(**kwargs)
def addSource(self, sourceDescriptor):
if sourceDescriptor.font is not None:
self.fonts[sourceDescriptor.name] = sourceDescriptor.font
self.doc.addSource(sourceDescriptor)
def addSourceDescriptor(self, **kwargs):
if "font" in kwargs:
self.fonts[kwargs["name"]] = kwargs["font"]
return self.doc.addSourceDescriptor(**kwargs)
def addInstance(self, instanceDescriptor):
self.doc.addInstance(instanceDescriptor)
def addInstanceDescriptor(self, **kwargs):
return self.doc.addInstanceDescriptor(**kwargs)
def addVariableFont(self, variableFontDescriptor):
self.doc.addVariableFont(variableFontDescriptor)
def addVariableFontDescriptor(self, **kwargs):
return self.doc.addVariableFontDescriptor(**kwargs)
def getVariableFonts(self):
return self.doc.getVariableFonts()
def getInterpolableUFOOperators(self, useVariableFonts=True):
if useVariableFonts:
splitFunction = splitVariableFonts
else:
splitFunction = splitInterpolable
for discreteLocationOrName, interpolableDesignspace in splitFunction(self.doc):
if isinstance(discreteLocationOrName, dict):
basename = ""
if self.doc.filename is not None:
basename = os.path.splitext(self.doc.filename)[0]
elif self.doc.path is not None:
basename = os.path.splitext(os.path.basename(self.doc.path))[0]
discreteLocationOrName = basename + "-".join([f"{key}_{value:g}" for key, value in discreteLocationOrName.items()])
yield discreteLocationOrName, self.__class__(
interpolableDesignspace,
ufoVersion=self.ufoVersion,
useVarlib=self.useVarlib,
extrapolate=self.extrapolate,
strict=self.strict,
debug=self.debug
)
@property
def path(self):
return self.doc.path
@path.setter
def path(self, value):
self.doc.path = value
@property
def lib(self):
return self.doc.lib
@property
def axes(self):
return self.doc.axes
@property
def sources(self):
return self.doc.sources
@property
def instances(self):
return self.doc.instances
@property
def formatVersion(self):
return self.doc.formatVersion
@property
def rules(self):
return self.doc.rules
@property
def rulesProcessingLast(self):
return self.doc.rulesProcessingLast
@property
def map_backward(self):
return self.doc.map_backward
@property
def labelForUserLocation(self):
return self.doc.labelForUserLocation
@property
def locationLabels(self):
return self.doc.locationLabels
@locationLabels.setter
def locationLabels(self, locationLabels):
self.doc.locationLabels = locationLabels
@property
def variableFonts(self):
return self.doc.variableFonts
@property
def writerClass(self):
return self.doc.writerClass
def nameLocation(self, loc):
# return a nicely formatted string for this location
return ",".join([f"{k}:{v}" for k, v in loc.items()])
@formatVersion.setter
def formatVersion(self, value):
self.doc.formatVersion = value
def getAxis(self, axisName):
return self.doc.getAxis(axisName)
# loading and updating fonts
def loadFonts(self, reload=False):
# Load the fonts and find the default candidate based on the info flag
if self.logger is None and self.debug:
# in some cases the UFOProcessor is initialised without debug
# and then it is switched on afterwards. So have to check if
# we have a logger before proceding.
self.startLog()
self.glyphNames = list({glyphname for font in self.fonts.values() for glyphname in font.keys()})
if self._fontsLoaded and not reload:
if self.debug:
self.logger.info("\t\t-- loadFonts called, but fonts are loaded already and no reload requested")
return
actions = []
if self.debug:
self.logger.info("## loadFonts")
for i, sourceDescriptor in enumerate(self.doc.sources):
if sourceDescriptor.name is None:
# make sure it has a unique name
sourceDescriptor.name = "source.%d" % i
if sourceDescriptor.name not in self.fonts:
if os.path.exists(sourceDescriptor.path):
font = self.fonts[sourceDescriptor.name] = self._instantiateFont(sourceDescriptor.path)
thisLayerName = getDefaultLayerName(font)
if self.debug:
actions.append(f"loaded: {os.path.basename(sourceDescriptor.path)}, layer: {thisLayerName}, format: {font.ufoFormatVersionTuple}, id: {id(font):X}")
else:
self.fonts[sourceDescriptor.name] = None
if self.debug:
actions.append("source ufo not found at %s" % (sourceDescriptor.path))
if self.debug:
for item in actions:
self.logger.infoItem(item)
self._fontsLoaded = True
# XX maybe also make a character map here?
def _logLoadedFonts(self):
# dump info about the loaded fonts to the log
self.logger.info("\t# font status:")
for name, font in self.fonts.items():
self.logger.info(f"\t\tloaded: , id: {id(font):X}, {os.path.basename(font.path)}, format: {font.ufoFormatVersionTuple}")
def updateFonts(self, fontObjects):
# this is to update the loaded fonts.
# it should be the way for an editor to provide a list of fonts that are open
# self.fonts[sourceDescriptor.name] = None
hasUpdated = False
for newFont in fontObjects:
# XX can we update font objects which arent stored on disk?
if newFont.path is not None:
for fontName, haveFont in self.fonts.items():
# XX what happens here when the font did not load?
# haveFont will be None. Scenario: font initially missing, then added.
if haveFont is None:
if self.debug:
self.logger.time()
self.logger.info(f"## updating unloaded source {fontName} with {newFont}")
self.fonts[fontName] = newFont
hasUpdated = True
elif haveFont.path == newFont.path:
if self.debug:
self.logger.time()
self.logger.info(f"## updating source {self.fonts[fontName]} with {newFont}")
self.fonts[fontName] = newFont
hasUpdated = True
if hasUpdated:
self.changed()
def getFonts(self):
# return a list of (font object, location) tuples
fonts = []
for sourceDescriptor in self.sources:
f = self.fonts.get(sourceDescriptor.name)
if f is not None:
fonts.append((f, sourceDescriptor.location))
return fonts
def usesFont(self, fontObj=None):
# return True if font is used in this designspace.
if fontObj is None:
return False
for name, otherFontObj in self.fonts.items():
if otherFontObj is None: continue
if otherFontObj.path == fontObj.path:
# we don't need to know anything else
return True
return False
def getCharacterMapping(self, discreteLocation=None):
# return a unicode -> glyphname map for the default of the system or discreteLocation
characterMap = {}
defaultSourceDescriptor = self.findDefault(discreteLocation=discreteLocation)
if not defaultSourceDescriptor:
return {}
defaultFont = self.fonts.get(defaultSourceDescriptor.name)
if defaultFont is None:
return {}
for glyph in defaultFont:
if glyph.unicodes:
for u in glyph.unicodes:
characterMap[u] = glyph.name
return characterMap
# caching
def __del__(self):
self.changed()
def changed(self):
# clears everything relating to this designspacedocument
# the cache could contain more designspacedocument objects.
if _memoizeCache == None:
# it can happen that changed is called after we're already clearing out.
# Otherwise it _memoizeCache will be a dict.
# If it is no longer a dict, it will not have anything left in store.
return
for key in list(_memoizeCache.keys()):
funcName, data = key
if data["self"] == self:
del _memoizeCache[key]
if key in _memoizeStats:
del _memoizeStats[key]
_cachedCallbacksWithGlyphNames = ("getGlyphMutator", "collectSourcesForGlyph", "makeOneGlyph")
def glyphChanged(self, glyphName, includeDependencies=False):
"""Clears this one specific glyph from the memoize cache
includeDependencies = True: check where glyphName is used as a component
and remove those as well.
Note: this must be check in each discreteLocation separately
because they can have different constructions."""
changedNames = set()
changedNames.add(glyphName)
if includeDependencies:
dependencies = self.getGlyphDependencies(glyphName)
if dependencies:
changedNames.update(dependencies)
remove = []
for key in list(_memoizeCache.keys()):
funcName, data = key
if data["self"] == self and funcName in self._cachedCallbacksWithGlyphNames and data["glyphName"] in changedNames:
remove.append(key)
remove = set(remove)
for key in remove:
del _memoizeCache[key]
if key in _memoizeStats:
del _memoizeStats[key]
def getGlyphDependencies(self, glyphName):
dependencies = set()
discreteLocation = self.getDiscreteLocations()
if not discreteLocation:
discreteLocation = [None]
for discreteLocation in discreteLocation:
# this is expensive, should it be cached?
reverseComponentMap = self.getReverseComponentMapping(discreteLocation)
if glyphName not in reverseComponentMap:
return None
for compName in reverseComponentMap[glyphName]:
dependencies.add(compName)
return dependencies
def glyphsInCache(self):
"""report which glyphs are in the cache at the moment"""
names = set()
for funcName, data in list(_memoizeCache.keys()):
if funcName in self._cachedCallbacksWithGlyphNames and data["self"] == self:
names.add(data["glyphName"])
names = list(names)
names.sort()
return names
# manipulate locations and axes
def findAllDefaults(self):
# collect all default sourcedescriptors for all discrete locations
defaults = []
discreteLocation = self.getDiscreteLocations()
if not discreteLocation:
discreteLocation = [None]
for discreteLocation in discreteLocation:
defaultSourceDescriptor = self.findDefault(discreteLocation=discreteLocation)
defaults.append(defaultSourceDescriptor)
return defaults
def findDefault(self, discreteLocation=None):
defaultDesignLocation = self.newDefaultLocation(bend=True, discreteLocation=discreteLocation)
sources = self.findSourceDescriptorsForDiscreteLocation(discreteLocation)
for s in sources:
if s.location == defaultDesignLocation:
return s
return None
def findDefaultFont(self, discreteLocation=None):
# A system without discrete axes should be able to
# find a default here.
defaultSourceDescriptor = self.findDefault(discreteLocation=discreteLocation)
if defaultSourceDescriptor is None:
return None
# find the font now
return self.fonts.get(defaultSourceDescriptor.name, None)
getNeutralFont = findDefaultFont
def splitLocation(self, location):
# split a location in a continouous and a discrete part
# Note: discrete can be None
discreteAxes = [a.name for a in self.getOrderedDiscreteAxes()]
continuous = {}
discrete = {}
for name, value in location.items():
if name in discreteAxes:
discrete[name] = value
else:
continuous[name] = value
if not discrete:
return continuous, None
return continuous, discrete
def _serializeAnyAxis(self, axis):
if hasattr(axis, "serialize"):
return axis.serialize()
else:
if hasattr(axis, "values"):
# discrete axis does not have serialize method, meh
return dict(
tag=axis.tag,
name=axis.name,
labelNames=axis.labelNames,
minimum=min(axis.values), # XX is this allowed
maximum=max(axis.values), # XX is this allowed
values=axis.values,
default=axis.default,
hidden=axis.hidden,
map=axis.map,
axisOrdering=axis.axisOrdering,
axisLabels=axis.axisLabels,
)
def getSerializedAxes(self, discreteLocation=None):
serialized = []
for axis in self.getOrderedContinuousAxes():
serialized.append(self._serializeAnyAxis(axis))
return serialized
def getContinuousAxesForMutator(self):
# map the axis values?
d = collections.OrderedDict()
for axis in self.getOrderedContinuousAxes():
d[axis.name] = self._serializeAnyAxis(axis)
return d
def _getAxisOrder(self):
# XX this might be different from the axis order labels
return [axisDescriptor.name for axisDescriptor in self.doc.axes]
axisOrder = property(_getAxisOrder, doc="get the axis order from the axis descriptors")
def getFullDesignLocation(self, location):
return self.doc.getFullDesignLocation(location, self.doc)
def getDiscreteLocations(self):
# return a list of all permutated discrete locations
# do we have a list of ordered axes?
values = []
names = []
discreteCoordinates = []
for axis in self.getOrderedDiscreteAxes():
values.append(axis.values)
names.append(axis.name)
if values:
for r in itertools.product(*values):
# make a small dict for the discrete location values
discreteCoordinates.append({a: b for a, b in zip(names, r)})
return discreteCoordinates
def getOrderedDiscreteAxes(self):
# return the list of discrete axis objects, in the right order
axes = []
for axisName in self.doc.getAxisOrder():
axisObj = self.doc.getAxis(axisName)
if hasattr(axisObj, "values"):
axes.append(axisObj)
return axes
def getOrderedContinuousAxes(self):
# return the list of continuous axis objects, in the right order
axes = []
for axisName in self.doc.getAxisOrder():
axisObj = self.doc.getAxis(axisName)
if not hasattr(axisObj, "values"):
axes.append(axisObj)
return axes
def checkDiscreteAxisValues(self, location):
# check if the discrete values in this location are allowed
for discreteAxis in self.getOrderedDiscreteAxes():
testValue = location.get(discreteAxis.name)
if testValue not in discreteAxis.values:
return False
return True
def collectBaseGlyphs(self, glyphName, location):
# make a list of all baseglyphs needed to build this glyph, at this location
# Note: different discrete values mean that the glyph component set up can be different too
continuousLocation, discreteLocation = self.splitLocation(location)
names = set()
def _getComponentNames(glyph):
# so we can do recursion
names = set()
for comp in glyph.components:
names.add(comp.baseGlyph)
for n in _getComponentNames(glyph.font[comp.baseGlyph]):
names.add(n)
return list(names)
for sourceDescriptor in self.findSourceDescriptorsForDiscreteLocation(discreteLocation):
sourceFont = self.fonts[sourceDescriptor.name]
if glyphName not in sourceFont:
continue
[names.add(n) for n in _getComponentNames(sourceFont[glyphName])]
return list(names)
def findSourceDescriptorsForDiscreteLocation(self, discreteLocDict=None):
# return a list of all sourcedescriptors that share the values in the discrete loc tuple
# so this includes all sourcedescriptors that point to layers
# discreteLocDict {'countedItems': 1.0, 'outlined': 0.0}, {'countedItems': 1.0, 'outlined': 1.0}
sources = []
for s in self.doc.sources:
ok = True
if discreteLocDict is None:
sources.append(s)
continue
for name, value in discreteLocDict.items():
if name in s.location:
if s.location[name] != value:
ok = False
else:
ok = False
continue
if ok:
sources.append(s)
return sources
def getVariationModel(self, items, axes, bias=None):
# Return either a mutatorMath or a varlib.model object for calculating.
if self.useVarlib:
# use the varlib variation model
try:
return dict(), VariationModelMutator(items, axes=self.doc.axes, extrapolate=True)
except TypeError:
if self.debug:
note = "Error while making VariationModelMutator for {loc}:\n{traceback.format_exc()}"
self.logger.info(note)
return {}, None
except (KeyError, AssertionError):
if self.debug:
note = "UFOProcessor.getVariationModel error: {traceback.format_exc()}"
self.logger.info(note)
return {}, None
else:
# use mutatormath model
axesForMutator = self.getContinuousAxesForMutator()
# mutator will be confused by discrete axis values.
# the bias needs to be for the continuous axes only
biasForMutator, _ = self.splitLocation(bias)
return buildMutator(items, axes=axesForMutator, bias=biasForMutator)
return {}, None
def newDefaultLocation(self, bend=False, discreteLocation=None):
# overwrite from fontTools.newDefaultLocation
# we do not want this default location always to be mapped.
loc = collections.OrderedDict()
for axisDescriptor in self.doc.axes:
axisName = axisDescriptor.name
axisValue = axisDescriptor.default
if discreteLocation is not None:
# if we want to find the default for a specific discreteLoation
# we can not use the discrete axis' default value
# -> we have to use the value in the given discreteLocation
if axisDescriptor.name in discreteLocation:
axisValue = discreteLocation[axisDescriptor.name]
else:
axisValue = axisDescriptor.default
if bend:
loc[axisName] = axisDescriptor.map_forward(
axisValue
)
else:
loc[axisName] = axisValue
return loc
def isAnisotropic(self, location):
# check if the location has anisotropic values
for v in location.values():
if isinstance(v, (list, tuple)):
return True
return False
def splitAnisotropic(self, location):
# split the anisotropic location into a horizontal and vertical component
x = Location()
y = Location()
for dim, val in location.items():
if isinstance(val, (tuple, list)):
x[dim] = val[0]
y[dim] = val[1]
else:
x[dim] = y[dim] = val
return x, y
# find out stuff about this designspace
def collectForegroundLayerNames(self):
"""Return list of names of the default layers of all the fonts in this system.
Include None and foreground. XX Why
"""
names = set([None, 'foreground'])
for key, font in self.fonts.items():
names.add(getDefaultLayerName(font))
return list(names)
def getReverseComponentMapping(self, discreteLocation=None):
"""Return a dict with reverse component mappings.
Check if we're using fontParts or defcon
Check which part of the designspace we're in.
"""
if discreteLocation is not None:
sources = self.findSourceDescriptorsForDiscreteLocation(discreteLocation)
else:
sources = self.doc.sources
for sourceDescriptor in sources:
isDefault = self.isLocalDefault(sourceDescriptor.location)
if isDefault:
font = self.fonts.get(sourceDescriptor.name)
if font is None:
return {}
if isinstance(font, defcon.objects.font.Font):
# defcon
reverseComponentMapping = {}
for base, comps in font.componentReferences.items():
for c in comps:
if base not in reverseComponentMapping:
reverseComponentMapping[base] = set()
reverseComponentMapping[base].add(c)
else:
if hasattr(font, "getReverseComponentMapping"):
reverseComponentMapping = font.getReverseComponentMapping()
return reverseComponentMapping
return {}
def generateUFOs(self, useVarlib=None):
# generate an UFO for each of the instance locations
previousModel = self.useVarlib
generatedFontPaths = []
if useVarlib is not None:
self.useVarlib = useVarlib
glyphCount = 0
self.loadFonts()
if self.debug:
self.logger.info("## generateUFO")
for instanceDescriptor in self.doc.instances:
if self.debug:
self.logger.infoItem(f"Generating UFO at designspaceLocation {instanceDescriptor.getFullDesignLocation(self.doc)}")
if instanceDescriptor.path is None:
continue
pairs = None
bend = False
font = self.makeInstance(
instanceDescriptor,
# processRules,
glyphNames=self.glyphNames,
decomposeComponents=False,
pairs=pairs,
bend=bend,
)
if self.debug:
self.logger.info(f"\t\t{os.path.basename(instanceDescriptor.path)}")
instanceFolder = os.path.dirname(instanceDescriptor.path)
if instanceFolder and not os.path.exists(instanceFolder):
os.makedirs(instanceFolder)
font.save(instanceDescriptor.path)
generatedFontPaths.append(instanceDescriptor.path)
glyphCount += len(font)
if self.debug:
self.logger.info(f"\t\tGenerated {glyphCount} glyphs altogether.")
self.useVarlib = previousModel
return generatedFontPaths
generateUFO = generateUFOs
@memoize
def getInfoMutator(self, discreteLocation=None):
""" Returns a info mutator for this discrete location """
infoItems = []
foregroundLayers = self.collectForegroundLayerNames()
if discreteLocation is not None and discreteLocation is not {}:
sources = self.findSourceDescriptorsForDiscreteLocation(discreteLocation)
else:
sources = self.doc.sources
for sourceDescriptor in sources:
if sourceDescriptor.layerName not in foregroundLayers:
continue
continuous, discrete = self.splitLocation(sourceDescriptor.location)
loc = Location(continuous)
sourceFont = self.fonts[sourceDescriptor.name]
if sourceFont is None:
continue
if hasattr(sourceFont.info, "toMathInfo"):
infoItems.append((loc, sourceFont.info.toMathInfo()))
else:
infoItems.append((loc, self.mathInfoClass(sourceFont.info)))
infoBias = self.newDefaultLocation(bend=True, discreteLocation=discreteLocation)
bias, self._infoMutator = self.getVariationModel(infoItems, axes=self.getSerializedAxes(), bias=infoBias)
return self._infoMutator
@memoize
def getLibEntryMutator(self, discreteLocation=None):
""" Returns a mutator for selected lib keys store in self.libKeysForProcessing
If there is no entry in the lib, it will ignore the source
If there are no libkeys, it will return None.
"""
libMathItems = []
allValues = {}
foregroundLayers = self.collectForegroundLayerNames()
if discreteLocation is not None and discreteLocation is not {}:
sources = self.findSourceDescriptorsForDiscreteLocation(discreteLocation)
else:
sources = self.doc.sources
for sourceDescriptor in sources:
#if sourceDescriptor.layerName not in foregroundLayers:
# continue
continuous, discrete = self.splitLocation(sourceDescriptor.location)
loc = Location(continuous)
sourceFont = self.fonts[sourceDescriptor.name]
if sourceFont is None:
continue
mathDict = Location() # we're using this for its math dict skills
for libKey in self.libKeysForProcessing:
if libKey in sourceFont.lib:
# only add values we know
mathDict[libKey] = sourceFont.lib[libKey]
libMathItems.append((loc, mathDict))
if not libMathItems:
# no keys, no mutator.
return None
libMathBias = self.newDefaultLocation(bend=True, discreteLocation=discreteLocation)
bias, libMathMutator = self.getVariationModel(libMathItems, axes=self.getSerializedAxes(), bias=libMathBias)
return libMathMutator
@memoize
def getKerningMutator(self, pairs=None, discreteLocation=None):
""" Return a kerning mutator, collect the sources, build mathGlyphs.
If no pairs are given: calculate the whole table.
If pairs are given then query the sources for a value and make a mutator only with those values.
"""
if discreteLocation is not None:
sources = self.findSourceDescriptorsForDiscreteLocation(discreteLocation)
else:
sources = self.sources
kerningItems = []
foregroundLayers = self.collectForegroundLayerNames()
if pairs is None:
for sourceDescriptor in sources:
if sourceDescriptor.layerName not in foregroundLayers:
continue
if not sourceDescriptor.muteKerning:
continuous, discrete = self.splitLocation(sourceDescriptor.location)
loc = Location(continuous)
sourceFont = self.fonts[sourceDescriptor.name]
if sourceFont is None:
continue
# this makes assumptions about the groups of all sources being the same.
kerningItems.append((loc, self.mathKerningClass(sourceFont.kerning, sourceFont.groups)))
else:
self._kerningMutatorPairs = pairs
for sourceDescriptor in sources:
# XXX check sourceDescriptor layerName, only foreground should contribute
if sourceDescriptor.layerName is not None:
continue
if not os.path.exists(sourceDescriptor.path):
continue
if not sourceDescriptor.muteKerning:
sourceFont = self.fonts[sourceDescriptor.name]
if sourceFont is None:
continue
continuous, discrete = self.splitLocation(sourceDescriptor.location)
loc = Location(continuous)
# XXX can we get the kern value from the fontparts kerning object?
kerningItem = self.mathKerningClass(sourceFont.kerning, sourceFont.groups)
if kerningItem is not None:
sparseKerning = {}
for pair in pairs:
v = kerningItem.get(pair)
if v is not None:
sparseKerning[pair] = v
kerningItems.append((loc, self.mathKerningClass(sparseKerning)))
kerningBias = self.newDefaultLocation(bend=True, discreteLocation=discreteLocation)
bias, thing = self.getVariationModel(kerningItems, axes=self.getSerializedAxes(), bias=kerningBias) #xx
bias, self._kerningMutator = self.getVariationModel(kerningItems, axes=self.getSerializedAxes(), bias=kerningBias)
return self._kerningMutator
@memoize
def getGlyphMutator(self, glyphName, decomposeComponents=False, **discreteLocation):
"""make a mutator / varlib object for glyphName, with the sources for the given discrete location"""
items, unicodes = self.collectSourcesForGlyph(glyphName, decomposeComponents=decomposeComponents, **discreteLocation)
new = []
for a, b, c in items:
if hasattr(b, "toMathGlyph"):
# note: calling toMathGlyph ignores the mathGlyphClass preference
# maybe the self.mathGlyphClass is not necessary?
new.append((a, b.toMathGlyph(strict=self.strict)))
else:
new.append((a, self.mathGlyphClass(b, strict=self.strict)))
thing = None
thisBias = self.newDefaultLocation(bend=True, discreteLocation=discreteLocation)
try:
serializedAxes = self.getSerializedAxes()
bias, thing = self.getVariationModel(new, axes=serializedAxes, bias=thisBias) # xx
except Exception:
error = traceback.format_exc()
note = f"Error in getGlyphMutator for {glyphName}:\n{error}"
if self.debug:
self.logger.info(note)
return thing, unicodes
def isLocalDefault(self, location):
# return True if location is a local default
# check for bending
defaults = {}
for aD in self.doc.axes:
defaults[aD.name] = aD.map_forward(aD.default)
for axisName, value in location.items():
if defaults[axisName] != value:
return False
return True
def axesByName(self):
# return a dict[axisName]: axisDescriptor
axes = {}
for aD in self.doc.axes:
axes[aD.name] = aD
return axes
def locationWillClip(self, location):
# return True if this location will be clipped.
clipped = self.clipDesignLocation(location)
return not clipped == location
def getAxisExtremes(self, axisRecord):
# return the axis values in designspace coordinates
if axisRecord.map is not None:
aD_minimum = axisRecord.map_forward(axisRecord.minimum)
aD_maximum = axisRecord.map_forward(axisRecord.maximum)
aD_default = axisRecord.map_forward(axisRecord.default)
return aD_minimum, aD_default, aD_maximum
return axisRecord.minimum, axisRecord.default, axisRecord.maximum
def clipDesignLocation(self, location):
# return a copy of the design location without extrapolation
# assume location is in designspace coordinates.
# use map_forward on axis extremes,
axesByName = self.axesByName()
new = {}
for axisName, value in location.items():
aD = axesByName.get(axisName)
clippedValues = []
if type(value) == tuple:
testValues = list(value)
else:
testValues = [value]
for value in testValues:
if hasattr(aD, "values"):
# a discrete axis
# will there be mapped discrete values?
mx = max(aD.values)
mn = min(aD.values)
if value in aD.values:
clippedValues.append(value)
elif value > mx:
clippedValues.append(mx)
elif value < mn:
clippedValues.append(mn)
else:
# do we want to test if the value is part of the values allowed in this axes?
# or do we just assume it is correct?
# possibility: snap to the nearest value?
clippedValues.append(value)
else:
# a continuous axis
aD_minimum = aD.map_forward(aD.minimum)
aD_maximum = aD.map_forward(aD.maximum)
if value < aD_minimum:
clippedValues.append(aD_minimum)
elif value > aD_maximum:
clippedValues.append(aD_maximum)
else:
clippedValues.append(value)
if len(clippedValues)==1:
new[axisName] = clippedValues[0]
elif len(clippedValues)==2:
new[axisName] = tuple(clippedValues)
return new
def filterThisLocation(self, location, mutedAxes=None):
# return location with axes is mutedAxes removed
# this means checking if the location is a non-default value
if not mutedAxes:
return False, location
defaults = {}
ignoreSource = False
for aD in self.doc.axes:
defaults[aD.name] = aD.default
new = {}
new.update(location)
for mutedAxisName in mutedAxes:
if mutedAxisName not in location:
continue
if mutedAxisName not in defaults:
continue
if location[mutedAxisName] != defaults.get(mutedAxisName):
ignoreSource = True
del new[mutedAxisName]
return ignoreSource, new
@memoize
def collectSourcesForGlyph(self, glyphName, decomposeComponents=False, discreteLocation=None, asMathGlyph=True):
""" Return all source glyph objects.
+ either as mathglyphs (for use in mutators)
+ or source glyphs straight from the fonts
decomposeComponents = True causes the source glyphs to be decomposed first
before building the mutator. That gives you instances that do not depend
on a complete font. If you're calculating previews for instance.
findSourceDescriptorsForDiscreteLocation returns sources from layers as well
"""
items = []
empties = []
foundEmpty = False
# is bend=True necessary here?
defaultLocation = self.newDefaultLocation(bend=True, discreteLocation=discreteLocation)
#
if discreteLocation is not None:
sources = self.findSourceDescriptorsForDiscreteLocation(discreteLocation)
else:
sources = self.doc.sources
unicodes = set() # unicodes for this glyph
for sourceDescriptor in sources:
if not os.path.exists(sourceDescriptor.path):
#kthxbai
note = "\tMissing UFO at %s" % sourceDescriptor.path
if self.debug:
self.logger.info(note)
continue
if glyphName in sourceDescriptor.mutedGlyphNames:
if self.debug:
self.logger.info(f"\t\tglyphName {glyphName} is muted")
continue
thisIsDefault = self.isLocalDefault(sourceDescriptor.location)
ignoreSource, filteredLocation = self.filterThisLocation(sourceDescriptor.location, self.mutedAxisNames)
if ignoreSource:
continue
f = self.fonts.get(sourceDescriptor.name)
if f is None:
continue
loc = Location(sourceDescriptor.location)
sourceLayer = f
if glyphName not in f:
# log this>
continue
layerName = getDefaultLayerName(f)
sourceGlyphObject = None
# handle source layers
if sourceDescriptor.layerName is not None:
# start looking for a layer
# Do not bother for mutatorMath designspaces
layerName = sourceDescriptor.layerName
sourceLayer = getLayer(f, sourceDescriptor.layerName)
if sourceLayer is None:
continue
if glyphName not in sourceLayer:
# start looking for a glyph
# this might be a support in a sparse layer
# so we're skipping!
continue
# still have to check if the sourcelayer glyph is empty
if glyphName not in sourceLayer:
continue
else:
sourceGlyphObject = sourceLayer[glyphName]
if sourceGlyphObject.unicodes is not None:
for u in sourceGlyphObject.unicodes:
unicodes.add(u)
if checkGlyphIsEmpty(sourceGlyphObject, allowWhiteSpace=True):
foundEmpty = True
# sourceGlyphObject = None
# continue
if decomposeComponents:
# what about decomposing glyphs in a partial font?
temp = self.glyphClass()
sourceGlyphObject.drawPoints(
DecomposePointPen(sourceLayer, temp.getPointPen())
)
temp.width = sourceGlyphObject.width
temp.name = sourceGlyphObject.name
temp.anchors = [dict(
x=anchor.x,
y=anchor.y,
name=anchor.name,
identifier=anchor.identifier,
color=anchor.color
) for anchor in sourceGlyphObject.anchors]
temp.guidelines = [dict(
x=guideline.x,
y=guideline.y,
angle=guideline.angle,
name=guideline.name,
identifier=guideline.identifier,
color=guideline.color
) for guideline in sourceGlyphObject.guidelines]
processThis = temp
else:
processThis = sourceGlyphObject
sourceInfo = dict(
source=f.path,
glyphName=glyphName,
layerName=layerName,
location=filteredLocation, # sourceDescriptor.location,
sourceName=sourceDescriptor.name,
)
if asMathGlyph:
if hasattr(processThis, "toMathGlyph"):
processThis = processThis.toMathGlyph(strict=self.strict)
else:
processThis = self.mathGlyphClass(processThis, strict=self.strict)
continuous, discrete = self.splitLocation(loc)
items.append((continuous, processThis, sourceInfo))
empties.append((thisIsDefault, foundEmpty))
# check the empties:
# if the default glyph is empty, then all must be empty
# if the default glyph is not empty then none can be empty
checkedItems = []
emptiesAllowed = False
# first check if the default is empty.
# remember that the sources can be in any order
for i, p in enumerate(empties):
isDefault, isEmpty = p
if isDefault and isEmpty:
emptiesAllowed = True
# now we know what to look for
if not emptiesAllowed:
for i, p in enumerate(empties):
isDefault, isEmpty = p
if not isEmpty:
checkedItems.append(items[i])
else:
for i, p in enumerate(empties):
isDefault, isEmpty = p
if isEmpty:
checkedItems.append(items[i])
return checkedItems, unicodes
def collectMastersForGlyph(self, glyphName, decomposeComponents=False, discreteLocation=None):
# compatibility thing for designspaceProblems.
checkedItems, unicodes = self.collectSourcesForGlyph(glyphName, decomposeComponents=False, discreteLocation=None)
return checkedItems
def getLocationType(self, location):
"""Determine the type of the location:
continuous / discrete
anisotropic / normal.
"""
continuousLocation, discreteLocation = self.splitLocation(location)
if not self.extrapolate:
# Axis values are in userspace, so this needs to happen before bending
continuousLocation = self.clipDesignLocation(continuousLocation)
#font = self._instantiateFont(None)
loc = Location(continuousLocation)
anisotropic = False
locHorizontal = locVertical = loc
if self.isAnisotropic(loc):
anisotropic = True
locHorizontal, locVertical = self.splitAnisotropic(loc)
return anisotropic, continuousLocation, discreteLocation, locHorizontal, locVertical
def collectSkippedGlyphs(self):
# return a list of all the glyphnames listed in public.skipExportGlyphs
names = []
for fontPath, fontObj in self.fonts.items():
for name in fontObj.lib.get('public.skipExportGlyphs', []):
if name not in names:
names.append(name)
if self.debug:
self.logger.info(f"collectSkippedGlyphs: {names}")
return names
def makeInstance(self, instanceDescriptor,
doRules=None,
glyphNames=None,
decomposeComponents=False,
pairs=None,
bend=False):
""" Generate a font object for this instance """
if doRules is not None:
warn('The doRules argument in DesignSpaceProcessor.makeInstance() is deprecated', DeprecationWarning, stacklevel=2)
if isinstance(instanceDescriptor, dict):
instanceDescriptor = self.doc.writerClass.instanceDescriptorClass(**instanceDescriptor)
# hmm getFullDesignLocation does not support anisotropc locations?
fullDesignLocation = instanceDescriptor.getFullDesignLocation(self.doc)
anisotropic, continuousLocation, discreteLocation, locHorizontal, locVertical = self.getLocationType(fullDesignLocation)
self.loadFonts()
if not self.extrapolate:
# Axis values are in userspace, so this needs to happen before bending
continuousLocation = self.clipDesignLocation(continuousLocation)
font = self._instantiateFont(None)
loc = Location(continuousLocation)
anisotropic = False
locHorizontal = locVertical = loc
if self.isAnisotropic(loc):
anisotropic = True
locHorizontal, locVertical = self.splitAnisotropic(loc)
if self.debug:
self.logger.info(f"\t\t\tAnisotropic location for \"{instanceDescriptor.name}\"\n\t\t\t{fullDesignLocation}")
# makeOneKerning
# discreteLocation ?
if instanceDescriptor.kerning:
kerningObject = self.makeOneKerning(fullDesignLocation, pairs=pairs)
if kerningObject is not None:
kerningObject.extractKerning(font)
# makeOneInfo
infoInstanceObject = self.makeOneInfo(fullDesignLocation, roundGeometry=self.roundGeometry, clip=False)
if infoInstanceObject is not None:
infoInstanceObject.extractInfo(font.info)
font.info.familyName = instanceDescriptor.familyName
font.info.styleName = instanceDescriptor.styleName
font.info.postscriptFontName = instanceDescriptor.postScriptFontName # yikes, note the differences in capitalisation..
font.info.styleMapFamilyName = instanceDescriptor.styleMapFamilyName
font.info.styleMapStyleName = instanceDescriptor.styleMapStyleName
# calculate selected lib key values here
libMathMutator = self.getLibEntryMutator(discreteLocation=discreteLocation)
if self.debug:
self.logger.info(f"\t\t\tlibMathMutator \"{libMathMutator}\"\n\t\t\t{discreteLocation}")
if libMathMutator:
# use locHorizontal in case this was anisotropic.
# remember: libMathDict is a Location object,
# each key in the location is the libKey
# each value is the calculated value
libMathDict = libMathMutator.makeInstance(locHorizontal)
if libMathDict:
for libKey, mutatedValue in libMathDict.items():
# only add the value to the lib if it is not 0.
# otherwise it will always add it? Not sure?
font.lib[libKey] = mutatedValue
if self.debug:
self.logger.info(f"\t\t\tlibMathMutator: libKey \"{libKey}: {mutatedValue}")
defaultSourceFont = self.findDefaultFont(discreteLocation=discreteLocation)
# found a default source font
if defaultSourceFont:
# copy info
self._copyFontInfo(defaultSourceFont.info, font.info)
# copy lib
for key, value in defaultSourceFont.lib.items():
# don't overwrite the keys we calculated
if key in self.libKeysForProcessing: continue
font.lib[key] = value
# copy groups
for key, value in defaultSourceFont.groups.items():
font.groups[key] = value
# copy features
font.features.text = defaultSourceFont.features.text
# ok maybe now it is time to calculate some glyphs
# glyphs
if glyphNames:
selectedGlyphNames = glyphNames
else:
# since all glyphs are processed, decomposing components is unecessary
# maybe that's confusing and components should be decomposed anyway
# if decomposeComponents was set to True?
decomposeComponents = False
selectedGlyphNames = self.glyphNames
if 'public.glyphOrder' not in font.lib.keys():
# should be the glyphorder from the default, yes?
font.lib['public.glyphOrder'] = selectedGlyphNames
# remove skippable glyphs
toSkip = self.collectSkippedGlyphs()
selectedGlyphNames = [name for name in selectedGlyphNames if name not in toSkip]
for glyphName in selectedGlyphNames:
glyphMutator, unicodes = self.getGlyphMutator(glyphName, decomposeComponents=decomposeComponents, discreteLocation=discreteLocation)
if glyphMutator is None:
if self.debug:
note = f"makeInstance: Could not make mutator for glyph {glyphName}"
self.logger.info(note)
continue
font.newGlyph(glyphName)
font[glyphName].clear()
font[glyphName].unicodes = unicodes
try:
if not self.isAnisotropic(continuousLocation):
glyphInstanceObject = glyphMutator.makeInstance(continuousLocation, bend=bend)
else:
# split anisotropic location into horizontal and vertical components
horizontalGlyphInstanceObject = glyphMutator.makeInstance(locHorizontal, bend=bend)
verticalGlyphInstanceObject = glyphMutator.makeInstance(locVertical, bend=bend)
# merge them again in a beautiful single line:
glyphInstanceObject = (1, 0) * horizontalGlyphInstanceObject + (0, 1) * verticalGlyphInstanceObject
except IndexError:
# alignment problem with the data?
if self.debug:
note = "makeInstance: Quite possibly some sort of data alignment error in %s" % glyphName
self.logger.info(note)
continue
if self.roundGeometry:
try:
glyphInstanceObject = glyphInstanceObject.round()
except AttributeError:
# what are we catching here?
# math objects without a round method?
if self.debug:
note = f"makeInstance: no round method for {glyphInstanceObject} ?"
self.logger.info(note)
try:
# File "/Users/erik/code/ufoProcessor/Lib/ufoProcessor/__init__.py", line 649, in makeInstance
# glyphInstanceObject.extractGlyph(font[glyphName], onlyGeometry=True)
# File "/Applications/RoboFont.app/Contents/Resources/lib/python3.6/fontMath/mathGlyph.py", line 315, in extractGlyph
# glyph.anchors = [dict(anchor) for anchor in self.anchors]
# File "/Applications/RoboFont.app/Contents/Resources/lib/python3.6/fontParts/base/base.py", line 103, in __set__
# raise FontPartsError("no setter for %r" % self.name)
# fontParts.base.errors.FontPartsError: no setter for 'anchors'
if hasattr(font[glyphName], "fromMathGlyph"):
font[glyphName].fromMathGlyph(glyphInstanceObject)
else:
glyphInstanceObject.extractGlyph(font[glyphName], onlyGeometry=True)
except TypeError:
# this causes ruled glyphs to end up in the wrong glyphname
# but defcon2 objects don't support it
pPen = font[glyphName].getPointPen()
font[glyphName].clear()
glyphInstanceObject.drawPoints(pPen)
font[glyphName].width = glyphInstanceObject.width
# add designspace location to lib
font.lib['ufoProcessor.fullDesignspaceLocation'] = list(instanceDescriptor.getFullDesignLocation(self.doc).items())
if self.useVarlib:
font.lib['ufoProcessor.mathmodel'] = "fonttools.varlib"
else:
font.lib['ufoProcessor.mathmodel'] = "mutatorMath"
if self.debug:
self.logger.info(f"\t\t\t{len(selectedGlyphNames)} glyphs added")
return font
def locationToDescriptiveString(self, loc):
# make a nice descriptive string from the location
# Check if the discrete location is None.
t = []
cl, dl = self.splitLocation(loc)
for continuousAxis in sorted(cl.keys()):
t.append(f'{continuousAxis}_{cl[continuousAxis]}')
if dl is not None:
for discreteAxis in sorted(dl.keys()):
t.append(f'{discreteAxis}_{dl[discreteAxis]}')
return '_'.join(t)
def pathForInstance(self, instanceDescriptor):
# generate the complete path for this instance descriptor.
if self.path is not None and instanceDescriptor.filename is not None:
return os.path.abspath(os.path.join(os.path.dirname(self.path), instanceDescriptor.filename))
return None
def makeOneInstance(self, location,
doRules=None,
glyphNames=None,
decomposeComponents=False,
pairs=None,
bend=False):
# make one instance for this location. This is a shortcut for making an
# instanceDescriptor. So it makes some assumptions about the font names.
# Otherwise all the geometry will be exactly what it needs to be.
self.loadFonts()
continuousLocation, discreteLocation = self.splitLocation(location)
defaultFont = self.findDefaultFont(discreteLocation=discreteLocation)
if defaultFont is not None:
instanceFamilyName = defaultFont.info.familyName
else:
if self.doc.path is not None:
instanceFamilyName = os.path.splitext(self.doc.path)[0]
else:
instanceFamilyName = "UFOOperatorInstance"
tempInstanceDescriptor = InstanceDescriptor()
tempInstanceDescriptor.location = location
tempInstanceDescriptor.familyName = instanceFamilyName
tempInstanceDescriptor.styleName = self.locationToDescriptiveString(location)
return self.makeInstance(tempInstanceDescriptor, doRules=doRules, glyphNames=glyphNames, decomposeComponents=decomposeComponents, pairs=pairs, bend=bend)
def randomLocation(self, extrapolate=0, anisotropic=False, roundValues=True, discreteLocation=None):
"""A good random location, for quick testing and entertainment
extrapolate: is a factor of the (max-min) distance. 0 = nothing, 0.1 = 0.1 * (max - min)
anisotropic= True: *all* continuous axes get separate x, y values
for discrete axes: random choice from the defined values
for continuous axes: interpolated value between axis.minimum and axis.maximum
if discreteLocation is given, make a random location for the continuous part.
assuming we want this location for testing the ufoOperator machine:
we will eventually need a designspace location, not a userspace location.
"""
workLocation = {}
if discreteLocation:
workLocation.update(discreteLocation)
else:
for aD in self.getOrderedDiscreteAxes():
workLocation[aD.name] = random.choice(aD.values)
for aD in self.getOrderedContinuousAxes():
# use the map on the extremes to make sure we randomise between the proper extremes.
aD_minimum = aD.map_forward(aD.minimum)
aD_maximum = aD.map_forward(aD.maximum)
if extrapolate:
delta = (aD.maximum - aD.minimum)
extraMinimum = aD_minimum - extrapolate * delta
extraMaximum = aD_maximum + extrapolate * delta
else:
extraMinimum = aD_minimum
extraMaximum = aD_maximum
if anisotropic:
x = ip(extraMinimum, extraMaximum, random.random())
y = ip(extraMinimum, extraMaximum, random.random())
if roundValues:
x = round(x)
y = round(y)
workLocation[aD.name] = (x, y)
else:
v = ip(extraMinimum, extraMaximum, random.random())
if roundValues:
v = round(v)
workLocation[aD.name] = v
return workLocation
def getLocationsForFont(self, fontObj):
# returns the locations this fontObj is used at, in this designspace
# returns [], [] if the fontObj is not used at all
# returns [loc], [] if the fontObj has no discrete location.
# Note: this returns *a list* as one fontObj can be used at multiple locations in a designspace.
# Note: fontObj must have a path.
discreteLocations = []
continuousLocations = []
for s in self.sources:
if s.path == fontObj.path:
cl, dl = self.splitLocation(s.location)
discreteLocations.append(dl)
continuousLocations.append(cl)
return continuousLocations, discreteLocations
# @memoize
def makeFontProportions(self, location, bend=False, roundGeometry=True):
"""Calculate the basic font proportions for this location, to map out expectations for drawing"""
self.loadFonts()
continuousLocation, discreteLocation = self.splitLocation(location)
infoMutator = self.getInfoMutator(discreteLocation=discreteLocation)
data = dict(unitsPerEm=1000, ascender=750, descender=-250, xHeight=500)
if infoMutator is None:
return data
if not self.isAnisotropic(continuousLocation):
infoInstanceObject = infoMutator.makeInstance(continuousLocation, bend=bend)
else:
locHorizontal, locVertical = self.splitAnisotropic(continuousLocation)
horizontalInfoInstanceObject = infoMutator.makeInstance(locHorizontal, bend=bend)
verticalInfoInstanceObject = infoMutator.makeInstance(locVertical, bend=bend)
# merge them again
infoInstanceObject = (1, 0) * horizontalInfoInstanceObject + (0, 1) * verticalInfoInstanceObject
if roundGeometry:
infoInstanceObject = infoInstanceObject.round()
data = dict(unitsPerEm=infoInstanceObject.unitsPerEm, ascender=infoInstanceObject.ascender, descender=infoInstanceObject.descender, xHeight=infoInstanceObject.xHeight)
return data
@memoize
def makeOneGlyph(self, glyphName, location, decomposeComponents=True, useVarlib=False, roundGeometry=False, clip=False):
"""
glyphName:
location: location including discrete axes, in **designspace** coordinates.
decomposeComponents: decompose all components so we get a proper representation of the shape
useVarlib: use varlib as mathmodel. Otherwise it is mutatorMath
roundGeometry: round all geometry to integers
clip: restrict axis values to the defined minimum and maximum
+ Supports extrapolation for varlib and mutatormath: though the results can be different
+ Supports anisotropic locations for varlib and mutatormath. Obviously this will not be present in any Variable font exports.
Returns: a mathglyph, results are cached
"""
self.loadFonts()
continuousLocation, discreteLocation = self.splitLocation(location)
bend=False #
if not self.extrapolate:
# Axis values are in userspace, so this needs to happen *after* clipping.
continuousLocation = self.clipDesignLocation(continuousLocation)
# check if the discreteLocation, if there is one, is within limits
if discreteLocation is not None:
if not self.checkDiscreteAxisValues(discreteLocation):
if self.debug:
self.logger.info(f"\t\tmakeOneGlyph reports: {location} has illegal value for discrete location")
return None
previousModel = self.useVarlib
self.useVarlib = useVarlib
glyphInstanceObject = None
glyphMutator, unicodes = self.getGlyphMutator(glyphName, decomposeComponents=decomposeComponents, discreteLocation=discreteLocation)
if not glyphMutator: return None
try:
if not self.isAnisotropic(location):
glyphInstanceObject = glyphMutator.makeInstance(continuousLocation, bend=bend)
else:
if self.debug:
self.logger.info(f"\t\tmakeOneGlyph anisotropic location: {location}")
loc = Location(continuousLocation)
locHorizontal, locVertical = self.splitAnisotropic(loc)
# split anisotropic location into horizontal and vertical components
horizontalGlyphInstanceObject = glyphMutator.makeInstance(locHorizontal, bend=bend)
verticalGlyphInstanceObject = glyphMutator.makeInstance(locVertical, bend=bend)
# merge them again
glyphInstanceObject = (1, 0) * horizontalGlyphInstanceObject + (0, 1) * verticalGlyphInstanceObject
if self.debug:
self.logger.info(f"makeOneGlyph anisotropic glyphInstanceObject {glyphInstanceObject}")
except IndexError:
# alignment problem with the data?
if self.debug:
note = "makeOneGlyph: Quite possibly some sort of data alignment error in %s" % glyphName
self.logger.info(note)
return None
if glyphInstanceObject:
glyphInstanceObject.unicodes = unicodes
if roundGeometry:
glyphInstanceObject.round()
self.useVarlib = previousModel
return glyphInstanceObject
def makeOneInfo(self, location, roundGeometry=False, clip=False):
""" Make the fontMath.mathInfo object for this location.
You need to extract this to an instance font.
location: location including discrete axes, in **designspace** coordinates.
"""
if self.debug:
self.logger.info(f"\t\t\tmakeOneInfo for {location}")
self.loadFonts()
bend = False
anisotropic, continuousLocation, discreteLocation, locHorizontal, locVertical = self.getLocationType(location)
# so we can take the math object that comes out of the calculation
infoMutator = self.getInfoMutator(discreteLocation=discreteLocation)
infoInstanceObject = None
if infoMutator is not None:
if not anisotropic:
infoInstanceObject = infoMutator.makeInstance(continuousLocation, bend=bend)
else:
horizontalInfoInstanceObject = infoMutator.makeInstance(locHorizontal, bend=bend)
verticalInfoInstanceObject = infoMutator.makeInstance(locVertical, bend=bend)
# merge them again
infoInstanceObject = (1,0) * horizontalInfoInstanceObject + (0,1) * verticalInfoInstanceObject
if self.roundGeometry:
infoInstanceObject = infoInstanceObject.round()
if self.debug:
if infoInstanceObject is not None:
self.logger.info(f"\t\t\t\tmakeOneInfo outcome: {infoInstanceObject}")
else:
self.logger.info(f"\t\t\t\tmakeOneInfo outcome: None")
return infoInstanceObject
def makeOneKerning(self, location, pairs=None):
"""
Make the fontMath.mathKerning for this location.
location: location including discrete axes, in **designspace** coordinates.
pairs: a list of pairs, if you want to get a subset
"""
if self.debug:
self.logger.info(f"\t\t\tmakeOneKerning for {location}")
self.loadFonts()
bend = False
kerningObject = None
anisotropic, continuousLocation, discreteLocation, locHorizontal, locVertical = self.getLocationType(location)
if pairs:
try:
kerningMutator = self.getKerningMutator(pairs=pairs, discreteLocation=discreteLocation)
kerningObject = kerningMutator.makeInstance(locHorizontal, bend=bend)
except Exception:
note = f"makeOneKerning: Could not make kerning for {location}\n{traceback.format_exc()}"
if self.debug:
self.logger.info(note)
else:
kerningMutator = self.getKerningMutator(discreteLocation=discreteLocation)
if kerningMutator is not None:
kerningObject = kerningMutator.makeInstance(locHorizontal, bend=bend)
# extract the object later
if self.debug:
self.logger.info(f"\t\t\t\t{len(kerningObject.keys())} kerning pairs added")
if self.roundGeometry:
kerningObject.round()
if self.debug:
if kerningObject is not None:
self.logger.info(f"\t\t\t\tmakeOneKerning outcome: {kerningObject.items()}")
else:
self.logger.info(f"\t\t\t\tmakeOneKerning outcome: None")
return kerningObject
def _copyFontInfo(self, sourceInfo, targetInfo):
""" Copy the non-calculating fields from the source info."""
infoAttributes = [
"versionMajor",
"versionMinor",
"copyright",
"trademark",
"note",
"openTypeGaspRangeRecords",
"openTypeHeadCreated",
"openTypeHeadFlags",
"openTypeNameDesigner",
"openTypeNameDesignerURL",
"openTypeNameManufacturer",
"openTypeNameManufacturerURL",
"openTypeNameLicense",
"openTypeNameLicenseURL",
"openTypeNameVersion",
"openTypeNameUniqueID",
"openTypeNameDescription",
"#openTypeNamePreferredFamilyName",
"#openTypeNamePreferredSubfamilyName",
"#openTypeNameCompatibleFullName",
"openTypeNameSampleText",
"openTypeNameWWSFamilyName",
"openTypeNameWWSSubfamilyName",
"openTypeNameRecords",
"openTypeOS2Selection",
"openTypeOS2VendorID",
"openTypeOS2Panose",
"openTypeOS2FamilyClass",
"openTypeOS2UnicodeRanges",
"openTypeOS2CodePageRanges",
"openTypeOS2Type",
"postscriptIsFixedPitch",
"postscriptForceBold",
"postscriptDefaultCharacter",
"postscriptWindowsCharacterSet"
]
for infoAttribute in infoAttributes:
copy = False
if self.ufoVersion == 1 and infoAttribute in fontInfoAttributesVersion1:
copy = True
elif self.ufoVersion == 2 and infoAttribute in fontInfoAttributesVersion2:
copy = True
elif self.ufoVersion == 3 and infoAttribute in fontInfoAttributesVersion3:
copy = True
if copy:
value = getattr(sourceInfo, infoAttribute)
setattr(targetInfo, infoAttribute, value)
|
class UFOOperator(object):
def __init__(self, pathOrObject=None, ufoVersion=3, useVarlib=True, extrapolate=False, strict=False, debug=False):
pass
def startLog(self):
pass
def _instantiateFont(self, path):
''' Return a instance of a font object with all the given subclasses'''
pass
def read(self, path):
'''Wrap a DesignSpaceDocument'''
pass
def write(self, path):
'''Write the wrapped DesignSpaceDocument'''
pass
def addAxis(self, axisDescriptor):
pass
def addAxisDescriptor(self, **kwargs):
pass
def addLocationLabel(self, locationLabelDescriptor):
pass
def addLocationLabelDescriptor(self, **kwargs):
pass
def addRule(self, ruleDescriptor):
pass
def addRuleDescriptor(self, **kwargs):
pass
def addSource(self, sourceDescriptor):
pass
def addSourceDescriptor(self, **kwargs):
pass
def addInstance(self, instanceDescriptor):
pass
def addInstanceDescriptor(self, **kwargs):
pass
def addVariableFont(self, variableFontDescriptor):
pass
def addVariableFontDescriptor(self, **kwargs):
pass
def getVariableFonts(self):
pass
def getInterpolableUFOOperators(self, useVariableFonts=True):
pass
@property
def path(self):
pass
@path.setter
def path(self):
pass
@property
def lib(self):
pass
@property
def axes(self):
pass
@property
def sources(self):
pass
@property
def instances(self):
pass
@property
def formatVersion(self):
pass
@property
def rules(self):
pass
@property
def rulesProcessingLast(self):
pass
@property
def map_backward(self):
pass
@property
def labelForUserLocation(self):
pass
@property
def locationLabels(self):
pass
@locationLabels.setter
def locationLabels(self):
pass
@property
def variableFonts(self):
pass
@property
def writerClass(self):
pass
def nameLocation(self, loc):
pass
@formatVersion.setter
def formatVersion(self):
pass
def getAxis(self, axisName):
pass
def loadFonts(self, reload=False):
pass
def _logLoadedFonts(self):
pass
def updateFonts(self, fontObjects):
pass
def getFonts(self):
pass
def usesFont(self, fontObj=None):
pass
def getCharacterMapping(self, discreteLocation=None):
pass
def __del__(self):
pass
def changed(self):
pass
def glyphChanged(self, glyphName, includeDependencies=False):
'''Clears this one specific glyph from the memoize cache
includeDependencies = True: check where glyphName is used as a component
and remove those as well.
Note: this must be check in each discreteLocation separately
because they can have different constructions.'''
pass
def getGlyphDependencies(self, glyphName):
pass
def glyphsInCache(self):
'''report which glyphs are in the cache at the moment'''
pass
def findAllDefaults(self):
pass
def findDefault(self, discreteLocation=None):
pass
def findDefaultFont(self, discreteLocation=None):
pass
def splitLocation(self, location):
pass
def _serializeAnyAxis(self, axis):
pass
def getSerializedAxes(self, discreteLocation=None):
pass
def getContinuousAxesForMutator(self):
pass
def _getAxisOrder(self):
pass
def getFullDesignLocation(self, location):
pass
def getDiscreteLocations(self):
pass
def getOrderedDiscreteAxes(self):
pass
def getOrderedContinuousAxes(self):
pass
def checkDiscreteAxisValues(self, location):
pass
def collectBaseGlyphs(self, glyphName, location):
pass
def _getComponentNames(glyph):
pass
def findSourceDescriptorsForDiscreteLocation(self, discreteLocDict=None):
pass
def getVariationModel(self, items, axes, bias=None):
pass
def newDefaultLocation(self, bend=False, discreteLocation=None):
pass
def isAnisotropic(self, location):
pass
def splitAnisotropic(self, location):
pass
def collectForegroundLayerNames(self):
'''Return list of names of the default layers of all the fonts in this system.
Include None and foreground. XX Why
'''
pass
def getReverseComponentMapping(self, discreteLocation=None):
'''Return a dict with reverse component mappings.
Check if we're using fontParts or defcon
Check which part of the designspace we're in.
'''
pass
def generateUFOs(self, useVarlib=None):
pass
@memoize
def getInfoMutator(self, discreteLocation=None):
''' Returns a info mutator for this discrete location '''
pass
@memoize
def getLibEntryMutator(self, discreteLocation=None):
''' Returns a mutator for selected lib keys store in self.libKeysForProcessing
If there is no entry in the lib, it will ignore the source
If there are no libkeys, it will return None.
'''
pass
@memoize
def getKerningMutator(self, pairs=None, discreteLocation=None):
''' Return a kerning mutator, collect the sources, build mathGlyphs.
If no pairs are given: calculate the whole table.
If pairs are given then query the sources for a value and make a mutator only with those values.
'''
pass
@memoize
def getGlyphMutator(self, glyphName, decomposeComponents=False, **discreteLocation):
'''make a mutator / varlib object for glyphName, with the sources for the given discrete location'''
pass
def isLocalDefault(self, location):
pass
def axesByName(self):
pass
def locationWillClip(self, location):
pass
def getAxisExtremes(self, axisRecord):
pass
def clipDesignLocation(self, location):
pass
def filterThisLocation(self, location, mutedAxes=None):
pass
@memoize
def collectSourcesForGlyph(self, glyphName, decomposeComponents=False, discreteLocation=None, asMathGlyph=True):
''' Return all source glyph objects.
+ either as mathglyphs (for use in mutators)
+ or source glyphs straight from the fonts
decomposeComponents = True causes the source glyphs to be decomposed first
before building the mutator. That gives you instances that do not depend
on a complete font. If you're calculating previews for instance.
findSourceDescriptorsForDiscreteLocation returns sources from layers as well
'''
pass
def collectMastersForGlyph(self, glyphName, decomposeComponents=False, discreteLocation=None):
pass
def getLocationType(self, location):
'''Determine the type of the location:
continuous / discrete
anisotropic / normal.
'''
pass
def collectSkippedGlyphs(self):
pass
def makeInstance(self, instanceDescriptor,
doRules=None,
glyphNames=None,
decomposeComponents=False,
pairs=None,
bend=False):
''' Generate a font object for this instance '''
pass
def locationToDescriptiveString(self, loc):
pass
def pathForInstance(self, instanceDescriptor):
pass
def makeOneInstance(self, location,
doRules=None,
glyphNames=None,
decomposeComponents=False,
pairs=None,
bend=False):
pass
def randomLocation(self, extrapolate=0, anisotropic=False, roundValues=True, discreteLocation=None):
'''A good random location, for quick testing and entertainment
extrapolate: is a factor of the (max-min) distance. 0 = nothing, 0.1 = 0.1 * (max - min)
anisotropic= True: *all* continuous axes get separate x, y values
for discrete axes: random choice from the defined values
for continuous axes: interpolated value between axis.minimum and axis.maximum
if discreteLocation is given, make a random location for the continuous part.
assuming we want this location for testing the ufoOperator machine:
we will eventually need a designspace location, not a userspace location.
'''
pass
def getLocationsForFont(self, fontObj):
pass
def makeFontProportions(self, location, bend=False, roundGeometry=True):
'''Calculate the basic font proportions for this location, to map out expectations for drawing'''
pass
@memoize
def makeOneGlyph(self, glyphName, location, decomposeComponents=True, useVarlib=False, roundGeometry=False, clip=False):
'''
glyphName:
location: location including discrete axes, in **designspace** coordinates.
decomposeComponents: decompose all components so we get a proper representation of the shape
useVarlib: use varlib as mathmodel. Otherwise it is mutatorMath
roundGeometry: round all geometry to integers
clip: restrict axis values to the defined minimum and maximum
+ Supports extrapolation for varlib and mutatormath: though the results can be different
+ Supports anisotropic locations for varlib and mutatormath. Obviously this will not be present in any Variable font exports.
Returns: a mathglyph, results are cached
'''
pass
def makeOneInfo(self, location, roundGeometry=False, clip=False):
''' Make the fontMath.mathInfo object for this location.
You need to extract this to an instance font.
location: location including discrete axes, in **designspace** coordinates.
'''
pass
def makeOneKerning(self, location, pairs=None):
'''
Make the fontMath.mathKerning for this location.
location: location including discrete axes, in **designspace** coordinates.
pairs: a list of pairs, if you want to get a subset
'''
pass
def _copyFontInfo(self, sourceInfo, targetInfo):
''' Copy the non-calculating fields from the source info.'''
pass
| 119 | 20 | 15 | 0 | 12 | 3 | 4 | 0.22 | 1 | 20 | 3 | 0 | 95 | 17 | 95 | 95 | 1,591 | 121 | 1,217 | 440 | 1,088 | 273 | 1,039 | 408 | 942 | 33 | 1 | 6 | 383 |
145,387 |
LettError/ufoProcessor
|
LettError_ufoProcessor/Lib/ufoProcessor/varModels.py
|
ufoProcessor.varModels.AxisMapper
|
class AxisMapper(object):
def __init__(self, axes):
# axes: list of axis axisdescriptors
self.axisOrder = [a.name for a in axes]
self.axisDescriptors = {}
for a in axes:
self.axisDescriptors[a.name] = a
def getMappedAxisValues(self):
values = {}
for axisName in self.axisOrder:
a = self.axisDescriptors[axisName]
values[axisName] = a.map_forward(a.minimum), a.map_forward(a.default), a.map_forward(a.maximum)
return values
def __call__(self, location):
return self.map_forward(location)
def _normalize(self, location):
new = {}
for axisName in location.keys():
new[axisName] = normalizeLocation(dict(w=location[axisName]), dict(w=self.axes[axisName]))
return new
def map_backward(self, location):
new = {}
for axisName in location.keys():
if not axisName in self.axisOrder:
continue
if axisName not in location:
continue
new[axisName] = self.axisDescriptors[axisName].map_backward(location[axisName])
return new
def map_forward(self, location):
new = {}
for axisName in location.keys():
if not axisName in self.axisOrder:
continue
if axisName not in location:
continue
new[axisName] = self.axisDescriptors[axisName].map_forward(location[axisName])
return new
|
class AxisMapper(object):
def __init__(self, axes):
pass
def getMappedAxisValues(self):
pass
def __call__(self, location):
pass
def _normalize(self, location):
pass
def map_backward(self, location):
pass
def map_forward(self, location):
pass
| 7 | 0 | 6 | 0 | 6 | 0 | 3 | 0.03 | 1 | 1 | 0 | 0 | 6 | 2 | 6 | 6 | 43 | 5 | 37 | 18 | 30 | 1 | 37 | 18 | 30 | 4 | 1 | 2 | 15 |
145,388 |
Liebeck/IWNLP-py
|
Liebeck_IWNLP-py/tests/test_iwnlp_wrapper.py
|
tests.test_iwnlp_wrapper.IWNLPWrapperTest
|
class IWNLPWrapperTest(unittest.TestCase):
@classmethod
def setUpClass(self):
self.iwnlp = IWNLPWrapper(lemmatizer_path='data/IWNLP.Lemmatizer_20170501.json')
def test_lemmatize_plain_example1(self):
predicted = self.iwnlp.lemmatize_plain('Hallo')
self.assertEqual(predicted, ['Hallo'])
def test_lemmatize_plain_example2(self):
predicted = self.iwnlp.lemmatize_plain('Hallo', ignore_case=False)
self.assertEqual(predicted, ['Hallo'])
def test_lemmatize_plain_example3(self):
predicted = self.iwnlp.lemmatize_plain('birne', ignore_case=False)
self.assertEqual(predicted, None)
def test_lemmatize_plain_example4(self):
predicted = self.iwnlp.lemmatize_plain('birne', ignore_case=True)
self.assertEqual(predicted, ['Birne'])
def test_lemmatize_plain_example5(self):
predicted = self.iwnlp.lemmatize_plain('gespielt')
self.assertEqual(predicted, ['spielen'])
def test_lemmatize_plain_example6(self):
predicted = self.iwnlp.lemmatize_plain('schnell')
self.assertEqual(predicted, ['schnell', 'schnellen'])
def test_lemmatize_plain_example7(self):
predicted = self.iwnlp.lemmatize_plain('Gartenhäuser')
self.assertEqual(predicted, ['Gartenhaus'])
def test_lemmatize_plain_example8(self):
predicted = self.iwnlp.lemmatize_plain('ein')
self.assertEqual(predicted, ['ein', 'einen'])
def test_contains_entry_example1(self):
self.assertEqual(self.iwnlp.contains_entry('Birne'), True)
def test_contains_entry_example2(self):
self.assertEqual(self.iwnlp.contains_entry('birne', ignore_case=False), False)
def test_contains_entry_example3(self):
self.assertEqual(self.iwnlp.contains_entry('birne', ignore_case=True), True)
def test_contains_entry_example4(self):
self.assertEqual(self.iwnlp.contains_entry('groko'), False)
def test_contains_entry_example5(self):
self.assertEqual(self.iwnlp.contains_entry('GroKo'), True)
def test_contains_entry_example6(self):
self.assertEqual(self.iwnlp.contains_entry('groko', ignore_case=True), True)
def test_contains_entry_example7(self):
self.assertEqual(self.iwnlp.contains_entry('groko', pos='Noun'), False)
def test_contains_entry_example8(self):
self.assertEqual(self.iwnlp.contains_entry('groko', pos='X'), False)
def test_contains_entry_example9(self):
self.assertEqual(self.iwnlp.contains_entry('groko', pos='AdjectivalDeclension'), False)
def test_contains_entry_example10(self):
self.assertEqual(self.iwnlp.contains_entry('groko', pos=["Noun", "X"], ignore_case=True), True)
def test_lemmatize_example1(self):
predicted = self.iwnlp.lemmatize('Lkws', pos_universal_google='NOUN')
self.assertEqual(predicted, ['Lkw'])
def test_lemmatize_example2(self):
predicted = self.iwnlp.lemmatize('gespielt', pos_universal_google='VERB')
self.assertEqual(predicted, ['spielen'])
def test_get_lemmas_example1(self):
predicted = self.iwnlp.get_lemmas('groko', pos=["Noun", "X"], ignore_case=True)
self.assertEqual(predicted, ['GroKo'])
|
class IWNLPWrapperTest(unittest.TestCase):
@classmethod
def setUpClass(self):
pass
def test_lemmatize_plain_example1(self):
pass
def test_lemmatize_plain_example2(self):
pass
def test_lemmatize_plain_example3(self):
pass
def test_lemmatize_plain_example4(self):
pass
def test_lemmatize_plain_example5(self):
pass
def test_lemmatize_plain_example6(self):
pass
def test_lemmatize_plain_example7(self):
pass
def test_lemmatize_plain_example8(self):
pass
def test_contains_entry_example1(self):
pass
def test_contains_entry_example2(self):
pass
def test_contains_entry_example3(self):
pass
def test_contains_entry_example4(self):
pass
def test_contains_entry_example5(self):
pass
def test_contains_entry_example6(self):
pass
def test_contains_entry_example7(self):
pass
def test_contains_entry_example8(self):
pass
def test_contains_entry_example9(self):
pass
def test_contains_entry_example10(self):
pass
def test_lemmatize_example1(self):
pass
def test_lemmatize_example2(self):
pass
def test_get_lemmas_example1(self):
pass
| 24 | 0 | 3 | 0 | 3 | 0 | 1 | 0 | 1 | 1 | 1 | 0 | 21 | 1 | 22 | 94 | 78 | 21 | 57 | 36 | 33 | 0 | 56 | 35 | 33 | 1 | 2 | 0 | 22 |
145,389 |
Liebeck/IWNLP-py
|
Liebeck_IWNLP-py/iwnlp/iwnlp_wrapper.py
|
iwnlp.iwnlp_wrapper.IWNLPWrapper
|
class IWNLPWrapper(object):
def __init__(self, lemmatizer_path='IWNLP.Lemmatizer_20170501.json'):
self.logger = logging.getLogger()
self.logger.setLevel(logging.DEBUG)
self.logger.debug('Loading IWNLP lemmatizer')
self.load(lemmatizer_path)
self.logger.debug('IWNLP Lemmatizer loaded')
def load(self, lemmatizer_path):
"""
This methods load the IWNLP.Lemmatizer json file and creates a dictionary
of lowercased forms which maps each form to its possible lemmas.
"""
self.lemmatizer = {}
with io.open(lemmatizer_path, encoding='utf-8') as data_file:
raw = json.load(data_file)
for entry in raw:
self.lemmatizer[entry["Form"]] = entry["Lemmas"]
self.apply_blacklist()
def apply_blacklist(self):
self.remove_entry("die", "Noun", "Adsorbens") # parser error in 20170501.json
def remove_entry(self, form, pos, lemma):
key = form.lower().strip()
if key in self.lemmatizer:
wrong_entry = {"POS": pos, "Form": form, "Lemma": lemma}
if wrong_entry in self.lemmatizer[key]:
self.lemmatizer[key].remove(wrong_entry)
def contains_entry(self, word, pos=None, ignore_case=False):
key = word.lower().strip()
if not pos:
if ignore_case:
return key in self.lemmatizer
else:
return key in self.lemmatizer and any(filter(lambda x: x["Form"] == word, self.lemmatizer[key]))
elif not isinstance(pos, list):
if ignore_case:
return key in self.lemmatizer and any(filter(lambda x: x["POS"] == pos, self.lemmatizer[key]))
else:
return key in self.lemmatizer and any(
filter(lambda x: x["POS"] == pos and x["Form"] == word, self.lemmatizer[key]))
else:
for pos_entry in pos:
if self.contains_entry(word, pos_entry, ignore_case):
return True
return False
def get_entries(self, word, pos=None, ignore_case=False):
entries = []
key = word.lower().strip()
if not pos:
if ignore_case:
entries = self.lemmatizer[key]
else:
entries = list(filter(lambda x: x["Form"] == word, self.lemmatizer[key]))
elif not isinstance(pos, list):
if ignore_case:
entries = list(filter(lambda x: x["POS"] == pos, self.lemmatizer[key]))
else:
entries = list(filter(lambda x: x["POS"] == pos and x["Form"] == word, self.lemmatizer[key]))
else:
for pos_entry in pos:
if self.contains_entry(word, pos=pos_entry, ignore_case=ignore_case):
entries.extend(self.get_entries(word, pos_entry, ignore_case))
return entries
def get_lemmas(self, word, pos=None, ignore_case=False):
"""
Return all lemmas for a given word. This method assumes that the specified word is present in the dictionary
:param word: Word that is present in the IWNLP lemmatizer
"""
entries = self.get_entries(word, pos, ignore_case)
lemmas = list(set([entry["Lemma"] for entry in entries]))
return sorted(lemmas)
def lemmatize_plain(self, word, ignore_case=False):
if self.contains_entry(word, ignore_case=ignore_case):
return self.get_lemmas(word, ignore_case=ignore_case)
else:
return None
def lemmatize(self, word, pos_universal_google):
"""
Python port of the lemmatize method, see https://github.com/Liebeck/IWNLP.Lemmatizer/blob/master/IWNLP.Lemmatizer.Predictor/IWNLPSentenceProcessor.cs
"""
if pos_universal_google == "NOUN":
if self.contains_entry(word, "Noun"):
return self.get_lemmas(word, "Noun")
elif self.contains_entry(word, "X"):
return self.get_lemmas(word, "X")
elif self.contains_entry(word, "AdjectivalDeclension"):
return self.get_lemmas(word, "AdjectivalDeclension")
elif self.contains_entry(word, ["Noun", "X"], ignore_case=True):
return self.get_lemmas(word, ["Noun", "X"], ignore_case=True)
else:
return None
elif pos_universal_google == "ADJ":
if self.contains_entry(word, "Adjective"):
return self.get_lemmas(word, "Adjective")
elif self.contains_entry(word, "Adjective", ignore_case=True):
return self.get_lemmas(word, "Adjective", ignore_case=True)
# Account for possible errors in the POS tagger. This order was fine-tuned in terms of accuracy
elif self.contains_entry(word, "Noun", ignore_case=True):
return self.get_lemmas(word, "Noun", ignore_case=True)
elif self.contains_entry(word, "X", ignore_case=True):
return self.get_lemmas(word, "X", ignore_case=True)
elif self.contains_entry(word, "Verb", ignore_case=True):
return self.get_lemmas(word, "Verb", ignore_case=True)
else:
return None
elif pos_universal_google in ["VERB", "AUX"]:
if self.contains_entry(word, "Verb", ignore_case=True):
return self.get_lemmas(word, "Verb", ignore_case=True)
else:
return None
else:
return None
|
class IWNLPWrapper(object):
def __init__(self, lemmatizer_path='IWNLP.Lemmatizer_20170501.json'):
pass
def load(self, lemmatizer_path):
'''
This methods load the IWNLP.Lemmatizer json file and creates a dictionary
of lowercased forms which maps each form to its possible lemmas.
'''
pass
def apply_blacklist(self):
pass
def remove_entry(self, form, pos, lemma):
pass
def contains_entry(self, word, pos=None, ignore_case=False):
pass
def get_entries(self, word, pos=None, ignore_case=False):
pass
def get_lemmas(self, word, pos=None, ignore_case=False):
'''
Return all lemmas for a given word. This method assumes that the specified word is present in the dictionary
:param word: Word that is present in the IWNLP lemmatizer
'''
pass
def lemmatize_plain(self, word, ignore_case=False):
pass
def lemmatize_plain(self, word, ignore_case=False):
'''
Python port of the lemmatize method, see https://github.com/Liebeck/IWNLP.Lemmatizer/blob/master/IWNLP.Lemmatizer.Predictor/IWNLPSentenceProcessor.cs
'''
pass
| 10 | 3 | 12 | 0 | 11 | 1 | 4 | 0.13 | 1 | 3 | 0 | 0 | 9 | 2 | 9 | 9 | 120 | 9 | 99 | 24 | 89 | 13 | 76 | 23 | 66 | 14 | 1 | 3 | 38 |
145,390 |
LiftoffSoftware/htmltag
|
LiftoffSoftware_htmltag/htmltag.py
|
htmltag.HTML
|
class HTML(stringtype):
"""
.. versionadded:: 1.2.0
A subclass of Python's built-in `str` to add a simple `__html__` method
that lets us know this string is HTML and does not need to be escaped. It
also has an `escaped` property that will return `self` with all special
characters converted into HTML entities.
"""
tagname = None
def __html__(self):
"""
Returns `self` (we're already a string) in unmodified form.
"""
return self
@property
def escaped(self):
"""
A property that returns `self` with all characters that have special
meaning (in HTML/XML) replaced with HTML entities. Example::
>>> print(HTML('<span>These span tags will be escaped</span>').escaped)
<span>These span tags will be escaped</span>
"""
return cgi.escape(self).encode(
'ascii', 'xmlcharrefreplace').decode('ascii')
def append(self, *strings):
"""
Adds any number of supplied *strings* to `self` (we're a subclass of
`str` remember) just before the last closing tag and returns a new
instance of `~htmltag.HTML` with the result.
Example::
>>> from htmltag import span, b
>>> html = span('Test:')
>>> print(html)
<span>Test:</span>
>>> html = html.append(' ', b('appended'))
>>> print(html)
<span>Test: <b>appended</b></span>
In the case of self-closing tags like '<img>' the string will simply be
appended after the tag::
>>> from htmltag import img
>>> image = img(src="http://company.com/image.png")
>>> print(image.append("Appended string"))
<img src="http://company.com/image.png">Appended string
.. note:: Why not update ourselves in-place? Because we're a subclass
of `str`; in Python strings are immutable.
"""
close_tag_start = self.rfind('</')
if self.tagname: # More accurate
close_tag_start = self.rfind('</'+self.tagname)
if close_tag_start == -1: # Couldn't find closing tag
return self + "".join(strings) # Just tack on to the end
ending = self[close_tag_start:]
beginning = self[:close_tag_start]
if self.tagname: # Preserve it
tagname = self.tagname
new = HTML(beginning + "".join(strings) + ending)
new.tagname = tagname
return new
else:
return HTML(beginning + "".join(strings) + ending)
|
class HTML(stringtype):
'''
.. versionadded:: 1.2.0
A subclass of Python's built-in `str` to add a simple `__html__` method
that lets us know this string is HTML and does not need to be escaped. It
also has an `escaped` property that will return `self` with all special
characters converted into HTML entities.
'''
def __html__(self):
'''
Returns `self` (we're already a string) in unmodified form.
'''
pass
@property
def escaped(self):
'''
A property that returns `self` with all characters that have special
meaning (in HTML/XML) replaced with HTML entities. Example::
>>> print(HTML('<span>These span tags will be escaped</span>').escaped)
<span>These span tags will be escaped</span>
'''
pass
def append(self, *strings):
'''
Adds any number of supplied *strings* to `self` (we're a subclass of
`str` remember) just before the last closing tag and returns a new
instance of `~htmltag.HTML` with the result.
Example::
>>> from htmltag import span, b
>>> html = span('Test:')
>>> print(html)
<span>Test:</span>
>>> html = html.append(' ', b('appended'))
>>> print(html)
<span>Test: <b>appended</b></span>
In the case of self-closing tags like '<img>' the string will simply be
appended after the tag::
>>> from htmltag import img
>>> image = img(src="http://company.com/image.png")
>>> print(image.append("Appended string"))
<img src="http://company.com/image.png">Appended string
.. note:: Why not update ourselves in-place? Because we're a subclass
of `str`; in Python strings are immutable.
'''
pass
| 5 | 4 | 18 | 2 | 7 | 11 | 2 | 1.78 | 1 | 0 | 0 | 0 | 3 | 0 | 3 | 3 | 68 | 8 | 23 | 11 | 18 | 41 | 20 | 10 | 16 | 4 | 1 | 1 | 6 |
145,391 |
LiftoffSoftware/htmltag
|
LiftoffSoftware_htmltag/htmltag.py
|
htmltag.SelfWrap
|
class SelfWrap(ModuleType):
"""
This class is the magic that lets us do things like::
>>> from htmltag import span
"""
def __init__(self, tagname, *args, **kwargs):
self.tagname = tagname
# This is necessary for reload() to work and so we don't overwrite
# these values with instances of TagWrap:
no_override = [
'HTML', 'SelfWrap', 'TagWrap', 'strip_xss', '__author__',
'__builtins__', '__doc__', '__license__', '__name__',
'__package__', '__version__', '__version_info__'
]
for attr in no_override:
setattr(self, attr, getattr(tagname, attr, None))
self.__path__ = [] # Required for Python 3.3
self.__file__ = FILE # Needed for Sphinx docs
def __getattr__(self, name): # "from htmltag import a" <--*name* will be 'a'
# This is how Python looks up the module name
if name not in self.__dict__: # So we don't overwrite what's already set
# Make our instance of TagWrap exist so we can return it properly
setattr(self, name, TagWrap(name))
return self.__dict__[name]
def __call__(self, *args, **kwargs):
# This turns the 'a' in "from htmltag import a" into a callable:
return TagWrap(self.tagname, *args, **kwargs)
|
class SelfWrap(ModuleType):
'''
This class is the magic that lets us do things like::
>>> from htmltag import span
'''
def __init__(self, tagname, *args, **kwargs):
pass
def __getattr__(self, name):
pass
def __call__(self, *args, **kwargs):
pass
| 4 | 1 | 7 | 0 | 6 | 3 | 2 | 0.72 | 1 | 1 | 1 | 0 | 3 | 3 | 3 | 3 | 30 | 3 | 18 | 9 | 14 | 13 | 14 | 9 | 10 | 2 | 1 | 1 | 5 |
145,392 |
LiftoffSoftware/htmltag
|
LiftoffSoftware_htmltag/htmltag.py
|
htmltag.TagWrap
|
class TagWrap(object):
"""
Lets you wrap whatever string you want in whatever HTML tag (*tagname*) you
want.
**Optional Keyword Arguments:**
:keyword safe_mode: If `True` dangerous (XSS) content will be removed
from all HTML. Defaults to `True`
:keyword whitelist: If given only tags that exist in the whitelist will be
allowed. All else will be escaped into HTML entities.
:keyword replacement: A string to replace unsafe HTML with. If set to
"entities", will convert unsafe tags to HTML entities so they
display as-is but won't be evaluated by renderers/browsers'. The
defaults is "(removed)".
:keyword log_rejects: If `True` rejected unsafe (XSS) HTML will be
logged using :meth:`logging.error`. Defaults to `False`
:keyword ending_slash: If `True` self-closing HTML tags like '<img>'
will not have a '/' placed before the '>'. Usually only necessary
with XML and XHTML documents (as opposed to regular HTML). Defaults
to `False`.
:type safe_mode: boolean
:type whitelist: iterable
:type replacement: string, "entities", or "off"
:type log_rejects: boolean
:type ending_slash: boolean
The `TagWrap` class may be used in a direct fashion (as opposed to the
metaprogramming magic way: ``from htmltag import sometag``)::
>>> from htmltag import TagWrap
>>> img = TagWrap('img', ending_slash=True)
>>> print(img(src="http://company.com/someimage.png"))
<img src="http://company.com/someimage.png" />
The `TagWrap` class also has a :meth:`~TagWrap.copy` method which can be
useful when you want a new tag to have the same attributes as another::
>>> from htmltag import TagWrap
>>> whitelist = ["b", "i", "strong", "a", "em"]
>>> replacement = "(tag not allowed)"
>>> b = TagWrap('b', whitelist=whitelist, replacement=replacement)
>>> i = b.copy('i')
>>> print(i.whitelist)
['b', 'i', 'strong', 'a', 'em']
Here's how you can create a number of tags with your own custom settings all
at once::
>>> import sys
>>> from htmltag import TagWrap
>>> whitelist = ["b", "i", "strong", "a", "em"] # Whitelist ourselves
>>> replacement = "(tag not allowed)"
>>> for tag in whitelist:
... setattr(sys.modules[__name__], tag,
... TagWrap(tag, whitelist=whitelist, replacement=replacement))
>>> strong.replacement # doctest: +SKIP
'(tag not allowed)' # doctest: +SKIP
.. note:: ``sys.modules[__name__]`` is the current module; the global 'self'.
"""
# NOTE: The above doctest is skipped because it only works in reality :)
def __init__(self, tagname, **kwargs):
self.tagname = tagname
self.safe_mode = kwargs.get('safe_mode', True)
self.whitelist = kwargs.get('whitelist', "off")
self.replacement = kwargs.get('replacement', '(removed)')
self.log_rejects = kwargs.get('log_rejects', False)
# This only applies to self-closing tags:
self.ending_slash = kwargs.get('ending_slash', False)
def escape(self, string):
"""
Returns *string* with all instances of '<', '>', and '&' converted into
HTML entities.
"""
html_entities = {"&": "&", '<': '<', '>': '>'}
return HTML("".join(html_entities.get(c, c) for c in string))
def wrap(self, tag, *args, **kwargs):
"""
Returns all *args* (strings) wrapped in HTML tags like so::
>>> b = TagWrap('b')
>>> print(b('bold text'))
<b>bold text</b>
To add attributes to the tag you can pass them as keyword arguments::
>>> a = TagWrap('a')
>>> print(a('awesome software', href='http://liftoffsoftware.com/'))
<a href="http://liftoffsoftware.com/">awesome software</a>
.. note:: :meth:`~TagWrap.wrap` will automatically convert '<', '>', \
and '&' into HTML entities unless the wrapped string has an `__html__` \
method
"""
template = "<{tagstart}>{content}</{tag}>"
if tag in self_closing_tags:
template = "<{tagstart}>" # self-closing tags don't have content
if self.ending_slash:
template = "<{tagstart} />"
content = ""
for string in args:
if not hasattr(string, '__html__'): # Indicates already escaped
string = self.escape(string)
content += string.__html__()
tagstart = tag
if kwargs:
tagstart += ' '
for key, value in kwargs.items():
key = key.lstrip('_')
if value == True:
tagstart = tagstart + key + ' '
elif value == False:
continue # skip it altogether
else:
tagstart = tagstart + '{key}="{value}" '.format(
key=key, value=value)
tagstart = tagstart.rstrip()
html = template.format(tagstart=tagstart, content=content, tag=tag)
if self.safe_mode:
html, rejected = strip_xss(
html, whitelist=self.whitelist, replacement=self.replacement)
if self.log_rejects:
logging.error(
"{name} rejected unsafe HTML: '{rejected}'".format(
name=self.__class__.__name__, rejected=rejected))
html = HTML(html)
html.tagname = tag # So we can easily append()
return html
def copy(self, tagname, **kwargs):
"""
Returns a new instance of `TagWrap` using the given *tagname* that has
all the same attributes as this instance. If *kwargs* is given they
will override the attributes of the created instance.
"""
new_kwargs = {
'replacement': self.replacement,
'whitelist': self.whitelist,
'safe_mode': self.safe_mode,
'log_rejects': self.log_rejects,
'ending_slash': self.ending_slash
}
new_kwargs.update(**kwargs)
return TagWrap(tagname, **new_kwargs)
def __call__(self, *args, **kwargs):
return self.wrap(self.tagname, *args, **kwargs)
def __getitem__(self, k):
if k == "__all__":
raise ImportError("Cannot 'import *' with htmltag.")
if isinstance(k, str):
if k.startswith('__') and k.endswith("__"):
raise AttributeError
elif k in self.__dict__:
return self.__dict__[k]
raise ImportError(
"Using IPython? Ignore that ^ traceback stuff and try again "
"(second time usually works to get your traceback)")
|
class TagWrap(object):
'''
Lets you wrap whatever string you want in whatever HTML tag (*tagname*) you
want.
**Optional Keyword Arguments:**
:keyword safe_mode: If `True` dangerous (XSS) content will be removed
from all HTML. Defaults to `True`
:keyword whitelist: If given only tags that exist in the whitelist will be
allowed. All else will be escaped into HTML entities.
:keyword replacement: A string to replace unsafe HTML with. If set to
"entities", will convert unsafe tags to HTML entities so they
display as-is but won't be evaluated by renderers/browsers'. The
defaults is "(removed)".
:keyword log_rejects: If `True` rejected unsafe (XSS) HTML will be
logged using :meth:`logging.error`. Defaults to `False`
:keyword ending_slash: If `True` self-closing HTML tags like '<img>'
will not have a '/' placed before the '>'. Usually only necessary
with XML and XHTML documents (as opposed to regular HTML). Defaults
to `False`.
:type safe_mode: boolean
:type whitelist: iterable
:type replacement: string, "entities", or "off"
:type log_rejects: boolean
:type ending_slash: boolean
The `TagWrap` class may be used in a direct fashion (as opposed to the
metaprogramming magic way: ``from htmltag import sometag``)::
>>> from htmltag import TagWrap
>>> img = TagWrap('img', ending_slash=True)
>>> print(img(src="http://company.com/someimage.png"))
<img src="http://company.com/someimage.png" />
The `TagWrap` class also has a :meth:`~TagWrap.copy` method which can be
useful when you want a new tag to have the same attributes as another::
>>> from htmltag import TagWrap
>>> whitelist = ["b", "i", "strong", "a", "em"]
>>> replacement = "(tag not allowed)"
>>> b = TagWrap('b', whitelist=whitelist, replacement=replacement)
>>> i = b.copy('i')
>>> print(i.whitelist)
['b', 'i', 'strong', 'a', 'em']
Here's how you can create a number of tags with your own custom settings all
at once::
>>> import sys
>>> from htmltag import TagWrap
>>> whitelist = ["b", "i", "strong", "a", "em"] # Whitelist ourselves
>>> replacement = "(tag not allowed)"
>>> for tag in whitelist:
... setattr(sys.modules[__name__], tag,
... TagWrap(tag, whitelist=whitelist, replacement=replacement))
>>> strong.replacement # doctest: +SKIP
'(tag not allowed)' # doctest: +SKIP
.. note:: ``sys.modules[__name__]`` is the current module; the global 'self'.
'''
def __init__(self, tagname, **kwargs):
pass
def escape(self, string):
'''
Returns *string* with all instances of '<', '>', and '&' converted into
HTML entities.
'''
pass
def wrap(self, tag, *args, **kwargs):
'''
Returns all *args* (strings) wrapped in HTML tags like so::
>>> b = TagWrap('b')
>>> print(b('bold text'))
<b>bold text</b>
To add attributes to the tag you can pass them as keyword arguments::
>>> a = TagWrap('a')
>>> print(a('awesome software', href='http://liftoffsoftware.com/'))
<a href="http://liftoffsoftware.com/">awesome software</a>
.. note:: :meth:`~TagWrap.wrap` will automatically convert '<', '>', and '&' into HTML entities unless the wrapped string has an `__html__` method
'''
pass
def copy(self, tagname, **kwargs):
'''
Returns a new instance of `TagWrap` using the given *tagname* that has
all the same attributes as this instance. If *kwargs* is given they
will override the attributes of the created instance.
'''
pass
def __call__(self, *args, **kwargs):
pass
def __getitem__(self, k):
pass
| 7 | 4 | 16 | 1 | 11 | 5 | 3 | 1.14 | 1 | 4 | 1 | 0 | 6 | 6 | 6 | 6 | 162 | 18 | 69 | 22 | 62 | 79 | 54 | 22 | 47 | 11 | 1 | 3 | 20 |
145,393 |
Lilykos/pyphonetics
|
pyphonetics/phonetics/mra.py
|
pyphonetics.phonetics.mra.MatchingRatingApproach
|
class MatchingRatingApproach(PhoneticAlgorithm):
"""
Functions related to the computation of the Match Rating Approach codex.
[Reference]: https://en.wikipedia.org/wiki/Match_rating_approach
[Article]: Moore, G B.; Kuhns, J L.; Treffzs, J L.; Montgomery, C A. (Feb 1, 1977).
Accessing Individual Records from Personal Data Files Using Nonunique Identifiers.
US National Institute of Standards and Technology. p. 17. NIST SP - 500-2.
"""
def __init__(self):
super().__init__()
def phonetics(self, word):
check_str(word)
check_empty(word)
codex = unidecode(word).upper()
codex = re.sub(r'[^A-Z]', r'', codex)
# Dropping non - leading vowels
codex = codex[0] + re.sub(r'[AEIOU]', r'', codex[1:])
# Dropping consecutive consonants
codex = squeeze(codex)
# Returning the codex
offset = min(3, len(codex) - 3)
return codex[:3] + codex[len(codex) - offset:offset + len(codex)]
|
class MatchingRatingApproach(PhoneticAlgorithm):
'''
Functions related to the computation of the Match Rating Approach codex.
[Reference]: https://en.wikipedia.org/wiki/Match_rating_approach
[Article]: Moore, G B.; Kuhns, J L.; Treffzs, J L.; Montgomery, C A. (Feb 1, 1977).
Accessing Individual Records from Personal Data Files Using Nonunique Identifiers.
US National Institute of Standards and Technology. p. 17. NIST SP - 500-2.
'''
def __init__(self):
pass
def phonetics(self, word):
pass
| 3 | 1 | 9 | 2 | 6 | 2 | 1 | 0.83 | 1 | 1 | 0 | 0 | 2 | 0 | 2 | 6 | 28 | 6 | 12 | 5 | 9 | 10 | 12 | 5 | 9 | 1 | 1 | 0 | 2 |
145,394 |
Lilykos/pyphonetics
|
pyphonetics/exceptions.py
|
pyphonetics.exceptions.DistanceMetricError
|
class DistanceMetricError(Exception):
pass
|
class DistanceMetricError(Exception):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 10 | 2 | 0 | 2 | 1 | 1 | 0 | 2 | 1 | 1 | 0 | 3 | 0 | 0 |
145,395 |
Lilykos/pyphonetics
|
pyphonetics/phonetics/phonetic_algorithm.py
|
pyphonetics.phonetics.phonetic_algorithm.PhoneticAlgorithm
|
class PhoneticAlgorithm:
"""
The main Phonetic Algorithm class, to ensure a unified API
for all the included algorithms.
"""
def __init__(self):
self.distances = {
'levenshtein': levenshtein_distance,
'hamming': hamming_distance,
}
def phonetics(self, word):
"""Get the phonetic representation of the word."""
pass
def sounds_like(self, word1, word2):
"""Compare the phonetic representations of 2 words, and return a boolean value."""
return self.phonetics(word1) == self.phonetics(word2)
def distance(self, word1, word2, metric='levenshtein'):
"""Get the similarity of the words, using the supported distance metrics."""
if metric in self.distances:
distance_func = self.distances[metric]
return distance_func(self.phonetics(word1), self.phonetics(word2))
else:
raise DistanceMetricError('Distance metric not supported! Choose from levenshtein, hamming.')
|
class PhoneticAlgorithm:
'''
The main Phonetic Algorithm class, to ensure a unified API
for all the included algorithms.
'''
def __init__(self):
pass
def phonetics(self, word):
'''Get the phonetic representation of the word.'''
pass
def sounds_like(self, word1, word2):
'''Compare the phonetic representations of 2 words, and return a boolean value.'''
pass
def distance(self, word1, word2, metric='levenshtein'):
'''Get the similarity of the words, using the supported distance metrics.'''
pass
| 5 | 4 | 5 | 0 | 4 | 1 | 1 | 0.44 | 0 | 1 | 1 | 8 | 4 | 1 | 4 | 4 | 26 | 3 | 16 | 7 | 11 | 7 | 12 | 7 | 7 | 2 | 0 | 1 | 5 |
145,396 |
Lilykos/pyphonetics
|
pyphonetics/phonetics/metaphone.py
|
pyphonetics.phonetics.metaphone.Metaphone
|
class Metaphone(PhoneticAlgorithm):
"""
The metaphone algorithm.
[Reference]: https://en.wikipedia.org/wiki/Metaphone
[Author]: Lawrence Philips, 1990
"""
def __init__(self):
super().__init__()
self.rules = [
(r'[^a-z]', r''),
(r'([bcdfhjklmnpqrstvwxyz])\1+', r'\1'),
(r'^ae', r'E'),
(r'^[gkp]n', r'N'),
(r'^wr', r'R'),
(r'^x', r'S'),
(r'^wh', r'W'),
(r'mb$', r'M'),
(r'(?!^)sch', r'SK'),
(r'th', r'0'),
(r't?ch|sh', r'X'),
(r'c(?=ia)', r'X'),
(r'[st](?=i[ao])', r'X'),
(r's?c(?=[iey])', r'S'),
(r'[cq]', r'K'),
(r'dg(?=[iey])', r'J'),
(r'd', r'T'),
(r'g(?=h[^aeiou])', r''),
(r'gn(ed)?', r'N'),
(r'([^g]|^)g(?=[iey])', r'\1J'),
(r'g+', r'K'),
(r'ph', r'F'),
(r'([aeiou])h(?=\b|[^aeiou])', r'\1'),
(r'[wy](?![aeiou])', r''),
(r'z', r'S'),
(r'v', r'F'),
(r'(?!^)[aeiou]+', r'')
]
def phonetics(self, word):
check_str(word)
check_empty(word)
code = unidecode(word).lower()
for item in self.rules:
code = re.sub(item[0], item[1], code)
return code.upper()
|
class Metaphone(PhoneticAlgorithm):
'''
The metaphone algorithm.
[Reference]: https://en.wikipedia.org/wiki/Metaphone
[Author]: Lawrence Philips, 1990
'''
def __init__(self):
pass
def phonetics(self, word):
pass
| 3 | 1 | 20 | 1 | 19 | 0 | 2 | 0.13 | 1 | 1 | 0 | 0 | 2 | 1 | 2 | 6 | 48 | 4 | 39 | 6 | 36 | 5 | 11 | 6 | 8 | 2 | 1 | 1 | 3 |
145,397 |
Lilykos/pyphonetics
|
pyphonetics/phonetics/soundex.py
|
pyphonetics.phonetics.soundex.Soundex
|
class Soundex(PhoneticAlgorithm):
"""
The Soundex algorithm.
[Reference]: https://en.wikipedia.org/wiki/Soundex
[Authors]: Robert C. Russel, Margaret King Odell
"""
def __init__(self):
super().__init__()
self.translations = translation(
'AEIOUYWHBPFVCSKGJQXZDTLMNR',
'000000DD111122222222334556'
)
self.pad = lambda code: '{}0000'.format(code)[:4]
def phonetics(self, word):
check_str(word)
check_empty(word)
word = unidecode(word).upper()
word = re.sub(r'[^A-Z]', r'', word)
first_letter = word[0]
tail = ''.join(self.translations[char] for char in word
if self.translations[char] != 'D')
# Dropping first code's letter if duplicate
if len(tail):
if tail[0] == self.translations[first_letter]:
tail = tail[1:]
code = squeeze(tail).replace('0', '')
return self.pad(first_letter + code)
|
class Soundex(PhoneticAlgorithm):
'''
The Soundex algorithm.
[Reference]: https://en.wikipedia.org/wiki/Soundex
[Authors]: Robert C. Russel, Margaret King Odell
'''
def __init__(self):
pass
def phonetics(self, word):
pass
| 3 | 1 | 13 | 3 | 10 | 1 | 2 | 0.29 | 1 | 1 | 0 | 0 | 2 | 2 | 2 | 6 | 34 | 7 | 21 | 8 | 18 | 6 | 17 | 8 | 14 | 3 | 1 | 2 | 4 |
145,398 |
Lilykos/pyphonetics
|
pyphonetics/phonetics/lein.py
|
pyphonetics.phonetics.lein.Lein
|
class Lein(PhoneticAlgorithm):
"""
The Lein name coding procedure.
[Reference]: http://naldc.nal.usda.gov/download/27833/PDF
"""
def __init__(self):
super().__init__()
self.translations = translation(
'DTMNLRBFPVCJKGQSXZ',
'112233444455555555'
)
self.pad = lambda code: '{}0000'.format(code)[:4]
def phonetics(self, word):
check_str(word)
check_empty(word)
word = unidecode(word).upper()
word = re.sub(r'[^A-Z]\s', r'', word)
# Keep the 1st letter
first, code = word[0], word[1:]
# Drop vowels and Y, W & H
code = re.sub(r'[AEIOUYWH]', r'', code)
# Drop consecutive duplicates and truncate to 4 chars
code = squeeze(code)[0: 4]
# Translations
code = ''.join(self.translations.get(char, char) for char in code)
return self.pad(first + code)
|
class Lein(PhoneticAlgorithm):
'''
The Lein name coding procedure.
[Reference]: http://naldc.nal.usda.gov/download/27833/PDF
'''
def __init__(self):
pass
def phonetics(self, word):
pass
| 3 | 1 | 15 | 4 | 9 | 2 | 1 | 0.44 | 1 | 1 | 0 | 0 | 2 | 2 | 2 | 6 | 36 | 10 | 18 | 6 | 15 | 8 | 15 | 6 | 12 | 1 | 1 | 0 | 2 |
145,399 |
Lilykos/pyphonetics
|
pyphonetics/exceptions.py
|
pyphonetics.exceptions.UnicodeException
|
class UnicodeException(Exception):
pass
|
class UnicodeException(Exception):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 10 | 2 | 0 | 2 | 1 | 1 | 0 | 2 | 1 | 1 | 0 | 3 | 0 | 0 |
145,400 |
Lilykos/pyphonetics
|
pyphonetics/exceptions.py
|
pyphonetics.exceptions.WrongLengthException
|
class WrongLengthException(Exception):
pass
|
class WrongLengthException(Exception):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 10 | 2 | 0 | 2 | 1 | 1 | 0 | 2 | 1 | 1 | 0 | 3 | 0 | 0 |
145,401 |
Lilykos/pyphonetics
|
pyphonetics/phonetics/fuzzy_soundex.py
|
pyphonetics.phonetics.fuzzy_soundex.FuzzySoundex
|
class FuzzySoundex(PhoneticAlgorithm):
"""
Implementation of the "Fuzzy Soundex" algorithm.
[Reference]: http://wayback.archive.org/web/20100629121128/http://www.ir.iit.edu/publications/downloads/IEEESoundexV5.pdf
[Article]: Holmes, David and M. Catherine McCabe. "Improving Precision and Recall for Soundex Retrieval."
"""
def __init__(self):
super().__init__()
self.translations = translation(
'ABCDEFGHIJKLMNOPQRSTUVWXYZ',
'0193017-07745501769301-7-9'
)
self.rules = [
(r'CA', r'KA'),
(r'CC', r'KK'),
(r'CK', r'KK'),
(r'CE', r'SE'),
(r'CHL', r'KL'),
(r'CL', r'KL'),
(r'CHR', r'KR'),
(r'CR', r'KR'),
(r'CI', r'SI'),
(r'CO', r'KO'),
(r'CU', r'KU'),
(r'CY', r'SY'),
(r'DG', r'GG'),
(r'GH', r'HH'),
(r'MAC', r'MK'),
(r'MC', r'MK'),
(r'NST', r'NSS'),
(r'PF', r'FF'),
(r'PH', r'FF'),
(r'SCH', r'SSS'),
(r'TIO', r'SIO'),
(r'TIA', r'SIO'),
(r'TCH', r'CHH'),
]
self.set1 = ['CS', 'CZ', 'TS', 'TZ']
self.set2 = ['HR', 'WR']
self.set3 = ['KN', 'NG']
self.set4 = 'HWY'
def phonetics(self, word):
check_str(word)
check_empty(word)
word = unidecode(word).upper()
# Substitutions for beginnings
first_two, rest = word[:2], word[2:]
if first_two in self.set1:
word = 'SS' + rest
elif first_two == 'GN':
word = 'NN' + rest
elif first_two in self.set2:
word = 'RR' + rest
elif first_two == 'HW':
word = 'WW' + rest
elif first_two in self.set3:
word = 'NN' + rest
# Substitutions for endings
last_two, initial = word[-2:], word[0:-2]
if last_two == 'CH':
word = initial + 'KK'
elif last_two == 'NT':
word = initial + 'TT'
elif last_two == 'RT':
word = initial + 'RR'
elif word[-3:] == 'RDT':
word = word[0:-3] + 'RR'
# Applying the rules
for rule in self.rules:
word = re.sub(rule[0], rule[1], word)
# Catch the first letter
first_letter = word[0]
# Translating
code = ''.join(self.translations.get(char, char) for char in word)
# Removing hyphens
code = code.replace('-', '')
# Squeezing the code
code = squeeze(code)
# Dealing with initials
code = first_letter if code[0] in self.set4 \
else first_letter + code[1:]
# Dropping vowels
code = code.replace('0', '')
return code
|
class FuzzySoundex(PhoneticAlgorithm):
'''
Implementation of the "Fuzzy Soundex" algorithm.
[Reference]: http://wayback.archive.org/web/20100629121128/http://www.ir.iit.edu/publications/downloads/IEEESoundexV5.pdf
[Article]: Holmes, David and M. Catherine McCabe. "Improving Precision and Recall for Soundex Retrieval."
'''
def __init__(self):
pass
def phonetics(self, word):
pass
| 3 | 1 | 47 | 8 | 35 | 5 | 7 | 0.2 | 1 | 1 | 0 | 0 | 2 | 6 | 2 | 6 | 101 | 17 | 70 | 14 | 67 | 14 | 35 | 14 | 32 | 12 | 1 | 1 | 13 |
145,402 |
Lilykos/pyphonetics
|
pyphonetics/phonetics/refined_soundex.py
|
pyphonetics.phonetics.refined_soundex.RefinedSoundex
|
class RefinedSoundex(PhoneticAlgorithm):
"""
The Refined Soundex algorithm.
[Reference]: https://en.wikipedia.org/wiki/Soundex
[Authors]: Robert C. Russel, Margaret King Odell
"""
def __init__(self):
super().__init__()
self.translations = translation(
'AEIOUYWHBPFVCKSGJQXZDTLMNR',
'000000DD112233344555667889'
)
def phonetics(self, word):
check_str(word)
check_empty(word)
word = unidecode(word).upper()
word = re.sub(r'[^A-Z]', r'', word)
first_letter = word[0]
tail = ''.join(self.translations[char] for char in word
if self.translations[char] != 'D')
return first_letter + squeeze(tail)
|
class RefinedSoundex(PhoneticAlgorithm):
'''
The Refined Soundex algorithm.
[Reference]: https://en.wikipedia.org/wiki/Soundex
[Authors]: Robert C. Russel, Margaret King Odell
'''
def __init__(self):
pass
def phonetics(self, word):
pass
| 3 | 1 | 10 | 2 | 8 | 0 | 1 | 0.31 | 1 | 1 | 0 | 0 | 2 | 1 | 2 | 6 | 27 | 6 | 16 | 6 | 13 | 5 | 12 | 6 | 9 | 1 | 1 | 0 | 2 |
145,403 |
Lilykos/pyphonetics
|
pyphonetics/exceptions.py
|
pyphonetics.exceptions.EmptyStringError
|
class EmptyStringError(Exception):
pass
|
class EmptyStringError(Exception):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 10 | 2 | 0 | 2 | 1 | 1 | 0 | 2 | 1 | 1 | 0 | 3 | 0 | 0 |
145,404 |
Linaro/squad
|
Linaro_squad/test/core/test_utils.py
|
test.core.test_utils.TestSplitDict
|
class TestSplitDict(TestCase):
def test_split_dict(self):
_dict = {'a': 1, 'b': 2, 'c': 3, 'd': 4, 'e': 5}
chunks = split_dict(_dict)
self.assertEqual(5, len(chunks))
self.assertEqual({'a': 1}, chunks[0])
self.assertEqual({'e': 5}, chunks[4])
_dict = {'a': 1, 'b': 2, 'c': 3, 'd': 4, 'e': 5}
chunks = split_dict(_dict, chunk_size=2)
self.assertEqual(3, len(chunks))
self.assertEqual({'a': 1, 'b': 2}, chunks[0])
self.assertEqual({'c': 3, 'd': 4}, chunks[1])
self.assertEqual({'e': 5}, chunks[2])
|
class TestSplitDict(TestCase):
def test_split_dict(self):
pass
| 2 | 0 | 16 | 4 | 12 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 1 | 18 | 5 | 13 | 4 | 11 | 0 | 13 | 4 | 11 | 1 | 1 | 0 | 1 |
145,405 |
Linaro/squad
|
Linaro_squad/test/core/test_metric_comparison.py
|
test.core.test_metric_comparison.MetricComparisonTest
|
class MetricComparisonTest(TestCase):
def receive_test_run(self, project, version, env, metrics):
receive = ReceiveTestRun(project, update_project_status=False)
receive(version, env, metrics_file=json.dumps(metrics))
def setUp(self):
self.group = models.Group.objects.create(slug='mygruop')
self.project1 = self.group.projects.create(slug='project1')
self.project2 = self.group.projects.create(slug='project2')
self.receive_test_run(self.project1, '0', 'myenv', {
'z': {"value": 0.1, "unit": ""}
})
self.receive_test_run(self.project1, '0', 'myenv', {
'z': {"value": 0.2, "unit": "bikes"}
})
self.receive_test_run(self.project2, '0', 'otherenv', {
'z': {"value": 0.1, "unit": "seconds"}
})
self.receive_test_run(self.project1, '1', 'myenv', {
'a': {"value": 0.2, "unit": "seconds"},
'b': {"value": 0.3, "unit": "seconds"}
})
self.receive_test_run(self.project1, '1', 'myenv', {
'c': {"value": 0.4, "unit": "seconds"},
'd/e': {"value": 0.5, "unit": "seconds"}
})
self.receive_test_run(self.project2, '1', 'myenv', {
'a': {"value": 0.2, "unit": "seconds"},
'b': {"value": 0.3, "unit": "seconds"}
})
self.receive_test_run(self.project2, '1', 'myenv', {
'c': {"value": 2.5, "unit": "seconds"},
'd/e': {"value": 2.5, "unit": "seconds"}
})
self.receive_test_run(self.project1, '1', 'otherenv', {
'a': {"value": 0.2, "unit": "seconds"},
'b': {"value": 0.4, "unit": "seconds"}
})
self.receive_test_run(self.project1, '1', 'otherenv', {
'c': {"value": 0.5, "unit": "seconds"},
'd/e': {"value": 0.6, "unit": "seconds"}
})
self.receive_test_run(self.project2, '1', 'otherenv', {
'a': {"value": 0.2, "unit": "seconds"},
'b': {"value": 0.4, "unit": "seconds"}
})
self.receive_test_run(self.project2, '1', 'otherenv', {
'c': {"value": 2.5, "unit": "seconds"},
'd/e': {"value": 2.4, "unit": "seconds"}
})
self.build0 = self.project1.builds.first()
self.build1 = self.project1.builds.last()
self.build2 = self.project2.builds.first()
self.build3 = self.project2.builds.last()
# Data for testing regressions and fixes
self.project = self.group.projects.create(slug='project')
self.environment_a = self.project.environments.create(slug='env_a')
self.environment_b = self.project.environments.create(slug='env_b')
self.build_a = self.project.builds.create(version='build_a')
self.build_b = self.project.builds.create(version='build_b')
# Create a few thresholds to trigger comparison
self.project.thresholds.create(name='suite_a/regressing-metric-higher-better', is_higher_better=True)
self.project.thresholds.create(name='suite_a/regressing-metric-lower-better', is_higher_better=False)
self.project.thresholds.create(name='suite_a/improved-metric-higher-better', is_higher_better=True)
self.project.thresholds.create(name='suite_a/improved-metric-lower-better', is_higher_better=False)
self.project.thresholds.create(name='suite_a/stable-metric')
# Thresholds with value WILL NOT trigger regressions/fixes
self.project.thresholds.create(name='suite_a/valueness-threshold-metric', value=1)
# Thresholds from different environments SHOULD NOT interact
self.project.thresholds.create(name='suite_a/different-env-metric', environment=self.environment_a)
self.project.thresholds.create(name='suite_a/different-env-metric', environment=self.environment_b)
# Thresholds from different suites SHOULD NOT interact
self.project.thresholds.create(name='suite_a/different-suite-metric')
self.project.thresholds.create(name='suite_b/different-suite-metric')
def test_builds(self):
comp = compare(self.build1, self.build3)
self.assertEqual([self.build1, self.build3], comp.builds)
def test_test_runs(self):
comp = compare(self.build1, self.build3)
self.assertEqual(['myenv', 'otherenv'], comp.environments[self.build1])
self.assertEqual(['myenv', 'otherenv'], comp.environments[self.build3])
def test_metrics_are_sorted(self):
comp = compare(self.build0, self.build1)
self.assertEqual(['a', 'b', 'c', 'd/e', 'z'], list(comp.results.keys()))
def test_metric_results(self):
comp = compare(self.build1, self.build3)
self.assertEqual((0.2, 0.0, 1), comp.results['a'][self.build1, 'otherenv'])
self.assertEqual((0.5, 0.0, 1), comp.results['c'][self.build1, 'otherenv'])
self.assertEqual((0.2, 0.0, 1), comp.results['a'][self.build3, 'otherenv'])
self.assertEqual((0.4, 0.0, 1), comp.results['b'][self.build3, 'otherenv'])
def test_compare_projects(self):
comp = MetricComparison.compare_projects(self.project1, self.project2)
self.assertEqual([self.build1, self.build3], comp.builds)
def test_no_data(self):
new_project = self.group.projects.create(slug='new')
comp = MetricComparison.compare_projects(new_project)
self.assertFalse(comp.diff)
self.assertEqual([], comp.builds)
def test_diff(self):
comparison = compare(self.build1, self.build3)
diff = comparison.diff
self.assertEqual(['c', 'd/e'], sorted(diff.keys()))
def test_empty_diff(self):
comparison = compare(self.build1, self.build1) # same build → no diff
self.assertFalse(comparison.diff)
def test_empty_with_no_builds(self):
new_project = self.group.projects.create(slug='new')
comparison = MetricComparison.compare_projects(new_project)
self.assertFalse(comparison.diff)
def test_multiple_values_same_metric(self):
comparison = compare(self.build0, self.build2)
diff = comparison.diff
self.assertEqual(['z'], sorted(diff.keys()))
true_mean = (0.1 + 0.2) / 2.0 # mean
true_stddev = sqrt((pow(0.1 - true_mean, 2.0) + pow(0.2 - true_mean, 2.0)) / 2.0) # standard deviation
mean, stddev, count = diff['z'][self.build0, 'myenv']
self.assertAlmostEqual(true_mean, mean)
self.assertAlmostEqual(true_stddev, stddev)
self.assertEqual(2, count)
def test_basic(self):
# metric full name | build1 result | build2 result | expected result
# False -> the metric has regressed, True -> the metric has been fixed, None -> not a regression nor fix
test_cases = {
'suite_a/improved-metric-higher-better': (1, 2, True),
'suite_a/improved-metric-lower-better': (2, 1, True),
'suite_a/regressing-metric-higher-better': (2, 1, False),
'suite_a/regressing-metric-lower-better': (1, 2, False),
'suite_a/stable-metric': (1, 1, None),
'suite_a/thresholdless-metric': (1, 2, None),
'suite_a/valueness-threshold-metric': (1, 2, None),
}
for metric_name in test_cases.keys():
build_a_result = test_cases[metric_name][0]
build_b_result = test_cases[metric_name][1]
expected = test_cases[metric_name][2]
# Post build 1 results
self.receive_test_run(self.project, self.build_a.version, self.environment_a.slug, {
metric_name: build_a_result,
})
# Post build 2 results
self.receive_test_run(self.project, self.build_b.version, self.environment_a.slug, {
metric_name: build_b_result,
})
comparison = MetricComparison(self.build_a, self.build_b, regressions_and_fixes_only=True)
if expected is True:
self.assertIn(metric_name, comparison.fixes[self.environment_a.slug])
elif expected is False:
self.assertIn(metric_name, comparison.regressions[self.environment_a.slug])
else:
self.assertNotIn(metric_name, comparison.regressions[self.environment_a.slug])
self.assertNotIn(metric_name, comparison.fixes[self.environment_a.slug])
def different_environments(self):
metric_name = 'suite_a/different-env-metric'
build_a_result = 1
build_b_result = 2
# Post build 1 results
self.receive_test_run(self.project, self.build_a.version, self.environment_a.slug, {
metric_name: build_a_result,
})
# Post build 2 results
self.receive_test_run(self.project, self.build_b.version, self.environment_b.slug, {
metric_name: build_b_result,
})
comparison = MetricComparison(self.build_a, self.build_b, regressions_and_fixes_only=True)
self.assertEqual(0, len(comparison.regressions))
self.assertEqual(0, len(comparison.fixes))
def different_suites(self):
metric_name = 'different-suite-metric'
build_a_result = 1
build_b_result = 2
# Post build 1 results
self.receive_test_run(self.project, self.build_a.version, self.environment_a.slug, {
'suite_a/' + metric_name: build_a_result,
})
# Post build 2 results
self.receive_test_run(self.project, self.build_b.version, self.environment_a.slug, {
'suite_b/' + metric_name: build_b_result,
})
comparison = MetricComparison(self.build_a, self.build_b, regressions_and_fixes_only=True)
self.assertEqual(0, len(comparison.regressions))
self.assertEqual(0, len(comparison.fixes))
|
class MetricComparisonTest(TestCase):
def receive_test_run(self, project, version, env, metrics):
pass
def setUp(self):
pass
def test_builds(self):
pass
def test_test_runs(self):
pass
def test_metrics_are_sorted(self):
pass
def test_metric_results(self):
pass
def test_compare_projects(self):
pass
def test_no_data(self):
pass
def test_diff(self):
pass
def test_empty_diff(self):
pass
def test_empty_with_no_builds(self):
pass
def test_multiple_values_same_metric(self):
pass
def test_basic(self):
pass
def different_environments(self):
pass
def different_suites(self):
pass
| 16 | 0 | 14 | 2 | 11 | 1 | 1 | 0.1 | 1 | 3 | 2 | 0 | 15 | 12 | 15 | 15 | 219 | 39 | 167 | 60 | 151 | 16 | 115 | 60 | 99 | 4 | 1 | 2 | 18 |
145,406 |
Linaro/squad
|
Linaro_squad/test/core/test_update_project_statuses.py
|
test.core.test_update_project_statuses.UpdateStatusesTest
|
class UpdateStatusesTest(TestCase):
def setUp(self):
self.group = Group.objects.create(slug='mygroup')
self.project = self.group.projects.create(slug='myproject')
self.environment = self.project.environments.create(slug='theenvironment')
self.suite = self.project.suites.create(slug='/')
def create_build(self, v, datetime=None, create_test_run=True):
build = self.project.builds.create(version=v, datetime=datetime)
if create_test_run:
build.test_runs.create(environment=self.environment)
return build
def test_update_status(self):
build1 = self.create_build('1')
build1.datetime = timezone.make_aware(datetime(2018, 6, 1))
build1.save()
status1 = ProjectStatus.objects.first()
status1.fixes = "fixes:\n- fix1"
status1.finished = True
status1.save()
self.create_build('2')
status2 = ProjectStatus.objects.last()
status2.fixes = "fixes:\n- fix2"
status2.finished = True
status2.save()
call_command('update_project_statuses', "--date-start", "2018-07-01")
status1.refresh_from_db()
status2.refresh_from_db()
self.assertEqual(status1.get_fixes(), {"fixes": ["fix1"]})
self.assertEqual(status2.get_fixes(), {})
|
class UpdateStatusesTest(TestCase):
def setUp(self):
pass
def create_build(self, v, datetime=None, create_test_run=True):
pass
def test_update_status(self):
pass
| 4 | 0 | 11 | 2 | 9 | 0 | 1 | 0 | 1 | 2 | 1 | 0 | 3 | 4 | 3 | 3 | 37 | 8 | 29 | 12 | 25 | 0 | 29 | 12 | 25 | 2 | 1 | 1 | 4 |
145,407 |
Linaro/squad
|
Linaro_squad/test/core/test_user_namespace.py
|
test.core.test_user_namespace.UserNamespaceTest
|
class UserNamespaceTest(TestCase):
def test_slug_basic(self):
userns = UserNamespace.objects.create(slug='~foo')
self.assertIsInstance(userns, Group)
def test_create_for(self):
user = User.objects.create(username='foo')
userns = UserNamespace.objects.create_for(user)
self.assertEqual('~foo', userns.slug)
self.assertTrue(userns.writable_by(user))
def test_get_or_create_for(self):
user = User.objects.create(username='foo')
userns1 = UserNamespace.objects.get_or_create_for(user)
userns2 = UserNamespace.objects.get_or_create_for(user)
self.assertEqual(userns1, userns2)
def test_get_only_user_namespaces(self):
Group.objects.create(slug='thegroup')
userns = UserNamespace.objects.create(slug='~foo')
self.assertEqual([userns], list(UserNamespace.objects.all()))
|
class UserNamespaceTest(TestCase):
def test_slug_basic(self):
pass
def test_create_for(self):
pass
def test_get_or_create_for(self):
pass
def test_get_only_user_namespaces(self):
pass
| 5 | 0 | 4 | 0 | 4 | 0 | 1 | 0 | 1 | 2 | 1 | 0 | 4 | 0 | 4 | 4 | 22 | 4 | 18 | 12 | 13 | 0 | 18 | 12 | 13 | 1 | 1 | 0 | 4 |
145,408 |
Linaro/squad
|
Linaro_squad/test/core/test_utils.py
|
test.core.test_utils.TestCrypto
|
class TestCrypto(TestCase):
def test_encryption(self):
msg = 'confidential message'
encrypted = encrypt(msg)
decrypted = decrypt(encrypted)
self.assertEqual(msg, decrypted)
self.assertEqual(msg, decrypt(encrypted))
|
class TestCrypto(TestCase):
def test_encryption(self):
pass
| 2 | 0 | 6 | 0 | 6 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 1 | 8 | 1 | 7 | 5 | 5 | 0 | 7 | 5 | 5 | 1 | 1 | 0 | 1 |
145,409 |
Linaro/squad
|
Linaro_squad/test/core/test_utils.py
|
test.core.test_utils.TestJoinName
|
class TestJoinName(TestCase):
def test_join_ungrouped(self):
self.assertEqual('foo', join_name('/', 'foo'))
def test_join_group(self):
self.assertEqual('foo/bar', join_name('foo', 'bar'))
|
class TestJoinName(TestCase):
def test_join_ungrouped(self):
pass
def test_join_group(self):
pass
| 3 | 0 | 2 | 0 | 2 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 2 | 0 | 2 | 2 | 7 | 2 | 5 | 3 | 2 | 0 | 5 | 3 | 2 | 1 | 1 | 0 | 2 |
145,410 |
Linaro/squad
|
Linaro_squad/test/plugins/test_plugin.py
|
test.plugins.test_plugin.TestGetPluginsByFeature
|
class TestGetPluginsByFeature(TestCase):
def test_basics(self):
testrun_plugins = get_plugins_by_feature([Plugin.postprocess_testrun])
testjob_plugins = get_plugins_by_feature([Plugin.postprocess_testjob])
self.assertNotIn('example', testrun_plugins)
self.assertNotIn('linux_log_parser', testjob_plugins)
self.assertIn('linux_log_parser', testrun_plugins)
def test_feature_list_is_none(self):
plugins = get_plugins_by_feature(None)
self.assertIn('example', plugins)
self.assertIn('linux_log_parser', plugins)
def test_empty_feature_list(self):
plugins = get_plugins_by_feature([])
self.assertIn('example', plugins)
self.assertIn('linux_log_parser', plugins)
|
class TestGetPluginsByFeature(TestCase):
def test_basics(self):
pass
def test_feature_list_is_none(self):
pass
def test_empty_feature_list(self):
pass
| 4 | 0 | 5 | 0 | 5 | 0 | 1 | 0 | 1 | 1 | 1 | 0 | 3 | 0 | 3 | 3 | 18 | 3 | 15 | 8 | 11 | 0 | 15 | 8 | 11 | 1 | 1 | 0 | 3 |
145,411 |
Linaro/squad
|
Linaro_squad/test/plugins/test_plugin.py
|
test.plugins.test_plugin.TestApplyPlugins
|
class TestApplyPlugins(TestCase):
def test_skips_nonexisting_plugins(self):
plugins = []
for plugin in apply_plugins(['example', 'nonexisting']):
plugins.append(plugin)
self.assertEqual(1, len(plugins))
self.assertIsInstance(plugins[0], Plugin)
|
class TestApplyPlugins(TestCase):
def test_skips_nonexisting_plugins(self):
pass
| 2 | 0 | 6 | 0 | 6 | 0 | 2 | 0 | 1 | 1 | 1 | 0 | 1 | 0 | 1 | 1 | 8 | 1 | 7 | 4 | 5 | 0 | 7 | 4 | 5 | 2 | 1 | 1 | 2 |
145,412 |
Linaro/squad
|
Linaro_squad/test/plugins/test_linux_log_parser.py
|
test.plugins.test_linux_log_parser.TestLinuxLogParser
|
class TestLinuxLogParser(TestCase):
def setUp(self):
group = Group.objects.create(slug='mygroup')
self.project = group.projects.create(slug='myproject', enabled_plugins_list='example')
self.build = self.project.builds.create(version='1')
self.env = self.project.environments.create(slug='myenv')
self.plugin = Plugin()
def new_testrun(self, logfile, job_id='999'):
log = read_sample_file(logfile)
testrun = self.build.test_runs.create(environment=self.env, job_id=job_id)
testrun.save_log_file(log)
return testrun
def test_detects_oops(self):
testrun = self.new_testrun('oops.log')
self.plugin.postprocess_testrun(testrun)
test = testrun.tests.get(suite__slug='log-parser-test', metadata__name='check-kernel-oops-oops-bug-preempt-smp')
self.assertFalse(test.result)
self.assertIsNotNone(test.log)
self.assertNotIn('Linux version 4.4.89-01529-gb29bace', test.log)
self.assertIn('Internal error: Oops - BUG: 0 [#1] PREEMPT SMP', test.log)
self.assertNotIn('Kernel panic', test.log)
def test_detects_kernel_panic(self):
testrun = self.new_testrun('kernelpanic.log')
self.plugin.postprocess_testrun(testrun)
test = testrun.tests.get(suite__slug='log-parser-test', metadata__name='check-kernel-panic-kernel-panic-not-syncing-attempted-to-kill-the-idle-task')
self.assertFalse(test.result)
self.assertIsNotNone(test.log)
self.assertNotIn('Booting Linux', test.log)
self.assertIn('Kernel panic - not syncing', test.log)
self.assertNotIn('Attempted to kill init! exitcode=0x00000009', test.log)
self.assertNotIn('Internal error: Oops', test.log)
def test_detects_kernel_exception(self):
testrun = self.new_testrun('kernelexceptiontrace.log')
self.plugin.postprocess_testrun(testrun)
test = testrun.tests.get(suite__slug='log-parser-test', metadata__name='check-kernel-exception-warning-cpu-pid-at-kernelsmpc-smp_call_function_many_cond')
self.assertFalse(test.result)
self.assertIsNotNone(test.log)
self.assertNotIn('Booting Linux', test.log)
self.assertIn('WARNING: CPU: 0 PID: 1 at kernel/smp.c:912 smp_call_function_many_cond+0x3c4/0x3c8', test.log)
self.assertIn('5fe0: 0000000b be963e80 b6f142d9 b6f0e648 60000030 ffffffff"}', test.log)
self.assertNotIn('Internal error: Oops', test.log)
def test_detects_kernel_exception_without_square_braces(self):
testrun = self.new_testrun('kernelexceptiontrace_without_squarebraces.log')
self.plugin.postprocess_testrun(testrun)
test = testrun.tests.get(suite__slug='log-parser-test', metadata__name='check-kernel-exception-warning-cpu-pid-at-kernelsmpc-smp_call_function_many_cond')
self.assertFalse(test.result)
self.assertIsNotNone(test.log)
self.assertNotIn('Booting Linux', test.log)
self.assertIn('WARNING: CPU: 0 PID: 1 at kernel/smp.c:912 smp_call_function_many_cond+0x3c4/0x3c8', test.log)
self.assertIn('5fe0: 0000000b be963e80 b6f142d9 b6f0e648 60000030 ffffffff"}', test.log)
self.assertNotIn('Internal error: Oops', test.log)
def test_detects_kernel_kasan(self):
testrun = self.new_testrun('kasan.log')
self.plugin.postprocess_testrun(testrun)
test = testrun.tests.get(suite__slug='log-parser-test', metadata__name='check-kernel-kasan-bug-kasan-slab-out-of-bounds-in-kmalloc_oob_right')
self.assertFalse(test.result)
self.assertIsNotNone(test.log)
self.assertNotIn('Booting Linux', test.log)
self.assertIn('==================================================================', test.log)
self.assertIn('BUG: KASAN: slab-out-of-bounds in kmalloc_oob_right+0x190/0x3b8', test.log)
self.assertIn('Write of size 1 at addr c6aaf473 by task kunit_try_catch/191', test.log)
self.assertNotIn('Internal error: Oops', test.log)
def test_detects_kernel_kfence(self):
testrun = self.new_testrun('kfence.log')
self.plugin.postprocess_testrun(testrun)
test = testrun.tests.get(suite__slug='log-parser-test', metadata__name='check-kernel-kfence-bug-kfence-memory-corruption-in-kfree')
self.assertFalse(test.result)
self.assertIsNotNone(test.log)
self.assertNotIn('Booting Linux', test.log)
self.assertIn('==================================================================', test.log)
self.assertIn('BUG: KFENCE: memory corruption in kfree+0x8c/0x174', test.log)
self.assertIn('Corrupted memory at 0x00000000c5d55ff8 [ ! ! ! . . . . . . . . . . . . . ] (in kfence-#214):', test.log)
self.assertNotIn('Internal error: Oops', test.log)
def test_detects_kernel_bug(self):
testrun = self.new_testrun('oops.log')
self.plugin.postprocess_testrun(testrun)
test = testrun.tests.get(suite__slug='log-parser-test', metadata__name='check-kernel-bug-bug-spinlock-lockup-suspected-on-cpu-gdbus')
self.assertFalse(test.result)
self.assertIsNotNone(test.log)
self.assertNotIn('Booting Linux', test.log)
self.assertIn('] BUG:', test.log)
self.assertNotIn('Internal error: Oops', test.log)
testrun = self.new_testrun('kernel_bug_and_invalid_opcode.log', job_id='1000')
self.plugin.postprocess_testrun(testrun)
test = testrun.tests.get(suite__slug='log-parser-test', metadata__name='check-kernel-exception-kernel-bug-at-usrsrckernelarchxkvmmmummuc')
self.assertFalse(test.result)
self.assertIsNotNone(test.log)
self.assertNotIn('Booting Linux', test.log)
self.assertIn('] kernel BUG at', test.log)
self.assertNotIn('] BUG:', test.log)
self.assertNotIn('Internal error: Oops', test.log)
def test_detects_kernel_invalid_opcode(self):
testrun = self.new_testrun('kernel_bug_and_invalid_opcode.log')
self.plugin.postprocess_testrun(testrun)
test = testrun.tests.get(suite__slug='log-parser-test', metadata__name='check-kernel-invalid-opcode-invalid-opcode-smp-pti')
self.assertFalse(test.result)
self.assertIsNotNone(test.log)
self.assertNotIn('Booting Linux', test.log)
self.assertIn('] invalid opcode:', test.log)
self.assertNotIn('] BUG:', test.log)
self.assertNotIn('Internal error: Oops', test.log)
def test_detects_multiple(self):
testrun = self.new_testrun('multiple_issues_dmesg.log')
self.plugin.postprocess_testrun(testrun)
tests = testrun.tests
test_panic = tests.get(suite__slug='log-parser-test', metadata__name='check-kernel-panic-kernel-panic-not-syncing-stack-protector-kernel-stack-is-corrupted-in-ffffffffcc')
test_exception = tests.get(suite__slug='log-parser-test', metadata__name='check-kernel-exception-warning-cpu-pid-at-driversgpudrmradeonradeon_objectc-radeon_ttm_bo_destroy')
test_warning = tests.get(suite__slug='log-parser-test', metadata__name='check-kernel-warning-warning-cpu-pid-at-driversregulatorcorec-_regulator_putpart')
test_oops = tests.get(suite__slug='log-parser-test', metadata__name='check-kernel-oops-oops-preempt-smp')
test_fault = tests.get(suite__slug='log-parser-test', metadata__name='check-kernel-fault-unhandled-fault-external-abort-on-non-linefetch-at')
self.assertFalse(test_panic.result)
self.assertNotIn('Boot CPU', test_panic.log)
self.assertIn('Kernel panic - not syncing', test_panic.log)
self.assertFalse(test_exception.result)
self.assertNotIn('Boot CPU', test_exception.log)
self.assertIn('------------[ cut here ]------------', test_exception.log)
self.assertFalse(test_warning.result)
self.assertNotIn('Boot CPU', test_warning.log)
self.assertNotIn('Kernel panic - not syncing', test_warning.log)
self.assertNotIn('------------[ cut here ]------------', test_warning.log)
self.assertNotIn('Unhandled fault:', test_warning.log)
self.assertNotIn('Oops', test_warning.log)
self.assertIn('WARNING: CPU', test_warning.log)
self.assertFalse(test_oops.result)
self.assertNotIn('Boot CPU', test_oops.log)
self.assertNotIn('Kernel panic - not syncing', test_oops.log)
self.assertNotIn('------------[ cut here ]------------', test_oops.log)
self.assertNotIn('WARNING: CPU', test_oops.log)
self.assertNotIn('Unhandled fault:', test_oops.log)
self.assertIn('Oops', test_oops.log)
self.assertFalse(test_fault.result)
self.assertNotIn('Boot CPU', test_fault.log)
self.assertNotIn('Kernel panic - not syncing', test_fault.log)
self.assertNotIn('------------[ cut here ]------------', test_fault.log)
self.assertNotIn('WARNING: CPU', test_fault.log)
self.assertNotIn('Oops', test_fault.log)
self.assertIn('Unhandled fault:', test_fault.log)
def test_pass_if_nothing_is_found(self):
testrun = self.new_testrun('/dev/null')
self.plugin.postprocess_testrun(testrun)
tests = testrun.tests
test_panic = tests.get(suite__slug='log-parser-test', metadata__name='check-kernel-panic')
test_exception = tests.get(suite__slug='log-parser-test', metadata__name='check-kernel-exception')
test_warning = tests.get(suite__slug='log-parser-test', metadata__name='check-kernel-warning')
test_oops = tests.get(suite__slug='log-parser-test', metadata__name='check-kernel-oops')
test_fault = tests.get(suite__slug='log-parser-test', metadata__name='check-kernel-fault')
self.assertTrue(test_panic.result)
self.assertTrue(test_exception.result)
self.assertTrue(test_warning.result)
self.assertTrue(test_oops.result)
self.assertTrue(test_fault.result)
def test_two_testruns_distinct_test_names(self):
testrun1 = self.new_testrun('/dev/null', 'job1')
testrun2 = self.new_testrun('/dev/null', 'job2')
self.plugin.postprocess_testrun(testrun1)
self.plugin.postprocess_testrun(testrun2)
self.assertNotEqual(testrun1.tests.all(), testrun2.tests.all())
def test_rcu_warning(self):
testrun = self.new_testrun('rcu_warning.log')
self.plugin.postprocess_testrun(testrun)
tests = testrun.tests
test_panic = tests.get(suite__slug='log-parser-test', metadata__name='check-kernel-panic')
test_exception = tests.get(suite__slug='log-parser-test', metadata__name='check-kernel-exception')
test_warning = tests.get(suite__slug='log-parser-test', metadata__name='check-kernel-warning-warning-suspicious-rcu-usage')
test_oops = tests.get(suite__slug='log-parser-test', metadata__name='check-kernel-oops')
test_fault = tests.get(suite__slug='log-parser-test', metadata__name='check-kernel-fault')
self.assertTrue(test_panic.result)
self.assertTrue(test_exception.result)
self.assertTrue(test_oops.result)
self.assertTrue(test_fault.result)
self.assertFalse(test_warning.result)
self.assertIn('WARNING: suspicious RCU usage', test_warning.log)
def test_no_string(self):
testrun = self.build.test_runs.create(environment=self.env, job_id='1111')
self.plugin.postprocess_testrun(testrun)
tests = testrun.tests.filter(result=False)
self.assertEqual(0, tests.count())
def test_metadata_creation(self):
log = 'Kernel panic - not syncing'
testrun = self.build.test_runs.create(environment=self.env, job_id='999')
testrun.save_log_file(log)
self.plugin.postprocess_testrun(testrun)
test = testrun.tests.get(suite__slug='log-parser-test', metadata__name='check-kernel-panic')
self.assertIsNotNone(test.metadata)
def test_boot_log(self):
testrun = self.new_testrun('oops.log')
self.plugin.postprocess_testrun(testrun)
test = testrun.tests.get(suite__slug='log-parser-boot', metadata__name='check-kernel-oops-oops-bug-preempt-smp')
self.assertFalse(test.result)
self.assertIsNotNone(test.log)
self.assertNotIn('Linux version 4.4.89-01529-gb29bace', test.log)
self.assertIn('Internal error: Oops - BUG: 0 [#0] PREEMPT SMP', test.log)
self.assertNotIn('Kernel panic', test.log)
def test_sha_name(self):
testrun = self.new_testrun('oops.log')
self.plugin.postprocess_testrun(testrun)
test = testrun.tests.get(suite__slug='log-parser-boot', metadata__name='check-kernel-oops-oops-bug-preempt-smp')
self.assertFalse(test.result)
self.assertIsNotNone(test.log)
self.assertNotIn('Linux version 4.4.89-01529-gb29bace', test.log)
self.assertIn('Internal error: Oops - BUG: 0 [#0] PREEMPT SMP', test.log)
self.assertNotIn('Kernel panic', test.log)
# Now check if a test with sha digest in the name
test = testrun.tests.get(suite__slug='log-parser-boot', metadata__name='check-kernel-oops-oops-bug-preempt-smp-a1acf2f0467782c9c2f6aeadb1d1d3cec136642b13d7231824a66ef63ee62220')
self.assertFalse(test.result)
self.assertIsNotNone(test.log)
self.assertIn('Internal error: Oops - BUG: 0 [#0] PREEMPT SMP', test.log)
self.assertNotIn('Internal error: Oops - BUG: 99 [#1] PREEMPT SMP', test.log)
def test_sha_name_multiple(self):
testrun = self.new_testrun('multiple_issues_dmesg.log')
self.plugin.postprocess_testrun(testrun)
tests = testrun.tests
test_panic = tests.get(suite__slug='log-parser-test', metadata__name='check-kernel-panic-kernel-panic-not-syncing-stack-protector-kernel-stack-is-corrupted-in-ffffffffcc-ab2f1708a36efc4f90943d58fb240d435fcb3d05f7fac9b00163483fe77209eb')
test_exception = tests.get(suite__slug='log-parser-test', metadata__name='check-kernel-exception-warning-cpu-pid-at-driversgpudrmradeonradeon_objectc-radeon_ttm_bo_destroy-77251099bfa081e5c942070a569fe31163336e61a80bda7304cd59f0f4b82080')
test_warning = tests.get(suite__slug='log-parser-test', metadata__name='check-kernel-warning-warning-cpu-pid-at-driversregulatorcorec-_regulator_putpart-d44949024d5373185a7381cb9dd291b13c117d6b93feb576a431e5376025004f')
test_oops = tests.get(suite__slug='log-parser-test', metadata__name='check-kernel-oops-oops-preempt-smp-4e1ddddb2c142178a8977e7d973c2a13db2bb978aa471c0049ee39fe3fe4d74c')
test_fault = tests.get(suite__slug='log-parser-test', metadata__name='check-kernel-fault-unhandled-fault-external-abort-on-non-linefetch-at-6f9e3ab8f97e35c1e9167fed1e01c6149986819c54451064322b7d4208528e07')
self.assertFalse(test_panic.result)
self.assertNotIn('Boot CPU', test_panic.log)
self.assertIn('Kernel panic - not syncing', test_panic.log)
self.assertFalse(test_exception.result)
self.assertNotIn('Boot CPU', test_exception.log)
self.assertIn('------------[ cut here ]------------', test_exception.log)
self.assertFalse(test_warning.result)
self.assertNotIn('Boot CPU', test_warning.log)
self.assertNotIn('Kernel panic - not syncing', test_warning.log)
self.assertNotIn('------------[ cut here ]------------', test_warning.log)
self.assertNotIn('Unhandled fault:', test_warning.log)
self.assertNotIn('Oops', test_warning.log)
self.assertIn('WARNING: CPU', test_warning.log)
self.assertFalse(test_oops.result)
self.assertNotIn('Boot CPU', test_oops.log)
self.assertNotIn('Kernel panic - not syncing', test_oops.log)
self.assertNotIn('------------[ cut here ]------------', test_oops.log)
self.assertNotIn('WARNING: CPU', test_oops.log)
self.assertNotIn('Unhandled fault:', test_oops.log)
self.assertIn('Oops', test_oops.log)
self.assertFalse(test_fault.result)
self.assertNotIn('Boot CPU', test_fault.log)
self.assertNotIn('Kernel panic - not syncing', test_fault.log)
self.assertNotIn('------------[ cut here ]------------', test_fault.log)
self.assertNotIn('WARNING: CPU', test_fault.log)
self.assertNotIn('Oops', test_fault.log)
self.assertIn('Unhandled fault:', test_fault.log)
|
class TestLinuxLogParser(TestCase):
def setUp(self):
pass
def new_testrun(self, logfile, job_id='999'):
pass
def test_detects_oops(self):
pass
def test_detects_kernel_panic(self):
pass
def test_detects_kernel_exception(self):
pass
def test_detects_kernel_exception_without_square_braces(self):
pass
def test_detects_kernel_kasan(self):
pass
def test_detects_kernel_kfence(self):
pass
def test_detects_kernel_bug(self):
pass
def test_detects_kernel_invalid_opcode(self):
pass
def test_detects_multiple(self):
pass
def test_pass_if_nothing_is_found(self):
pass
def test_two_testruns_distinct_test_names(self):
pass
def test_rcu_warning(self):
pass
def test_no_string(self):
pass
def test_metadata_creation(self):
pass
def test_boot_log(self):
pass
def test_sha_name(self):
pass
def test_sha_name_multiple(self):
pass
| 20 | 0 | 15 | 2 | 13 | 0 | 1 | 0.03 | 1 | 1 | 1 | 0 | 19 | 4 | 19 | 19 | 297 | 53 | 243 | 82 | 223 | 7 | 243 | 82 | 223 | 1 | 1 | 0 | 19 |
145,413 |
Linaro/squad
|
Linaro_squad/test/plugins/test_gerrit.py
|
test.plugins.test_gerrit.FakeSubprocess
|
class FakeSubprocess():
__last_cmd__ = None
PIPE = 0
class CalledProcessError(BaseException):
def __str__(self):
return 'Could not establish connection to host'
@staticmethod
def run(cmd, stdout=0, stderr=0):
FakeSubprocess.__last_cmd__ = ' '.join(cmd)
gerrit_cmd = 'gerrit review'
options = ' '.join(gerrit.DEFAULT_SSH_OPTIONS)
port = gerrit.DEFAULT_SSH_PORT
if 'ssh %s -p %s theuser@the.host' % (options, port) != ' '.join(cmd[0:10]) \
or not cmd[10].startswith(gerrit_cmd):
raise FakeSubprocess.CalledProcessError()
obj = FakeObject()
obj.stdout = ""
obj.stderr = ""
return obj
@staticmethod
def given_cmd():
return FakeSubprocess.__last_cmd__
|
class FakeSubprocess():
class CalledProcessError(BaseException):
def __str__(self):
pass
@staticmethod
def run(cmd, stdout=0, stderr=0):
pass
@staticmethod
def given_cmd():
pass
| 7 | 0 | 6 | 0 | 5 | 0 | 1 | 0 | 0 | 2 | 2 | 0 | 0 | 0 | 2 | 2 | 26 | 4 | 22 | 13 | 15 | 0 | 19 | 11 | 14 | 2 | 0 | 1 | 4 |
145,414 |
Linaro/squad
|
Linaro_squad/test/plugins/test_gerrit.py
|
test.plugins.test_gerrit.FakeRequests
|
class FakeRequests():
__last_json__ = None
class auth():
class HTTPBasicAuth():
def __init__(self, user, password):
self.user = user
self.password = password
@staticmethod
def post(url, auth=None, json=None):
FakeRequests.__last_json__ = json
result = FakeObject()
result.status_code = 200
user = auth.user
password = auth.password
if 'https://the.host' not in url or [user, password] != ['theuser', '1234'] or json['message'] is None:
result.status_code = 400
return result
@staticmethod
def get(url, auth=None, json=None):
FakeRequests.__last_json__ = json
result = FakeObject()
result.status_code = 200
user = auth.user
password = auth.password
if 'https://the.host' not in url or [user, password] != ['theuser', '1234']:
result.status_code = 400
return result
@staticmethod
def given_json():
return FakeRequests.__last_json__
|
class FakeRequests():
class auth():
class HTTPBasicAuth():
def __init__(self, user, password):
pass
@staticmethod
def post(url, auth=None, json=None):
pass
@staticmethod
def get(url, auth=None, json=None):
pass
@staticmethod
def given_json():
pass
| 10 | 0 | 6 | 0 | 6 | 0 | 2 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 3 | 3 | 34 | 4 | 30 | 19 | 20 | 0 | 27 | 16 | 20 | 2 | 0 | 1 | 6 |
145,415 |
Linaro/squad
|
Linaro_squad/test/plugins/test_gerrit.py
|
test.plugins.test_gerrit.FakeObject
|
class FakeObject():
text = response_json_text
pass
|
class FakeObject():
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0 | 3 | 2 | 2 | 0 | 3 | 2 | 2 | 0 | 0 | 0 | 0 |
145,416 |
Linaro/squad
|
Linaro_squad/test/frontend/test_utils.py
|
test.frontend.test_utils.FileTypeTest
|
class FileTypeTest(TestCase):
def test_text(self):
self.assertEqual('text', file_type('foo.txt'))
def test_code(self):
self.assertEqual('code', file_type('foo.py'))
self.assertEqual('code', file_type('foo.sh'))
def test_image(self):
self.assertEqual('image', file_type('foo.png'))
self.assertEqual('image', file_type('foo.jpg'))
|
class FileTypeTest(TestCase):
def test_text(self):
pass
def test_code(self):
pass
def test_image(self):
pass
| 4 | 0 | 3 | 0 | 3 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 3 | 0 | 3 | 3 | 12 | 3 | 9 | 4 | 5 | 0 | 9 | 4 | 5 | 1 | 1 | 0 | 3 |
145,417 |
Linaro/squad
|
Linaro_squad/test/frontend/test_utils.py
|
test.frontend.test_utils.AlphaNumSortTest
|
class AlphaNumSortTest(TestCase):
def setUp(self):
self.group = Group.objects.create(slug='mygroup')
self.p1 = self.group.projects.create(slug='project1', name='project-v1.1')
self.p2 = self.group.projects.create(slug='project2', name='project-v1.10')
self.p3 = self.group.projects.create(slug='project3', name='project-v1.2')
def test_asc_sort(self):
projects = Project.objects.all()
projects_sorted = alphanum_sort(projects, 'name', reverse=False)
# v1.1 -> v1.2 -> v1.10
self.assertEqual([self.p1, self.p3, self.p2], projects_sorted)
def test_desc_sort(self):
projects = Project.objects.all()
projects_sorted = alphanum_sort(projects, 'name')
# v1.10 -> v1.2 -> v1.1
self.assertEqual([self.p2, self.p3, self.p1], projects_sorted)
|
class AlphaNumSortTest(TestCase):
def setUp(self):
pass
def test_asc_sort(self):
pass
def test_desc_sort(self):
pass
| 4 | 0 | 5 | 0 | 4 | 1 | 1 | 0.14 | 1 | 1 | 1 | 0 | 3 | 4 | 3 | 3 | 19 | 3 | 14 | 12 | 10 | 2 | 14 | 12 | 10 | 1 | 1 | 0 | 3 |
145,418 |
Linaro/squad
|
Linaro_squad/test/core/test_test_summary.py
|
test.core.test_test_summary.TestSummaryTest
|
class TestSummaryTest(TestCase):
def setUp(self):
self.group = Group.objects.create(slug='mygroup')
self.project = self.group.projects.create(slug='myproject')
self.receive_testrun = ReceiveTestRun(self.project, update_project_status=False)
def test_basics(self):
build = self.project.builds.create(version='1')
env = self.project.environments.create(slug='env')
known_issue = KnownIssue.objects.create(title='dummy_issue', test_name='tests/pla')
known_issue.environments.add(env)
known_issue.save()
tests_json = """
{
"tests/foo": "pass",
"tests/bar": "fail",
"tests/baz": "none",
"tests/qux": "fail",
"tests/pla": "fail"
}
"""
self.receive_testrun(build.version, env.slug, tests_file=tests_json)
summary = TestSummary(build)
self.assertEqual(5, summary.tests_total)
self.assertEqual(1, summary.tests_pass)
self.assertEqual(2, summary.tests_fail)
self.assertEqual(1, summary.tests_skip)
self.assertEqual(1, summary.tests_xfail)
def test_test_summary_retried_tests(self):
build = Build.objects.create(project=self.project, version='1.1')
env = self.project.environments.create(slug='env')
self.receive_testrun(build.version, env.slug, tests_file='{"tests/foo": "pass"}')
self.receive_testrun(build.version, env.slug, tests_file='{"tests/foo": "pass"}')
summary = build.test_summary
self.assertEqual(2, summary.tests_total)
self.assertEqual(2, summary.tests_pass)
def test_later_test_does_not_prevails(self):
build = Build.objects.create(project=self.project, version='1.1')
env = self.project.environments.create(slug='env')
self.receive_testrun(build.version, env.slug, tests_file='{"tests/foo": "pass"}')
self.receive_testrun(build.version, env.slug, tests_file='{"tests/foo": "fail"}')
summary = build.test_summary
self.assertEqual(2, summary.tests_total)
self.assertEqual(1, summary.tests_pass)
self.assertEqual(1, summary.tests_fail)
def test_count_separate_environments_separately(self):
build = Build.objects.create(project=self.project, version='1.1')
env1 = self.project.environments.create(slug='env1')
env2 = self.project.environments.create(slug='env2')
self.receive_testrun(build.version, env1.slug, tests_file='{"tests/foo": "pass"}')
self.receive_testrun(build.version, env2.slug, tests_file='{"tests/foo": "fail"}')
summary = build.test_summary
self.assertEqual(2, summary.tests_total)
self.assertEqual(1, summary.tests_pass)
self.assertEqual(1, summary.tests_fail)
def test_count_single_environment(self):
build = Build.objects.create(project=self.project, version='1.1')
env1 = self.project.environments.create(slug='env1')
env2 = self.project.environments.create(slug='env2')
self.receive_testrun(build.version, env1.slug, tests_file='{"tests/foo": "pass"}')
self.receive_testrun(build.version, env2.slug, tests_file='{"tests/foo": "fail"}')
summary = TestSummary(build, env1)
self.assertEqual(1, summary.tests_total)
self.assertEqual(1, summary.tests_pass)
self.assertEqual(0, summary.tests_fail)
|
class TestSummaryTest(TestCase):
def setUp(self):
pass
def test_basics(self):
pass
def test_test_summary_retried_tests(self):
pass
def test_later_test_does_not_prevails(self):
pass
def test_count_separate_environments_separately(self):
pass
def test_count_single_environment(self):
pass
| 7 | 0 | 12 | 2 | 11 | 0 | 1 | 0 | 1 | 4 | 4 | 0 | 6 | 3 | 6 | 6 | 81 | 17 | 64 | 29 | 57 | 0 | 56 | 29 | 49 | 1 | 1 | 0 | 6 |
145,419 |
Linaro/squad
|
Linaro_squad/test/frontend/test_tests.py
|
test.frontend.test_tests.TestRunTestsTest
|
class TestRunTestsTest(TestCase):
def setUp(self):
self.client = Client()
group = models.Group.objects.create(slug='mygroup')
project = group.projects.create(slug='myproject')
env = project.environments.create(slug='myenv')
for test, _ in tests_file.items():
if test.startswith('suite2/'):
issue = models.KnownIssue.objects.create(
title='foo fails',
test_name=test
)
issue.environments.add(env)
ReceiveTestRun(project)(
version='1',
environment_slug='myenv',
log_file='log file contents ...',
tests_file=json.dumps(tests_file),
metrics_file='{}',
metadata_file='{ "job_id" : "1" }',
)
self.test_run = models.TestRun.objects.last()
def test_table_layout(self):
response = self.client.get('/mygroup/myproject/build/1/?results_layout=table')
self.assertIn("onclick=\"window.location = \'?results_layout=table&failures_only=false#test-results'\"", response.content.decode("utf-8"))
self.assertEqual(200, response.status_code)
def test_table_layout_failures_only_false(self):
response = self.client.get('/mygroup/myproject/build/1/?results_layout=table&failures_only=false')
self.assertIn("onclick=\"window.location = \'?results_layout=table&failures_only=true#test-results'\"", response.content.decode("utf-8"))
self.assertEqual(200, response.status_code)
def test_table_layout_failures_only_true(self):
response = self.client.get('/mygroup/myproject/build/1/?results_layout=table&failures_only=true')
self.assertIn("onclick=\"window.location = \'?results_layout=table&failures_only=false#test-results'\"", response.content.decode("utf-8"))
self.assertEqual(200, response.status_code)
def test_envbox_layout(self):
response = self.client.get('/mygroup/myproject/build/1/?results_layout=envbox')
self.assertIn("onclick=\"window.location = \'?results_layout=envbox&failures_only=false#test-results'\"", response.content.decode("utf-8"))
self.assertEqual(200, response.status_code)
def test_envbox_layout_failures_only_false(self):
response = self.client.get('/mygroup/myproject/build/1/?results_layout=envbox&failures_only=false')
self.assertIn("onclick=\"window.location = \'?results_layout=envbox&failures_only=true#test-results'\"", response.content.decode("utf-8"))
self.assertEqual(200, response.status_code)
def test_envbox_layout_failures_only_true(self):
response = self.client.get('/mygroup/myproject/build/1/?results_layout=envbox&failures_only=true')
self.assertIn("onclick=\"window.location = \'?results_layout=envbox&failures_only=false#test-results'\"", response.content.decode("utf-8"))
self.assertEqual(200, response.status_code)
def test_failures_only_user_preference_false(self):
self.user = models.User.objects.create(username='theuser')
self.client = Client()
self.client.force_login(self.user)
self.user_preferences = get_user_preferences(user=self.user)
self.user_preferences.display_failures_only = False
self.user_preferences.save()
response = self.client.get('/mygroup/myproject/build/1/')
self.assertIn("onclick=\"window.location = \'?failures_only=true#test-results'\"", response.content.decode("utf-8"))
self.assertEqual(200, response.status_code)
def test_failures_only_user_preference_true(self):
self.user = models.User.objects.create(username='theuser')
self.client = Client()
self.client.force_login(self.user)
self.user_preferences = get_user_preferences(user=self.user)
self.user_preferences.display_failures_only = True
self.user_preferences.save()
response = self.client.get('/mygroup/myproject/build/1/')
self.assertIn("onclick=\"window.location = \'?failures_only=false#test-results'\"", response.content.decode("utf-8"))
self.assertEqual(200, response.status_code)
def test_suitebox_layout(self):
response = self.client.get('/mygroup/myproject/build/1/?results_layout=suitebox')
self.assertEqual(200, response.status_code)
def test_suitebox_layout_failures_only(self):
response = self.client.get('/mygroup/myproject/build/1/?results_layout=suitebox&failures_only=false')
self.assertEqual(200, response.status_code)
def test_testrun_tests(self):
response = self.client.get('/mygroup/myproject/build/1/testrun/%s/suite/suite1/tests/' % self.test_run.id)
self.assertEqual(200, response.status_code)
def test_testrun_test_details(self):
response = self.client.get('/mygroup/myproject/build/1/testrun/%s/suite/suite1/test/test1/details/' % self.test_run.id)
self.assertEqual(200, response.status_code)
|
class TestRunTestsTest(TestCase):
def setUp(self):
pass
def test_table_layout(self):
pass
def test_table_layout_failures_only_false(self):
pass
def test_table_layout_failures_only_true(self):
pass
def test_envbox_layout(self):
pass
def test_envbox_layout_failures_only_false(self):
pass
def test_envbox_layout_failures_only_true(self):
pass
def test_failures_only_user_preference_false(self):
pass
def test_failures_only_user_preference_true(self):
pass
def test_suitebox_layout(self):
pass
def test_suitebox_layout_failures_only(self):
pass
def test_testrun_tests(self):
pass
def test_testrun_test_details(self):
pass
| 14 | 0 | 6 | 0 | 6 | 1 | 1 | 0.1 | 1 | 3 | 3 | 0 | 13 | 4 | 13 | 13 | 93 | 15 | 78 | 35 | 64 | 8 | 68 | 35 | 54 | 3 | 1 | 2 | 15 |
145,420 |
Linaro/squad
|
Linaro_squad/test/frontend/test_template_tags.py
|
test.frontend.test_template_tags.TemplateTagsTest
|
class TemplateTagsTest(TestCase):
def test_strip_get_parameters(self):
fake_request = FakeRequest()
fake_request.GET = FakeGet({'page': 2, 'existing_arg': 'val'})
context = {'request': fake_request}
result = strip_get_parameters(context, ['page'])
self.assertIn('existing_arg', result)
self.assertNotIn('page', result)
def test_update_get_parameters(self):
fake_request = FakeRequest()
fake_request.GET = FakeGet({'page': 2, 'existing_arg': 'val'})
context = {'request': fake_request}
result = update_get_parameters(context, {'page': 42})
self.assertIn('existing_arg', result)
self.assertIn('page=42', result)
def test_get_page_url(self):
fake_request = FakeRequest()
fake_request.GET = FakeGet({'page': 2, 'existing_arg': 'val'})
context = {'request': fake_request, 'get_page_url': get_page_url}
template_to_render = Template('{{get_page_url(42)}}')
rendered_template = template_to_render.render(context)
self.assertNotIn('page=2', rendered_template)
self.assertIn('page=42', rendered_template)
self.assertIn('existing_arg=val', rendered_template)
def test_to_json(self):
self.assertEqual('1', to_json(1))
self.assertEqual('"a string"', to_json('a string'))
self.assertEqual('[1, 2, 3]', to_json([1, 2, 3]))
self.assertEqual('{"key": 42}', to_json({'key': 42}))
self.assertEqual('', to_json(FakeGet())) # non-parsable types return empty string
def test_socialaccount_providers_google(self):
s = SocialApp.objects.create(
name="foo",
client_id="ID_123456789",
secret="secret_987654321",
provider=GoogleProvider.id
)
s.save()
site = Site.objects.first()
s.sites.add(site)
s.save()
factory = RequestFactory()
context = {"request": factory.get("/login")}
social_providers = socialaccount_providers(context)
self.assertEqual(1, len(social_providers.keys()))
self.assertEqual(GoogleProvider, list(social_providers)[0].__class__)
def test_socialaccount_providers_github(self):
s = SocialApp.objects.create(
name="foo",
client_id="ID_123456789",
secret="secret_987654321",
provider=GitHubProvider.id
)
s.save()
site = Site.objects.first()
s.sites.add(site)
s.save()
factory = RequestFactory()
context = {"request": factory.get("/login")}
social_providers = socialaccount_providers(context)
self.assertEqual(1, len(social_providers.keys()))
self.assertEqual(GitHubProvider, list(social_providers)[0].__class__)
def test_socialaccount_providers_gitlab(self):
s = SocialApp.objects.create(
name="foo",
client_id="ID_123456789",
secret="secret_987654321",
provider=GitLabProvider.id
)
s.save()
site = Site.objects.first()
s.sites.add(site)
s.save()
factory = RequestFactory()
context = {"request": factory.get("/login")}
social_providers = socialaccount_providers(context)
self.assertEqual(1, len(social_providers.keys()))
self.assertEqual(GitLabProvider, list(social_providers)[0].__class__)
def test_catch_error_when_status_missing(self):
# Test that if the status for a build gets deleted, that this is
# handled appropriately, rather than causing a crash.
# create the group, project and build
self.group = models.Group.objects.create(slug="mygroup")
self.project = self.group.projects.create(slug="myproject")
self.build = self.project.builds.create(version="mybuild")
self.project.latest_build = self.build
# Set build status to None
self.build.status = None
# Try to call project_status when status is None
missing_project_status_error = False
try:
status = project_status(self.project)
except models.Build.status.RelatedObjectDoesNotExist:
missing_project_status_error = True
# Check call to project_status doesn't crash
self.assertFalse(missing_project_status_error)
# Check status returns None as expected
self.assertEqual(status, None)
|
class TemplateTagsTest(TestCase):
def test_strip_get_parameters(self):
pass
def test_update_get_parameters(self):
pass
def test_get_page_url(self):
pass
def test_to_json(self):
pass
def test_socialaccount_providers_google(self):
pass
def test_socialaccount_providers_github(self):
pass
def test_socialaccount_providers_gitlab(self):
pass
def test_catch_error_when_status_missing(self):
pass
| 9 | 0 | 13 | 1 | 11 | 1 | 1 | 0.09 | 1 | 4 | 3 | 0 | 8 | 3 | 8 | 8 | 114 | 16 | 91 | 39 | 82 | 8 | 76 | 39 | 67 | 2 | 1 | 1 | 9 |
145,421 |
Linaro/squad
|
Linaro_squad/test/frontend/test_template_tags.py
|
test.frontend.test_template_tags.FakeRequest
|
class FakeRequest():
pass
|
class FakeRequest():
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 | 0 | 2 | 1 | 1 | 0 | 2 | 1 | 1 | 0 | 0 | 0 | 0 |
145,422 |
Linaro/squad
|
Linaro_squad/test/frontend/test_template_tags.py
|
test.frontend.test_template_tags.FakeGet
|
class FakeGet():
def __init__(self, params=None):
self.params = params or {}
def __setitem__(self, key, value):
self.params[key] = value
def __getitem__(self, key):
return self.params.get(key)
def __delitem__(self, key):
del self.params[key]
def get(self, key):
return self.__getitem__(key)
def keys(self):
return self.params.keys()
def copy(self):
return self
def urlencode(self):
return urllib.parse.urlencode(self.params)
|
class FakeGet():
def __init__(self, params=None):
pass
def __setitem__(self, key, value):
pass
def __getitem__(self, key):
pass
def __delitem__(self, key):
pass
def get(self, key):
pass
def keys(self):
pass
def copy(self):
pass
def urlencode(self):
pass
| 9 | 0 | 2 | 0 | 2 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 8 | 1 | 8 | 8 | 25 | 8 | 17 | 10 | 8 | 0 | 17 | 10 | 8 | 1 | 0 | 0 | 8 |
145,423 |
Linaro/squad
|
Linaro_squad/test/frontend/test_history.py
|
test.frontend.test_history.TestHistoryTest
|
class TestHistoryTest(TestCase):
def setUp(self):
self.client = Client()
group = Group.objects.create(slug='mygroup')
project = group.projects.create(slug='myproject')
env = project.environments.create(slug='myenv')
suite = project.suites.create(slug='mysuite')
build = project.builds.create(version='mybuild')
test_name = 'mytest'
metadata = SuiteMetadata.objects.create(kind='test', suite=suite.slug, name=test_name)
self.testrun = build.test_runs.create(job_id='123', environment=env)
self.testrun.tests.create(suite=suite, metadata=metadata, build=build, environment=env)
self.testrun.status.create(test_run=self.testrun, suite=suite)
def test_tests_history_with_empty_suite_metadata(self):
response = self.client.get('/mygroup/myproject/build/mybuild/testrun/%s/suite/mysuite/test/mytest/history/' % self.testrun.id)
self.assertEqual(200, response.status_code)
def test_tests_history_suite_not_found(self):
response = self.client.get('/mygroup/myproject/build/mybuild/testrun/%s/suite/mynonexistingsuite/test/mytest/history/' % self.testrun.id)
self.assertEqual(404, response.status_code)
def test_tests_history_test_not_found(self):
response = self.client.get('/mygroup/myproject/mysuite/mynonexistanttest/')
self.assertEqual(404, response.status_code)
|
class TestHistoryTest(TestCase):
def setUp(self):
pass
def test_tests_history_with_empty_suite_metadata(self):
pass
def test_tests_history_suite_not_found(self):
pass
def test_tests_history_test_not_found(self):
pass
| 5 | 0 | 5 | 0 | 5 | 0 | 1 | 0 | 1 | 1 | 1 | 0 | 4 | 2 | 4 | 4 | 26 | 4 | 22 | 17 | 17 | 0 | 22 | 17 | 17 | 1 | 1 | 0 | 4 |
145,424 |
Linaro/squad
|
Linaro_squad/test/frontend/test_group_settings.py
|
test.frontend.test_group_settings.TestNewProject
|
class TestNewProject(TestCase):
def setUp(self):
self.group = models.Group.objects.create(slug='mygroup')
self.user = models.User.objects.create(username='theuser')
self.group.add_admin(self.user)
self.client = Client()
self.client.force_login(self.user)
def test_create_project(self):
response = self.client.post(
'/_/group-settings/mygroup/new-project/',
{
'slug': 'myproject'
}
)
self.assertEqual(302, response.status_code)
self.assertTrue(self.group.projects.filter(slug='myproject').exists())
def test_create_group_validates_uniqueness(self):
self.group.projects.create(slug='myproject')
response = self.client.post(
'/_/group-settings/mygroup/new-project/',
{
'slug': 'myproject'
}
)
self.assertEqual(200, response.status_code)
self.assertIn('already exists', str(response.content))
|
class TestNewProject(TestCase):
def setUp(self):
pass
def test_create_project(self):
pass
def test_create_group_validates_uniqueness(self):
pass
| 4 | 0 | 8 | 0 | 8 | 0 | 1 | 0 | 1 | 1 | 0 | 0 | 3 | 3 | 3 | 3 | 29 | 3 | 26 | 9 | 22 | 0 | 16 | 9 | 12 | 1 | 1 | 0 | 3 |
145,425 |
Linaro/squad
|
Linaro_squad/test/frontend/test_group_settings.py
|
test.frontend.test_group_settings.TestNewGroup
|
class TestNewGroup(TestCase):
def setUp(self):
self.user = models.User.objects.create(username='theuser')
self.client = Client()
self.client.force_login(self.user)
def test_create_group(self):
response = self.client.post('/_/new-group/', {'slug': 'mygroup'})
self.assertEqual(302, response.status_code)
self.assertTrue(models.Group.objects.filter(slug='mygroup').exists())
def test_create_group_validates_uniqueness(self):
models.Group.objects.create(slug='mygroup')
response = self.client.post('/_/new-group/', {'slug': 'mygroup'})
self.assertEqual(200, response.status_code)
self.assertIn('already exists', str(response.content))
|
class TestNewGroup(TestCase):
def setUp(self):
pass
def test_create_group(self):
pass
def test_create_group_validates_uniqueness(self):
pass
| 4 | 0 | 4 | 0 | 4 | 0 | 1 | 0 | 1 | 1 | 0 | 0 | 3 | 2 | 3 | 3 | 17 | 3 | 14 | 8 | 10 | 0 | 14 | 8 | 10 | 1 | 1 | 0 | 3 |
145,426 |
Linaro/squad
|
Linaro_squad/test/frontend/test_comparison.py
|
test.frontend.test_comparison.ProjectComparisonTest
|
class ProjectComparisonTest(TestCase):
def receive_test_run(self, project, version, env, tests):
receive = ReceiveTestRun(project, update_project_status=False)
receive(version, env, tests_file=json.dumps(tests))
def setUp(self):
self.client = Client()
self.group = models.Group.objects.create(slug='mygroup')
self.project1 = self.group.projects.create(slug='project1')
self.project2 = self.group.projects.create(slug='project2')
self.receive_test_run(self.project1, '1', 'myenv', {
'a': 'pass',
'b': 'pass',
})
self.receive_test_run(self.project1, '1', 'myenv', {
'c': 'fail',
'd/e': 'pass',
})
self.receive_test_run(self.project2, '1', 'myenv', {
'a': 'fail',
'b': 'pass',
})
self.receive_test_run(self.project2, '1', 'myenv', {
'c': 'pass',
'd/e': 'pass',
})
self.receive_test_run(self.project1, '1', 'otherenv', {
'a': 'pass',
'b': 'pass',
})
self.receive_test_run(self.project1, '1', 'otherenv', {
'c': 'fail',
'd/e': 'pass',
})
self.receive_test_run(self.project2, '1', 'otherenv', {
'a': 'fail',
'b': 'pass',
})
self.receive_test_run(self.project2, '1', 'otherenv', {
'c': 'pass',
'd/e': 'pass',
})
self.build1 = self.project1.builds.last()
self.build2 = self.project2.builds.last()
def test_comparison_project_sanity_check(self):
url = '/_/compare/?group=mygroup&project_%d=1&project_%d=1&transitions=ignore' % (self.project1.id, self.project2.id)
response = self.client.get(url)
self.assertEqual(200, response.status_code)
self.assertIn('d/e', str(response.content))
self.assertIn('myenv', str(response.content))
self.assertIn('otherenv', str(response.content))
self.assertIn('pass', str(response.content))
self.assertIn('fail', str(response.content))
def test_comparison_project_with_default_transition(self):
# default transitions: pass to fail and fail to pass
url = '/_/compare/?group=mygroup&project_%d=1&project_%d=1' % (self.project1.id, self.project2.id)
response = self.client.get(url)
self.assertEqual(200, response.status_code)
self.assertNotIn('d/e', str(response.content))
self.assertIn('<th>a</th>', str(response.content))
self.assertIn('<th>c</th>', str(response.content))
self.assertIn('myenv', str(response.content))
self.assertIn('otherenv', str(response.content))
self.assertIn('pass', str(response.content))
self.assertIn('fail', str(response.content))
|
class ProjectComparisonTest(TestCase):
def receive_test_run(self, project, version, env, tests):
pass
def setUp(self):
pass
def test_comparison_project_sanity_check(self):
pass
def test_comparison_project_with_default_transition(self):
pass
| 5 | 0 | 17 | 1 | 16 | 0 | 1 | 0.02 | 1 | 2 | 1 | 0 | 4 | 6 | 4 | 4 | 71 | 7 | 63 | 16 | 58 | 1 | 39 | 16 | 34 | 1 | 1 | 0 | 4 |
145,427 |
Linaro/squad
|
Linaro_squad/test/frontend/test_comparison.py
|
test.frontend.test_comparison.BuildComparisonTest
|
class BuildComparisonTest(TestCase):
def test_comparison_malformed_project_slug(self):
url = '/_/comparebuilds/?project=something-bad'
response = self.client.get(url)
self.assertEqual(404, response.status_code)
|
class BuildComparisonTest(TestCase):
def test_comparison_malformed_project_slug(self):
pass
| 2 | 0 | 4 | 0 | 4 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 1 | 6 | 1 | 5 | 4 | 3 | 0 | 5 | 4 | 3 | 1 | 1 | 0 | 1 |
145,428 |
Linaro/squad
|
Linaro_squad/test/frontend/test_basics.py
|
test.frontend.test_basics.FrontendTestProjectList
|
class FrontendTestProjectList(TestCase):
def setUp(self):
self.group = models.Group.objects.create(slug='mygroup')
self.two_day_old_project = self.group.projects.create(slug='two_day_old_project', datetime=timezone.now() - relativedelta(days=2))
self.ten_day_old_project = self.group.projects.create(slug='ten_day_old_project', datetime=timezone.now() - relativedelta(days=10))
self.thirty_day_old_project = self.group.projects.create(slug='thirty_day_old_project', datetime=timezone.now() - relativedelta(days=30))
self.user = User.objects.create(username='theuser')
self.group.add_admin(self.user)
self.client = Client()
self.client.force_login(self.user)
def test_get_project_list_show_projects_active_n_days_ago_some_projects(self):
# test that only the projects that are new enough are returned when
# SHOW_PROJECTS_ACTIVE_N_DAYS_AGO is provided
self.group.settings = "SHOW_PROJECTS_ACTIVE_N_DAYS_AGO: 3\nDEFAULT_PROJECT_COUNT: 2"
order_by = 'last_updated'
display_all_projects = False
projects = get_project_list(self.group, self.user, order_by, display_all_projects)
self.assertEqual(len(projects), 1)
def test_get_project_list_show_projects_active_n_days_ago_no_projects(self):
# test if there are no projects new enough when
# SHOW_PROJECTS_ACTIVE_N_DAYS_AGO is provided that
# DEFAULT_PROJECT_COUNT is used instead
self.group.settings = "SHOW_PROJECTS_ACTIVE_N_DAYS_AGO: 1\nDEFAULT_PROJECT_COUNT: 2"
order_by = 'last_updated'
display_all_projects = False
projects = get_project_list(self.group, self.user, order_by, display_all_projects)
self.assertEqual(len(projects), 2)
def test_get_project_list_show_projects_active_n_days_ago_only_no_projects(self):
# test case where SHOW_PROJECTS_ACTIVE_N_DAYS_AGO but no projects are
# new enough and DEFAULT_PROJECT_COUNT is not provided that all
# projects will be shown
self.group.settings = "SHOW_PROJECTS_ACTIVE_N_DAYS_AGO: 1"
order_by = 'last_updated'
display_all_projects = False
projects = get_project_list(self.group, self.user, order_by, display_all_projects)
self.assertEqual(len(projects), 3)
def test_get_project_list_no_parameters(self):
# test case where SHOW_PROJECTS_ACTIVE_N_DAYS_AGO and
# DEFAULT_PROJECT_COUNT are not provided - all projects should be shown
order_by = 'last_updated'
display_all_projects = False
projects = get_project_list(self.group, self.user, order_by, display_all_projects)
self.assertEqual(len(projects), 3)
|
class FrontendTestProjectList(TestCase):
def setUp(self):
pass
def test_get_project_list_show_projects_active_n_days_ago_some_projects(self):
pass
def test_get_project_list_show_projects_active_n_days_ago_no_projects(self):
pass
def test_get_project_list_show_projects_active_n_days_ago_only_no_projects(self):
pass
def test_get_project_list_no_parameters(self):
pass
| 6 | 0 | 10 | 2 | 6 | 2 | 1 | 0.3 | 1 | 1 | 0 | 0 | 5 | 6 | 5 | 5 | 58 | 15 | 33 | 24 | 27 | 10 | 33 | 24 | 27 | 1 | 1 | 0 | 5 |
145,429 |
Linaro/squad
|
Linaro_squad/test/frontend/test_basics.py
|
test.frontend.test_basics.FrontendTestAnonymousUser
|
class FrontendTestAnonymousUser(TestCase):
def setUp(self):
self.group = models.Group.objects.create(slug='mygroup')
self.group_without_project = models.Group.objects.create(slug='mygroup2')
self.group_with_private_projects = models.Group.objects.create(slug='myprivategroup')
self.project = self.group.projects.create(slug='myproject')
self.private_project = self.group_with_private_projects.projects.create(slug='myprivateproject', is_public=False)
self.other_project = self.group.projects.create(slug='yourproject')
self.client = Client()
def hit(self, url, expected_status=200):
with count_queries('url:' + url):
response = self.client.get(url)
self.assertEqual(expected_status, response.status_code)
return response
def test_project(self):
self.hit('/mygroup/myproject/')
def test_group_without_projects(self):
self.hit('/mygroup2/', 404)
def test_group_with_private_projects(self):
self.hit('/myprivategroup/', 404)
|
class FrontendTestAnonymousUser(TestCase):
def setUp(self):
pass
def hit(self, url, expected_status=200):
pass
def test_project(self):
pass
def test_group_without_projects(self):
pass
def test_group_with_private_projects(self):
pass
| 6 | 0 | 4 | 0 | 4 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 5 | 7 | 5 | 5 | 26 | 6 | 20 | 14 | 14 | 0 | 20 | 14 | 14 | 1 | 1 | 1 | 5 |
145,430 |
Linaro/squad
|
Linaro_squad/test/frontend/test_basics.py
|
test.frontend.test_basics.FrontendTest
|
class FrontendTest(TestCase):
def setUp(self):
self.group = models.Group.objects.create(slug='mygroup')
self.project = self.group.projects.create(slug='myproject')
self.other_project = self.group.projects.create(slug='yourproject')
self.user = User.objects.create(username='theuser')
self.another_user = User.objects.create(username='anotheruser')
self.group.add_admin(self.user)
self.client = Client()
self.client.force_login(self.user)
ReceiveTestRun(self.project)(
version='1.0',
environment_slug='myenv',
log_file='log file contents ...',
tests_file='{}',
metrics_file='{"mysuite/mymetric": 1}',
metadata_file='{ "job_id" : "1" }',
)
self.test_run = models.TestRun.objects.last()
self.suite, _ = self.project.suites.get_or_create(slug='mysuite')
metadata, _ = models.SuiteMetadata.objects.get_or_create(suite=self.suite.slug, name='mytest', kind='test')
self.test_run.tests.create(suite=self.suite, result=True, metadata=metadata, build=self.test_run.build, environment=self.test_run.environment)
self.test = self.test_run.tests.first()
backend = Backend.objects.create(
url='http://example.com',
username='foobar',
token='mypassword',
)
self.build = self.test_run.build
self.build.test_jobs.create(
target=self.build.project,
environment='myenv',
backend=backend,
)
self.build.test_jobs.create(
target=self.build.project,
environment='myenv',
backend=backend,
job_status='Incomplete',
)
self.build.test_jobs.create(
target=self.build.project,
environment='myenv',
backend=backend,
job_status='Complete',
)
def hit(self, url, expected_status=200):
with count_queries('url:' + url):
response = self.client.get(url)
self.assertEqual(expected_status, response.status_code)
return response
def test_home(self):
response = self.hit('/')
self.assertContains(response, '<strong>mygroup</strong>', html=True, count=1)
self.assertIsNotNone(re.search(r'2</span>\s*projects', response.content.decode()))
def test_home_project_count(self):
# Test with a logged in user that is not part of the group
client = Client()
client.force_login(self.another_user)
self.project.is_public = False
self.project.save()
self.other_project.is_public = False
self.other_project.save()
response = client.get('/')
self.assertNotContains(response, '<strong>mygroup</strong>', html=True)
self.assertIsNone(re.search(r'2</span>\s*projects', response.content.decode()))
# Now only one should be visible
self.other_project.is_public = True
self.other_project.save()
response = client.get('/')
self.assertContains(response, '<strong>mygroup</strong>', html=True, count=1)
self.assertIsNotNone(re.search(r'1</span>\s*projects', response.content.decode()))
def test_compare(self):
self.hit('/_/compare/')
def test_comparetest(self):
self.hit('/_/comparetest/')
def test_settings(self):
# check if redirection to /_/settings/profile/ works
self.hit('/_/settings/', 302)
def test_group(self):
self.hit('/mygroup/')
def test_group_404(self):
self.hit('/unexistinggroup/', 404)
def test_project(self):
self.hit('/mygroup/myproject/')
def test_project_list_no_status(self):
self.build = self.project.builds.create(version="mybuild")
self.build.status.delete()
self.hit('/mygroup/?all_projects=1')
def test_project_badge(self):
self.hit('/mygroup/myproject/badge')
def test_project_metrics(self):
response = self.hit('/mygroup/myproject/metrics/')
self.assertNotIn('None', str(response.content))
def test_project_metrics_metric_summary(self):
self.hit('/mygroup/myproject/metrics/?environment=myenv&metric=:summary:')
def test_project_test_history_404(self):
self.hit('/mygroup/myproject/tests/foo', 404)
def test_project_404(self):
self.hit('/mygroup/unexistingproject/', 404)
def test_project_no_build(self):
self.project.builds.all().delete()
self.hit('/mygroup/myproject/')
def test_builds(self):
self.hit('/mygroup/myproject/builds/')
def test_builds_unexisting_page(self):
self.hit('/mygroup/myproject/builds/?page=99', 404)
def test_build(self):
self.hit('/mygroup/myproject/build/1.0/')
def test_build_testjobs_progress_per_environment(self):
self.hit('/mygroup/myproject/build/1.0/?testjobs_progress_per_environments=true')
def test_build_badge(self):
self.hit('/mygroup/myproject/build/1.0/badge')
def test_build_badge_title(self):
self.hit('/mygroup/myproject/build/1.0/badge?title=abc')
def test_build_badge_passrate(self):
self.hit('/mygroup/myproject/build/1.0/badge?passrate')
def test_build_badge_metrics(self):
self.hit('/mygroup/myproject/build/1.0/badge?metrics')
def test_build_badge_filter_by_environment(self):
self.hit('/mygroup/myproject/build/1.0/badge?environment=myenv')
def test_build_badge_filter_by_suite(self):
self.hit('/mygroup/myproject/build/1.0/badge?suite=mysuite')
def test_build_badge_filter_by_environment_and_suite(self):
self.hit('/mygroup/myproject/build/1.0/badge?suite=mysuite&environment=myenv')
def test_build_badge_hide_zeros(self):
self.hit('/mygroup/myproject/build/1.0/badge?hide_zeros=1')
def test_build_badge_invalid(self):
self.hit('/mygroup/myproject/build/1.0/badge?foo')
def test_build_404(self):
self.hit('/mygroup/myproject/build/999/', 404)
def test_build_after_cleanup(self):
self.project.data_retention_days = 180
self.project.save()
cleanup_build(self.project.builds.last().id)
response = self.hit('/mygroup/myproject/build/1.0/', 404)
# Django 3.2 introduced a regression that removed the exception message from
# the default 404 template, causing the check below to fail.
# Ref: https://code.djangoproject.com/ticket/32637
from django import get_version
if get_version().startswith('3.2'):
return
self.assertIn('after 180 days', str(response.content))
def test_build_tests_404(self):
self.hit('/mygroup/myproject/build/999/tests/', 404)
def test_build_testjobs_404(self):
self.hit('/mygroup/myproject/build/999/testjobs/', 404)
def test_build_testjobs_tab(self):
response = self.hit('/mygroup/myproject/build/1.0/testjobs/')
# assert that all 3 testjobs badges are displayed and have each 1 in it
self.assertTrue(re.match(r'.*?(?:badge-(Created|Complete|Incomplete)[^>]+title="\1">1.*?){3}.*?', str(response.content)))
def test_build_testjobs_change_per_page(self):
response = self.hit('/mygroup/myproject/build/1.0/testjobs/?per_page=1')
self.assertIn('<a href="?per_page=1&page=2"', str(response.content))
def test_build_latest_finished(self):
self.hit('/mygroup/myproject/build/latest-finished/')
def test_build_metadata(self):
self.hit('/mygroup/myproject/build/1.0/metadata/')
def test_build_callbakcs(self):
self.hit('/mygroup/myproject/build/1.0/callbacks/')
def test_build_latest_finished_404(self):
self.group.projects.create(slug='otherproject')
self.hit('/mygroup/otherproject/')
self.hit('/mygroup/otherproject/build/latest-finished/', 404)
def test_build_metrics(self):
self.hit('/mygroup/myproject/build/1.0/testrun/%s/suite/%s/metrics/' % (self.test_run.id, self.suite.slug))
def test_build_api_link(self):
response = self.hit('/mygroup/myproject/build/1.0/api/', 302)
self.assertRedirects(response, '/api/builds/%d/' % self.build.id, status_code=302)
def test_test_run_build_404(self):
self.hit('/mygroup/myproject/build/2.0.missing/testrun/999/', 404)
def test_test_run_404(self):
self.hit('/mygroup/myproject/build/1.0/testrun/999/', 404)
def test_attachment(self):
data = bytes('text file', 'utf-8')
filename = 'foo.txt'
attachment = self.test_run.attachments.create(filename=filename, length=len(data), mimetype="text/plain")
attachment.save_file(filename, data)
response = self.hit('/mygroup/myproject/build/1.0/testrun/%s/suite/%s/test/%s/attachments/foo.txt' % (self.test_run.id, self.suite.slug, self.test.name))
self.assertEqual('text/plain', response['Content-Type'])
self.assertEqual(b'text file', response.content)
def test_attachment_download_url(self):
data = bytes('text file', 'utf-8')
filename = 'foo.txt'
attachment = self.test_run.attachments.create(filename=filename, length=len(data), mimetype="text/plain")
attachment.save_file(filename, data)
# NOTE: /api/testruns/%s/attachments?filename=foo.txt redirects to /api/testruns/%s/attachments/?filename=foo.txt
response = self.hit('/api/testruns/%s/attachments/?filename=foo.txt' % (self.test_run.id))
self.assertEqual('text/plain', response['Content-Type'])
self.assertEqual(b'text file', response.content)
def test_log(self):
response = self.hit('/mygroup/myproject/build/1.0/testrun/%s/suite/%s/test/%s/log' % (self.test_run.id, self.suite.slug, self.test.name))
self.assertEqual('text/plain', response['Content-Type'])
self.assertEqual(b'log file contents ...', response.content)
def test_no_log(self):
self.test_run.log_file_storage = None
self.test_run.save()
response = self.client.get('/mygroup/myproject/build/1.0/testrun/%s/suite/%s/test/%s/log' % (self.test_run.id, self.suite.slug, self.test.name))
self.assertEqual(404, response.status_code)
def test_tests(self):
response = self.hit('/mygroup/myproject/build/1.0/testrun/%s/suite/%s/test/%s/tests' % (self.test_run.id, self.suite.slug, self.test.name))
self.assertEqual('application/json', response['Content-Type'])
self.assertEqual(b'{}', response.content)
def test_tests_bad_testrun(self):
response = self.client.get('/mygroup/myproject/build/1.0/testrun/%s/suite/%s/test/%s/tests' % ('not-an-id', self.suite.slug, self.test.name))
self.assertEqual(404, response.status_code)
def test_metrics(self):
response = self.hit('/mygroup/myproject/build/1.0/testrun/%s/suite/%s/test/%s/metrics' % (self.test_run.id, self.suite.slug, self.test.name))
self.assertEqual('application/json', response['Content-Type'])
self.assertEqual(b'{"mysuite/mymetric": 1}', response.content)
def test_metrics_bad_testrun(self):
response = self.client.get('/mygroup/myproject/build/1.0/testrun/%s/suite/%s/test/%s/metrics' % ('not-an-id', self.suite.slug, self.test.name))
self.assertEqual(404, response.status_code)
def test_metadata(self):
response = self.hit('/mygroup/myproject/build/1.0/testrun/%s/suite/%s/test/%s/metadata' % (self.test_run.id, self.suite.slug, self.test.name))
self.assertEqual('application/json', response['Content-Type'])
self.assertEqual(b'{"job_id": "1"}', response.content)
def test_metadata_bad_testrun(self):
response = self.client.get('/mygroup/myproject/build/1.0/testrun/%s/suite/%s/test/%s/metadata' % ('not-an-id', self.suite.slug, self.test.name))
self.assertEqual(404, response.status_code)
|
class FrontendTest(TestCase):
def setUp(self):
pass
def hit(self, url, expected_status=200):
pass
def test_home(self):
pass
def test_home_project_count(self):
pass
def test_compare(self):
pass
def test_comparetest(self):
pass
def test_settings(self):
pass
def test_group(self):
pass
def test_group_404(self):
pass
def test_project(self):
pass
def test_project_list_no_status(self):
pass
def test_project_badge(self):
pass
def test_project_metrics(self):
pass
def test_project_metrics_metric_summary(self):
pass
def test_project_test_history_404(self):
pass
def test_project_404(self):
pass
def test_project_no_build(self):
pass
def test_builds(self):
pass
def test_builds_unexisting_page(self):
pass
def test_builds(self):
pass
def test_build_testjobs_progress_per_environment(self):
pass
def test_build_badge(self):
pass
def test_build_badge_title(self):
pass
def test_build_badge_passrate(self):
pass
def test_build_badge_metrics(self):
pass
def test_build_badge_filter_by_environment(self):
pass
def test_build_badge_filter_by_suite(self):
pass
def test_build_badge_filter_by_environment_and_suite(self):
pass
def test_build_badge_hide_zeros(self):
pass
def test_build_badge_invalid(self):
pass
def test_build_404(self):
pass
def test_build_after_cleanup(self):
pass
def test_build_tests_404(self):
pass
def test_build_testjobs_404(self):
pass
def test_build_testjobs_tab(self):
pass
def test_build_testjobs_change_per_page(self):
pass
def test_build_latest_finished(self):
pass
def test_build_metadata(self):
pass
def test_build_callbakcs(self):
pass
def test_build_latest_finished_404(self):
pass
def test_build_metrics(self):
pass
def test_build_api_link(self):
pass
def test_test_run_build_404(self):
pass
def test_test_run_404(self):
pass
def test_attachment(self):
pass
def test_attachment_download_url(self):
pass
def test_log(self):
pass
def test_no_log(self):
pass
def test_tests(self):
pass
def test_tests_bad_testrun(self):
pass
def test_metrics(self):
pass
def test_metrics_bad_testrun(self):
pass
def test_metadata(self):
pass
def test_metadata_bad_testrun(self):
pass
| 55 | 0 | 4 | 0 | 4 | 0 | 1 | 0.04 | 1 | 6 | 4 | 0 | 54 | 10 | 54 | 54 | 283 | 63 | 212 | 93 | 156 | 8 | 187 | 93 | 131 | 2 | 1 | 1 | 55 |
145,431 |
Linaro/squad
|
Linaro_squad/test/core/test_utils.py
|
test.core.test_utils.TestSplitList
|
class TestSplitList(TestCase):
def test_split_list(self):
_list = [1, 2, 3, 4, 5, 6, 7]
chunks = split_list(_list)
self.assertEqual(7, len(chunks))
self.assertEqual([1], chunks[0])
self.assertEqual([7], chunks[6])
_list = [1, 2, 3, 4, 5, 6, 7]
chunks = split_list(_list, chunk_size=2)
self.assertEqual(4, len(chunks))
self.assertEqual([1, 2], chunks[0])
self.assertEqual([3, 4], chunks[1])
self.assertEqual([5, 6], chunks[2])
self.assertEqual([7], chunks[3])
|
class TestSplitList(TestCase):
def test_split_list(self):
pass
| 2 | 0 | 17 | 4 | 13 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 1 | 19 | 5 | 14 | 4 | 12 | 0 | 14 | 4 | 12 | 1 | 1 | 0 | 1 |
145,432 |
Linaro/squad
|
Linaro_squad/test/frontend/test_tests.py
|
test.frontend.test_tests.AllTestResultsTest
|
class AllTestResultsTest(TestCase):
def setUp(self):
self.client = Client()
group = models.Group.objects.create(slug='mygroup')
project = group.projects.create(slug='myproject')
env = project.environments.create(slug='myenv')
for test, _ in tests_file.items():
if test.startswith('suite2/'):
issue = models.KnownIssue.objects.create(
title='foo fails',
test_name=test
)
issue.environments.add(env)
ReceiveTestRun(project)(
version='1',
environment_slug='myenv2',
log_file='log file contents ...',
tests_file=json.dumps(tests_file),
metrics_file='{}',
metadata_file='{ "job_id" : "2" }',
)
ReceiveTestRun(project)(
version='1',
environment_slug='myenv',
log_file='log file contents ...',
tests_file=json.dumps(tests_file),
metrics_file='{}',
metadata_file='{ "job_id" : "1" }',
)
self.test_run = models.TestRun.objects.last()
def test_basics(self):
response = self.client.get('/mygroup/myproject/build/1/tests/')
self.assertEqual(200, response.status_code)
content = str(response.content)
self.assertTrue('test1' in content)
self.assertTrue('test2' in content)
self.assertTrue('myenv' in content)
self.assertTrue('myenv2' in content)
def test_pagination_page_1(self):
# page 1: only tests from suite1 - fail
response = self.client.get('/mygroup/myproject/build/1/tests/?page=1')
page1 = str(response.content)
self.assertTrue("suite1" in page1)
self.assertTrue("suite2" not in page1)
self.assertTrue("suite3" not in page1)
def test_pagination_page_2(self):
# page 2: only tests from suite2 - xfail
response = self.client.get('/mygroup/myproject/build/1/tests/?page=2')
page2 = str(response.content)
self.assertTrue("suite1" not in page2)
self.assertTrue("suite2" in page2)
self.assertTrue("suite3" not in page2)
def test_pagination_page_3(self):
# page 3: only tests from suite3 - pass
response = self.client.get('/mygroup/myproject/build/1/tests/?page=3')
page3 = str(response.content)
self.assertTrue("suite1" not in page3)
self.assertTrue("suite2" not in page3)
self.assertTrue("suite3" in page3)
def test_no_metadata(self):
suite, _ = self.test_run.build.project.suites.get_or_create(slug='a-suite')
metadata, _ = models.SuiteMetadata.objects.get_or_create(suite=suite.slug, name='no_metadata_test', kind='test')
self.test_run.tests.create(metadata=metadata, result=False, suite=suite, build=self.test_run.build, environment=self.test_run.environment)
response = self.client.get('/mygroup/myproject/build/1/tests/?page=2')
self.assertEqual(200, response.status_code)
def test_filter(self):
response = self.client.get('/mygroup/myproject/build/1/tests/?search=test1')
content = str(response.content)
self.assertEqual(200, response.status_code)
self.assertTrue('test1' in content)
self.assertTrue('test2' not in content)
def test_filter_by_environment(self):
response = self.client.get('/mygroup/myproject/build/1/tests/?environment=myenv')
content = str(response.content)
self.assertEqual(200, response.status_code)
self.assertTrue('test1' in content)
self.assertTrue('test2' in content)
self.assertTrue('myenv' in content)
self.assertTrue('myenv2' not in content)
def test_filter_by_suite(self):
response = self.client.get('/mygroup/myproject/build/1/tests/?suite=suite2')
content = str(response.content)
self.assertEqual(200, response.status_code)
self.assertTrue('suite2' in content)
self.assertTrue('suite1' not in content)
self.assertTrue('suite3' not in content)
|
class AllTestResultsTest(TestCase):
def setUp(self):
pass
def test_basics(self):
pass
def test_pagination_page_1(self):
pass
def test_pagination_page_2(self):
pass
def test_pagination_page_3(self):
pass
def test_no_metadata(self):
pass
def test_filter(self):
pass
def test_filter_by_environment(self):
pass
def test_filter_by_suite(self):
pass
| 10 | 0 | 10 | 1 | 9 | 0 | 1 | 0.04 | 1 | 5 | 4 | 0 | 9 | 2 | 9 | 9 | 103 | 17 | 83 | 34 | 73 | 3 | 66 | 34 | 56 | 3 | 1 | 2 | 11 |
145,433 |
Linaro/squad
|
Linaro_squad/test/core/test_known_issues.py
|
test.core.test_known_issues.KnownIssueTest
|
class KnownIssueTest(TestCase):
def setUp(self):
self.group = Group.objects.create(slug='mygroup')
self.project = self.group.projects.create(slug='myproject')
self.env1 = self.project.environments.create(slug='env1')
self.suite1 = self.project.suites.create(slug="suite1")
self.suite2 = self.project.suites.create(slug="suite2")
self.date = timezone.now()
def test_active_known_issue(self):
build = self.project.builds.create(
datetime=self.date,
version=self.date.strftime("%Y%m%d"),
)
test_run = build.test_runs.create(environment=self.env1)
# create failed test
foo_metadata, _ = SuiteMetadata.objects.get_or_create(suite=self.suite1.slug, name='test_foo', kind='test')
test = test_run.tests.create(build=test_run.build, environment=test_run.environment, suite=self.suite1, metadata=foo_metadata, result=False)
known_issue = KnownIssue.objects.create(
title="foo",
test_name=test.full_name
)
known_issue.save()
known_issue.environments.add(test_run.environment)
self.assertEqual(1, len(KnownIssue.active_by_environment(test_run.environment)))
def test_inactive_known_issue(self):
build = self.project.builds.create(
datetime=self.date,
version=self.date.strftime("%Y%m%d"),
)
test_run = build.test_runs.create(environment=self.env1)
# create failed test
foo_metadata, _ = SuiteMetadata.objects.get_or_create(suite=self.suite1.slug, name='test_foo', kind='test')
test = test_run.tests.create(build=test_run.build, environment=test_run.environment, suite=self.suite1, metadata=foo_metadata, result=False)
known_issue = KnownIssue.objects.create(
title="foo",
test_name=test.full_name
)
known_issue.save()
known_issue.environments.add(test_run.environment)
known_issue.active = False
known_issue.save()
self.assertEqual(0, len(KnownIssue.active_by_environment(self.env1)))
def test_active_by_project(self):
build = self.project.builds.create(
datetime=self.date,
version=self.date.strftime("%Y%m%d"),
)
test_run = build.test_runs.create(environment=self.env1)
# create failed test
foo_metadata, _ = SuiteMetadata.objects.get_or_create(suite=self.suite1.slug, name='test_foo', kind='test')
test = test_run.tests.create(build=test_run.build, environment=test_run.environment, suite=self.suite1, metadata=foo_metadata, result=False)
known_issue = KnownIssue.objects.create(
title="foo",
test_name=test.full_name
)
known_issue.save()
known_issue.environments.add(test_run.environment)
self.assertEqual(1, len(KnownIssue.active_by_project_and_test(self.project, test.full_name)))
def test_inactive_by_project(self):
build = self.project.builds.create(
datetime=self.date,
version=self.date.strftime("%Y%m%d"),
)
test_run = build.test_runs.create(environment=self.env1)
# create failed test
foo_metadata, _ = SuiteMetadata.objects.get_or_create(suite=self.suite1.slug, name='test_foo', kind='test')
test = test_run.tests.create(build=test_run.build, environment=test_run.environment, suite=self.suite1, metadata=foo_metadata, result=False)
known_issue = KnownIssue.objects.create(
title="foo",
test_name=test.full_name
)
known_issue.save()
known_issue.environments.add(test_run.environment)
known_issue.active = False
known_issue.save()
self.assertEqual(0, len(KnownIssue.active_by_project_and_test(self.project, test.full_name)))
def test_pattern_as_test_name(self):
build = self.project.builds.create(
datetime=self.date,
version=self.date.strftime("%Y%m%d"),
)
testrun = build.test_runs.create(environment=self.env1)
known_issue = KnownIssue.objects.create(
title="foo",
test_name="suite*/foo"
)
known_issue.save()
known_issue.environments.add(testrun.environment)
known_issue.save()
tests_file = '{"suite1/foo": "pass", "suite2/foo": "pass", "notinpattern/foo": "pass"}'
testrun.save_tests_file(tests_file)
ParseTestRunData()(testrun)
self.assertEqual(3, testrun.tests.count())
for test in testrun.tests.filter(suite__slug__in="suite1,suite2").all():
self.assertTrue(test.has_known_issues)
self.assertIn(known_issue, test.known_issues.all())
|
class KnownIssueTest(TestCase):
def setUp(self):
pass
def test_active_known_issue(self):
pass
def test_inactive_known_issue(self):
pass
def test_active_by_project(self):
pass
def test_inactive_by_project(self):
pass
def test_pattern_as_test_name(self):
pass
| 7 | 0 | 17 | 1 | 15 | 1 | 1 | 0.04 | 1 | 3 | 3 | 0 | 6 | 6 | 6 | 6 | 108 | 12 | 92 | 38 | 85 | 4 | 62 | 38 | 55 | 2 | 1 | 1 | 7 |
145,434 |
Linaro/squad
|
Linaro_squad/test/core/test_test_run.py
|
test.core.test_test_run.TestRunTest
|
class TestRunTest(TestCase):
def setUp(self):
self.group = Group.objects.create(slug='mygroup')
self.project = self.group.projects.create(slug='myproject')
self.build = self.project.builds.create(version='1')
self.env = self.project.environments.create(slug='myenv')
def test_metadata(self):
t = TestRun(metadata_file='{"1": 2}')
self.assertEqual({"1": 2}, t.metadata)
def test_no_metadata(self):
self.assertEqual({}, TestRun().metadata)
def test_manipulate_metadata(self):
t = TestRun(build=self.build, environment=self.env)
t.metadata["foo"] = "bar"
t.metadata["baz"] = "qux"
t.save()
t.refresh_from_db()
self.assertEqual({"foo": "bar", "baz": "qux"}, t.metadata)
def test_storage_fields(self):
tests_file_content = 'tests file content'
metrics_file_content = 'metrics file content'
log_file_content = 'log file content'
testrun = TestRun.objects.create(
build=self.build,
environment=self.env)
self.assertFalse(testrun.tests_file_storage)
self.assertFalse(testrun.metrics_file_storage)
self.assertFalse(testrun.log_file_storage)
testrun.save_tests_file(tests_file_content)
testrun.save_metrics_file(metrics_file_content)
testrun.save_log_file(log_file_content)
self.assertEqual(tests_file_content, testrun.tests_file_storage.read().decode())
self.assertEqual(metrics_file_content, testrun.metrics_file_storage.read().decode())
self.assertEqual(log_file_content, testrun.log_file_storage.read().decode())
def test_delete_storage_fields_on_model_deletion(self):
tests_file_content = 'tests file content'
metrics_file_content = 'metrics file content'
log_file_content = 'log file content'
attachment_content = b'attachment content'
attachment_filename = 'foo.txt'
testrun = TestRun.objects.create(
build=self.build,
environment=self.env)
attachment = testrun.attachments.create(filename=attachment_filename, length=len(attachment_content))
self.assertFalse(testrun.tests_file_storage)
self.assertFalse(testrun.metrics_file_storage)
self.assertFalse(testrun.log_file_storage)
self.assertFalse(attachment.storage)
testrun.refresh_from_db()
testrun.save_tests_file(tests_file_content)
testrun.save_metrics_file(metrics_file_content)
testrun.save_log_file(log_file_content)
attachment.save_file(attachment_filename, attachment_content)
attachment.refresh_from_db()
self.assertEqual(tests_file_content, testrun.tests_file_storage.read().decode())
self.assertEqual(metrics_file_content, testrun.metrics_file_storage.read().decode())
self.assertEqual(log_file_content, testrun.log_file_storage.read().decode())
self.assertEqual(attachment_content, attachment.storage.read())
tests_file_storage_path = testrun.tests_file_storage.path
metrics_file_storage_path = testrun.metrics_file_storage.path
log_file_storage_path = testrun.log_file_storage.path
attachment_storage_path = attachment.storage.path
testrun.delete()
self.assertFalse(os.path.isfile(tests_file_storage_path))
self.assertFalse(os.path.isfile(metrics_file_storage_path))
self.assertFalse(os.path.isfile(log_file_storage_path))
self.assertFalse(os.path.isfile(attachment_storage_path))
|
class TestRunTest(TestCase):
def setUp(self):
pass
def test_metadata(self):
pass
def test_no_metadata(self):
pass
def test_manipulate_metadata(self):
pass
def test_storage_fields(self):
pass
def test_delete_storage_fields_on_model_deletion(self):
pass
| 7 | 0 | 13 | 2 | 11 | 0 | 1 | 0 | 1 | 1 | 1 | 0 | 6 | 4 | 6 | 6 | 86 | 19 | 67 | 28 | 60 | 0 | 63 | 28 | 56 | 1 | 1 | 0 | 6 |
145,435 |
Linaro/squad
|
Linaro_squad/test/core/test_test_comparison.py
|
test.core.test_test_comparison.TestComparisonTest
|
class TestComparisonTest(TestCase):
def receive_test_run(self, project, version, env, tests):
receive = ReceiveTestRun(project, update_project_status=False)
receive(version, env, tests_file=json.dumps(tests))
def setUp(self):
self.group = models.Group.objects.create(slug='mygruop')
self.project1 = self.group.projects.create(slug='project1')
self.project2 = self.group.projects.create(slug='project2')
self.project3 = self.group.projects.create(slug='project3')
self.receive_test_run(self.project1, '0', 'myenv', {
'z': 'pass',
})
self.receive_test_run(self.project1, '1', 'myenv', {
'a': 'pass',
'b': 'pass',
})
self.receive_test_run(self.project1, '1', 'myenv', {
'c': 'fail',
'd/e': 'pass',
})
self.receive_test_run(self.project2, '1', 'myenv', {
'a': 'fail',
'b': 'pass',
})
self.receive_test_run(self.project2, '1', 'myenv', {
'c': 'pass',
'd/e': 'pass',
})
self.receive_test_run(self.project1, '1', 'otherenv', {
'a': 'pass',
'b': 'pass',
})
self.receive_test_run(self.project1, '1', 'otherenv', {
'c': 'fail',
'd/e': 'pass',
})
self.receive_test_run(self.project2, '1', 'otherenv', {
'a': 'fail',
'b': 'pass',
})
self.receive_test_run(self.project2, '1', 'otherenv', {
'c': 'pass',
'd/e': 'pass',
})
self.receive_test_run(self.project3, '2', 'myenv', {
'a': 'pass',
'b': 'pass',
})
self.receive_test_run(self.project3, '2', 'myenv', {
'a': 'fail',
'b': 'fail',
})
self.receive_test_run(self.project3, '2', 'myenv', {
'a': 'pass',
'b': 'fail',
})
self.build0 = self.project1.builds.first()
self.build1 = self.project1.builds.last()
self.build2 = self.project2.builds.last()
self.build3 = self.project3.builds.last()
def test_builds(self):
comp = compare(self.build1, self.build2)
self.assertEqual([self.build1, self.build2], comp.builds)
def test_test_runs(self):
comp = compare(self.build1, self.build2)
self.assertEqual(['myenv', 'otherenv'], comp.environments[self.build1])
self.assertEqual(['myenv', 'otherenv'], comp.environments[self.build2])
def test_tests_are_sorted(self):
comp = compare(self.build0, self.build1)
self.assertEqual(['a', 'b', 'c', 'd/e', 'z'], list(comp.results.keys()))
def test_test_results(self):
comp = compare(self.build1, self.build2)
self.assertEqual('pass', comp.results['a'][self.build1, 'otherenv'])
self.assertEqual('fail', comp.results['c'][self.build1, 'otherenv'])
self.assertEqual('fail', comp.results['a'][self.build2, 'otherenv'])
self.assertEqual('pass', comp.results['b'][self.build2, 'otherenv'])
def test_compare_projects(self):
comp = TestComparison.compare_projects(self.project1, self.project2)
self.assertEqual([self.build1, self.build2], comp.builds)
def test_no_data(self):
new_project = self.group.projects.create(slug='new')
comp = TestComparison.compare_projects(new_project)
self.assertFalse(comp.diff)
self.assertEqual([], comp.builds)
def test_diff(self):
comparison = compare(self.build1, self.build2)
diff = comparison.diff
self.assertEqual(['a', 'c'], sorted(diff.keys()))
def test_empty_diff(self):
comparison = compare(self.build1, self.build1) # same build → no diff
self.assertFalse(comparison.diff)
def test_empty_with_no_builds(self):
new_project = self.group.projects.create(slug='new')
comparison = TestComparison.compare_projects(new_project)
self.assertFalse(comparison.diff)
def test_regressions(self):
"""
This test is using builds from different projects because the relevant
test data is already prepared in setUp(), but usually regressions is
only used when comparing subsequent builds from the same project.
"""
comparison = TestComparison(self.build1, self.build2, regressions_and_fixes_only=True)
regressions = comparison.regressions
self.assertEqual(['a'], regressions['myenv'])
self.assertEqual(['a'], regressions['otherenv'])
def test_fixes(self):
"""
This test is using builds from different projects because the relevant
test data is already prepared in setUp(), but usually regressions is
only used when comparing subsequent builds from the same project.
"""
comparison = TestComparison(self.build1, self.build2, regressions_and_fixes_only=True)
fixes = comparison.fixes
self.assertEqual(['c'], fixes['myenv'])
def test_failures(self):
# Check if failures are ok
comparison = TestComparison(self.build1)
self.assertEqual(['c'], sorted([t.full_name for t in comparison.failures['myenv']]))
self.receive_test_run(self.project1, '1', 'myenv', {'tests/another': 'fail'})
comparison = TestComparison(self.build1)
self.assertEqual(['c', 'tests/another'], sorted([t.full_name for t in comparison.failures['myenv']]))
def test_regressions_no_previous_build(self):
comparison = TestComparison.compare_builds(self.build1, None)
regressions = comparison.regressions
self.assertEqual({}, regressions)
def test_fixes_no_previous_build(self):
comparison = TestComparison.compare_builds(self.build1, None)
fixes = comparison.fixes
self.assertEqual({}, fixes)
def test_regressions_no_regressions(self):
# same build! so no regressions, by definition
comparison = TestComparison(self.build1, self.build1, regressions_and_fixes_only=True)
self.assertEqual({}, comparison.regressions)
def test_regressions_with_duplicates(self):
comparison = TestComparison.compare_builds(self.build1, self.build3)
self.assertEqual({'myenv': ['b']}, comparison.regressions)
def test_fixes_with_duplicates(self):
comparison = TestComparison.compare_builds(self.build2, self.build3)
self.assertEqual({'myenv': ['a']}, comparison.fixes)
def test_fixes_no_fixes(self):
# same build! so no fixes, by definition
comparison = TestComparison(self.build1, self.build1, regressions_and_fixes_only=True)
self.assertEqual({}, comparison.fixes)
def test_xfail_fix(self):
"""
This test is using builds from different projects because the relevant
test data is already prepared in setUp(), but usually fixes is
only used when comparing subsequent builds from the same project.
"""
models.Test.objects.filter(test_run__build=self.build1, metadata__name='c').update(has_known_issues=True)
comparison = TestComparison(self.build1, self.build2, regressions_and_fixes_only=True)
fixes = comparison.fixes
self.assertEqual(['c'], fixes['myenv'])
def test_pass_to_xfail_not_a_regressions(self):
"""
This test is using builds from different projects because the relevant
test data is already prepared in setUp(), but usually fixes is
only used when comparing subsequent builds from the same project.
"""
models.Test.objects.filter(test_run__build=self.build2, metadata__name='a').update(has_known_issues=True)
comparison = TestComparison(self.build1, self.build2, regressions_and_fixes_only=True)
regressions = comparison.regressions
self.assertEqual(0, len(regressions))
def test_intermittent_xfail_is_not_a_fix(self):
"""
This test is using builds from different projects because the relevant
test data is already prepared in setUp(), but usually fixes is
only used when comparing subsequent builds from the same project.
build1 = project1/1 -> baseline build
build2 = project2/1 -> target build
build | project1/1 | project2/1
test / environment | myenv | otherenv | myenv | otherenv
-------------------+-------+----------+------------------
a | pass | pass | fail | fail
b | pass | pass | pass | pass
c | fail | fail | pass | pass
d/e | pass | pass | pass | pass
Expected results
regressions:
- test a on environments "myenv" and "otherenv"
fixes:
- test c would be a fix, but the testing at hand tags that as intermittent and xfail
thus TestComparison should *NOT* mark that as a fix
"""
tests = models.Test.objects.filter(test_run__build=self.build1, metadata__name='c')
tests.update(has_known_issues=True)
issue = models.KnownIssue.objects.create(title='foo bar baz', intermittent=True)
for test in tests:
test.known_issues.add(issue)
comparison = TestComparison(self.build1, self.build2, regressions_and_fixes_only=True)
fixes = comparison.fixes
self.assertEqual({}, fixes)
def test_apply_transitions(self):
"""
Test results scenario
+---------------------+---------------------+
| buildA | buildB |
+------+------+-------+------+------+-------+
| envA | envB | envC | envA | envB | envC |
+-------+------+------+-------+------+------+-------+
| testA | pass | fail | xfail | fail | pass | xfail |
+-------+------+------+-------+------+------+-------+
| testB | pass | skip | xfail | skip | | xfail |
+-------+------+------+-------+------+------+-------+
| testC | pass | pass | pass | pass | pass | pass |
+-------+------+------+-------+------+------+-------+
"""
project = self.group.projects.create(slug='project4')
self.receive_test_run(project, 'buildA', 'envA', {'testA': 'pass', 'testB': 'pass', 'testC': 'pass'})
self.receive_test_run(project, 'buildA', 'envB', {'testA': 'fail', 'testB': 'skip', 'testC': 'pass'})
self.receive_test_run(project, 'buildA', 'envC', {'testA': 'xfail', 'testB': 'xfail', 'testC': 'pass'})
self.receive_test_run(project, 'buildB', 'envA', {'testA': 'fail', 'testB': 'skip', 'testC': 'pass'})
self.receive_test_run(project, 'buildB', 'envB', {'testA': 'pass', 'testC': 'pass'})
self.receive_test_run(project, 'buildB', 'envC', {'testA': 'xfail', 'testB': 'xfail', 'testC': 'pass'})
buildA = project.builds.filter(version='buildA').get()
buildB = project.builds.filter(version='buildB').get()
comparison = TestComparison.compare_builds(buildA, buildB)
self.assertEqual({'envB': ['testA']}, comparison.fixes)
self.assertEqual({'envA': ['testA']}, comparison.regressions)
self.assertEqual({'envA', 'envB', 'envC'}, comparison.all_environments)
self.assertEqual(3, len(comparison.results))
transitions = [('pass', 'fail'), ('skip', 'n/a')]
comparison.apply_transitions(transitions)
"""
Test results after transitions are applied
+-------------+-------------+
| buildA | buildB |
+------+------+------+------+
| envA | envB | envA | envB |
+-------+------+------+------+------+
| testA | pass | fail | fail | pass |
+-------+------+------+------+------+
| testB | pass | skip | skip | |
+-------+------+------+------+------+
"""
self.assertEqual({'envB': ['testA']}, comparison.fixes)
self.assertEqual({'envA': ['testA']}, comparison.regressions)
self.assertEqual({'envA', 'envB'}, comparison.all_environments)
self.assertEqual(2, len(comparison.results))
self.assertEqual(None, comparison.results['testB'].get((buildB, 'envB')))
|
class TestComparisonTest(TestCase):
def receive_test_run(self, project, version, env, tests):
pass
def setUp(self):
pass
def test_builds(self):
pass
def test_test_runs(self):
pass
def test_tests_are_sorted(self):
pass
def test_test_results(self):
pass
def test_compare_projects(self):
pass
def test_no_data(self):
pass
def test_diff(self):
pass
def test_empty_diff(self):
pass
def test_empty_with_no_builds(self):
pass
def test_regressions(self):
'''
This test is using builds from different projects because the relevant
test data is already prepared in setUp(), but usually regressions is
only used when comparing subsequent builds from the same project.
'''
pass
def test_fixes(self):
'''
This test is using builds from different projects because the relevant
test data is already prepared in setUp(), but usually regressions is
only used when comparing subsequent builds from the same project.
'''
pass
def test_failures(self):
pass
def test_regressions_no_previous_build(self):
pass
def test_fixes_no_previous_build(self):
pass
def test_regressions_no_regressions(self):
pass
def test_regressions_with_duplicates(self):
pass
def test_fixes_with_duplicates(self):
pass
def test_fixes_no_fixes(self):
pass
def test_xfail_fix(self):
'''
This test is using builds from different projects because the relevant
test data is already prepared in setUp(), but usually fixes is
only used when comparing subsequent builds from the same project.
'''
pass
def test_pass_to_xfail_not_a_regressions(self):
'''
This test is using builds from different projects because the relevant
test data is already prepared in setUp(), but usually fixes is
only used when comparing subsequent builds from the same project.
'''
pass
def test_intermittent_xfail_is_not_a_fix(self):
'''
This test is using builds from different projects because the relevant
test data is already prepared in setUp(), but usually fixes is
only used when comparing subsequent builds from the same project.
build1 = project1/1 -> baseline build
build2 = project2/1 -> target build
build | project1/1 | project2/1
test / environment | myenv | otherenv | myenv | otherenv
-------------------+-------+----------+------------------
a | pass | pass | fail | fail
b | pass | pass | pass | pass
c | fail | fail | pass | pass
d/e | pass | pass | pass | pass
Expected results
regressions:
- test a on environments "myenv" and "otherenv"
fixes:
- test c would be a fix, but the testing at hand tags that as intermittent and xfail
thus TestComparison should *NOT* mark that as a fix
'''
pass
def test_apply_transitions(self):
'''
Test results scenario
+---------------------+---------------------+
| buildA | buildB |
+------+------+-------+------+------+-------+
| envA | envB | envC | envA | envB | envC |
+-------+------+------+-------+------+------+-------+
| testA | pass | fail | xfail | fail | pass | xfail |
+-------+------+------+-------+------+------+-------+
| testB | pass | skip | xfail | skip | | xfail |
+-------+------+------+-------+------+------+-------+
| testC | pass | pass | pass | pass | pass | pass |
+-------+------+------+-------+------+------+-------+
'''
pass
| 25 | 6 | 11 | 1 | 7 | 3 | 1 | 0.41 | 1 | 5 | 4 | 0 | 24 | 8 | 24 | 24 | 281 | 41 | 171 | 73 | 146 | 70 | 136 | 73 | 111 | 2 | 1 | 1 | 25 |
145,436 |
Linaro/squad
|
Linaro_squad/test/core/test_utils.py
|
test.core.test_utils.TestParseName
|
class TestParseName(TestCase):
def test_simple(self):
self.assertEqual(('foo', 'bar'), parse_name('foo/bar'))
def test_nested(self):
self.assertEqual(('foo/bar', 'baz'), parse_name('foo/bar/baz'))
def test_ungrouped(self):
self.assertEqual(('/', 'foo'), parse_name('foo'))
def test_multiple_leading_slashes(self):
self.assertEqual(('/', 'foo'), parse_name('//foo'))
def test_variants_simple(self):
self.assertEqual(('special', 'case.for[result/variants]'),
parse_name("special/case.for[result/variants]"))
def test_variants_ungrouped(self):
self.assertEqual(('/', 'case.for[result/variants]'),
parse_name("case.for[result/variants]"))
def test_variants_multiple_leading_slashes(self):
self.assertEqual(('/', 'case.for[result/variants]'),
parse_name("//case.for[result/variants]"))
def test_variants_nested(self):
self.assertEqual(('long/special', 'case.for[result/variants]'),
parse_name("long/special/case.for[result/variants]"))
def test_variants_missing_opening_bracket(self):
self.assertEqual(('long/special/case.forresult', 'variants]'),
parse_name("long/special/case.forresult/variants]"))
|
class TestParseName(TestCase):
def test_simple(self):
pass
def test_nested(self):
pass
def test_ungrouped(self):
pass
def test_multiple_leading_slashes(self):
pass
def test_variants_simple(self):
pass
def test_variants_ungrouped(self):
pass
def test_variants_multiple_leading_slashes(self):
pass
def test_variants_nested(self):
pass
def test_variants_missing_opening_bracket(self):
pass
| 10 | 0 | 3 | 0 | 3 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 9 | 0 | 9 | 9 | 33 | 9 | 24 | 10 | 14 | 0 | 19 | 10 | 9 | 1 | 1 | 0 | 9 |
145,437 |
Linaro/squad
|
Linaro_squad/test/core/test_metric_threshold.py
|
test.core.test_metric_threshold.MetricThresholdTest
|
class MetricThresholdTest(TestCase):
def setUp(self):
self.group = Group.objects.create(slug='mygroup')
self.project = self.group.projects.create(slug='myproject')
self.environment = self.project.environments.create(slug='myenv')
def test_basic(self):
threshold_name = 'sample-threshold'
self.project.thresholds.create(name=threshold_name)
m = self.project.thresholds.filter(name=threshold_name)
self.assertEqual(1, m.count())
def test_all_attributes(self):
threshold_name = 'sample-threshold'
value = 1
is_higher_better = True
env = self.environment
self.project.thresholds.create(name=threshold_name, value=value, is_higher_better=is_higher_better, environment=env)
m = self.project.thresholds.filter(name=threshold_name, value=value, is_higher_better=is_higher_better, environment=env)
self.assertEqual(1, m.count())
def test_fail_to_duplicate(self):
# There are 2 types of duplicates:
# 1. When there's a threshold for all envs, and a new one for a specific env is added
threshold_name = 'all-envs-threshold'
self.project.thresholds.create(name=threshold_name)
with self.assertRaises(ValidationError):
self.project.thresholds.create(name=threshold_name, environment=self.environment)
# 2. When there's a threshold for a specific env, and a new one for all envs is added
threshold_name = 'specific-env-threshold'
self.project.thresholds.create(name=threshold_name, environment=self.environment)
with self.assertRaises(ValidationError):
self.project.thresholds.create(name=threshold_name)
def test_matches_correct_metrics(self):
# A threshold name is used as a regex to match metrics' names
# therefore it should not match metrics it is not supposed to
threshold_name = 'should-not-match-beyong-here'
threshold = self.project.thresholds.create(name=threshold_name)
self.assertFalse(threshold.match(threshold_name + '-very-similar-metric-name'))
# It should, though, match the correct things
threshold_name = 'should-match-*-of-these'
threshold = self.project.thresholds.create(name=threshold_name)
self.assertTrue(threshold.match('should-match-any-of-these'))
self.assertTrue(threshold.match('should-match-all-of-these'))
# It should NOT parse any other regex special character
threshold_name = 'should-not-match-that-[0-9]'
threshold = self.project.thresholds.create(name=threshold_name)
self.assertFalse(threshold.match('should-not-match-that-0'))
|
class MetricThresholdTest(TestCase):
def setUp(self):
pass
def test_basic(self):
pass
def test_all_attributes(self):
pass
def test_fail_to_duplicate(self):
pass
def test_matches_correct_metrics(self):
pass
| 6 | 0 | 10 | 1 | 7 | 1 | 1 | 0.18 | 1 | 0 | 0 | 0 | 5 | 3 | 5 | 5 | 54 | 9 | 38 | 19 | 32 | 7 | 38 | 19 | 32 | 1 | 1 | 1 | 5 |
145,438 |
Linaro/squad
|
Linaro_squad/test/core/test_metrics_data.py
|
test.core.test_metrics_data.JSONMetricDataParserTest
|
class JSONMetricDataParserTest(TestCase):
def test_empty(self):
self.assertEqual([], parser(None))
self.assertEqual([], parser(''))
self.assertEqual([], parser('{}'))
def test_basics(self):
data = parser(TEST_DATA)
self.assertEqual(5, len(data))
self.assertIsInstance(data, list)
def test_metric_name(self):
data = parser(TEST_DATA)
names = [t['name'] for t in data]
self.assertIn('ungrouped_int', names)
def test_grouping(self):
data = parser(TEST_DATA)
item = [t for t in data if t['group_name'] == 'group1'][0]
self.assertEqual(item['name'], 'var')
def test_int(self):
data = parser(TEST_DATA)
item = [t for t in data if t['name'] == 'ungrouped_int'][0]
self.assertEqual(10, item['result'])
self.assertEqual([10], item['measurements'])
def test_float(self):
data = parser(TEST_DATA)
item = [t for t in data if t['name'] == 'ungrouped_float'][0]
self.assertEqual(20.5, item['result'])
def test_array(self):
data = parser(TEST_DATA)
item = [t for t in data if t['name'] == 'ungrouped_multiple'][0]
self.assertEqual(10.5, item['result'])
self.assertEqual([10, 11, 10, 11], item['measurements'])
def test_nested_group(self):
item = parser('{"foo/bar/baz": {"value": 1, "unit": ""}}')[0]
self.assertEqual('foo/bar', item['group_name'])
self.assertEqual('baz', item['name'])
|
class JSONMetricDataParserTest(TestCase):
def test_empty(self):
pass
def test_basics(self):
pass
def test_metric_name(self):
pass
def test_grouping(self):
pass
def test_int(self):
pass
def test_float(self):
pass
def test_array(self):
pass
def test_nested_group(self):
pass
| 9 | 0 | 4 | 0 | 4 | 0 | 1 | 0 | 1 | 1 | 0 | 0 | 8 | 0 | 8 | 80 | 43 | 8 | 35 | 21 | 26 | 0 | 35 | 21 | 26 | 1 | 2 | 0 | 8 |
145,439 |
Linaro/squad
|
Linaro_squad/test/core/test_metrics_summary.py
|
test.core.test_metrics_summary.MetricsSummaryTest
|
class MetricsSummaryTest(TestCase):
def setUp(self):
self.group = Group.objects.create(slug='mygroup')
self.project = self.group.projects.create(slug='myproject')
self.build1 = self.project.builds.create(version='1')
self.build2 = self.project.builds.create(version='2')
self.env1 = self.project.environments.create(slug='env1')
self.env2 = self.project.environments.create(slug='env2')
suite1 = self.project.suites.create(slug='suite1')
suite2 = self.project.suites.create(slug='suite2')
foo_metadata, _ = SuiteMetadata.objects.get_or_create(suite=suite1.slug, name='foo', kind='metric')
bar_metadata, _ = SuiteMetadata.objects.get_or_create(suite=suite1.slug, name='bar', kind='metric')
baz_metadata, _ = SuiteMetadata.objects.get_or_create(suite=suite2.slug, name='baz', kind='metric')
qux_metadata, _ = SuiteMetadata.objects.get_or_create(suite=suite2.slug, name='qux', kind='metric')
test_run1 = self.build1.test_runs.create(environment=self.env1)
test_run1.metrics.create(metadata=foo_metadata, suite=suite1, result=1, build=test_run1.build, environment=test_run1.environment)
test_run1.metrics.create(metadata=bar_metadata, suite=suite1, result=2, build=test_run1.build, environment=test_run1.environment)
test_run1.metrics.create(metadata=baz_metadata, suite=suite2, result=3, build=test_run1.build, environment=test_run1.environment)
test_run1.metrics.create(metadata=qux_metadata, suite=suite2, result=4, build=test_run1.build, environment=test_run1.environment)
test_run2 = self.build1.test_runs.create(environment=self.env2)
test_run2.metrics.create(metadata=foo_metadata, suite=suite1, result=2, build=test_run2.build, environment=test_run2.environment)
test_run2.metrics.create(metadata=bar_metadata, suite=suite1, result=4, build=test_run2.build, environment=test_run2.environment)
test_run2.metrics.create(metadata=baz_metadata, suite=suite2, result=6, build=test_run2.build, environment=test_run2.environment)
test_run2.metrics.create(metadata=qux_metadata, suite=suite2, result=8, build=test_run2.build, environment=test_run2.environment)
def test_empty_metrics(self):
summary = MetricsSummary(self.build2)
self.assertFalse(summary.has_metrics)
def test_basic_summary(self):
values = [1, 2, 3, 4, 2, 4, 6, 8]
summary = MetricsSummary(self.build1)
self.assertTrue(summary.has_metrics)
self.assertTrue(eq(geomean(values), summary.value))
def test_environment_summary(self):
values1 = [1, 2, 3, 4]
values2 = [2, 4, 6, 8]
summary1 = MetricsSummary(self.build1, self.env1)
summary2 = MetricsSummary(self.build1, self.env2)
self.assertTrue(summary1.has_metrics)
self.assertTrue(summary2.has_metrics)
self.assertTrue(eq(geomean(values1), summary1.value))
self.assertTrue(eq(geomean(values2), summary2.value))
|
class MetricsSummaryTest(TestCase):
def setUp(self):
pass
def test_empty_metrics(self):
pass
def test_basic_summary(self):
pass
def test_environment_summary(self):
pass
| 5 | 0 | 11 | 1 | 10 | 0 | 1 | 0 | 1 | 2 | 2 | 0 | 4 | 6 | 4 | 4 | 50 | 9 | 41 | 26 | 36 | 0 | 41 | 26 | 36 | 1 | 1 | 0 | 4 |
145,440 |
Linaro/squad
|
Linaro_squad/test/core/test_notification.py
|
test.core.test_notification.TestCustomEmailTemplate
|
class TestCustomEmailTemplate(TestCase):
def setUp(self):
t0 = timezone.now() - relativedelta(hours=3)
t = timezone.now() - relativedelta(hours=2.75)
self.group = Group.objects.create(slug='mygroup')
self.project = self.group.projects.create(slug='myproject')
self.build1 = self.project.builds.create(version='1', datetime=t0)
status = ProjectStatus.create_or_update(self.build1)
status.finished = True
status.notified = True
status.save()
self.build2 = self.project.builds.create(version='2', datetime=t)
self.project.subscriptions.create(email='user@example.com')
self.project.admin_subscriptions.create(email='admin@example.com')
def test_custom_template(self):
template = EmailTemplate.objects.create(plain_text='foo', html='bar')
self.project.custom_email_template = template
self.project.save()
status = ProjectStatus.create_or_update(self.build2)
send_status_notification(status)
msg = mail.outbox[0]
txt = msg.body
html = msg.alternatives[0][0]
self.assertEqual('foo', txt)
self.assertEqual('bar', html)
def test_subject_from_custom_template(self):
template = EmailTemplate.objects.create(subject='lalala', plain_text='foo', html='bar')
self.project.custom_email_template = template
self.project.save()
status = ProjectStatus.create_or_update(self.build2)
send_status_notification(status)
msg = mail.outbox[0]
self.assertEqual('lalala', msg.subject)
def test_escaping_custom_template(self):
template = EmailTemplate.objects.create(
subject='subject: {{ project.name }}',
plain_text='foo: {{ notification.project.name }}',
html='{% autoescape True %}bar: {{ notification.project.name }}{% endautoescape %}')
self.project.name = "Project's name"
self.project.custom_email_template = template
self.project.save()
status = ProjectStatus.create_or_update(self.build2)
send_status_notification(status)
msg = mail.outbox[0]
subject = msg.subject
txt = msg.body
html = msg.alternatives[0][0]
self.assertEqual("subject: Project's name", subject)
self.assertEqual("foo: Project's name", txt)
self.assertEqual("bar: Project's name", html)
def test_context_custom_template(self):
expose_context_vars = """
build={{build.version}}
important_metadata={{important_metadata}}
metadata={{metadata}}
notification={{notification}}
previous_build={{previous_build}}
regressions_grouped_by_suite={{regressions_grouped_by_suite}}
fixes_grouped_by_suite={{fixes_grouped_by_suite}}
known_issues={{known_issues}}
regressions={{regressions}}
fixes={{fixes}}
thresholds={{thresholds}}
settings={{settings}}
summary={{summary}}
metrics={{metrics}}
"""
template = EmailTemplate.objects.create(plain_text=expose_context_vars, html=expose_context_vars)
self.project.custom_email_template = template
self.project.save()
status = ProjectStatus.create_or_update(self.build2)
send_status_notification(status)
msg = mail.outbox[0]
txt = msg.body
self.assertIn('build=2', txt)
self.assertIn('important_metadata={}', txt)
self.assertIn('metadata=OrderedDict()', txt)
self.assertIn('notification=<squad.core.notification.Notification', txt)
self.assertIn('previous_build=1', txt)
self.assertIn('regressions_grouped_by_suite=OrderedDict()', txt)
self.assertIn('fixes_grouped_by_suite=OrderedDict()', txt)
self.assertIn('known_issues=<QuerySet []>', txt)
self.assertIn('regressions=OrderedDict()', txt)
self.assertIn('fixes=OrderedDict()', txt)
self.assertIn('thresholds=[]', txt)
self.assertIn('settings=<Settings "test.settings">', txt)
self.assertIn('summary=<squad.core.models.TestSummary', txt)
self.assertIn('metrics=<QuerySet []>', txt)
|
class TestCustomEmailTemplate(TestCase):
def setUp(self):
pass
def test_custom_template(self):
pass
def test_subject_from_custom_template(self):
pass
def test_escaping_custom_template(self):
pass
def test_context_custom_template(self):
pass
| 6 | 0 | 20 | 2 | 17 | 0 | 1 | 0.01 | 1 | 3 | 2 | 0 | 5 | 4 | 5 | 5 | 105 | 17 | 88 | 32 | 82 | 1 | 70 | 32 | 64 | 1 | 1 | 0 | 5 |
145,441 |
Linaro/squad
|
Linaro_squad/test/core/test_notification.py
|
test.core.test_notification.TestModeratedNotifications
|
class TestModeratedNotifications(TestCase):
def setUp(self):
t0 = timezone.now() - relativedelta(hours=3)
t = timezone.now() - relativedelta(hours=2.75)
self.group = Group.objects.create(slug='mygroup')
self.project = self.group.projects.create(slug='myproject')
self.build1 = self.project.builds.create(version='1', datetime=t0)
status = ProjectStatus.create_or_update(self.build1)
status.notified = True
status.save()
self.build2 = self.project.builds.create(version='2', datetime=t)
self.project.subscriptions.create(email='user@example.com')
self.project.admin_subscriptions.create(email='admin@example.com')
self.project.moderate_notifications = True
self.project.save()
self.status = ProjectStatus.create_or_update(self.build2)
|
class TestModeratedNotifications(TestCase):
def setUp(self):
pass
| 2 | 0 | 16 | 1 | 15 | 0 | 1 | 0 | 1 | 2 | 1 | 2 | 1 | 5 | 1 | 1 | 18 | 2 | 16 | 10 | 14 | 0 | 16 | 10 | 14 | 1 | 1 | 0 | 1 |
145,442 |
Linaro/squad
|
Linaro_squad/test/core/test_notification.py
|
test.core.test_notification.TestSendNotificationFirstTime
|
class TestSendNotificationFirstTime(TestCase):
def setUp(self):
group = Group.objects.create(slug='mygroup')
self.project = group.projects.create(slug='myproject')
t0 = timezone.now() - relativedelta(hours=3)
self.build = self.project.builds.create(version='1', datetime=t0)
self.subscription = self.project.subscriptions.create(
email='foo@example.com')
def test_send_if_notifying_all_builds(self):
status = ProjectStatus.create_or_update(self.build)
send_status_notification(status)
self.assertEqual(1, len(mail.outbox))
def test_dont_send_if_notifying_on_change(self):
self.subscription.notification_strategy = Subscription.NOTIFY_ON_CHANGE
self.subscription.save()
status = ProjectStatus.create_or_update(self.build)
send_status_notification(status)
self.assertEqual(0, len(mail.outbox))
def test_dont_send_if_notifying_on_regression(self):
self.subscription.notification_strategy = Subscription.NOTIFY_ON_REGRESSION
self.subscription.save()
status = ProjectStatus.create_or_update(self.build)
send_status_notification(status)
self.assertEqual(0, len(mail.outbox))
def test_dont_send_if_notifying_on_error(self):
self.subscription.notification_strategy = Subscription.NOTIFY_ON_ERROR
self.subscription.save()
status = ProjectStatus.create_or_update(self.build)
send_status_notification(status)
self.assertEqual(0, len(mail.outbox))
|
class TestSendNotificationFirstTime(TestCase):
def setUp(self):
pass
def test_send_if_notifying_all_builds(self):
pass
def test_dont_send_if_notifying_on_change(self):
pass
def test_dont_send_if_notifying_on_regression(self):
pass
def test_dont_send_if_notifying_on_error(self):
pass
| 6 | 0 | 6 | 0 | 6 | 0 | 1 | 0 | 1 | 3 | 2 | 0 | 5 | 3 | 5 | 5 | 34 | 4 | 30 | 15 | 24 | 0 | 29 | 15 | 23 | 1 | 1 | 0 | 5 |
145,443 |
Linaro/squad
|
Linaro_squad/test/core/test_notification.py
|
test.core.test_notification.TestSendUnmoderatedNotification
|
class TestSendUnmoderatedNotification(TestModeratedNotifications):
def setUp(self):
super(TestSendUnmoderatedNotification, self).setUp()
send_status_notification(self.status)
def test_mails_admins(self):
self.assertEqual(['admin@example.com'], mail.outbox[0].recipients())
def test_subject(self):
self.assertTrue(mail.outbox[0].subject.startswith("[PREVIEW]"))
def test_txt_banner(self):
txt = mail.outbox[0].body
self.assertTrue(txt.find('needs to be approved') >= 0)
def test_html_banner(self):
html = mail.outbox[0].alternatives[0][0]
self.assertTrue(html.find('needs to be approved') >= 0)
|
class TestSendUnmoderatedNotification(TestModeratedNotifications):
def setUp(self):
pass
def test_mails_admins(self):
pass
def test_subject(self):
pass
def test_txt_banner(self):
pass
def test_html_banner(self):
pass
| 6 | 0 | 3 | 0 | 3 | 0 | 1 | 0 | 1 | 1 | 0 | 0 | 5 | 0 | 5 | 6 | 19 | 5 | 14 | 8 | 8 | 0 | 14 | 8 | 8 | 1 | 2 | 0 | 5 |
145,444 |
Linaro/squad
|
Linaro_squad/test/core/test_notification_delivery.py
|
test.core.test_notification_delivery.NotificationDeliveryTest
|
class NotificationDeliveryTest(TestCase):
def setUp(self):
group = Group.objects.create(slug='mygroup')
project = group.projects.create(slug='myproject')
build1 = project.builds.create(version='1')
self.status = ProjectStatus.create_or_update(build1)
def test_avoid_duplicates(self):
args = [self.status, 'my subject', 'text', 'html']
self.assertFalse(NotificationDelivery.exists(*args))
self.assertTrue(NotificationDelivery.exists(*args))
def test_pass_modified_notifications(self):
args = [self.status, 'my subject', 'text', 'html']
self.assertFalse(NotificationDelivery.exists(*args))
args[2] = 'new text'
args[3] = 'new html'
self.assertFalse(NotificationDelivery.exists(*args))
|
class NotificationDeliveryTest(TestCase):
def setUp(self):
pass
def test_avoid_duplicates(self):
pass
def test_pass_modified_notifications(self):
pass
| 4 | 0 | 5 | 0 | 5 | 0 | 1 | 0 | 1 | 2 | 2 | 0 | 3 | 1 | 3 | 3 | 19 | 3 | 16 | 10 | 12 | 0 | 16 | 10 | 12 | 1 | 1 | 0 | 3 |
145,445 |
Linaro/squad
|
Linaro_squad/test/core/test_patch_source.py
|
test.core.test_patch_source.TestPatchSource
|
class TestPatchSource(TestCase):
def test_get_implementation(self):
patch_source = PatchSource(implementation='example')
self.assertIsInstance(patch_source.get_implementation(), Plugin)
|
class TestPatchSource(TestCase):
def test_get_implementation(self):
pass
| 2 | 0 | 3 | 0 | 3 | 0 | 1 | 0 | 1 | 2 | 2 | 0 | 1 | 0 | 1 | 1 | 5 | 1 | 4 | 3 | 2 | 0 | 4 | 3 | 2 | 1 | 1 | 0 | 1 |
145,446 |
Linaro/squad
|
Linaro_squad/test/core/test_project.py
|
test.core.test_project.ProjectTest
|
class ProjectTest(TestCase):
def setUp(self):
self.user1 = User.objects.create(username='u1')
self.user2 = User.objects.create(username='u2')
self.admin = User.objects.create(username='admin', is_superuser=True)
self.group = Group.objects.create(slug='mygroup')
self.group.add_admin(self.user1)
self.public_project = self.group.projects.create(slug='public')
self.private_project = self.group.projects.create(slug='private', is_public=False)
def test_accessible_manager_non_member(self):
self.assertEqual(
[self.public_project],
list(Project.objects.accessible_to(self.user2))
)
def test_accessible_manager_member(self):
self.assertEqual(
[self.public_project, self.private_project],
list(Project.objects.accessible_to(self.user1).order_by('id'))
)
def test_accessible_manager_anonymous_user(self):
self.assertEqual(
[self.public_project],
list(Project.objects.accessible_to(AnonymousUser()))
)
def test_accessible_manager_admin(self):
self.assertEqual(
[self.public_project.id, self.private_project.id],
sorted([p.id for p in Project.objects.accessible_to(self.admin)])
)
def test_accessible_instance_non_member(self):
self.assertFalse(self.private_project.accessible_to(self.user2))
def test_accessible_instance_member(self):
self.assertTrue(self.private_project.accessible_to(self.user1))
def test_accessible_instance_public_project_non_member(self):
self.assertTrue(self.public_project.accessible_to(self.user2))
def test_accessible_instance_public_project_anonymous_user(self):
self.assertTrue(self.public_project.accessible_to(AnonymousUser()))
def test_accessible_instance_admin(self):
self.assertTrue(self.private_project.accessible_to(self.admin))
def test_enabled_plugins_empty(self):
self.assertIsNone(Project().enabled_plugins)
self.assertEqual([], Project(enabled_plugins_list=[]).enabled_plugins)
def test_enabled_plugins(self):
p = Project(enabled_plugins_list=['aaa', 'bbb'])
self.assertEqual(['aaa', 'bbb'], p.enabled_plugins)
def test_invalid_slug(self):
p = Project(group=self.group, slug='foo/bar')
with self.assertRaises(ValidationError):
p.full_clean()
p.slug = 'foo-bar'
p.full_clean() # it this raises no exception, then we are fine
def test_validate_project_settings(self):
p = Project(group=self.group, slug='foobar', project_settings='1')
with self.assertRaises(ValidationError):
p.full_clean()
p.project_settings = 'foo: bar\n'
p.full_clean()
def test_get_project_settings(self):
p = Project.objects.create(group=self.group, slug='foobar', project_settings='{"setting1": "value"}')
self.assertEqual("value", p.get_setting("setting1"))
self.assertEqual(None, p.get_setting("unkown_setting"))
self.assertEqual("default", p.get_setting("unkown_setting", "default"))
|
class ProjectTest(TestCase):
def setUp(self):
pass
def test_accessible_manager_non_member(self):
pass
def test_accessible_manager_member(self):
pass
def test_accessible_manager_anonymous_user(self):
pass
def test_accessible_manager_admin(self):
pass
def test_accessible_instance_non_member(self):
pass
def test_accessible_instance_member(self):
pass
def test_accessible_instance_public_project_non_member(self):
pass
def test_accessible_instance_public_project_anonymous_user(self):
pass
def test_accessible_instance_admin(self):
pass
def test_enabled_plugins_empty(self):
pass
def test_enabled_plugins_empty(self):
pass
def test_invalid_slug(self):
pass
def test_validate_project_settings(self):
pass
def test_get_project_settings(self):
pass
| 16 | 0 | 4 | 0 | 4 | 0 | 1 | 0.02 | 1 | 2 | 1 | 0 | 15 | 6 | 15 | 15 | 81 | 19 | 62 | 26 | 46 | 1 | 50 | 26 | 34 | 1 | 1 | 1 | 15 |
145,447 |
Linaro/squad
|
Linaro_squad/test/core/test_project_status.py
|
test.core.test_project_status.ProjectStatusTest
|
class ProjectStatusTest(TestCase):
def setUp(self):
self.group = Group.objects.create(slug='mygroup')
self.project = self.group.projects.create(slug='myproject')
self.project2 = self.group.projects.create(slug='myproject2')
self.environment = self.project.environments.create(slug='theenvironment')
self.environment_a = self.project.environments.create(slug='environment_a')
self.suite = self.project.suites.create(slug='suite_')
self.suite_a = self.project.suites.create(slug='suite_a')
self.receive_testrun = ReceiveTestRun(self.project, update_project_status=False)
def create_build(self, v, datetime=None, create_test_run=True):
build = self.project.builds.create(version=v, datetime=datetime)
if create_test_run:
build.test_runs.create(environment=self.environment)
return build
def test_status_of_first_build(self):
build = self.project2.builds.create(version='1122')
status = ProjectStatus.create_or_update(build)
self.assertEqual(build, status.build)
self.assertIsNone(status.get_previous())
def test_status_of_second_build(self):
build = self.create_build('1')
status1 = ProjectStatus.create_or_update(build)
build2 = self.create_build('2')
status2 = ProjectStatus.create_or_update(build2)
self.assertEqual(status1, status2.get_previous())
self.assertEqual(build2, status2.build)
def test_dont_record_the_same_status_twice(self):
build = self.create_build('1')
status1 = ProjectStatus.create_or_update(build)
status2 = ProjectStatus.create_or_update(build)
self.assertEqual(status1, status2)
self.assertEqual(1, ProjectStatus.objects.count())
def test_wait_for_build_completion(self):
build = self.create_build('1', datetime=h(1), create_test_run=False)
status = ProjectStatus.create_or_update(build)
self.assertFalse(status.finished)
def test_first_build(self):
build = self.create_build('1')
status = ProjectStatus.create_or_update(build)
self.assertEqual(build, status.build)
def test_build_not_finished(self):
build = self.create_build('2', datetime=h(4), create_test_run=False)
status = ProjectStatus.create_or_update(build)
self.assertFalse(status.finished)
def test_force_finishing_build_on_notification_timeout_disabled(self):
build = self.create_build('2', datetime=h(4), create_test_run=False)
status = ProjectStatus.create_or_update(build)
self.assertFalse(status.finished)
build.project.force_finishing_builds_on_timeout = False
build.project.save()
notification.notification_timeout(status.id)
status.refresh_from_db()
self.assertFalse(status.finished)
def test_force_finishing_build_on_notification_timeout_enabled(self):
build = self.create_build('2', datetime=h(4), create_test_run=False)
status = ProjectStatus.create_or_update(build)
self.assertFalse(status.finished)
build.project.force_finishing_builds_on_timeout = True
build.project.save()
notification.notification_timeout(status.id)
status.refresh_from_db()
self.assertTrue(status.finished)
def test_test_summary(self):
build = self.create_build('1', datetime=h(10), create_test_run=False)
tests_json = """
{
"tests/foo": "pass",
"tests/bar": "fail",
"tests/baz": "none"
}
"""
self.receive_testrun(build.version, self.environment.slug, tests_file=tests_json)
status = ProjectStatus.create_or_update(build)
self.assertEqual(1, status.tests_pass)
self.assertEqual(1, status.tests_fail)
self.assertEqual(1, status.tests_skip)
self.assertEqual(3, status.tests_total)
def test_metrics_summary(self):
build = self.create_build('1', datetime=h(10))
test_run = build.test_runs.first()
foo_metadata, _ = SuiteMetadata.objects.get_or_create(suite=self.suite.slug, name='foo', kind='metric')
test_run.metrics.create(metadata=foo_metadata, suite=self.suite, result=2, build=test_run.build, environment=test_run.environment)
bar_metadata, _ = SuiteMetadata.objects.get_or_create(suite=self.suite.slug, name='bar', kind='metric')
test_run.metrics.create(metadata=bar_metadata, suite=self.suite, result=2, build=test_run.build, environment=test_run.environment)
status = ProjectStatus.create_or_update(build)
self.assertEqual(2.0, status.metrics_summary)
def test_updates_data_as_new_testruns_arrive(self):
build = self.create_build('1', datetime=h(10), create_test_run=False)
tests_json = """
{
"tests/foo": "pass"
}
"""
self.receive_testrun(build.version, self.environment.slug, tests_file=tests_json)
ProjectStatus.create_or_update(build)
tests_json = """
{
"tests/bar": "pass",
"tests/baz": "fail",
"tests/qux": "none"
}
"""
self.receive_testrun(build.version, self.environment.slug, tests_file=tests_json)
test_run2 = build.test_runs.create(environment=self.environment)
metadata, _ = SuiteMetadata.objects.get_or_create(suite=self.suite.slug, name='v1', kind='metric')
test_run2.metrics.create(metadata=metadata, suite=self.suite, result=5.0, build=test_run2.build, environment=test_run2.environment)
status = ProjectStatus.create_or_update(build)
build.refresh_from_db()
status.refresh_from_db()
self.assertEqual(status, build.status)
self.assertEqual(2, status.tests_pass)
self.assertEqual(1, status.tests_fail)
self.assertEqual(1, status.tests_skip)
self.assertEqual(status.tests_pass, build.status.tests_pass)
self.assertEqual(status.tests_fail, build.status.tests_fail)
self.assertEqual(status.tests_skip, build.status.tests_skip)
self.assertAlmostEqual(5.0, status.metrics_summary)
self.assertEqual(status.metrics_summary, build.status.metrics_summary)
def test_populates_last_updated(self):
build = self.create_build('1', datetime=h(10))
status = ProjectStatus.create_or_update(build)
self.assertIsNotNone(status.last_updated)
def test_updates_last_updated(self):
build = self.create_build('1', datetime=h(10))
test_run1 = build.test_runs.first()
metadata, _ = SuiteMetadata.objects.get_or_create(suite=self.suite.slug, name='foo', kind='test')
test_run1.tests.create(build=test_run1.build, environment=test_run1.environment, metadata=metadata, suite=self.suite, result=True)
status = ProjectStatus.create_or_update(build)
old_date = status.last_updated
build.test_runs.create(environment=self.environment)
status = ProjectStatus.create_or_update(build)
self.assertNotEqual(status.last_updated, old_date)
def test_previous_must_be_finished(self):
self.environment.expected_test_runs = 2
self.environment.save()
# finished
build1 = self.create_build('1', datetime=h(10), create_test_run=False)
build1.test_runs.create(environment=self.environment)
build1.test_runs.create(environment=self.environment)
status1 = ProjectStatus.create_or_update(build1)
# not finished
build2 = self.create_build('2', datetime=h(5), create_test_run=False)
ProjectStatus.create_or_update(build2)
# current build
build = self.create_build('3', datetime=h(0), create_test_run=False)
status = ProjectStatus.create_or_update(build)
self.assertEqual(status1, status.get_previous())
def test_previous_must_be_from_the_same_project(self):
previous_build = self.create_build('1', datetime=h(10))
previous = ProjectStatus.create_or_update(previous_build)
other_project = self.group.projects.create(slug='other_project')
other_env = other_project.environments.create(slug='other_env')
other_build = other_project.builds.create(version='1', datetime=h(5))
other_build.test_runs.create(environment=other_env)
ProjectStatus.create_or_update(other_build)
build = self.create_build('2', datetime=h(0))
status = ProjectStatus.create_or_update(build)
self.assertEqual(previous, status.get_previous())
def test_zero_expected_test_runs(self):
self.project.environments.create(slug='other_env', expected_test_runs=0)
build = self.create_build('1')
status = ProjectStatus.create_or_update(build)
self.assertTrue(status.finished)
def test_cache_test_run_counts(self):
build = self.create_build('1', create_test_run=False)
build.test_runs.create(environment=self.environment, completed=True)
build.test_runs.create(environment=self.environment, completed=True)
build.test_runs.create(environment=self.environment, completed=False)
status = ProjectStatus.create_or_update(build)
self.assertEqual(3, status.test_runs_total)
self.assertEqual(2, status.test_runs_completed)
self.assertEqual(1, status.test_runs_incomplete)
def test_cache_test_run_counts_on_update(self):
build = self.create_build('1', create_test_run=False)
ProjectStatus.create_or_update(build)
build.test_runs.create(environment=self.environment, completed=True)
build.test_runs.create(environment=self.environment, completed=False)
status = ProjectStatus.create_or_update(build)
self.assertEqual(2, status.test_runs_total)
self.assertEqual(1, status.test_runs_completed)
self.assertEqual(1, status.test_runs_incomplete)
def test_cache_regressions(self):
build1 = self.create_build('1', datetime=h(10))
build1.project.thresholds.create(name='%s/foo-metric' % self.suite.slug)
test_run1 = build1.test_runs.first()
foo_metadata, _ = SuiteMetadata.objects.get_or_create(suite=self.suite.slug, name='foo', kind='test')
foo_metadata_metric, _ = SuiteMetadata.objects.get_or_create(suite=self.suite.slug, name='foo-metric', kind='metric')
test_run1.tests.create(build=test_run1.build, environment=test_run1.environment, metadata=foo_metadata, suite=self.suite, result=True)
test_run1.metrics.create(build=test_run1.build, environment=test_run1.environment, metadata=foo_metadata_metric, suite=self.suite, result=1)
ProjectStatus.create_or_update(build1)
build2 = self.create_build('2', datetime=h(9))
test_run2 = build2.test_runs.first()
test_run2.tests.create(build=test_run2.build, environment=test_run2.environment, metadata=foo_metadata, suite=self.suite, result=False)
test_run2.metrics.create(build=test_run2.build, environment=test_run2.environment, metadata=foo_metadata_metric, suite=self.suite, result=2)
status = ProjectStatus.create_or_update(build2)
self.assertIsNotNone(status.regressions)
self.assertIsNone(status.fixes)
self.assertIsNotNone(status.metric_regressions)
self.assertIsNone(status.metric_fixes)
def test_cache_regressions_update(self):
build1 = self.create_build('1', datetime=h(10))
test_run1 = build1.test_runs.first()
foo_metadata, _ = SuiteMetadata.objects.get_or_create(suite=self.suite.slug, name='foo', kind='test')
test_run1.tests.create(build=test_run1.build, environment=test_run1.environment, metadata=foo_metadata, suite=self.suite, result=True)
ProjectStatus.create_or_update(build1)
build2 = self.create_build('2', datetime=h(9))
test_run2 = build2.test_runs.first()
test_run2.tests.create(build=test_run2.build, environment=test_run2.environment, metadata=foo_metadata, suite=self.suite, result=True)
status1 = ProjectStatus.create_or_update(build2)
self.assertIsNone(status1.regressions)
self.assertIsNone(status1.fixes)
build3 = self.create_build('3', datetime=h(8))
test_run3 = build3.test_runs.first()
test_run3.tests.create(build=test_run3.build, environment=test_run3.environment, metadata=foo_metadata, suite=self.suite, result=False)
status2 = ProjectStatus.create_or_update(build3)
self.assertIsNotNone(status2.regressions)
self.assertIsNone(status2.fixes)
def test_cache_fixes(self):
build1 = self.create_build('1', datetime=h(10))
build1.project.thresholds.create(name='%s/foo-metric' % self.suite.slug)
test_run1 = build1.test_runs.first()
foo_metadata, _ = SuiteMetadata.objects.get_or_create(suite=self.suite.slug, name='foo', kind='test')
foo_metadata_metric, _ = SuiteMetadata.objects.get_or_create(suite=self.suite.slug, name='foo-metric', kind='metric')
test_run1.tests.create(build=test_run1.build, environment=test_run1.environment, metadata=foo_metadata, suite=self.suite, result=False)
test_run1.metrics.create(build=test_run1.build, environment=test_run1.environment, metadata=foo_metadata_metric, suite=self.suite, result=2)
ProjectStatus.create_or_update(build1)
build2 = self.create_build('2', datetime=h(9))
test_run2 = build2.test_runs.first()
test_run2.tests.create(build=test_run2.build, environment=test_run2.environment, metadata=foo_metadata, suite=self.suite, result=True)
test_run2.metrics.create(build=test_run2.build, environment=test_run2.environment, metadata=foo_metadata_metric, suite=self.suite, result=1)
status = ProjectStatus.create_or_update(build2)
self.assertIsNotNone(status.fixes)
self.assertIsNone(status.regressions)
self.assertIsNotNone(status.metric_fixes)
self.assertIsNone(status.metric_regressions)
def test_cache_fixes_update(self):
build1 = self.create_build('1', datetime=h(10))
test_run1 = build1.test_runs.first()
foo_metadata, _ = SuiteMetadata.objects.get_or_create(suite=self.suite.slug, name='foo', kind='test')
test_run1.tests.create(build=test_run1.build, environment=test_run1.environment, metadata=foo_metadata, suite=self.suite, result=False)
ProjectStatus.create_or_update(build1)
build2 = self.create_build('2', datetime=h(9))
test_run2 = build2.test_runs.first()
test_run2.tests.create(build=test_run2.build, environment=test_run2.environment, metadata=foo_metadata, suite=self.suite, result=False)
status1 = ProjectStatus.create_or_update(build2)
self.assertIsNone(status1.fixes)
self.assertIsNone(status1.regressions)
build3 = self.create_build('3', datetime=h(8))
test_run3 = build3.test_runs.first()
test_run3.tests.create(build=test_run3.build, environment=test_run3.environment, metadata=foo_metadata, suite=self.suite, result=True)
status2 = ProjectStatus.create_or_update(build3)
self.assertIsNotNone(status2.fixes)
self.assertIsNone(status2.regressions)
def test_get_exceeded_thresholds(self):
build = self.create_build('1')
testrun = build.test_runs.create(environment=self.environment)
metric1_metadata, _ = SuiteMetadata.objects.get_or_create(suite=self.suite.slug, name='metric1', kind='metric')
metric2_metadata, _ = SuiteMetadata.objects.get_or_create(suite=self.suite.slug, name='metric2', kind='metric')
metric3_metadata, _ = SuiteMetadata.objects.get_or_create(suite=self.suite.slug, name='metric3', kind='metric')
testrun.metrics.create(metadata=metric1_metadata, suite=self.suite, result=3, build=testrun.build, environment=testrun.environment)
testrun.metrics.create(metadata=metric2_metadata, suite=self.suite, result=8, build=testrun.build, environment=testrun.environment)
testrun.metrics.create(metadata=metric3_metadata, suite=self.suite, result=5, build=testrun.build, environment=testrun.environment)
build_a = self.create_build('2')
testrun_a = build_a.test_runs.create(environment=self.environment_a)
metric4_metadata, _ = SuiteMetadata.objects.get_or_create(suite=self.suite.slug, name='metric4', kind='metric')
metric5_metadata, _ = SuiteMetadata.objects.get_or_create(suite=self.suite.slug, name='metric5', kind='metric')
metric6_metadata, _ = SuiteMetadata.objects.get_or_create(suite=self.suite.slug, name='metric6', kind='metric')
testrun_a.metrics.create(metadata=metric4_metadata, suite=self.suite_a, result=3, build=testrun_a.build, environment=testrun_a.environment)
testrun_a.metrics.create(metadata=metric5_metadata, suite=self.suite_a, result=2, build=testrun_a.build, environment=testrun_a.environment)
testrun_a.metrics.create(metadata=metric6_metadata, suite=self.suite_a, result=7, build=testrun_a.build, environment=testrun_a.environment)
status = ProjectStatus.create_or_update(build)
MetricThreshold.objects.create(project=self.environment.project,
environment=self.environment,
name='suite_/metric2', value=4,
is_higher_better=False)
thresholds = status.get_exceeded_thresholds()
self.assertEqual(len(thresholds), 1)
self.assertEqual(thresholds[0][1].name, 'metric2')
self.assertEqual(thresholds[0][1].result, 8)
status_a = ProjectStatus.create_or_update(build_a)
MetricThreshold.objects.create(project=self.environment_a.project,
environment=self.environment_a,
name='suite_a/metric6', value=4,
is_higher_better=True)
thresholds = status_a.get_exceeded_thresholds()
self.assertEqual(len(thresholds), 0)
def test_last_build_comparison(self):
# Test that the build that we compare against is truly the last one
# time wise.
build1 = self.create_build('1', datetime=h(10))
test_run1 = build1.test_runs.first()
foo_metadata, _ = SuiteMetadata.objects.get_or_create(suite=self.suite.slug, name='foo', kind='test')
bar_metadata, _ = SuiteMetadata.objects.get_or_create(suite=self.suite.slug, name='bar', kind='test')
test_run1.tests.create(build=test_run1.build, environment=test_run1.environment, metadata=foo_metadata, suite=self.suite, result=False)
test_run1.tests.create(build=test_run1.build, environment=test_run1.environment, metadata=bar_metadata, suite=self.suite, result=False)
ProjectStatus.create_or_update(build1)
build2 = self.create_build('2', datetime=h(9))
test_run2 = build2.test_runs.first()
test_run2.tests.create(build=test_run2.build, environment=test_run2.environment, metadata=foo_metadata, suite=self.suite, result=False)
test_run2.tests.create(build=test_run2.build, environment=test_run2.environment, metadata=bar_metadata, suite=self.suite, result=True)
ProjectStatus.create_or_update(build2)
build3 = self.create_build('3', datetime=h(8))
test_run3 = build3.test_runs.first()
test_run3.tests.create(build=test_run3.build, environment=test_run3.environment, metadata=foo_metadata, suite=self.suite, result=True)
test_run3.tests.create(build=test_run3.build, environment=test_run3.environment, metadata=bar_metadata, suite=self.suite, result=True)
status3 = ProjectStatus.create_or_update(build3)
fixes3 = status3.get_fixes()
self.assertEqual(len(fixes3['theenvironment']), 1)
self.assertEqual(fixes3['theenvironment'][0], 'suite_/foo')
def test_keep_baseline(self):
# Test that baseline is kept for unfinished builds
self.environment.expected_test_runs = 2
self.environment.save()
build1 = self.create_build('10', datetime=h(10))
foo_metadata, _ = SuiteMetadata.objects.get_or_create(suite=self.suite.slug, name='foo', kind='test')
foo2_metadata, _ = SuiteMetadata.objects.get_or_create(suite=self.suite.slug, name='foo2', kind='test')
bar_metadata, _ = SuiteMetadata.objects.get_or_create(suite=self.suite.slug, name='bar', kind='test')
test_run11 = build1.test_runs.first()
test_run11.tests.create(build=test_run11.build, environment=test_run11.environment, metadata=foo_metadata, suite=self.suite, result=False)
test_run11.tests.create(build=test_run11.build, environment=test_run11.environment, metadata=bar_metadata, suite=self.suite, result=False)
test_run12 = build1.test_runs.create(environment=self.environment)
test_run12.tests.create(build=test_run12.build, environment=test_run12.environment, metadata=foo2_metadata, suite=self.suite_a, result=False)
ProjectStatus.create_or_update(build1)
build2 = self.create_build('20', datetime=h(9))
test_run21 = build2.test_runs.first()
test_run21.tests.create(build=test_run21.build, environment=test_run21.environment, metadata=foo_metadata, suite=self.suite, result=False)
test_run21.tests.create(build=test_run21.build, environment=test_run21.environment, metadata=bar_metadata, suite=self.suite, result=True)
ProjectStatus.create_or_update(build2)
build3 = self.create_build('30', datetime=h(8))
test_run31 = build3.test_runs.first()
test_run31.tests.create(build=test_run31.build, environment=test_run31.environment, metadata=foo_metadata, suite=self.suite, result=True)
test_run31.tests.create(build=test_run31.build, environment=test_run31.environment, metadata=bar_metadata, suite=self.suite, result=True)
ProjectStatus.create_or_update(build3)
self.assertEqual(build2.status.baseline, build1)
self.assertEqual(build3.status.baseline, build1)
test_run22 = build2.test_runs.create(environment=self.environment)
test_run22.tests.create(build=test_run22.build, environment=test_run22.environment, metadata=foo2_metadata, suite=self.suite_a, result=False)
ProjectStatus.create_or_update(build2)
test_run32 = build3.test_runs.create(environment=self.environment)
test_run32.tests.create(build=test_run32.build, environment=test_run32.environment, metadata=foo2_metadata, suite=self.suite_a, result=False)
ProjectStatus.create_or_update(build3)
self.assertEqual(build2.status.baseline, build1)
self.assertEqual(build3.status.baseline, build1)
def test_save_malformed_yaml(self):
build = self.project.builds.create(version="bad-yaml")
build.status.fixes = """!!python/object/apply:collections.OrderedDict
- - - env
- [suite/test"""
with self.assertRaises(ValidationError):
build.status.save()
def test_get_malformed_yaml(self):
# For backwards compatibility, records with bad yaml already saved
build = self.project.builds.create(version="bad-yaml")
build.status.fixes = """!!python/object/apply:collections.OrderedDict
- - - env
- [suite/test"""
self.assertEqual(0, len(build.status.get_fixes()))
|
class ProjectStatusTest(TestCase):
def setUp(self):
pass
def create_build(self, v, datetime=None, create_test_run=True):
pass
def test_status_of_first_build(self):
pass
def test_status_of_second_build(self):
pass
def test_dont_record_the_same_status_twice(self):
pass
def test_wait_for_build_completion(self):
pass
def test_first_build(self):
pass
def test_build_not_finished(self):
pass
def test_force_finishing_build_on_notification_timeout_disabled(self):
pass
def test_force_finishing_build_on_notification_timeout_enabled(self):
pass
def test_test_summary(self):
pass
def test_metrics_summary(self):
pass
def test_updates_data_as_new_testruns_arrive(self):
pass
def test_populates_last_updated(self):
pass
def test_updates_last_updated(self):
pass
def test_previous_must_be_finished(self):
pass
def test_previous_must_be_from_the_same_project(self):
pass
def test_zero_expected_test_runs(self):
pass
def test_cache_test_run_counts(self):
pass
def test_cache_test_run_counts_on_update(self):
pass
def test_cache_regressions(self):
pass
def test_cache_regressions_update(self):
pass
def test_cache_fixes(self):
pass
def test_cache_fixes_update(self):
pass
def test_get_exceeded_thresholds(self):
pass
def test_last_build_comparison(self):
pass
def test_keep_baseline(self):
pass
def test_save_malformed_yaml(self):
pass
def test_get_malformed_yaml(self):
pass
| 30 | 0 | 14 | 2 | 12 | 0 | 1 | 0.02 | 1 | 4 | 4 | 0 | 29 | 8 | 29 | 29 | 439 | 81 | 351 | 165 | 321 | 7 | 325 | 165 | 295 | 2 | 1 | 1 | 30 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.