python_code
stringlengths 0
679k
| repo_name
stringlengths 9
41
| file_path
stringlengths 6
149
|
---|---|---|
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import (
NEMO_NOT_QUOTE,
GraphFst,
delete_extra_space,
delete_preserve_order,
)
from pynini.lib import pynutil
class MoneyFst(GraphFst):
"""
Finite state transducer for verbalizing money, e.g.
money { integer_part: "twelve" fractional_part: "o five" currency: "dollars" } -> twelve o five dollars
Args:
decimal: DecimalFst
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, decimal: GraphFst, deterministic: bool = True):
super().__init__(name="money", kind="verbalize", deterministic=deterministic)
keep_space = pynini.accep(" ")
maj = pynutil.delete("currency_maj: \"") + pynini.closure(NEMO_NOT_QUOTE, 1) + pynutil.delete("\"")
min = pynutil.delete("currency_min: \"") + pynini.closure(NEMO_NOT_QUOTE, 1) + pynutil.delete("\"")
fractional_part = (
pynutil.delete("fractional_part: \"") + pynini.closure(NEMO_NOT_QUOTE, 1) + pynutil.delete("\"")
)
integer_part = decimal.integer
# *** currency_maj
graph_integer = integer_part + keep_space + maj
# *** currency_maj + (***) | ((and) *** current_min)
fractional = fractional_part + delete_extra_space + min
if not deterministic:
fractional |= pynutil.insert("och ") + fractional
fractional |= pynutil.insert("komma ") + fractional
graph_integer_with_minor = integer_part + keep_space + maj + keep_space + fractional + delete_preserve_order
# *** point *** currency_maj
graph_decimal = decimal.numbers + keep_space + maj
# *** current_min
graph_minor = fractional_part + delete_extra_space + min + delete_preserve_order
graph = graph_integer | graph_integer_with_minor | graph_decimal | graph_minor
if not deterministic:
graph |= graph_integer + delete_preserve_order
delete_tokens = self.delete_tokens(graph)
self.fst = delete_tokens.optimize()
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/sv/verbalizers/money.py
|
# Copyright (c) 2022, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import NEMO_NOT_QUOTE, GraphFst
from pynini.lib import pynutil
class CardinalFst(GraphFst):
"""
Finite state transducer for verbalizing cardinals
e.g. cardinal { integer: "tre" } -> "tre"
Args:
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, deterministic: bool = True):
super().__init__(name="cardinal", kind="verbalize", deterministic=deterministic)
optional_sign = pynini.closure(pynini.cross("negative: \"true\" ", "minus "), 0, 1)
self.optional_sign = optional_sign
integer = pynini.closure(NEMO_NOT_QUOTE, 1)
self.integer = pynutil.delete(" \"") + integer + pynutil.delete("\"")
integer = pynutil.delete("integer:") + self.integer
self.numbers = integer
graph = optional_sign + self.numbers
delete_tokens = self.delete_tokens(graph)
self.fst = delete_tokens.optimize()
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/sv/verbalizers/cardinal.py
|
# Copyright (c) 2022, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import (
NEMO_NOT_QUOTE,
GraphFst,
delete_preserve_order,
delete_space,
insert_space,
)
from pynini.lib import pynutil
class DecimalFst(GraphFst):
"""
Finite state transducer for classifying decimal, e.g.
decimal { negative: "true" integer_part: "dos" fractional_part: "cuatro cero" quantity: "billones" } -> menos dos coma quatro cero billones
decimal { integer_part: "un" quantity: "billón" } -> un billón
Args:
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, deterministic: bool = True):
super().__init__(name="decimal", kind="classify", deterministic=deterministic)
optional_sign = pynini.closure(pynini.cross("negative: \"true\"", "minus ") + delete_space, 0, 1)
integer = pynutil.delete("integer_part: \"") + pynini.closure(NEMO_NOT_QUOTE, 1) + pynutil.delete("\"")
fractional_default = (
pynutil.delete("fractional_part: \"") + pynini.closure(NEMO_NOT_QUOTE, 1) + pynutil.delete("\"")
)
self.integer = integer
conjunction = pynutil.insert(" komma ")
fractional = conjunction + fractional_default
quantity = (
delete_space
+ insert_space
+ pynutil.delete("quantity: \"")
+ pynini.closure(NEMO_NOT_QUOTE, 1)
+ pynutil.delete("\"")
)
optional_quantity = pynini.closure(quantity, 0, 1)
self.optional_quantity = optional_quantity
graph = optional_sign + pynini.union(
(integer + quantity), (integer + delete_space + fractional + optional_quantity)
)
self.numbers_only_quantity = (
optional_sign
+ pynini.union((integer + quantity), (integer + delete_space + fractional + quantity)).optimize()
)
self.graph = (graph + delete_preserve_order).optimize()
self.numbers = graph
graph += delete_preserve_order
delete_tokens = self.delete_tokens(graph)
self.fst = delete_tokens.optimize()
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/sv/verbalizers/decimals.py
|
# Copyright (c) 2022, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import (
NEMO_NOT_QUOTE,
NEMO_SIGMA,
GraphFst,
delete_preserve_order,
insert_space,
)
from nemo_text_processing.text_normalization.sv.graph_utils import bos_or_space, eos_or_space
from nemo_text_processing.text_normalization.sv.utils import get_abs_path
from pynini.lib import pynutil
digit_no_zero = pynini.invert(pynini.string_file(get_abs_path("data/numbers/digit.tsv")))
zero = pynini.invert(pynini.string_file(get_abs_path("data/numbers/zero.tsv")))
graph_symbols = pynini.string_file(get_abs_path("data/electronic/symbols.tsv"))
server_common = pynini.string_file(get_abs_path("data/electronic/server_name.tsv"))
domain_common = pynini.string_file(get_abs_path("data/electronic/domain.tsv"))
class ElectronicFst(GraphFst):
"""
Finite state transducer for verbalizing electronic
e.g. electronic { username: "abc" domain: "hotmail.com" } -> "a b c snabel-a hotmail punkt com"
-> "a b c snabel-a h o t m a i l punkt c o m"
-> "a b c snabel-a hotmail punkt c o m"
-> "a b c at h o t m a i l punkt com"
Args:
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, deterministic: bool = True):
super().__init__(name="electronic", kind="verbalize", deterministic=deterministic)
graph_digit = digit_no_zero | zero
def add_space_after_char():
return pynini.closure(NEMO_NOT_QUOTE - pynini.accep(" ") + insert_space) + (
NEMO_NOT_QUOTE - pynini.accep(" ")
)
verbalize_characters = pynini.cdrewrite(graph_symbols | graph_digit, "", "", NEMO_SIGMA)
user_name = pynutil.delete("username: \"") + add_space_after_char() + pynutil.delete("\"")
user_name @= verbalize_characters
convert_defaults = pynutil.add_weight(NEMO_NOT_QUOTE, weight=0.0001) | domain_common | server_common
domain = convert_defaults + pynini.closure(insert_space + convert_defaults)
domain @= verbalize_characters
domain = pynutil.delete("domain: \"") + domain + pynutil.delete("\"")
protocol = (
pynutil.delete("protocol: \"")
+ add_space_after_char() @ pynini.cdrewrite(graph_symbols, "", "", NEMO_SIGMA)
+ pynutil.delete("\"")
) @ pynini.cdrewrite(
pynini.cross("snedstreck snedstreck", "dubbla snedstreck"), bos_or_space, eos_or_space, NEMO_SIGMA
)
self.graph = (pynini.closure(protocol + pynini.accep(" "), 0, 1) + domain) | (
user_name + pynini.accep(" ") + pynutil.insert("snabel-a ") + domain
)
delete_tokens = self.delete_tokens(self.graph + delete_preserve_order)
self.fst = delete_tokens.optimize()
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/sv/verbalizers/electronic.py
|
# Copyright (c) 2022, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import (
NEMO_NOT_QUOTE,
NEMO_SPACE,
GraphFst,
delete_preserve_order,
)
from nemo_text_processing.text_normalization.sv.utils import get_abs_path
from pynini.lib import pynutil
era_words = pynini.string_file(get_abs_path("data/dates/era_words.tsv"))
class DateFst(GraphFst):
"""
Finite state transducer for verbalizing date, e.g.
date { day: "trettioförsta" month: "mars" year: "tjugotjugotvå" } -> "trettioförsta mars tjugotjugotvå"
Args:
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, deterministic: bool = True):
super().__init__(name="date", kind="verbalize", deterministic=deterministic)
day = pynutil.delete("day: \"") + pynini.closure(NEMO_NOT_QUOTE, 1) + pynutil.delete("\"")
month = pynutil.delete("month: \"") + pynini.closure(NEMO_NOT_QUOTE, 1) + pynutil.delete("\"")
year = pynutil.delete("year: \"") + pynini.closure(NEMO_NOT_QUOTE, 1) + pynutil.delete("\"")
era = pynutil.delete("era: \"") + pynini.closure(NEMO_NOT_QUOTE, 1) + pynutil.delete("\"")
era_opt = pynini.closure(NEMO_SPACE + era, 0, 1)
year_era_opt = year + era_opt
# day month year
graph_year_era = year + NEMO_SPACE + era + delete_preserve_order
graph_year_era |= year + NEMO_SPACE + era
graph_dmy = pynini.union(
day + NEMO_SPACE + month + pynini.closure(NEMO_SPACE + year_era_opt, 0, 1) + delete_preserve_order
)
graph_was_ymd = pynini.union(month + NEMO_SPACE + year, day + NEMO_SPACE + month + NEMO_SPACE + year)
self.graph = graph_dmy | graph_year_era | graph_was_ymd | year
final_graph = self.graph
delete_tokens = self.delete_tokens(final_graph)
self.fst = delete_tokens.optimize()
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/sv/verbalizers/date.py
|
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/sv/data/__init__.py
|
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/sv/data/dates/__init__.py
|
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/sv/data/numbers/__init__.py
|
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/sv/data/ordinals/__init__.py
|
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/sv/data/electronic/__init__.py
|
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/sv/data/telephone/__init__.py
|
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/sv/data/time/__init__.py
|
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/sv/data/money/__init__.py
|
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/sv/data/measure/__init__.py
|
# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/sv/data/roman/__init__.py
|
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/it/__init__.py
|
# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import csv
import os
def get_abs_path(rel_path):
"""
Get absolute path
Args:
rel_path: relative path to this file
Returns absolute path
"""
return os.path.dirname(os.path.abspath(__file__)) + '/' + rel_path
def load_labels(abs_path):
"""
loads relative path file as dictionary
Args:
abs_path: absolute path
Returns dictionary of mappings
"""
label_tsv = open(abs_path, encoding="utf-8")
labels = list(csv.reader(label_tsv, delimiter="\t"))
return labels
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/it/utils.py
|
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import (
NEMO_ALPHA,
NEMO_DIGIT,
NEMO_NON_BREAKING_SPACE,
NEMO_SIGMA,
GraphFst,
convert_space,
insert_space,
)
from nemo_text_processing.text_normalization.it.utils import get_abs_path
from pynini.examples import plurals
from pynini.lib import pynutil
unit_singular = pynini.string_file(get_abs_path("data/measure/measurements.tsv"))
suppletive = pynini.string_file(get_abs_path("data/measure/suppletive.tsv"))
def singular_to_plural():
_o = NEMO_SIGMA + pynini.cross("o", "") + pynutil.insert("i")
_a = NEMO_SIGMA + pynini.cross("a", "") + pynutil.insert("e")
_e = NEMO_SIGMA + pynini.cross("e", "") + pynutil.insert("i")
graph_plural = plurals._priority_union(suppletive, pynini.union(_o, _a, _e), NEMO_SIGMA).optimize()
return graph_plural
class MeasureFst(GraphFst):
"""
Finite state transducer for classifying measure, e.g.
"2,4 g" -> measure { cardinal { integer_part: "due" fractional_part: "quattro" units: "grammi" preserve_order: true } }
Args:
cardinal: CardinalFst
decimal: DecimalFst
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, cardinal: GraphFst, decimal: GraphFst, deterministic: bool = True):
super().__init__(name="measure", kind="classify", deterministic=deterministic)
cardinal_graph = cardinal.graph
graph_unit_singular = convert_space(unit_singular)
graph_unit_plural = convert_space(suppletive)
graph_unit_plural |= graph_unit_singular @ pynini.cdrewrite(convert_space(suppletive), "", "[EOS]", NEMO_SIGMA)
optional_graph_negative = pynini.closure("-", 0, 1)
graph_unit_denominator = (
pynini.cross("/", "per") + pynutil.insert(NEMO_NON_BREAKING_SPACE) + graph_unit_singular
)
optional_unit_denominator = pynini.closure(
pynutil.insert(NEMO_NON_BREAKING_SPACE) + graph_unit_denominator, 0, 1,
)
unit_plural = (
pynutil.insert("units: \"")
+ (graph_unit_plural + (optional_unit_denominator) | graph_unit_denominator)
+ pynutil.insert("\"")
)
unit_singular_graph = (
pynutil.insert("units: \"")
+ ((graph_unit_singular + optional_unit_denominator) | graph_unit_denominator)
+ pynutil.insert("\"")
)
subgraph_decimal = decimal.fst + insert_space + pynini.closure(pynutil.delete(" "), 0, 1) + unit_plural
subgraph_cardinal = (
(optional_graph_negative + (pynini.closure(NEMO_DIGIT) - "1")) @ cardinal.fst
+ insert_space
+ pynini.closure(pynutil.delete(" "), 0, 1)
+ unit_plural
)
subgraph_cardinal |= (
(optional_graph_negative + pynini.accep("1"))
@ cardinal.fst
@ pynini.cdrewrite(pynini.cross("uno", "un"), "", "", NEMO_SIGMA)
+ insert_space
+ pynini.closure(pynutil.delete(" "), 0, 1)
+ unit_singular_graph
)
cardinal_dash_alpha = (
pynutil.insert("cardinal { integer: \"")
+ cardinal_graph
+ pynutil.delete('-')
+ pynutil.insert("\" } units: \"")
+ pynini.closure(NEMO_ALPHA, 1)
+ pynutil.insert("\"")
)
alpha_dash_cardinal = (
pynutil.insert("units: \"")
+ pynini.closure(NEMO_ALPHA, 1)
+ pynutil.delete('-')
+ pynutil.insert("\"")
+ pynutil.insert(" cardinal { integer: \"")
+ cardinal_graph
+ pynutil.insert("\" }")
)
decimal_dash_alpha = (
pynutil.insert("decimal { ")
+ decimal.final_graph_wo_negative
+ pynutil.delete('-')
+ pynutil.insert("\" } units: \"")
+ pynini.closure(NEMO_ALPHA, 1)
+ pynutil.insert("\"")
)
decimal_times = (
pynutil.insert("decimal { ")
+ decimal.final_graph_wo_negative
+ pynutil.insert("\" } units: \"")
+ pynini.union('x', 'X')
+ pynutil.insert("\"")
)
cardinal_times = (
pynutil.insert("cardinal { integer: \"")
+ cardinal_graph
+ pynutil.insert("\" } units: \"")
+ pynini.union('x', 'X')
+ pynutil.insert("\"")
)
alpha_dash_decimal = (
pynutil.insert("units: \"")
+ pynini.closure(NEMO_ALPHA, 1)
+ pynutil.delete('-')
+ pynutil.insert("\"")
+ pynutil.insert(" decimal { ")
+ decimal.final_graph_wo_negative
+ pynutil.insert(" }")
)
final_graph = (
subgraph_decimal
| subgraph_cardinal
| cardinal_dash_alpha
| alpha_dash_cardinal
| decimal_dash_alpha
| decimal_times
| alpha_dash_decimal
| cardinal_times
)
# final_graph += pynutil.insert(" preserve_order: true")
final_graph = self.add_tokens(final_graph)
self.fst = final_graph.optimize()
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/it/taggers/measure.py
|
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import GraphFst, convert_space
from nemo_text_processing.text_normalization.it.utils import get_abs_path, load_labels
from pynini.lib import pynutil
class WhiteListFst(GraphFst):
"""
Finite state transducer for classifying whitelist, e.g.
"dr." -> tokens { name: "dottor" }
"Avv." -> tokens { name: "avvocato" }
This class has highest priority among all classifier grammars. Whitelisted tokens are defined and loaded from "data/whitelist.tsv".
Args:
input_case: accepting either "lower_cased" or "cased" input.
deterministic: if True will provide a single transduction option,
for False multiple options (used for audio-based normalization)
input_file: path to a file with whitelist replacements
"""
def __init__(self, input_case: str, deterministic: bool = True, input_file: str = None):
super().__init__(name="whitelist", kind="classify", deterministic=deterministic)
def _get_whitelist_graph(input_case, file):
whitelist = load_labels(file)
if input_case == "lower_cased":
whitelist = [[x[0].lower()] + x[1:] for x in whitelist]
graph = pynini.string_map(whitelist)
return graph
graph = _get_whitelist_graph(input_case, get_abs_path("data/whitelist.tsv"))
if not deterministic and input_case != "lower_cased":
graph |= pynutil.add_weight(
_get_whitelist_graph("lower_cased", get_abs_path("data/whitelist.tsv")), weight=0.0001
)
if input_file:
whitelist_provided = _get_whitelist_graph(input_case, input_file)
if not deterministic:
graph |= whitelist_provided
else:
graph = whitelist_provided
if not deterministic:
units_graph = _get_whitelist_graph(input_case, file=get_abs_path("data/measures/measurements.tsv"))
graph |= units_graph
self.graph = graph
self.final_graph = convert_space(self.graph).optimize()
self.fst = (pynutil.insert("name: \"") + self.final_graph + pynutil.insert("\"")).optimize()
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/it/taggers/whitelist.py
|
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import os
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import (
NEMO_WHITE_SPACE,
GraphFst,
delete_extra_space,
delete_space,
generator_main,
)
from nemo_text_processing.text_normalization.en.taggers.punctuation import PunctuationFst
from nemo_text_processing.text_normalization.it.taggers.cardinal import CardinalFst
from nemo_text_processing.text_normalization.it.taggers.decimals import DecimalFst
from nemo_text_processing.text_normalization.it.taggers.electronic import ElectronicFst
from nemo_text_processing.text_normalization.it.taggers.measure import MeasureFst
from nemo_text_processing.text_normalization.it.taggers.money import MoneyFst
from nemo_text_processing.text_normalization.it.taggers.whitelist import WhiteListFst
from nemo_text_processing.text_normalization.it.taggers.word import WordFst
from pynini.lib import pynutil
class ClassifyFst(GraphFst):
"""
Final class that composes all other classification grammars. This class can process an entire sentence, that is lower cased.
For deployment, this grammar will be compiled and exported to OpenFst Finite State aRchive (FAR) File.
More details to deployment at NeMo/tools/text_processing_deployment.
Args:
input_case: accepting either "lower_cased" or "cased" input.
deterministic: if True will provide a single transduction option,
for False multiple options (used for audio-based normalization)
cache_dir: path to a dir with .far grammar file. Set to None to avoid using cache.
overwrite_cache: set to True to overwrite .far files
whitelist: path to a file with whitelist replacements
"""
def __init__(
self,
input_case: str,
deterministic: bool = False,
cache_dir: str = None,
overwrite_cache: bool = False,
whitelist: str = None,
):
super().__init__(name="tokenize_and_classify", kind="classify", deterministic=deterministic)
far_file = None
if cache_dir is not None and cache_dir != "None":
os.makedirs(cache_dir, exist_ok=True)
whitelist_file = os.path.basename(whitelist) if whitelist else ""
far_file = os.path.join(
cache_dir, f"_{input_case}_it_tn_{deterministic}_deterministic{whitelist_file}.far"
)
if not overwrite_cache and far_file and os.path.exists(far_file):
self.fst = pynini.Far(far_file, mode="r")["tokenize_and_classify"]
logging.info(f"ClassifyFst.fst was restored from {far_file}.")
else:
logging.info(f"Creating ClassifyFst grammars. This might take some time...")
self.cardinal = CardinalFst(deterministic=deterministic)
cardinal_graph = self.cardinal.fst
self.decimal = DecimalFst(cardinal=self.cardinal, deterministic=deterministic)
decimal_graph = self.decimal.fst
word_graph = WordFst(deterministic=deterministic).fst
self.whitelist = WhiteListFst(input_case=input_case, deterministic=deterministic, input_file=whitelist)
whitelist_graph = self.whitelist.fst
self.electronic = ElectronicFst(deterministic=deterministic)
electronic_graph = self.electronic.fst
self.measure = MeasureFst(cardinal=self.cardinal, decimal=self.decimal, deterministic=deterministic)
measure_graph = self.measure.fst
self.money = MoneyFst(cardinal=self.cardinal, decimal=self.decimal, deterministic=deterministic)
money_graph = self.money.fst
punct_graph = PunctuationFst(deterministic=deterministic).fst
classify = (
pynutil.add_weight(whitelist_graph, 1)
| pynutil.add_weight(cardinal_graph, 1.1)
| pynutil.add_weight(decimal_graph, 1.1)
| pynutil.add_weight(electronic_graph, 1.09)
| pynutil.add_weight(measure_graph, 1.09)
| pynutil.add_weight(money_graph, 1.09)
| pynutil.add_weight(word_graph, 100)
)
punct = pynutil.insert("tokens { ") + pynutil.add_weight(punct_graph, weight=2.1) + pynutil.insert(" }")
punct = pynini.closure(
pynini.compose(pynini.closure(NEMO_WHITE_SPACE, 1), delete_extra_space)
| (pynutil.insert(" ") + punct),
1,
)
token = pynutil.insert("tokens { ") + classify + pynutil.insert(" }")
token_plus_punct = (
pynini.closure(punct + pynutil.insert(" ")) + token + pynini.closure(pynutil.insert(" ") + punct)
)
graph = token_plus_punct + pynini.closure(
(
pynini.compose(pynini.closure(NEMO_WHITE_SPACE, 1), delete_extra_space)
| (pynutil.insert(" ") + punct + pynutil.insert(" "))
)
+ token_plus_punct
)
graph = delete_space + graph + delete_space
graph |= punct
self.fst = graph.optimize()
if far_file:
generator_main(far_file, {"tokenize_and_classify": self.fst})
logging.info(f"ClassifyFst grammars are saved to {far_file}.")
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/it/taggers/tokenize_and_classify.py
|
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import (
NEMO_ALPHA,
NEMO_DIGIT,
NEMO_SIGMA,
NEMO_SPACE,
GraphFst,
delete_space,
insert_space,
)
from nemo_text_processing.text_normalization.es.graph_utils import decimal_separator
from nemo_text_processing.text_normalization.it.utils import get_abs_path, load_labels
from pynini.lib import pynutil
maj_singular_labels = load_labels(get_abs_path("data/money/currency_major.tsv"))
maj_singular = pynini.string_file((get_abs_path("data/money/currency_major.tsv")))
min_singular = pynini.string_file(get_abs_path("data/money/currency_minor.tsv"))
fem_plural = pynini.string_file((get_abs_path("data/money/currency_plural_fem.tsv")))
masc_plural = pynini.string_file((get_abs_path("data/money/currency_plural_masc.tsv")))
class MoneyFst(GraphFst):
"""
Finite state transducer for classifying money, e.g.
"€1" -> money { currency_maj: "euro" integer_part: "un"}
"€1,000" -> money { currency_maj: "euro" integer_part: "un" }
"4,2 £" -> money { integer_part: "quattro" currency_maj: "sterline" fractional_part: "venti" currency_min: "penny" preserve_order: true }
Args:
cardinal: CardinalFst
decimal: DecimalFst
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, cardinal: GraphFst, decimal: GraphFst, deterministic: bool = True):
super().__init__(name="money", kind="classify", deterministic=deterministic)
cardinal_graph = cardinal.graph
graph_decimal_final = decimal.final_graph_wo_negative
maj_singular_graph = maj_singular
min_singular_graph = min_singular
maj_plural_graph = maj_singular @ (fem_plural | masc_plural)
min_plural_graph = min_singular @ (fem_plural | masc_plural)
graph_maj_singular = pynutil.insert("currency_maj: \"") + maj_singular_graph + pynutil.insert("\"")
graph_maj_plural = pynutil.insert("currency_maj: \"") + maj_plural_graph + pynutil.insert("\"")
graph_integer_one = pynutil.insert("integer_part: \"") + pynini.cross("1", "un") + pynutil.insert("\"")
decimal_with_quantity = (NEMO_SIGMA + NEMO_ALPHA) @ graph_decimal_final
graph_decimal_plural = pynini.union(
graph_maj_plural + pynini.closure(delete_space, 0, 1) + insert_space + graph_decimal_final, # $1,05
graph_decimal_final + pynini.closure(delete_space, 0, 1) + insert_space + graph_maj_plural, # 1,05 $
)
graph_decimal_plural = ((NEMO_SIGMA - "1") + decimal_separator + NEMO_SIGMA) @ graph_decimal_plural
graph_decimal_singular = pynini.union(
graph_maj_singular + pynini.closure(delete_space, 0, 1) + insert_space + graph_decimal_final, # $1,05
graph_decimal_final + pynini.closure(delete_space, 0, 1) + insert_space + graph_maj_singular, # 1,05 $
)
graph_decimal_singular = (pynini.accep("1") + decimal_separator + NEMO_SIGMA) @ graph_decimal_singular
graph_decimal = pynini.union(
graph_decimal_singular,
graph_decimal_plural,
graph_maj_plural + pynini.closure(delete_space, 0, 1) + insert_space + decimal_with_quantity,
)
graph_integer = (
pynutil.insert("integer_part: \"") + ((NEMO_SIGMA - "1") @ cardinal_graph) + pynutil.insert("\"")
)
graph_integer_only = pynini.union(
graph_maj_singular + pynini.closure(delete_space, 0, 1) + insert_space + graph_integer_one,
graph_integer_one + pynini.closure(delete_space, 0, 1) + insert_space + graph_maj_singular,
)
graph_integer_only |= pynini.union(
graph_maj_plural + pynini.closure(delete_space, 0, 1) + insert_space + graph_integer,
graph_integer + pynini.closure(delete_space, 0, 1) + insert_space + graph_maj_plural,
)
graph = graph_integer_only | graph_decimal
# remove trailing zeros of non zero number in the first 2 digits and fill up to 2 digits
# e.g. 2000 -> 20, 0200->02, 01 -> 01, 10 -> 10
# not accepted: 002, 00, 0,
two_digits_fractional_part = (
pynini.closure(NEMO_DIGIT) + (NEMO_DIGIT - "0") + pynini.closure(pynutil.delete("0"))
) @ (
(pynutil.delete("0") + (NEMO_DIGIT - "0"))
| ((NEMO_DIGIT - "0") + pynutil.insert("0"))
| ((NEMO_DIGIT - "0") + NEMO_DIGIT)
)
graph_min_singular = pynutil.insert("currency_min: \"") + min_singular_graph + pynutil.insert("\"")
graph_min_plural = pynutil.insert("currency_min: \"") + min_plural_graph + pynutil.insert("\"")
# format ** euro ** centesimo
decimal_graph_with_minor = None
for curr_symbol, _ in maj_singular_labels:
preserve_order = pynutil.insert(" preserve_order: true")
integer_plus_maj = pynini.union(
graph_integer + insert_space + pynutil.insert(curr_symbol) @ graph_maj_plural,
graph_integer_one + insert_space + pynutil.insert(curr_symbol) @ graph_maj_singular,
)
# non zero integer part
integer_plus_maj = (pynini.closure(NEMO_DIGIT) - "0") @ integer_plus_maj
graph_fractional_one = (
pynutil.insert("fractional_part: \"")
+ two_digits_fractional_part @ pynini.cross("1", "un")
+ pynutil.insert("\"")
)
graph_fractional = (
two_digits_fractional_part @ (pynini.closure(NEMO_DIGIT, 1, 2) - "1") @ cardinal.two_digit_no_zero
)
graph_fractional = pynutil.insert("fractional_part: \"") + graph_fractional + pynutil.insert("\"")
fractional_plus_min = pynini.union(
graph_fractional + insert_space + pynutil.insert(curr_symbol) @ graph_min_plural,
graph_fractional_one + insert_space + pynutil.insert(curr_symbol) @ graph_min_singular,
)
decimal_graph_with_minor_curr = (
integer_plus_maj + pynini.cross(decimal_separator, NEMO_SPACE) + fractional_plus_min
)
decimal_graph_with_minor_curr |= pynutil.add_weight(
integer_plus_maj
+ pynini.cross(decimal_separator, NEMO_SPACE)
+ pynutil.insert("fractional_part: \"")
+ two_digits_fractional_part @ cardinal.two_digit_no_zero
+ pynutil.insert("\""),
weight=0.0001,
)
decimal_graph_with_minor_curr |= pynutil.delete("0,") + fractional_plus_min
decimal_graph_with_minor_curr = pynini.union(
pynutil.delete(curr_symbol)
+ pynini.closure(delete_space, 0, 1)
+ decimal_graph_with_minor_curr
+ preserve_order,
decimal_graph_with_minor_curr
+ preserve_order
+ pynini.closure(delete_space, 0, 1)
+ pynutil.delete(curr_symbol),
)
decimal_graph_with_minor = (
decimal_graph_with_minor_curr
if decimal_graph_with_minor is None
else pynini.union(decimal_graph_with_minor, decimal_graph_with_minor_curr)
)
final_graph = graph | pynutil.add_weight(decimal_graph_with_minor, -0.001)
final_graph = self.add_tokens(final_graph)
self.fst = final_graph.optimize()
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/it/taggers/money.py
|
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import (
NEMO_ALPHA,
NEMO_DIGIT,
NEMO_SIGMA,
NEMO_SPACE,
NEMO_WHITE_SPACE,
GraphFst,
delete_space,
insert_space,
)
from nemo_text_processing.text_normalization.es.graph_utils import cardinal_separator
from nemo_text_processing.text_normalization.it.utils import get_abs_path
from pynini.lib import pynutil
zero = pynini.invert(pynini.string_file(get_abs_path("data/numbers/zero.tsv")))
digit = pynini.invert(pynini.string_file(get_abs_path("data/numbers/digit.tsv")))
teen = pynini.invert(pynini.string_file(get_abs_path("data/numbers/teen.tsv")))
tens = pynini.invert(pynini.string_file(get_abs_path("data/numbers/tens.tsv")))
tens_one = pynini.invert(pynini.string_file(get_abs_path("data/numbers/tens_one.tsv")))
hundreds = pynini.invert(pynini.string_file(get_abs_path("data/numbers/hundreds.tsv")))
def filter_punctuation(fst: 'pynini.FstLike') -> 'pynini.FstLike':
"""
Helper function for parsing number strings. Converts common cardinal strings (groups of three digits delineated by 'cardinal_separator' - see graph_utils)
and converts to a string of digits:
"1 000" -> "1000"
"1.000.000" -> "1000000"
Args:
fst: Any pynini.FstLike object. Function composes fst onto string parser fst
Returns:
fst: A pynini.FstLike object
"""
exactly_three_digits = NEMO_DIGIT ** 3 # for blocks of three
up_to_three_digits = pynini.closure(NEMO_DIGIT, 1, 3) # for start of string
cardinal_string = pynini.closure(
NEMO_DIGIT, 1
) # For string w/o punctuation (used for page numbers, thousand series)
cardinal_string |= (
up_to_three_digits
+ pynutil.delete(cardinal_separator)
+ pynini.closure(exactly_three_digits + pynutil.delete(cardinal_separator))
+ exactly_three_digits
)
return cardinal_string @ fst
class CardinalFst(GraphFst):
'''
Finite state transducer for classifying cardinals in Italian, e.g.
"1000" -> cardinal { integer: "mille" }
"2.000.000" -> cardinal { integer: "due milioni" }
Args:
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
'''
def __init__(self, deterministic: bool = True):
super().__init__(name='cardinal', kind='classify', deterministic=deterministic)
# single digit
graph_digit = digit
digits_no_one = (NEMO_DIGIT - "1") @ graph_digit
# double digit
graph_tens = teen
graph_tens |= tens + (pynutil.delete('0') | graph_digit)
graph_tens |= tens_one
self.tens = graph_tens.optimize()
self.two_digit_no_zero = pynini.union(
graph_digit, graph_tens, (pynini.cross('0', NEMO_SPACE) + graph_digit)
).optimize()
# three digit
graph_hundreds = hundreds + pynini.union(
pynutil.delete('00'), insert_space + graph_tens, (pynini.cross("0", NEMO_SPACE) + graph_digit)
)
graph_hundreds |= (
pynini.cross('1', 'cento')
+ insert_space
+ pynini.union(graph_tens, pynutil.delete("0") + graph_digit, pynutil.delete("00"))
)
self.hundreds = graph_hundreds.optimize()
# three digit leading zeros
graph_hundreds_component = pynini.union(graph_hundreds, pynutil.delete("0") + graph_tens)
graph_hundreds_component_at_least_one_none_zero_digit = graph_hundreds_component | (
pynutil.delete("00") + graph_digit
)
graph_hundreds_component_at_least_one_none_zero_digit_no_one = graph_hundreds_component | (
pynutil.delete("00") + digits_no_one
)
# thousands
graph_thousands_component_at_least_one_none_zero_digit = pynini.union(
pynutil.delete("000") + graph_hundreds_component_at_least_one_none_zero_digit,
graph_hundreds_component_at_least_one_none_zero_digit_no_one
+ pynutil.insert("mila")
+ ((insert_space + graph_hundreds_component_at_least_one_none_zero_digit) | pynutil.delete("000")),
pynini.cross("001", "mille")
+ ((insert_space + graph_hundreds_component_at_least_one_none_zero_digit) | pynutil.delete("000")),
)
graph_thousands_component_at_least_one_none_zero_digit_no_one = pynini.union(
pynutil.delete("000") + graph_hundreds_component_at_least_one_none_zero_digit_no_one,
graph_hundreds_component_at_least_one_none_zero_digit_no_one
+ pynutil.insert("mila")
+ ((insert_space + graph_hundreds_component_at_least_one_none_zero_digit) | pynutil.delete("000")),
pynini.cross("001", "mille")
+ ((insert_space + graph_hundreds_component_at_least_one_none_zero_digit) | pynutil.delete("000")),
)
# higher
graph_million = pynutil.add_weight(pynini.cross("000001", "un milione"), -0.001)
graph_million |= graph_thousands_component_at_least_one_none_zero_digit_no_one + pynutil.insert(" milioni")
graph_million |= pynutil.delete("000000")
graph_million += insert_space
graph_billion = pynutil.add_weight(pynini.cross("000001", "un miliardo"), -0.001)
graph_billion |= graph_thousands_component_at_least_one_none_zero_digit_no_one + pynutil.insert(" miliardi")
graph_billion |= pynutil.delete("000000")
graph_billion += insert_space
graph_trillion = pynutil.add_weight(pynini.cross("000001", "un trilione"), -0.001)
graph_trillion |= graph_thousands_component_at_least_one_none_zero_digit_no_one + pynutil.insert(" trilioni")
graph_trillion |= pynutil.delete("000000")
graph_trillion += insert_space
graph = (
graph_trillion
+ graph_billion
+ graph_million
+ (graph_thousands_component_at_least_one_none_zero_digit | pynutil.delete("000000"))
)
self.graph = (
((NEMO_DIGIT - "0") + pynini.closure(NEMO_DIGIT, 0))
@ pynini.cdrewrite(pynini.closure(pynutil.insert("0")), "[BOS]", "", NEMO_SIGMA)
@ NEMO_DIGIT ** 24
@ graph
@ pynini.cdrewrite(delete_space, "[BOS]", "", NEMO_SIGMA)
@ pynini.cdrewrite(delete_space, "", "[EOS]", NEMO_SIGMA)
@ pynini.cdrewrite(
pynini.cross(pynini.closure(NEMO_WHITE_SPACE, 2), NEMO_SPACE), NEMO_ALPHA, NEMO_ALPHA, NEMO_SIGMA
)
)
self.graph |= zero
self.graph = filter_punctuation(self.graph).optimize()
optional_minus_graph = pynini.closure(pynutil.insert("negative: ") + pynini.cross("-", "\"true\" "), 0, 1)
final_graph = optional_minus_graph + pynutil.insert("integer: \"") + self.graph + pynutil.insert("\"")
final_graph = self.add_tokens(final_graph)
self.fst = final_graph.optimize()
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/it/taggers/cardinal.py
|
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import (
NEMO_DIGIT,
NEMO_SIGMA,
NEMO_SPACE,
GraphFst,
delete_space,
insert_space,
)
from nemo_text_processing.text_normalization.es.graph_utils import (
cardinal_separator,
decimal_separator,
strip_cardinal_apocope,
)
from nemo_text_processing.text_normalization.it.utils import get_abs_path
from pynini.lib import pynutil
quantities = pynini.string_file(get_abs_path("data/numbers/quantities.tsv"))
digit = pynini.invert(pynini.string_file(get_abs_path("data/numbers/digit.tsv")))
zero = pynini.invert(pynini.string_file(get_abs_path("data/numbers/zero.tsv")))
def get_quantity(decimal_graph: "pynini.FstLike", cardinal_graph: "pynini.FstLike") -> "pynini.FstLike":
"""
"""
numbers = pynini.closure(NEMO_DIGIT, 1, 6) @ cardinal_graph
numbers = pynini.cdrewrite(pynutil.delete(cardinal_separator), "", "", NEMO_SIGMA) @ numbers
res = (
pynutil.insert('integer_part: "')
+ numbers
+ pynutil.insert('"')
+ NEMO_SPACE
+ pynutil.insert('quantity: "')
+ quantities
+ pynutil.insert('"')
)
res |= decimal_graph + NEMO_SPACE + pynutil.insert('quantity: "') + quantities + pynutil.insert('"')
return res
class DecimalFst(GraphFst):
"""
Finite state transducer for classifying decimal, e.g.
-2 milioni: decimal { negative: "true" integer_part: "due" quantity: "milioni" preserve_order: true } -->
meno due milioni
Args:
cardinal: CardinalFst
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, cardinal: GraphFst, deterministic: bool = True):
super().__init__(name="decimal", kind="classify", deterministic=deterministic)
graph_digit = digit | zero
if not deterministic:
graph = pynini.union(graph_digit, cardinal.hundreds, cardinal.tens)
graph += pynini.closure(insert_space + graph)
else:
graph = pynini.union(
pynutil.add_weight(graph_digit + pynini.closure(insert_space + zero), -0.00001),
pynutil.add_weight(cardinal.tens + pynini.closure(insert_space + zero), -0.00002),
pynutil.add_weight(cardinal.hundreds + pynini.closure(insert_space + zero), 0.00001),
pynutil.add_weight(
cardinal.tens
+ pynini.closure(insert_space + cardinal.tens, 1)
+ pynini.closure(insert_space + zero, 0, 1)
+ (pynini.closure(insert_space + graph_digit, 0, 1) | pynini.closure(insert_space + zero, 0)),
-0.00002,
), # Read out as tens and a possible trailing digit or zeroes
zero
+ pynini.closure(insert_space + zero)
+ pynini.closure(insert_space + graph_digit), # For cases such as "1,010"
)
# Technically decimals should be space delineated groups of three, e.g. (1,333 333). This removes any possible spaces
strip_formatting = pynini.cdrewrite(delete_space, "", "", NEMO_SIGMA)
graph = strip_formatting @ graph
self.graph = graph.optimize()
graph_separator = pynutil.delete(decimal_separator)
optional_graph_negative = pynini.closure(pynutil.insert("negative: ") + pynini.cross("-", '"true" '), 0, 1)
self.graph_fractional = pynutil.insert('fractional_part: "') + self.graph + pynutil.insert('"')
graph_integer = (
strip_cardinal_apocope(cardinal.graph)
if deterministic
else pynini.union(cardinal.graph, strip_cardinal_apocope(cardinal.graph))
)
self.graph_integer = pynutil.insert('integer_part: "') + graph_integer + pynutil.insert('"')
final_graph_wo_sign = self.graph_integer + graph_separator + insert_space + self.graph_fractional
self.final_graph_wo_negative = (
final_graph_wo_sign | get_quantity(final_graph_wo_sign, cardinal.graph).optimize()
)
final_graph = optional_graph_negative + self.final_graph_wo_negative
final_graph += pynutil.insert(" preserve_order: true")
final_graph = self.add_tokens(final_graph)
self.fst = final_graph.optimize()
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/it/taggers/decimals.py
|
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import NEMO_ALPHA, NEMO_DIGIT, GraphFst, insert_space
from nemo_text_processing.text_normalization.it.utils import get_abs_path, load_labels
from pynini.lib import pynutil
common_domains = [x[0] for x in load_labels(get_abs_path("data/electronic/domain.tsv"))]
symbols = [x[0] for x in load_labels(get_abs_path("data/electronic/symbols.tsv"))]
class ElectronicFst(GraphFst):
"""
Finite state transducer for classifying electronic: email addresses
e.g. "abc.def@studente.università.it" -> electronic { username: "abc.def" domain: "studente.università.it" preserve_order: true }
e.g. "www.abc.com/123" -> electronic { protocol: "www." domain: "abc.com/123" preserve_order: true }
e.g. "https://github.com/NVIDIA/electronic.py" -> electronic { protocol: "https://" domain: "github.com/NVIDIA/electronic.py" preserve_order: true }
Args:
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, deterministic: bool = True):
super().__init__(name="electronic", kind="classify", deterministic=deterministic)
dot = pynini.accep(".")
accepted_common_domains = pynini.union(*common_domains)
accepted_symbols = pynini.union(*symbols) - dot
accepted_characters = pynini.closure(NEMO_ALPHA | NEMO_DIGIT | accepted_symbols)
acceepted_characters_with_dot = pynini.closure(NEMO_ALPHA | NEMO_DIGIT | accepted_symbols | dot)
# e-mail
username = (
pynutil.insert("username: \"")
+ acceepted_characters_with_dot
+ pynutil.insert("\"")
+ pynini.cross('@', ' ')
)
domain_graph = accepted_characters + dot + accepted_characters
domain_graph = (
pynutil.insert("domain: \"")
+ domain_graph
+ pynini.closure((accepted_symbols | dot) + pynini.closure(accepted_characters, 1), 0, 1)
+ pynutil.insert("\"")
)
domain_common_graph = (
pynutil.insert("domain: \"")
+ accepted_characters
+ accepted_common_domains
+ pynini.closure((accepted_symbols | dot) + pynini.closure(accepted_characters, 1), 0, 1)
+ pynutil.insert("\"")
)
graph = (username + domain_graph) | domain_common_graph
protocol_start = pynini.accep("https://") | pynini.accep("http://")
protocol_end = (
pynini.accep("www.") if deterministic else pynini.accep("www.") | pynini.cross("www.", "vu vu vu.")
)
protocol = protocol_start | protocol_end | (protocol_start + protocol_end)
protocol = pynutil.insert("protocol: \"") + protocol + pynutil.insert("\"")
graph |= protocol + insert_space + (domain_graph | domain_common_graph)
self.graph = graph
final_graph = self.add_tokens(self.graph + pynutil.insert(" preserve_order: true"))
self.fst = final_graph.optimize()
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/it/taggers/electronic.py
|
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/it/taggers/__init__ .py
|
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import NEMO_NOT_SPACE, GraphFst
from pynini.lib import pynutil
class WordFst(GraphFst):
"""
Finite state transducer for classifying word.
e.g. mangiare -> tokens { name: "mangiare" }
Args:
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, deterministic: bool = True):
super().__init__(name="word", kind="classify")
word = pynutil.insert("name: \"") + pynini.closure(NEMO_NOT_SPACE, 1) + pynutil.insert("\"")
self.fst = word.optimize()
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/it/taggers/word.py
|
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import (
NEMO_NOT_QUOTE,
GraphFst,
delete_extra_space,
delete_preserve_order,
)
from pynini.lib import pynutil
class MeasureFst(GraphFst):
"""
Finite state transducer for verbalizing measure, e.g.
measure { cardinal { integer: "due" units: "grammi" } } -> "due grammi"
Args:
decimal: decimal GraphFst
cardinal: cardinal GraphFst
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, decimal: GraphFst, cardinal: GraphFst, deterministic: bool):
super().__init__(name="measure", kind="verbalize", deterministic=deterministic)
unit = pynutil.delete("units: \"") + pynini.closure(NEMO_NOT_QUOTE) + pynutil.delete("\"")
graph_decimal = decimal.fst
graph_cardinal = cardinal.fst
graph = (graph_cardinal | graph_decimal) + pynini.accep(" ") + unit
graph |= unit + delete_extra_space + (graph_cardinal | graph_decimal)
graph += delete_preserve_order
delete_tokens = self.delete_tokens(graph)
self.fst = delete_tokens.optimize()
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/it/verbalizers/measure.py
|
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from nemo_text_processing.text_normalization.en.graph_utils import GraphFst
from nemo_text_processing.text_normalization.en.verbalizers.whitelist import WhiteListFst
from nemo_text_processing.text_normalization.it.verbalizers.cardinal import CardinalFst
from nemo_text_processing.text_normalization.it.verbalizers.decimal import DecimalFst
from nemo_text_processing.text_normalization.it.verbalizers.electronic import ElectronicFst
from nemo_text_processing.text_normalization.it.verbalizers.measure import MeasureFst
from nemo_text_processing.text_normalization.it.verbalizers.money import MoneyFst
class VerbalizeFst(GraphFst):
"""
Composes other verbalizer grammars.
For deployment, this grammar will be compiled and exported to OpenFst Finite State Archive (FAR) File.
More details to deployment at NeMo/tools/text_processing_deployment.
Args:
deterministic: if True will provide a single transduction option,
for False multiple options (used for audio-based normalization)
"""
def __init__(self, deterministic: bool = True):
super().__init__(name="verbalize", kind="verbalize", deterministic=deterministic)
cardinal = CardinalFst(deterministic=deterministic)
cardinal_graph = cardinal.fst
decimal = DecimalFst(deterministic=deterministic)
decimal_graph = decimal.fst
electronic = ElectronicFst(deterministic=deterministic)
electronic_graph = electronic.fst
whitelist_graph = WhiteListFst(deterministic=deterministic).fst
measure = MeasureFst(cardinal=cardinal, decimal=decimal, deterministic=deterministic)
measure_graph = measure.fst
money = MoneyFst(decimal=decimal, deterministic=deterministic)
money_graph = money.fst
graph = cardinal_graph | decimal_graph | electronic_graph | whitelist_graph | measure_graph | money_graph
self.fst = graph
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/it/verbalizers/verbalize.py
|
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import os
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import (
GraphFst,
delete_extra_space,
delete_space,
generator_main,
)
from nemo_text_processing.text_normalization.en.verbalizers.word import WordFst
from nemo_text_processing.text_normalization.it.verbalizers.verbalize import VerbalizeFst
from pynini.lib import pynutil
class VerbalizeFinalFst(GraphFst):
"""
Finite state transducer that verbalizes an entire sentence
Args:
deterministic: if True will provide a single transduction option,
for False multiple options (used for audio-based normalization)
cache_dir: path to a dir with .far grammar file. Set to None to avoid using cache.
overwrite_cache: set to True to overwrite .far files
"""
def __init__(self, deterministic: bool = True, cache_dir: str = None, overwrite_cache: bool = False):
super().__init__(name="verbalize_final", kind="verbalize", deterministic=deterministic)
far_file = None
if cache_dir is not None and cache_dir != "None":
os.makedirs(cache_dir, exist_ok=True)
far_file = os.path.join(cache_dir, f"it_tn_{deterministic}_deterministic_verbalizer.far")
if not overwrite_cache and far_file and os.path.exists(far_file):
self.fst = pynini.Far(far_file, mode="r")["verbalize"]
logging.info(f'VerbalizeFinalFst graph was restored from {far_file}.')
else:
verbalize = VerbalizeFst(deterministic=deterministic).fst
word = WordFst(deterministic=deterministic).fst
types = verbalize | word
graph = (
pynutil.delete("tokens")
+ delete_space
+ pynutil.delete("{")
+ delete_space
+ types
+ delete_space
+ pynutil.delete("}")
)
graph = delete_space + pynini.closure(graph + delete_extra_space) + graph + delete_space
self.fst = graph.optimize()
if far_file:
generator_main(far_file, {"verbalize": self.fst})
logging.info(f"VerbalizeFinalFst grammars are saved to {far_file}.")
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/it/verbalizers/verbalize_final.py
|
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import (
NEMO_NOT_QUOTE,
GraphFst,
delete_preserve_order,
delete_space,
insert_space,
)
from pynini.lib import pynutil
class DecimalFst(GraphFst):
"""
Finite state transducer for classifying decimal, e.g.
decimal { negative: "true" integer_part: "venti" fractional_part: "trentaquattro" quantity: "miliardi" } ->
meno venti virgola trentaquattro
decimal { integer_part: "un milione" fractional_part: "zero zero zero" quantity: "milioni" preserve_order: true } -->
un milione virgola zero zero zero
decimal { integer_part: "due" quantity: "milioni" preserve_order: true } -->
due milioni
Args:
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, deterministic: bool = True):
super().__init__(name="decimal", kind="classify", deterministic=deterministic)
optional_sign = pynini.closure(pynini.cross("negative: \"true\"", "meno ") + delete_space, 0, 1)
integer = pynutil.delete("integer_part: \"") + pynini.closure(NEMO_NOT_QUOTE, 1) + pynutil.delete("\"")
fractional_default = (
pynutil.delete("fractional_part: \"") + pynini.closure(NEMO_NOT_QUOTE, 1) + pynutil.delete("\"")
)
conjunction = pynutil.insert(" virgola ")
fractional = conjunction + fractional_default
quantity = (
delete_space
+ insert_space
+ pynutil.delete("quantity: \"")
+ pynini.closure(NEMO_NOT_QUOTE, 1)
+ pynutil.delete("\"")
)
optional_quantity = pynini.closure(quantity, 0, 1)
graph = optional_sign + pynini.union(
(integer + quantity), (integer + delete_space + fractional + optional_quantity)
)
self.numbers_only_quantity = (
optional_sign
+ pynini.union((integer + quantity), (integer + delete_space + fractional + quantity)).optimize()
)
self.graph = (graph + delete_preserve_order).optimize()
graph += delete_preserve_order
delete_tokens = self.delete_tokens(graph)
self.fst = delete_tokens.optimize()
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/it/verbalizers/decimal.py
|
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import (
NEMO_NOT_QUOTE,
NEMO_SIGMA,
NEMO_SPACE,
GraphFst,
delete_preserve_order,
)
from nemo_text_processing.text_normalization.es.graph_utils import shift_cardinal_gender, strip_cardinal_apocope
from nemo_text_processing.text_normalization.it.utils import get_abs_path
from pynini.lib import pynutil
fem = pynini.string_file((get_abs_path("data/money/currency_plural_fem.tsv")))
masc = pynini.string_file((get_abs_path("data/money/currency_plural_masc.tsv")))
fem_singular = pynini.project(fem, "input")
masc_singular = pynini.project(masc, "input")
fem_plural = pynini.project(fem, "output")
masc_plural = pynini.project(masc, "output")
class MoneyFst(GraphFst):
"""
Finite state transducer for verbalizing money, e.g.
money { currency_maj: "euro" integer_part: "un"} -> "un euro"
money { integer_part: "quattro" currency_maj: "sterline" fractional_part: "venti" currency_min: "penny" preserve_order: true } -> "quattro sterline venti penny"
Args:
decimal: GraphFst
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, decimal: GraphFst, deterministic: bool = True):
super().__init__(name="money", kind="verbalize", deterministic=deterministic)
maj_singular_masc = (
pynutil.delete("currency_maj: \"")
+ (pynini.closure(NEMO_NOT_QUOTE, 1) @ masc_singular)
+ pynutil.delete("\"")
)
maj_singular_fem = (
pynutil.delete("currency_maj: \"")
+ (pynini.closure(NEMO_NOT_QUOTE, 1) @ fem_singular)
+ pynutil.delete("\"")
)
maj_plural_masc = (
pynutil.delete("currency_maj: \"")
+ (pynini.closure(NEMO_NOT_QUOTE, 1) @ masc_plural)
+ pynutil.delete("\"")
)
maj_plural_fem = (
pynutil.delete("currency_maj: \"")
+ (pynini.closure(NEMO_NOT_QUOTE, 1) @ fem_plural)
+ pynutil.delete("\"")
)
maj_masc = maj_plural_masc | maj_singular_masc
maj_fem = maj_plural_fem | maj_singular_fem
min_singular_masc = (
pynutil.delete("currency_min: \"")
+ (pynini.closure(NEMO_NOT_QUOTE, 1) @ masc_singular)
+ pynutil.delete("\"")
)
min_singular_fem = (
pynutil.delete("currency_min: \"")
+ (pynini.closure(NEMO_NOT_QUOTE, 1) @ fem_singular)
+ pynutil.delete("\"")
)
min_plural_masc = (
pynutil.delete("currency_min: \"")
+ (pynini.closure(NEMO_NOT_QUOTE, 1) @ masc_plural)
+ pynutil.delete("\"")
)
min_plural_fem = (
pynutil.delete("currency_min: \"")
+ (pynini.closure(NEMO_NOT_QUOTE, 1) @ fem_plural)
+ pynutil.delete("\"")
)
min_masc = min_plural_masc | min_singular_masc
min_fem = min_plural_fem | min_singular_fem
fractional_part = (
pynutil.delete("fractional_part: \"") + pynini.closure(NEMO_NOT_QUOTE, 1) + pynutil.delete("\"")
)
integer_part = pynutil.delete("integer_part: \"") + pynini.closure(NEMO_NOT_QUOTE, 1) + pynutil.delete("\"")
optional_add_and = pynini.closure(pynutil.insert(pynini.union("con ", "y ")), 0, 1)
graph_integer_masc = integer_part + NEMO_SPACE + maj_masc
graph_integer_fem = shift_cardinal_gender(integer_part) + NEMO_SPACE + maj_fem
graph_integer = graph_integer_fem | graph_integer_masc
graph_integer_with_minor_masc = (
graph_integer_masc
+ NEMO_SPACE
+ pynini.union(
optional_add_and + strip_cardinal_apocope(fractional_part),
(optional_add_and + fractional_part + NEMO_SPACE + min_masc),
(optional_add_and + shift_cardinal_gender(fractional_part) + NEMO_SPACE + min_fem),
)
+ delete_preserve_order
)
graph_integer_with_minor_fem = (
graph_integer_fem
+ NEMO_SPACE
+ pynini.union(
optional_add_and + shift_cardinal_gender(fractional_part),
(optional_add_and + fractional_part + NEMO_SPACE + min_masc),
(optional_add_and + shift_cardinal_gender(fractional_part) + NEMO_SPACE + min_fem),
)
+ delete_preserve_order
)
graph_integer_with_minor = graph_integer_with_minor_fem | graph_integer_with_minor_masc
graph_decimal_masc = decimal.graph + NEMO_SPACE + maj_masc
graph_decimal_fem = decimal.graph
graph_decimal_fem += NEMO_SPACE + maj_fem
graph_decimal = graph_decimal_fem | graph_decimal_masc
graph_decimal = (
pynini.cdrewrite(
pynutil.insert(" di"), "quantity: \"" + pynini.closure(NEMO_NOT_QUOTE, 1), "\"", NEMO_SIGMA
)
@ graph_decimal
)
graph_minor_masc = fractional_part + NEMO_SPACE + min_masc + delete_preserve_order
graph_minor_fem = shift_cardinal_gender(fractional_part) + NEMO_SPACE + min_fem + delete_preserve_order
graph_minor = graph_minor_fem | graph_minor_masc
graph = graph_integer | graph_integer_with_minor | graph_decimal | graph_minor
delete_tokens = self.delete_tokens(graph)
self.fst = delete_tokens.optimize()
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/it/verbalizers/money.py
|
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import NEMO_NOT_QUOTE, GraphFst
from pynini.lib import pynutil
class CardinalFst(GraphFst):
"""
Finite state transducer for verbalizing cardinals
e.g. cardinal { integer: "due" } -> "due"
Args:
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, deterministic: bool = True):
super().__init__(name="cardinal", kind="verbalize", deterministic=deterministic)
optional_sign = pynini.closure(pynini.cross("negative: \"true\" ", "meno "), 0, 1)
self.optional_sign = optional_sign
integer = pynini.closure(NEMO_NOT_QUOTE, 1)
self.integer = pynutil.delete(" \"") + integer + pynutil.delete("\"")
integer = pynutil.delete("integer:") + self.integer
self.numbers = integer
graph = optional_sign + self.numbers
delete_tokens = self.delete_tokens(graph)
self.fst = delete_tokens.optimize()
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/it/verbalizers/cardinal.py
|
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import (
NEMO_NOT_QUOTE,
NEMO_SIGMA,
GraphFst,
delete_preserve_order,
insert_space,
)
from nemo_text_processing.text_normalization.it.utils import get_abs_path
from pynini.lib import pynutil
digit_no_zero = pynini.invert(pynini.string_file(get_abs_path("data/numbers/digit.tsv")))
zero = pynini.invert(pynini.string_file(get_abs_path("data/numbers/zero.tsv")))
graph_symbols = pynini.string_file(get_abs_path("data/electronic/symbols.tsv"))
server_common = pynini.string_file(get_abs_path("data/electronic/server_name.tsv"))
domain_common = pynini.string_file(get_abs_path("data/electronic/domain.tsv"))
class ElectronicFst(GraphFst):
"""
Finite state transducer for verbalizing electronic
e.g. electronic { username: "abc.def2" domain: "studenti.università.it" } ->
"a b c punto d e f due chiocciola s t u d e n t i punto u n i v e r s i t à punto IT
Args:
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, deterministic: bool = True):
super().__init__(name="electronic", kind="verbalize", deterministic=deterministic)
graph_digit = digit_no_zero | zero
def add_space_after_char():
return pynini.closure(NEMO_NOT_QUOTE - pynini.accep(" ") + insert_space) + (
NEMO_NOT_QUOTE - pynini.accep(" ")
)
verbalize_characters = pynini.cdrewrite(graph_symbols | graph_digit, "", "", NEMO_SIGMA)
user_name = pynutil.delete("username: \"") + add_space_after_char() + pynutil.delete("\"")
user_name @= verbalize_characters
convert_defaults = pynutil.add_weight(NEMO_NOT_QUOTE, weight=0.0001) | server_common | domain_common
domain = convert_defaults + pynini.closure(insert_space + convert_defaults)
domain @= verbalize_characters
domain = pynutil.delete("domain: \"") + domain + pynutil.delete("\"")
protocol = (
pynutil.delete("protocol: \"")
+ add_space_after_char() @ pynini.cdrewrite(graph_symbols, "", "", NEMO_SIGMA)
+ pynutil.delete("\"")
)
self.graph = (pynini.closure(protocol + pynini.accep(" "), 0, 1) + domain) | (
user_name + pynini.accep(" ") + pynutil.insert("chiocciola ") + domain
)
delete_tokens = self.delete_tokens(self.graph + delete_preserve_order)
self.fst = delete_tokens.optimize()
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/it/verbalizers/electronic.py
|
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/it/verbalizers/__init__ .py
|
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/it/data/__init__ .py
|
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/it/data/numbers/__init__ .py
|
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/it/data/electronic/__init__ .py
|
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/it/data/money/__init__.py
|
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/it/data/measure/__init__.py
|
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/ru/__init__.py
|
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import csv
import logging
import os
def get_abs_path(rel_path):
"""
Get absolute path
Args:
rel_path: relative path to this file
Returns absolute path
"""
abs_path = os.path.dirname(os.path.abspath(__file__)) + os.sep + rel_path
if not os.path.exists(abs_path):
logging.warning(f'{abs_path} does not exist')
return abs_path
def load_labels(abs_path):
"""
loads relative path file as dictionary
Args:
abs_path: absolute path
Returns dictionary of mappings
"""
label_tsv = open(abs_path, encoding='utf-8')
labels = list(csv.reader(label_tsv, delimiter="\t"))
return labels
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/ru/utils.py
|
# Copyright (c) 2021, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Adapted from https://github.com/google/TextNormalizationCoveringGrammars
# Russian minimally supervised number grammar.
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import NEMO_NON_BREAKING_SPACE, NEMO_SPACE
from nemo_text_processing.text_normalization.ru.utils import get_abs_path
RU_LOWER_ALPHA = "абвгдеёжзийклмнопрстуфхцчшщъыьэюя"
RU_UPPER_ALPHA = RU_LOWER_ALPHA.upper()
RU_LOWER_ALPHA = pynini.union(*RU_LOWER_ALPHA).optimize()
RU_UPPER_ALPHA = pynini.union(*RU_UPPER_ALPHA).optimize()
RU_ALPHA = (RU_LOWER_ALPHA | RU_UPPER_ALPHA).optimize()
RU_STRESSED_MAP = [
("А́", "А'"),
("Е́", "Е'"),
("Ё́", "Е'"),
("И́", "И'"),
("О́", "О'"),
("У́", "У'"),
("Ы́", "Ы'"),
("Э́", "Э'"),
("Ю́", "Ю'"),
("Я́", "Я'"),
("а́", "а'"),
("е́", "е'"),
("ё́", "е'"),
("и́", "и'"),
("о́", "о'"),
("у́", "у'"),
("ы́", "ы'"),
("э́", "э'"),
("ю́", "ю'"),
("я́", "я'"),
("ё", "е"),
("Ё", "Е"),
]
REWRITE_STRESSED = pynini.closure(pynini.string_map(RU_STRESSED_MAP).optimize() | RU_ALPHA).optimize()
TO_CYRILLIC = pynini.string_file(get_abs_path("data/latin_to_cyrillic.tsv")).optimize()
TO_LATIN = pynini.invert(TO_CYRILLIC).optimize()
RU_ALPHA_OR_SPACE = pynini.union(RU_ALPHA, NEMO_SPACE, NEMO_NON_BREAKING_SPACE).optimize()
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/ru/alphabet.py
|
# Copyright (c) 2021, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import GraphFst
from nemo_text_processing.text_normalization.ru.utils import get_abs_path
from pynini.lib import pynutil
class TimeFst(GraphFst):
"""
Finite state transducer for classifying time, e.g.
"02:15" -> time { hours: "два часа пятнадцать минут" }
Args:
number_names: number_names for cardinal and ordinal numbers
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, number_names: dict, deterministic: bool = True):
super().__init__(name="time", kind="classify", deterministic=deterministic)
increment_hour_ordinal = pynini.string_file(get_abs_path("data/time/increment_hour_ordinal.tsv"))
increment_hour_cardinal = pynini.string_file(get_abs_path("data/time/increment_hour_cardinal.tsv"))
convert_hour = pynini.string_file(get_abs_path("data/time/time_convert.tsv"))
number = pynini.closure(pynini.cross("0", ""), 0, 1) + number_names['cardinal_names_nominative']
hour_options = pynini.project(increment_hour_ordinal, "input")
hour_options = hour_options | pynini.project(convert_hour, "output")
hour_exeption_ends_with_one = pynini.union(*["01", "21"])
hour_exeption_ends_rest = pynini.union(*["02", "03", "04", "22", "23"])
hour_other = (
pynini.difference(hour_options, pynini.union(hour_exeption_ends_with_one, hour_exeption_ends_rest))
).optimize()
hour = hour_exeption_ends_with_one @ number + pynutil.insert(" час")
hour |= hour_exeption_ends_rest @ number + pynutil.insert(" часа")
hour |= hour_other @ number + pynutil.insert(" часов")
optional_and = pynini.closure(pynutil.insert("и "), 0, 1)
digits = pynini.union(*[str(x) for x in range(10)])
mins_start = pynini.union(*"012345")
mins_options = mins_start + digits
mins_exception_ends_with_one = mins_start + pynini.accep("1")
mins_exception_ends_rest = pynini.difference(
mins_start + pynini.union(*"234"), pynini.union(*["12", "13", "14"])
)
mins_other = pynini.difference(
mins_options, pynini.union(mins_exception_ends_with_one, mins_exception_ends_rest)
)
minutes = mins_exception_ends_with_one @ number + pynutil.insert(" минута")
minutes |= mins_exception_ends_rest @ number + pynutil.insert(" минуты")
minutes |= mins_other @ number + pynutil.insert(" минут")
self.minutes = minutes.optimize()
# 17:15 -> "семнадцать часов и пятнадцать минут"
hm = (
pynutil.insert("hours: \"")
+ hour.optimize()
+ pynutil.insert("\"")
+ (pynini.cross(":", " ") + pynutil.insert("minutes: \"") + optional_and + minutes.optimize())
+ pynutil.insert("\"")
+ pynutil.insert(" preserve_order: true")
)
h = pynutil.insert("hours: \"") + hour + pynutil.insert("\"") + pynutil.delete(":00")
self.graph_preserve_order = (hm | h).optimize()
# 17:15 -> "пятнадцать минут шестого"
# Requires permutations for the correct verbalization
self.increment_hour_ordinal = pynini.compose(hour_options, increment_hour_ordinal).optimize()
m_next_h = (
pynutil.insert("hours: \"")
+ self.increment_hour_ordinal
+ pynutil.insert("\"")
+ pynini.cross(":", " ")
+ pynutil.insert("minutes: \"")
+ minutes
+ pynutil.insert("\"")
)
# 17:45 -> "без пятнадцати минут шесть"
# Requires permutations for the correct verbalization
self.mins_to_h = pynini.string_file(get_abs_path("data/time/minutes_to_hour.tsv")).optimize()
self.increment_hour_cardinal = pynini.compose(hour_options, increment_hour_cardinal).optimize()
m_to_h = (
pynutil.insert("hours: \"")
+ self.increment_hour_cardinal
+ pynutil.insert("\"")
+ pynini.cross(":", " ")
+ pynutil.insert("minutes: \"без ")
+ self.mins_to_h
+ pynutil.insert("\"")
)
self.final_graph = m_next_h | self.graph_preserve_order | m_to_h
self.fst = self.add_tokens(self.final_graph)
self.fst = self.fst.optimize()
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/ru/taggers/time.py
|
# Copyright (c) 2021, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import (
NEMO_NON_BREAKING_SPACE,
NEMO_NOT_QUOTE,
NEMO_SPACE,
GraphFst,
insert_space,
)
from nemo_text_processing.text_normalization.ru.alphabet import RU_ALPHA
from nemo_text_processing.text_normalization.ru.utils import get_abs_path
from pynini.lib import pynutil
class MeasureFst(GraphFst):
"""
Finite state transducer for classifying measure, e.g.
"2 кг" -> measure { cardinal { integer: "два килограма" } }
This class also converts words containing numbers and letters
e.g. "тест-8" —> "тест восемь"
e.g. "тест-1,02" —> "тест одна целая две сотых"
Args:
cardinal: CardinalFst
decimal: DecimalFst
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, cardinal: GraphFst, decimal: GraphFst, deterministic: bool = True):
super().__init__(name="measure", kind="classify", deterministic=deterministic)
# adding weight to make sure the space is preserved for ITN
delete_space = pynini.closure(
pynutil.add_weight(pynutil.delete(pynini.union(NEMO_SPACE, NEMO_NON_BREAKING_SPACE)), -1), 0, 1
)
cardinal_graph = cardinal.cardinal_numbers_default
cardinal_graph_nominative = cardinal.cardinal_numbers_nominative
graph_unit = pynini.string_file(get_abs_path("data/measurements.tsv"))
optional_graph_negative = cardinal.optional_graph_negative
space_for_units = (
pynutil.add_weight(pynutil.insert(NEMO_NON_BREAKING_SPACE), -0.1)
| pynutil.add_weight(pynutil.insert(NEMO_SPACE), 0.1)
).optimize()
slash_unit = (pynini.cross("/", "в") | pynini.cross("/", "за")) + space_for_units + graph_unit
unit_slash_unit = pynutil.add_weight(graph_unit + space_for_units + slash_unit, -0.1)
default_units = pynutil.insert("units: \"") + (graph_unit | unit_slash_unit) + pynutil.insert("\"")
slash_units = pynutil.insert("units: \"") + slash_unit + pynutil.insert("\"")
subgraph_decimal = decimal.final_graph + ((delete_space + default_units) | slash_units)
cardinal_space = (
pynutil.insert("cardinal { ")
+ optional_graph_negative
+ pynutil.insert("integer: \"")
+ cardinal_graph
+ (
(delete_space + pynutil.insert("\"") + pynutil.insert(" } ") + default_units)
| (pynutil.insert("\"") + pynutil.insert(" } ") + slash_units)
)
)
cardinal_optional_dash_alpha = (
pynutil.insert("cardinal { integer: \"")
+ cardinal_graph
+ pynini.closure(pynini.cross('-', ''), 0, 1)
+ pynutil.insert("\" } units: \"")
+ pynini.closure(RU_ALPHA, 1)
+ pynutil.insert("\"")
)
alpha_optional_dash_cardinal = (
pynutil.insert("units: \"")
+ pynini.closure(RU_ALPHA, 1)
+ pynini.closure(pynini.cross('-', ''), 0, 1)
+ pynutil.insert("\"")
+ pynutil.insert(" cardinal { integer: \"")
+ cardinal_graph_nominative
+ pynutil.insert("\" } preserve_order: true")
)
decimal_dash_alpha = (
decimal.final_graph
+ pynini.cross('-', '')
+ pynutil.insert(" units: \"")
+ pynini.closure(RU_ALPHA, 1)
+ pynutil.insert("\"")
)
alpha_dash_decimal = (
pynutil.insert("units: \"")
+ pynini.closure(RU_ALPHA, 1)
+ pynini.cross('-', '')
+ pynutil.insert("\" ")
+ decimal.final_graph
+ pynutil.insert(" preserve_order: true")
)
self.tagger_graph_default = (subgraph_decimal | cardinal_space).optimize()
tagger_graph = (
self.tagger_graph_default
| cardinal_optional_dash_alpha
| alpha_optional_dash_cardinal
| decimal_dash_alpha
| alpha_dash_decimal
).optimize()
# verbalizer
unit = pynutil.delete("units: \"") + pynini.closure(NEMO_NOT_QUOTE, 1) + pynutil.delete("\"") + delete_space
optional_sign = pynini.closure(pynini.cross("negative: \"true\" ", "минус "), 0, 1)
integer = pynutil.delete(" \"") + pynini.closure(NEMO_NOT_QUOTE, 1) + pynutil.delete("\"")
integer_part = pynutil.delete("integer_part:") + integer
fractional_part = pynutil.delete("fractional_part:") + integer
optional_quantity_part = pynini.closure(
pynini.accep(" ")
+ pynutil.delete("quantity: \"")
+ pynini.closure(NEMO_NOT_QUOTE, 1)
+ pynutil.delete("\""),
0,
1,
)
graph_decimal = optional_sign + integer_part + pynini.accep(" ") + fractional_part + optional_quantity_part
graph_decimal = pynutil.delete("decimal {") + delete_space + graph_decimal + delete_space + pynutil.delete("}")
graph_cardinal = (
pynutil.delete("cardinal {")
+ delete_space
+ optional_sign
+ pynutil.delete("integer: \"")
+ pynini.closure(NEMO_NOT_QUOTE, 1)
+ pynutil.delete("\"")
+ delete_space
+ pynutil.delete("}")
)
verbalizer_graph = (graph_cardinal | graph_decimal) + delete_space + insert_space + unit
# SH adds "preserve_order: true" by default
preserve_order = pynutil.delete("preserve_order:") + delete_space + pynutil.delete("true") + delete_space
verbalizer_graph |= (
unit
+ insert_space
+ (graph_cardinal | graph_decimal)
+ delete_space
+ pynini.closure(preserve_order, 0, 1)
)
self.verbalizer_graph = verbalizer_graph.optimize()
final_graph = (tagger_graph @ verbalizer_graph).optimize()
self.fst = self.add_tokens(
pynutil.insert("cardinal { integer: \"") + final_graph + pynutil.insert("\" }")
).optimize()
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/ru/taggers/measure.py
|
# Copyright (c) 2021, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import NEMO_DIGIT, GraphFst, delete_space, insert_space
from nemo_text_processing.text_normalization.ru.alphabet import RU_ALPHA_OR_SPACE
from pynini.lib import pynutil
class TelephoneFst(GraphFst):
"""
Finite state transducer for classifying telephone, which includes country code, number part and extension
E.g
"8-913-983-56-01" -> telephone { number_part: "восемь девятьсот тринадцать девятьсот восемьдесят три пятьдесят шесть ноль один" }
Args:
number_names: number_names for cardinal and ordinal numbers
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, number_names: dict, deterministic: bool = True):
super().__init__(name="telephone", kind="classify", deterministic=deterministic)
separator = pynini.cross("-", " ") # between components
number = number_names["cardinal_names_nominative"]
country_code = (
pynutil.insert("country_code: \"")
+ pynini.closure(pynutil.add_weight(pynutil.delete("+"), 0.1), 0, 1)
+ number
+ separator
+ pynutil.insert("\"")
)
optional_country_code = pynini.closure(country_code + insert_space, 0, 1)
number_part = (
NEMO_DIGIT ** 3 @ number
+ separator
+ NEMO_DIGIT ** 3 @ number
+ separator
+ NEMO_DIGIT ** 2 @ number
+ separator
+ NEMO_DIGIT ** 2 @ (pynini.closure(pynini.cross("0", "ноль ")) + number)
)
number_part = pynutil.insert("number_part: \"") + number_part + pynutil.insert("\"")
tagger_graph = (optional_country_code + number_part).optimize()
# verbalizer
verbalizer_graph = pynini.closure(
pynutil.delete("country_code: \"")
+ pynini.closure(RU_ALPHA_OR_SPACE, 1)
+ pynutil.delete("\"")
+ delete_space,
0,
1,
)
verbalizer_graph += (
pynutil.delete("number_part: \"") + pynini.closure(RU_ALPHA_OR_SPACE, 1) + pynutil.delete("\"")
)
verbalizer_graph = verbalizer_graph.optimize()
self.final_graph = (tagger_graph @ verbalizer_graph).optimize()
self.fst = self.add_tokens(
pynutil.insert("number_part: \"") + self.final_graph + pynutil.insert("\"")
).optimize()
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/ru/taggers/telephone.py
|
# Copyright (c) 2021, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Adapted from https://github.com/google/TextNormalizationCoveringGrammars
# Russian minimally supervised number grammar.
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import NEMO_SIGMA, GraphFst
from nemo_text_processing.text_normalization.ru.utils import get_abs_path
from pynini.lib import pynutil
class OrdinalFst(GraphFst):
"""
Finite state transducer for classifying cardinals, e.g.
"2" -> ordinal { integer: "второе" } }
Args:
number_names: number_names for cardinal and ordinal numbers
alternative_formats: alternative format for cardinal and ordinal numbers
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, number_names: dict, alternative_formats: dict, deterministic=False):
super().__init__(name="ordinal", kind="classify", deterministic=deterministic)
one_thousand_alternative = alternative_formats['one_thousand_alternative']
separators = alternative_formats['separators']
ordinal = number_names['ordinal_number_names']
ordinal |= ordinal @ one_thousand_alternative
ordinal_numbers = separators @ ordinal
# to handle cases like 2-ая
endings = pynini.string_file(get_abs_path("data/numbers/ordinal_endings.tsv"))
not_dash = pynini.closure(pynini.difference(NEMO_SIGMA, "-"))
del_ending = pynini.cdrewrite(pynini.cross("-" + not_dash, ""), "", "[EOS]", NEMO_SIGMA)
ordinal_numbers_marked = (
((separators @ ordinal).optimize() + pynini.accep("-") + not_dash).optimize()
@ (NEMO_SIGMA + endings).optimize()
@ del_ending
).optimize()
self.ordinal_numbers = ordinal_numbers
# "03" -> remove leading zeros and verbalize
leading_zeros = pynini.closure(pynini.cross("0", ""))
self.ordinal_numbers_with_leading_zeros = (leading_zeros + ordinal_numbers).optimize()
final_graph = (ordinal_numbers | ordinal_numbers_marked).optimize()
final_graph = pynutil.insert("integer: \"") + final_graph + pynutil.insert("\"")
final_graph = self.add_tokens(final_graph)
self.fst = final_graph.optimize()
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/ru/taggers/ordinal.py
|
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import NEMO_CHAR, GraphFst, convert_space
from nemo_text_processing.text_normalization.ru.alphabet import RU_ALPHA, TO_CYRILLIC
from nemo_text_processing.text_normalization.ru.utils import get_abs_path, load_labels
from pynini.lib import pynutil
class WhiteListFst(GraphFst):
"""
Finite state transducer for classifying whitelist, e.g.
misses -> tokens { name: "mrs" }
for non-deterministic case: "Dr. Abc" ->
tokens { name: "drive" } tokens { name: "Abc" }
tokens { name: "doctor" } tokens { name: "Abc" }
tokens { name: "Dr." } tokens { name: "Abc" }
This class has highest priority among all classifier grammars. Whitelisted tokens are defined and loaded from "data/whitelist.tsv".
Args:
input_case: accepting either "lower_cased" or "cased" input.
deterministic: if True will provide a single transduction option,
for False multiple options (used for audio-based normalization)
input_file: path to a file with whitelist replacements
"""
def __init__(self, input_case: str, deterministic: bool = True, input_file: str = None):
super().__init__(name="whitelist", kind="classify", deterministic=deterministic)
def _get_whitelist_graph(input_case, file):
whitelist = load_labels(file)
if input_case == "lower_cased":
whitelist = [[x[0].lower()] + x[1:] for x in whitelist]
else:
whitelist = [[x[0].lower()] + x[1:] for x in whitelist]
graph = pynini.string_map(whitelist)
return graph
graph = _get_whitelist_graph(input_case, get_abs_path("data/whitelist.tsv"))
if input_file:
graph = _get_whitelist_graph(input_case, input_file)
units_graph = _get_whitelist_graph(input_case, file=get_abs_path("data/measurements.tsv"))
# do not replace single letter units, like `м`, `°` and `%` will be replaced
units_graph = pynini.compose((NEMO_CHAR ** (2, ...) | pynini.difference(NEMO_CHAR, RU_ALPHA)), units_graph)
graph |= units_graph.optimize()
graph |= TO_CYRILLIC + pynini.closure(pynutil.insert(" ") + TO_CYRILLIC)
self.final_graph = convert_space(graph)
self.fst = (pynutil.insert("name: \"") + self.final_graph + pynutil.insert("\"")).optimize()
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/ru/taggers/whitelist.py
|
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import os
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import (
GraphFst,
delete_extra_space,
delete_space,
generator_main,
)
from nemo_text_processing.text_normalization.en.taggers.punctuation import PunctuationFst
from nemo_text_processing.text_normalization.ru.taggers.cardinal import CardinalFst
from nemo_text_processing.text_normalization.ru.taggers.date import DateFst
from nemo_text_processing.text_normalization.ru.taggers.decimals import DecimalFst
from nemo_text_processing.text_normalization.ru.taggers.electronic import ElectronicFst
from nemo_text_processing.text_normalization.ru.taggers.measure import MeasureFst
from nemo_text_processing.text_normalization.ru.taggers.money import MoneyFst
from nemo_text_processing.text_normalization.ru.taggers.number_names import get_alternative_formats, get_number_names
from nemo_text_processing.text_normalization.ru.taggers.ordinal import OrdinalFst
from nemo_text_processing.text_normalization.ru.taggers.telephone import TelephoneFst
from nemo_text_processing.text_normalization.ru.taggers.time import TimeFst
from nemo_text_processing.text_normalization.ru.taggers.whitelist import WhiteListFst
from nemo_text_processing.text_normalization.ru.taggers.word import WordFst
from pynini.lib import pynutil
class ClassifyFst(GraphFst):
"""
Final class that composes all other classification grammars. This class can process an entire sentence, that is lower cased.
For deployment, this grammar will be compiled and exported to OpenFst Finite State Archive (FAR) File.
More details to deployment at NeMo/tools/text_processing_deployment.
Args:
input_case: accepting either "lower_cased" or "cased" input.
deterministic: if True will provide a single transduction option,
for False multiple options (used for audio-based normalization)
cache_dir: path to a dir with .far grammar file. Set to None to avoid using cache.
overwrite_cache: set to True to overwrite .far files
whitelist: path to a file with whitelist replacements
"""
def __init__(
self,
input_case: str,
deterministic: bool = False,
cache_dir: str = None,
overwrite_cache: bool = False,
whitelist: str = None,
):
super().__init__(name="tokenize_and_classify", kind="classify", deterministic=deterministic)
if deterministic:
raise ValueError(
'Ru TN only supports non-deterministic cases and produces multiple normalization options.'
)
far_file = None
if cache_dir is not None and cache_dir != "None":
os.makedirs(cache_dir, exist_ok=True)
whitelist_file = os.path.basename(whitelist) if whitelist else ""
far_file = os.path.join(
cache_dir, f"_{input_case}_ru_tn_{deterministic}_deterministic{whitelist_file}.far"
)
if not overwrite_cache and far_file and os.path.exists(far_file):
self.fst = pynini.Far(far_file, mode="r")["tokenize_and_classify"]
logging.info(f"ClassifyFst.fst was restored from {far_file}.")
else:
logging.info(f"Creating ClassifyFst grammars. This might take some time...")
number_names = get_number_names()
alternative_formats = get_alternative_formats()
self.cardinal = CardinalFst(
number_names=number_names, alternative_formats=alternative_formats, deterministic=deterministic
)
cardinal_graph = self.cardinal.fst
self.ordinal = OrdinalFst(
number_names=number_names, alternative_formats=alternative_formats, deterministic=deterministic
)
ordinal_graph = self.ordinal.fst
self.decimal = DecimalFst(cardinal=self.cardinal, deterministic=deterministic)
decimal_graph = self.decimal.fst
self.measure = MeasureFst(cardinal=self.cardinal, decimal=self.decimal, deterministic=deterministic)
measure_graph = self.measure.fst
self.date = DateFst(number_names=number_names, deterministic=deterministic)
date_graph = self.date.fst
word_graph = WordFst(deterministic=deterministic).fst
self.time = TimeFst(number_names=number_names, deterministic=deterministic)
time_graph = self.time.fst
self.telephone = TelephoneFst(number_names=number_names, deterministic=deterministic)
telephone_graph = self.telephone.fst
self.electronic = ElectronicFst(deterministic=deterministic)
electronic_graph = self.electronic.fst
self.money = MoneyFst(cardinal=self.cardinal, decimal=self.decimal, deterministic=deterministic)
money_graph = self.money.fst
self.whitelist = WhiteListFst(input_case=input_case, deterministic=deterministic, input_file=whitelist)
whitelist_graph = self.whitelist.fst
punct_graph = PunctuationFst(deterministic=deterministic).fst
classify = (
pynutil.add_weight(whitelist_graph, 1.01)
| pynutil.add_weight(time_graph, 1.1)
| pynutil.add_weight(date_graph, 1.09)
| pynutil.add_weight(decimal_graph, 1.1)
| pynutil.add_weight(measure_graph, 0.9)
| pynutil.add_weight(cardinal_graph, 1.1)
| pynutil.add_weight(ordinal_graph, 1.1)
| pynutil.add_weight(money_graph, 1.1)
| pynutil.add_weight(telephone_graph, 1.1)
| pynutil.add_weight(electronic_graph, 1.1)
| pynutil.add_weight(word_graph, 100)
)
punct = pynutil.insert("tokens { ") + pynutil.add_weight(punct_graph, weight=1.1) + pynutil.insert(" }")
token = pynutil.insert("tokens { ") + classify + pynutil.insert(" }")
token_plus_punct = (
pynini.closure(punct + pynutil.insert(" ")) + token + pynini.closure(pynutil.insert(" ") + punct)
)
graph = token_plus_punct + pynini.closure(pynutil.add_weight(delete_extra_space, 1.1) + token_plus_punct)
graph = delete_space + graph + delete_space
self.fst = graph.optimize()
if far_file:
generator_main(far_file, {"tokenize_and_classify": self.fst})
logging.info(f"ClassifyFst grammars are saved to {far_file}.")
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/ru/taggers/tokenize_and_classify.py
|
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/ru/taggers/__init__.py
|
# Copyright (c) 2021, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import NEMO_NOT_QUOTE, NEMO_SIGMA, NEMO_SPACE, GraphFst
from nemo_text_processing.text_normalization.ru.utils import get_abs_path
from pynini.lib import pynutil
class MoneyFst(GraphFst):
"""
Finite state transducer for classifying money, e.g.
"5руб." -> money { "пять рублей" }
Args:
cardinal: CardinalFst
decimal: DecimalFst
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, cardinal: GraphFst, decimal: GraphFst, deterministic: bool = True):
super().__init__(name="money", kind="classify", deterministic=deterministic)
cardinal_graph = cardinal.cardinal_numbers_default
decimal_graph = decimal.final_graph
unit_singular = pynini.string_file(get_abs_path("data/currency/currency_singular.tsv"))
unit_plural = pynini.string_file(get_abs_path("data/currency/currency_plural.tsv"))
# adding weight to make sure the space is preserved for ITN
optional_delimiter = pynini.closure(pynutil.add_weight(pynini.cross(NEMO_SPACE, ""), -100), 0, 1)
graph_unit_singular = (
optional_delimiter + pynutil.insert(" currency: \"") + unit_singular + pynutil.insert("\"")
)
graph_unit_plural = optional_delimiter + pynutil.insert(" currency: \"") + unit_plural + pynutil.insert("\"")
one = pynini.compose(pynini.accep("1"), cardinal_graph).optimize()
singular_graph = pynutil.insert("integer_part: \"") + one + pynutil.insert("\"") + graph_unit_singular
graph_decimal = decimal_graph + graph_unit_plural
graph_integer = (
pynutil.insert("integer_part: \"")
+ ((NEMO_SIGMA - "1") @ cardinal_graph)
+ pynutil.insert("\"")
+ (graph_unit_plural)
)
graph_integer |= singular_graph
tagger_graph = (graph_integer.optimize() | graph_decimal.optimize()).optimize()
# verbalizer
integer = pynutil.delete("\"") + pynini.closure(NEMO_NOT_QUOTE, 1) + pynutil.delete("\"")
integer_part = pynutil.delete("integer_part: ") + integer
unit = (
pynutil.delete("currency: ")
+ pynutil.delete("\"")
+ pynini.closure(NEMO_NOT_QUOTE, 1)
+ pynutil.delete("\"")
)
unit = pynini.accep(NEMO_SPACE) + unit
verbalizer_graph_cardinal = (integer_part + unit).optimize()
fractional_part = pynutil.delete("fractional_part: ") + integer
optional_quantity = pynini.closure(pynini.accep(NEMO_SPACE) + pynutil.delete("quantity: ") + integer, 0, 1)
verbalizer_graph_decimal = (
pynutil.delete('decimal { ')
+ integer_part
+ pynini.accep(" ")
+ fractional_part
+ optional_quantity
+ pynutil.delete(" }")
+ unit
)
verbalizer_graph = (verbalizer_graph_cardinal | verbalizer_graph_decimal).optimize()
self.final_graph = (tagger_graph @ verbalizer_graph).optimize()
self.fst = self.add_tokens(
pynutil.insert("integer_part: \"") + self.final_graph + pynutil.insert("\"")
).optimize()
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/ru/taggers/money.py
|
# Copyright (c) 2021, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Adapted from https://github.com/google/TextNormalizationCoveringGrammars
# Russian minimally supervised number grammar.
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import (
NEMO_DIGIT,
NEMO_SIGMA,
NEMO_SPACE,
GraphFst,
insert_space,
)
from nemo_text_processing.text_normalization.ru.alphabet import RU_ALPHA, TO_CYRILLIC
from nemo_text_processing.text_normalization.ru.utils import get_abs_path
from pynini.lib import pynutil
class CardinalFst(GraphFst):
"""
Finite state transducer for classifying cardinals, e.g.
"1 001" -> cardinal { integer: "тысяча один" }
Args:
number_names: number_names for cardinal and ordinal numbers
alternative_formats: alternative number formats
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, number_names: dict, alternative_formats: dict, deterministic: bool = False):
super().__init__(name="cardinal", kind="classify", deterministic=deterministic)
self.cardinal_numbers_default = self.get_cardinal_numbers(number_names, alternative_formats, mode="all")
self.cardinal_numbers_nominative = self.get_cardinal_numbers(
number_names, alternative_formats, mode="nominative"
)
self.optional_graph_negative = pynini.closure(
pynutil.insert("negative: ") + pynini.cross("-", "\"true\"") + insert_space, 0, 1
)
self.cardinal_numbers_with_optional_negative = (
self.optional_graph_negative
+ pynutil.insert("integer: \"")
+ self.cardinal_numbers_default
+ pynutil.insert("\"")
)
# "03" -> remove leading zeros and verbalize
leading_zeros = pynini.closure(pynini.cross("0", ""))
self.cardinal_numbers_with_leading_zeros = (leading_zeros + self.cardinal_numbers_default).optimize()
# "123" -> "один два три"
single_digits_graph = pynini.string_file(get_abs_path("data/numbers/cardinals_nominative_case.tsv")).optimize()
single_digits_graph = pynini.compose(NEMO_DIGIT, single_digits_graph)
self.single_digits_graph = single_digits_graph + pynini.closure(insert_space + single_digits_graph)
optional_quantity = pynini.string_file(get_abs_path("data/numbers/quantity.tsv")).optimize()
optional_quantity = pynutil.insert("quantity: \"") + optional_quantity + pynutil.insert("\"")
optional_quantity = pynini.closure(
(pynutil.add_weight(pynini.accep(NEMO_SPACE), -0.1) | insert_space) + optional_quantity, 0, 1
)
serial_graph = self.get_serial_graph()
final_graph = (
self.optional_graph_negative
+ pynutil.insert("integer: \"")
+ self.cardinal_numbers_with_leading_zeros
+ pynutil.insert("\"")
+ optional_quantity
).optimize()
final_graph = pynutil.add_weight(final_graph, -0.1)
final_graph |= pynutil.insert("integer: \"") + pynutil.add_weight(serial_graph, 10) + pynutil.insert("\"")
self.final_graph = final_graph
# to cover cases "2-х" -> "двух" (this is not covered by ordinal endings)
final_graph |= pynini.compose(
pynini.compose(NEMO_DIGIT ** (1, ...) + pynini.cross('-х', ''), final_graph),
NEMO_SIGMA + pynini.accep("х\"") + NEMO_SIGMA,
)
final_graph = self.add_tokens(final_graph)
self.fst = final_graph.optimize()
def get_cardinal_numbers(self, number_names: dict, alternative_formats: dict, mode: str = "all"):
"""Returns cardinal numbers names graph.
Args:
number_names: number_names for cardinal and ordinal numbers
alternative_formats: alternative number formats
mode: "all" - to return graph that includes all Ru cases, "nominative" to return only the nominative form
"""
if mode == "all":
cardinal_names = number_names['cardinal_number_names']
elif mode == "nominative":
cardinal_names = number_names['cardinal_names_nominative']
else:
raise ValueError(f'{mode} is not supported.')
one_thousand_alternative = alternative_formats['one_thousand_alternative']
separators = alternative_formats['separators']
cardinal_numbers = cardinal_names | pynini.compose(cardinal_names, one_thousand_alternative)
cardinal_numbers = pynini.compose(separators, cardinal_numbers)
return cardinal_numbers
def get_serial_graph(self):
"""
Finite state transducer for classifying serial.
The serial is a combination of digits, letters and dashes, e.g.:
c325-b -> tokens { cardinal { integer: "си три два пять би" } }
"""
num_graph = self.single_digits_graph
alpha = TO_CYRILLIC | RU_ALPHA
delimiter = insert_space | pynini.cross("-", " ") | pynini.cross("/", " ")
letter_num = pynini.closure(alpha + delimiter, 1) + num_graph
num_letter = pynini.closure(num_graph + delimiter, 1) + alpha
num_delimiter_num = pynini.closure(num_graph + delimiter, 1) + num_graph
next_alpha_or_num = pynini.closure(delimiter + (alpha | num_graph))
serial_graph = (letter_num | num_letter | num_delimiter_num) + next_alpha_or_num
# at least 1 alpha and 1 digit is present
at_least_one_alpha_num = (
NEMO_SIGMA + (RU_ALPHA | pynini.project(TO_CYRILLIC, "input")) + NEMO_SIGMA + NEMO_DIGIT + NEMO_SIGMA
) | (NEMO_SIGMA + NEMO_DIGIT + NEMO_SIGMA + (RU_ALPHA | pynini.project(TO_CYRILLIC, "input")) + NEMO_SIGMA)
serial_graph = pynini.compose(at_least_one_alpha_num, serial_graph.optimize()).optimize()
# numbers only with 2+ delimiters
serial_graph |= (
num_graph + delimiter + num_graph + delimiter + num_graph + pynini.closure(delimiter + num_graph)
).optimize()
return serial_graph.optimize()
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/ru/taggers/cardinal.py
|
# Copyright (c) 2021, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from collections import defaultdict
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import NEMO_DIGIT, NEMO_SPACE, GraphFst, insert_space
from nemo_text_processing.text_normalization.en.utils import load_labels
from nemo_text_processing.text_normalization.ru.utils import get_abs_path
from pynini.lib import pynutil
delete_space = pynutil.delete(" ")
def prepare_labels_for_insertion(file_path: str):
"""
Read the file and creates a union insertion graph
Args:
file_path: path to a file (3 columns: a label type e.g.
"@@decimal_delimiter@@", a label e.g. "целого", and a weight e.g. "0.1").
Returns dictionary mapping from label type to an fst that inserts the labels with the specified weights.
"""
labels = load_labels(file_path)
mapping = defaultdict(list)
for k, v, w in labels:
mapping[k].append((v, w))
for k in mapping:
mapping[k] = (
insert_space
+ pynini.union(*[pynutil.add_weight(pynutil.insert(end), weight) for end, weight in mapping[k]])
).optimize()
return mapping
class DecimalFst(GraphFst):
"""
Finite state transducer for classifying decimal, e.g.
"1,08" -> tokens { decimal { integer_part: "одно целая" fractional_part: "восемь сотых} }
Args:
cardinal: CardinalFst
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, cardinal: GraphFst, deterministic: bool = False):
super().__init__(name="decimal", kind="classify", deterministic=deterministic)
integer_part = cardinal.cardinal_numbers_default
cardinal_numbers_with_leading_zeros = cardinal.cardinal_numbers_with_leading_zeros
delimiter_map = prepare_labels_for_insertion(get_abs_path("data/numbers/decimal_delimiter.tsv"))
delimiter = (
pynini.cross(",", "")
+ delimiter_map['@@decimal_delimiter@@']
+ pynini.closure(pynutil.add_weight(pynutil.insert(" и"), 0.5), 0, 1)
).optimize()
decimal_endings_map = prepare_labels_for_insertion(get_abs_path("data/numbers/decimal_endings.tsv"))
self.integer_part = integer_part + delimiter
graph_integer = pynutil.insert("integer_part: \"") + self.integer_part + pynutil.insert("\"")
graph_fractional = NEMO_DIGIT @ cardinal_numbers_with_leading_zeros + decimal_endings_map['10']
graph_fractional |= (NEMO_DIGIT + NEMO_DIGIT) @ cardinal_numbers_with_leading_zeros + decimal_endings_map[
'100'
]
graph_fractional |= (
NEMO_DIGIT + NEMO_DIGIT + NEMO_DIGIT
) @ cardinal_numbers_with_leading_zeros + decimal_endings_map['1000']
graph_fractional |= (
NEMO_DIGIT + NEMO_DIGIT + NEMO_DIGIT + NEMO_DIGIT
) @ cardinal_numbers_with_leading_zeros + decimal_endings_map['10000']
self.optional_quantity = pynini.string_file(get_abs_path("data/numbers/quantity.tsv")).optimize()
self.graph_fractional = graph_fractional
graph_fractional = pynutil.insert("fractional_part: \"") + graph_fractional + pynutil.insert("\"")
optional_quantity = pynini.closure(
(pynutil.add_weight(pynini.accep(NEMO_SPACE), -0.1) | insert_space)
+ pynutil.insert("quantity: \"")
+ self.optional_quantity
+ pynutil.insert("\""),
0,
1,
)
self.final_graph = (
cardinal.optional_graph_negative + graph_integer + insert_space + graph_fractional + optional_quantity
)
self.final_graph = self.add_tokens(self.final_graph)
self.fst = self.final_graph.optimize()
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/ru/taggers/decimals.py
|
# Copyright (c) 2021, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import (
NEMO_ALPHA,
NEMO_DIGIT,
NEMO_NOT_QUOTE,
GraphFst,
delete_space,
insert_space,
)
from nemo_text_processing.text_normalization.ru.alphabet import RU_ALPHA, TO_CYRILLIC
from nemo_text_processing.text_normalization.ru.utils import get_abs_path
from pynini.lib import pynutil
class ElectronicFst(GraphFst):
"""
Finite state transducer for classifying electronic: email addresses
e.g. "ab@nd.ru" -> electronic { username: "эй би собака эн ди точка ру" }
Args:
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, deterministic: bool = True):
super().__init__(name="electronic", kind="classify", deterministic=deterministic)
# tagger
accepted_symbols = []
with open(get_abs_path("data/electronic/symbols.tsv"), 'r', encoding='utf-8') as f:
for line in f:
symbol, _ = line.split('\t')
accepted_symbols.append(pynini.accep(symbol))
username = (
pynutil.insert("username: \"")
+ NEMO_ALPHA
+ pynini.closure(NEMO_ALPHA | NEMO_DIGIT | pynini.union(*accepted_symbols))
+ pynutil.insert("\"")
+ pynini.cross('@', ' ')
)
domain_graph = (
NEMO_ALPHA
+ (pynini.closure(NEMO_ALPHA | NEMO_DIGIT | pynini.accep('-') | pynini.accep('.')))
+ (NEMO_ALPHA | NEMO_DIGIT)
)
domain_graph = pynutil.insert("domain: \"") + domain_graph + pynutil.insert("\"")
tagger_graph = (username + domain_graph).optimize()
# verbalizer
graph_digit = pynini.string_file(get_abs_path("data/numbers/digits_nominative_case.tsv")).optimize()
graph_symbols = pynini.string_file(get_abs_path("data/electronic/symbols.tsv")).optimize()
user_name = (
pynutil.delete("username:")
+ delete_space
+ pynutil.delete("\"")
+ (
pynini.closure(
pynutil.add_weight(graph_digit + insert_space, 1.09)
| pynutil.add_weight(pynini.closure(graph_symbols + pynutil.insert(" ")), 1.09)
| pynutil.add_weight(NEMO_NOT_QUOTE + insert_space, 1.1)
)
)
+ pynutil.delete("\"")
)
domain_default = (
pynini.closure(NEMO_NOT_QUOTE + insert_space)
+ pynini.cross(".", "точка ")
+ NEMO_NOT_QUOTE
+ pynini.closure(insert_space + NEMO_NOT_QUOTE)
)
server_default = (
pynini.closure((graph_digit | NEMO_ALPHA) + insert_space, 1)
+ pynini.closure(graph_symbols + insert_space)
+ pynini.closure((graph_digit | NEMO_ALPHA) + insert_space, 1)
)
server_common = pynini.string_file(get_abs_path("data/electronic/server_name.tsv")) + insert_space
domain_common = pynini.cross(".", "точка ") + pynini.string_file(get_abs_path("data/electronic/domain.tsv"))
domain = (
pynutil.delete("domain:")
+ delete_space
+ pynutil.delete("\"")
+ (pynutil.add_weight(server_common, 1.09) | pynutil.add_weight(server_default, 1.1))
+ (pynutil.add_weight(domain_common, 1.09) | pynutil.add_weight(domain_default, 1.1))
+ delete_space
+ pynutil.delete("\"")
)
graph = user_name + delete_space + pynutil.insert("собака ") + delete_space + domain + delete_space
# replace all latin letters with their Ru verbalization
verbalizer_graph = (graph.optimize() @ (pynini.closure(TO_CYRILLIC | RU_ALPHA | pynini.accep(" ")))).optimize()
verbalizer_graph = verbalizer_graph.optimize()
self.final_graph = (tagger_graph @ verbalizer_graph).optimize()
self.fst = self.add_tokens(pynutil.insert("username: \"") + self.final_graph + pynutil.insert("\"")).optimize()
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/ru/taggers/electronic.py
|
# Copyright (c) 2021, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Adapted from https://github.com/google/TextNormalizationCoveringGrammars
# Russian minimally supervised number grammar.
#
# Supports cardinals and ordinals in all inflected forms.
#
# The language-specific acceptor G was compiled with digit, teen, decade,
# century, and big power-of-ten preterminals. The lexicon transducer is
# highly ambiguous, but no LM is used.
# Intersects the universal factorization transducer (F) with language-specific
# acceptor (G).
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import NEMO_SIGMA
from nemo_text_processing.text_normalization.ru.utils import get_abs_path, load_labels
from pynini.lib import pynutil, rewrite
def get_number_names():
"""
Creates numbers names.
Based on: 1) Gorman, K., and Sproat, R. 2016. Minimally supervised number normalization.
Transactions of the Association for Computational Linguistics 4: 507-519.
and 2) Ng, A. H., Gorman, K., and Sproat, R. 2017.
Minimally supervised written-to-spoken text normalization. In ASRU, pages 665-670.
"""
a = pynini.Far(get_abs_path('data/utils/util_arithmetic.far'), mode='r')
d = a['DELTA_STAR']
f = a['IARITHMETIC_RESTRICTED']
g = pynini.Fst.read(get_abs_path('data/utils/g.fst'))
fg = (d @ (f @ (f @ (f @ g).optimize()).optimize()).optimize()).optimize()
assert rewrite.top_rewrite("230", fg) == "(+ 200 30 +)"
# Compiles lexicon transducers (L).
cardinal_name_nominative = pynini.string_file(get_abs_path("data/numbers/1_cardinals_nominative.tsv")).optimize()
cardinal_name_genitive = pynini.string_file(get_abs_path("data/numbers/2_cardinals_genitive.tsv")).optimize()
cardinal_name_dative = pynini.string_file(get_abs_path("data/numbers/3_cardinals_dative.tsv")).optimize()
cardinal_name_accusative = pynini.string_file(get_abs_path("data/numbers/4_cardinals_accusative.tsv")).optimize()
cardinal_name_instrumental = pynini.string_file(
get_abs_path("data/numbers/5_cardinals_instrumental.tsv")
).optimize()
cardinal_name_prepositional = pynini.string_file(
get_abs_path("data/numbers/6_cardinals_prepositional.tsv")
).optimize()
cardinal_name_nominative = (
pynini.closure(cardinal_name_nominative + pynini.accep(" ")) + cardinal_name_nominative
).optimize()
cardinal_l = pynutil.add_weight(cardinal_name_nominative, -0.1)
for case in [
pynutil.add_weight(cardinal_name_genitive, 0.1).optimize(),
pynutil.add_weight(cardinal_name_dative, 0.1).optimize(),
pynutil.add_weight(cardinal_name_accusative, 0.1).optimize(),
pynutil.add_weight(cardinal_name_instrumental, 0.1).optimize(),
pynutil.add_weight(cardinal_name_prepositional, 0.1).optimize(),
]:
cardinal_l |= (pynini.closure(case + pynini.accep(" ")) + case).optimize()
# Numbers in nominative case (to use, for example, with telephone or serial_graph (in cardinals))
cardinal_names_nominative_l = (
pynini.closure(cardinal_name_nominative + pynini.accep(" ")) + cardinal_name_nominative
).optimize()
# Convert e.g. "(* 5 1000 *)" back to "5000" so complex ordinals will be formed correctly,
# e.g. "пятитысячный" will eventually be formed. (If we didn't do this, the incorrect phrase
# "пять тысячный" would be formed).
# We do this for all thousands from "(*2 1000 *)" —> "2000" to "(*20 1000 *)" —> "20000".
# We do not go higher, in order to prevent the WFST graph becoming even larger.
complex_numbers = pynini.cross("(* 2 1000 *)", "2000")
for number in range(3, 21):
complex_numbers |= pynini.cross(f"(* {number} 1000 *)", f"{number}000")
complex_numbers = (
NEMO_SIGMA + pynutil.add_weight(complex_numbers, -1) + pynini.closure(pynini.union(" ", ")", "(", "+", "*"))
).optimize()
fg_ordinal = pynutil.add_weight(pynini.compose(fg, complex_numbers), -1) | fg
ordinal_name = pynini.string_file(get_abs_path("data/numbers/ordinals.tsv"))
ordinal_l = (pynini.closure(cardinal_name_nominative + pynini.accep(" ")) + ordinal_name).optimize()
# Composes L with the leaf transducer (P), then composes that with FG.
p = a['LEAVES']
number_names = {}
number_names['ordinal_number_names'] = (fg_ordinal @ (p @ ordinal_l)).optimize()
number_names['cardinal_number_names'] = (fg @ (p @ cardinal_l)).optimize()
number_names['cardinal_names_nominative'] = (fg @ (p @ cardinal_names_nominative_l)).optimize()
return number_names
def get_alternative_formats():
"""
Utils to get alternative formats for numbers.
"""
one_alternatives = load_labels(get_abs_path('data/numbers/cardinals_alternatives.tsv'))
one_thousand_map = []
for k in one_alternatives:
default, alternative = k
one_thousand_map.append((alternative.split()[1], alternative))
one_thousand_map = pynini.string_map(one_thousand_map)
one_thousand_alternative = pynini.cdrewrite(one_thousand_map, "[BOS]", "", NEMO_SIGMA)
# Adapted from
# https://github.com/google/TextNormalizationCoveringGrammars/blob/master/src/universal/thousands_punct.grm
# Specifies common ways of delimiting thousands in digit strings.
t = pynini.Far(get_abs_path('data/utils/universal_thousands_punct.far'))
separators = (
pynutil.add_weight(t['dot_thousands'], 0.1)
| pynutil.add_weight(t['no_delimiter'], -0.1)
| pynutil.add_weight(t['space_thousands'], 0.1)
)
alternative_formats = {}
alternative_formats['one_thousand_alternative'] = one_thousand_alternative.optimize()
alternative_formats['separators'] = separators.optimize()
return alternative_formats
if __name__ == '__main__':
from nemo_text_processing.text_normalization.en.graph_utils import generator_main
numbers = get_number_names()
for k, v in numbers.items():
generator_main(f'{k}.far', {k: v})
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/ru/taggers/number_names.py
|
# Copyright (c) 2021, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import (
NEMO_DIGIT,
NEMO_NOT_QUOTE,
NEMO_SIGMA,
GraphFst,
delete_extra_space,
delete_space,
insert_space,
)
from nemo_text_processing.text_normalization.ru.utils import get_abs_path
from pynini.lib import pynutil
class DateFst(GraphFst):
"""
Finite state transducer for classifying date, e.g.
"01.05" -> tokens { date { day: "первое мая" } }
Args:
number_names: number_names for cardinal and ordinal numbers
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, number_names: dict, deterministic: bool):
super().__init__(name="date", kind="classify", deterministic=deterministic)
# Ru format: DD-MM-YYYY or DD-MM-YY
month_abbr_to_names = pynini.string_file(get_abs_path("data/months/abbr_to_name.tsv")).optimize()
delete_sep = pynutil.add_weight(pynini.cross(".", " "), 1.09) | pynutil.add_weight(
pynini.cross(pynini.union("/", "-"), " "), 1.1
)
numbers = number_names['ordinal_number_names']
zero = (pynutil.add_weight(pynini.cross("0", ""), -0.1)) | (
pynutil.add_weight(pynini.cross("0", "ноль "), 0.1)
)
zero_digit = zero + pynini.compose(NEMO_DIGIT, numbers)
digit_day = (pynini.union("1", "2", "3") + NEMO_DIGIT) | NEMO_DIGIT
digit_day = pynini.compose(digit_day, numbers)
day = zero_digit | digit_day
day = pynini.compose(
day, pynini.difference(NEMO_SIGMA, NEMO_SIGMA + pynini.union("ой", "ая", "ых", "ые", "ыми"))
)
day = (pynutil.insert("day: \"") + day + pynutil.insert("\"")).optimize()
digit_month = zero_digit | pynini.compose(pynini.accep("1") + NEMO_DIGIT, numbers)
month_number_to_abbr = pynini.string_file(get_abs_path("data/months/numbers.tsv")).optimize()
month_number_to_abbr = (
(
((pynutil.add_weight(pynini.cross("0", ""), -0.1) | pynini.accep("1")) + NEMO_DIGIT) | NEMO_DIGIT
).optimize()
@ month_number_to_abbr
).optimize()
month_name = (
(month_number_to_abbr @ month_abbr_to_names) | pynutil.add_weight(month_abbr_to_names, 0.1)
).optimize()
month = (
pynutil.insert("month: \"") + (month_name | pynutil.add_weight(digit_month, 0.1)) + pynutil.insert("\"")
).optimize()
year = pynini.compose(((NEMO_DIGIT ** 4) | (NEMO_DIGIT ** 2)), numbers).optimize()
year |= zero_digit
# reduce year options
year_wrong_endings = ["ую", "ая"]
year_wrong_beginning = ["две тысяча", "два тысяч", "два тысячи", "две тысяч "]
year = pynini.compose(
year, pynini.difference(NEMO_SIGMA, NEMO_SIGMA + pynini.union("ой", "ого"))
) | pynutil.add_weight(pynini.compose(year, NEMO_SIGMA + pynini.union("ой", "ого")), -0.1)
year_restrict1 = pynini.difference(NEMO_SIGMA, pynini.union(*year_wrong_beginning) + NEMO_SIGMA)
year_restrict2 = pynini.difference(NEMO_SIGMA, NEMO_SIGMA + pynini.union(*year_wrong_endings))
year = pynini.compose(pynini.compose(year, year_restrict1), year_restrict2)
year_word_singular = ["год", "года", "году", "годом", "годе"]
year_word_plural = ["годы", "годов", "годам", "годами", "годам", "годах"]
year_word = pynini.cross("г.", pynini.union(*year_word_singular))
year_word |= pynini.cross("гг.", pynini.union(*year_word_plural))
year_word = (pynutil.add_weight(insert_space, -0.1) | pynutil.add_weight(pynini.accep(" "), 0.1)) + year_word
year_optional = pynutil.insert("year: \"") + year + pynini.closure(year_word, 0, 1) + pynutil.insert("\"")
year_optional = pynini.closure(delete_sep + year_optional, 0, 1).optimize()
year_only = pynutil.insert("year: \"") + year + year_word + pynutil.insert("\"")
tagger_graph = (day + delete_sep + month + year_optional) | year_only
# Verbalizer
day = (
pynutil.delete("day:")
+ delete_space
+ pynutil.delete("\"")
+ pynini.closure(NEMO_NOT_QUOTE, 1)
+ pynutil.delete("\"")
)
month = (
pynutil.delete("month:")
+ delete_space
+ pynutil.delete("\"")
+ pynini.closure(NEMO_NOT_QUOTE, 1)
+ pynutil.delete("\"")
)
year = (
pynutil.delete("year:")
+ delete_space
+ pynutil.delete("\"")
+ pynini.closure(NEMO_NOT_QUOTE, 1)
+ delete_space
+ pynutil.delete("\"")
)
year_optional = pynini.closure(delete_extra_space + year, 0, 1)
graph_dmy = day + delete_extra_space + month + year_optional
verbalizer_graph = (graph_dmy | year) + delete_space
self.final_graph = pynini.compose(tagger_graph, verbalizer_graph).optimize()
self.fst = pynutil.insert("day: \"") + self.final_graph + pynutil.insert("\"")
self.fst = self.add_tokens(self.fst).optimize()
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/ru/taggers/date.py
|
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import NEMO_NOT_SPACE, GraphFst
from pynini.lib import pynutil
class WordFst(GraphFst):
"""
Finite state transducer for classifying word.
e.g. sleep -> tokens { name: "sleep" }
Args:
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, deterministic: bool = True):
super().__init__(name="word", kind="classify")
word = pynutil.insert("name: \"") + pynini.closure(NEMO_NOT_SPACE, 1) + pynutil.insert("\"")
self.fst = word.optimize()
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/ru/taggers/word.py
|
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import NEMO_NOT_QUOTE, GraphFst, delete_space, insert_space
from pynini.lib import pynutil
class TimeFst(GraphFst):
"""
Finite state transducer for verbalizing electronic
e.g. time { hours: "два часа пятнадцать минут" } -> "два часа пятнадцать минут"
Args:
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, deterministic: bool = True):
super().__init__(name="time", kind="verbalize", deterministic=deterministic)
hour = (
pynutil.delete("hours:")
+ delete_space
+ pynutil.delete("\"")
+ pynini.closure(NEMO_NOT_QUOTE, 1)
+ pynutil.delete("\"")
)
minutes = (
pynutil.delete("minutes:")
+ delete_space
+ pynutil.delete("\"")
+ pynini.closure(NEMO_NOT_QUOTE, 1)
+ pynutil.delete("\"")
)
self.graph = (
hour + delete_space + insert_space + minutes + delete_space + pynutil.delete("preserve_order: true")
)
self.graph |= hour + delete_space
self.graph |= minutes + delete_space + insert_space + hour + delete_space
delete_tokens = self.delete_tokens(self.graph)
self.fst = delete_tokens.optimize()
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/ru/verbalizers/time.py
|
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import (
NEMO_NON_BREAKING_SPACE,
NEMO_SPACE,
GraphFst,
delete_space,
)
from nemo_text_processing.text_normalization.ru.alphabet import RU_ALPHA
from pynini.lib import pynutil
class MeasureFst(GraphFst):
"""
Finite state transducer for verbalizing measure, e.g.
measure { cardinal { integer: "два килограма" } } -> "два килограма"
Args:
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, deterministic: bool = True):
super().__init__(name="measure", kind="verbalize", deterministic=deterministic)
graph = (
pynutil.delete(" cardinal { integer: \"")
+ pynini.closure(RU_ALPHA | NEMO_SPACE | NEMO_NON_BREAKING_SPACE)
+ pynutil.delete("\"")
+ delete_space
+ pynutil.delete("}")
)
delete_tokens = self.delete_tokens(graph)
self.fst = delete_tokens.optimize()
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/ru/verbalizers/measure.py
|
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import GraphFst
from nemo_text_processing.text_normalization.ru.alphabet import RU_ALPHA
from pynini.lib import pynutil
class TelephoneFst(GraphFst):
"""
Finite state transducer for verbalizing telephone, e.g.
telephone { number_part: "восемь девятьсот тринадцать девятьсот восемьдесят три пятьдесят шесть ноль один" } -> "восемь девятьсот тринадцать девятьсот восемьдесят три пятьдесят шесть ноль один"
Args:
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, deterministic: bool = True):
super().__init__(name="telephone", kind="verbalize", deterministic=deterministic)
graph = pynutil.delete("number_part: \"") + pynini.closure(RU_ALPHA | " ", 1) + pynutil.delete("\"")
delete_tokens = self.delete_tokens(graph)
self.fst = delete_tokens.optimize()
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/ru/verbalizers/telephone.py
|
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import NEMO_NOT_QUOTE, GraphFst
from pynini.lib import pynutil
class OrdinalFst(GraphFst):
"""
Finite state transducer for verbalizing roman numerals
e.g. ordinal { integer: "второе" } } -> "второе"
Args:
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, deterministic: bool = True):
super().__init__(name="ordinal", kind="verbalize", deterministic=deterministic)
value = pynini.closure(NEMO_NOT_QUOTE)
graph = pynutil.delete("integer: \"") + value + pynutil.delete("\"")
delete_tokens = self.delete_tokens(graph)
self.fst = delete_tokens.optimize()
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/ru/verbalizers/ordinal.py
|
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from nemo_text_processing.text_normalization.en.graph_utils import GraphFst
from nemo_text_processing.text_normalization.en.verbalizers.whitelist import WhiteListFst
from nemo_text_processing.text_normalization.ru.verbalizers.cardinal import CardinalFst
from nemo_text_processing.text_normalization.ru.verbalizers.date import DateFst
from nemo_text_processing.text_normalization.ru.verbalizers.decimal import DecimalFst
from nemo_text_processing.text_normalization.ru.verbalizers.electronic import ElectronicFst
from nemo_text_processing.text_normalization.ru.verbalizers.measure import MeasureFst
from nemo_text_processing.text_normalization.ru.verbalizers.money import MoneyFst
from nemo_text_processing.text_normalization.ru.verbalizers.ordinal import OrdinalFst
from nemo_text_processing.text_normalization.ru.verbalizers.telephone import TelephoneFst
from nemo_text_processing.text_normalization.ru.verbalizers.time import TimeFst
class VerbalizeFst(GraphFst):
"""
Composes other verbalizer grammars.
For deployment, this grammar will be compiled and exported to OpenFst Finite State Archive (FAR) File.
More details to deployment at NeMo/tools/text_processing_deployment.
Args:
deterministic: if True will provide a single transduction option,
for False multiple options (used for audio-based normalization)
"""
def __init__(self, deterministic: bool = True):
super().__init__(name="verbalize", kind="verbalize", deterministic=deterministic)
cardinal = CardinalFst()
cardinal_graph = cardinal.fst
ordinal_graph = OrdinalFst().fst
decimal = DecimalFst()
decimal_graph = decimal.fst
date = DateFst()
date_graph = date.fst
measure = MeasureFst()
measure_graph = measure.fst
electronic = ElectronicFst()
electronic_graph = electronic.fst
whitelist_graph = WhiteListFst().fst
money_graph = MoneyFst().fst
telephone_graph = TelephoneFst().fst
time_graph = TimeFst().fst
graph = (
measure_graph
| cardinal_graph
| decimal_graph
| ordinal_graph
| date_graph
| electronic_graph
| money_graph
| whitelist_graph
| telephone_graph
| time_graph
)
self.fst = graph
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/ru/verbalizers/verbalize.py
|
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import os
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import (
GraphFst,
delete_extra_space,
delete_space,
generator_main,
)
from nemo_text_processing.text_normalization.en.verbalizers.word import WordFst
from nemo_text_processing.text_normalization.ru.verbalizers.verbalize import VerbalizeFst
from pynini.lib import pynutil
class VerbalizeFinalFst(GraphFst):
"""
Finite state transducer that verbalizes an entire sentence, e.g.
tokens { name: "its" } tokens { time { hours: "12" minutes: "30" } } tokens { name: "now" } -> its 12:30 now
Args:
deterministic: if True will provide a single transduction option,
for False multiple options (used for audio-based normalization)
cache_dir: path to a dir with .far grammar file. Set to None to avoid using cache.
overwrite_cache: set to True to overwrite .far files
"""
def __init__(self, deterministic: bool = True, cache_dir: str = None, overwrite_cache: bool = False):
super().__init__(name="verbalize_final", kind="verbalize", deterministic=deterministic)
far_file = None
if cache_dir is not None and cache_dir != "None":
os.makedirs(cache_dir, exist_ok=True)
far_file = os.path.join(cache_dir, f"ru_tn_{deterministic}_deterministic_verbalizer.far")
if not overwrite_cache and far_file and os.path.exists(far_file):
self.fst = pynini.Far(far_file, mode="r")["verbalize"]
logging.info(f'VerbalizeFinalFst graph was restored from {far_file}.')
else:
verbalize = VerbalizeFst().fst
word = WordFst().fst
types = verbalize | word
graph = (
pynutil.delete("tokens")
+ delete_space
+ pynutil.delete("{")
+ delete_space
+ types
+ delete_space
+ pynutil.delete("}")
)
graph = delete_space + pynini.closure(graph + delete_extra_space) + graph + delete_space
self.fst = graph.optimize()
if far_file:
generator_main(far_file, {"verbalize": self.fst})
logging.info(f"VerbalizeFinalFst grammars are saved to {far_file}.")
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/ru/verbalizers/verbalize_final.py
|
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/ru/verbalizers/__init__.py
|
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import NEMO_NOT_QUOTE, GraphFst, delete_space
from pynini.lib import pynutil
class DecimalFst(GraphFst):
"""
Finite state transducer for verbalizing decimal, e.g.
tokens { decimal { integer_part: "одно целая" fractional_part: "восемь сотых} } ->
"одно целая восемь сотых"
Args:
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, deterministic: bool = True):
super().__init__(name="decimal", kind="verbalize", deterministic=deterministic)
optional_sign = pynini.closure(pynini.cross("negative: \"true\" ", "минус "), 0, 1)
integer = pynutil.delete(" \"") + pynini.closure(NEMO_NOT_QUOTE, 1) + pynutil.delete("\"")
integer_part = pynutil.delete("integer_part:") + integer
fractional_part = pynutil.delete("fractional_part:") + integer
optional_quantity_part = pynini.closure(
pynini.accep(" ")
+ pynutil.delete("quantity: \"")
+ pynini.closure(NEMO_NOT_QUOTE, 1)
+ pynutil.delete("\""),
0,
1,
)
self.graph = (
optional_sign + integer_part + pynini.accep(" ") + fractional_part + optional_quantity_part + delete_space
)
delete_tokens = self.delete_tokens(self.graph)
self.fst = delete_tokens.optimize()
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/ru/verbalizers/decimal.py
|
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import GraphFst
from nemo_text_processing.text_normalization.ru.alphabet import RU_ALPHA
from pynini.lib import pynutil
class MoneyFst(GraphFst):
"""
Finite state transducer for verbalizing money, e.g.
money { "пять рублей" } -> пять рублей
Args:
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, deterministic: bool = True):
super().__init__(name="money", kind="verbalize", deterministic=deterministic)
graph = pynini.closure(RU_ALPHA | " ")
delete_tokens = self.delete_tokens(pynutil.delete("integer_part: \"") + graph + pynutil.delete("\""))
self.fst = delete_tokens.optimize()
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/ru/verbalizers/money.py
|
# Copyright (c) 2021, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import NEMO_NOT_QUOTE, GraphFst
from pynini.lib import pynutil
class CardinalFst(GraphFst):
"""
Finite state transducer for verbalizing cardinals
e.g. cardinal { integer: "тысяча один" } -> "тысяча один"
Args:
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, deterministic: bool = True):
super().__init__(name="cardinal", kind="verbalize", deterministic=deterministic)
optional_sign = pynini.closure(pynini.cross("negative: \"true\" ", "минус "), 0, 1)
optional_quantity_part = pynini.closure(
pynini.accep(" ")
+ pynutil.delete("quantity: \"")
+ pynini.closure(NEMO_NOT_QUOTE, 1)
+ pynutil.delete("\""),
0,
1,
)
integer = pynutil.delete("integer: \"") + pynini.closure(NEMO_NOT_QUOTE, 1) + pynutil.delete("\"")
self.graph = optional_sign + integer + optional_quantity_part
delete_tokens = self.delete_tokens(self.graph)
self.fst = delete_tokens.optimize()
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/ru/verbalizers/cardinal.py
|
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import GraphFst
from nemo_text_processing.text_normalization.ru.alphabet import RU_ALPHA
from pynini.lib import pynutil
class ElectronicFst(GraphFst):
"""
Finite state transducer for verbalizing electronic
e.g. electronic { username: "эй би собака эн ди точка ру" } -> "эй би собака эн ди точка ру"
Args:
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, deterministic: bool = True):
super().__init__(name="electronic", kind="verbalize", deterministic=deterministic)
graph = pynutil.delete("username: \"") + pynini.closure(RU_ALPHA | " ") + pynutil.delete("\"")
delete_tokens = self.delete_tokens(graph)
self.fst = delete_tokens.optimize()
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/ru/verbalizers/electronic.py
|
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import GraphFst
from nemo_text_processing.text_normalization.ru.alphabet import RU_ALPHA
from pynini.lib import pynutil
class DateFst(GraphFst):
"""
Finite state transducer for verbalizing date, e.g.
tokens { date { day: "первое мая" } } -> "первое мая"
Args:
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, deterministic: bool = True):
super().__init__(name="date", kind="verbalize", deterministic=deterministic)
graph = pynutil.delete("day: \"") + pynini.closure(RU_ALPHA | " ", 1) + pynutil.delete("\"")
delete_tokens = self.delete_tokens(graph)
self.fst = delete_tokens.optimize()
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/ru/verbalizers/date.py
|
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/ru/data/__init__.py
|
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/ru/data/months/__init__.py
|
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/ru/data/numbers/__init__.py
|
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/ru/data/utils/__init__.py
|
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/ru/data/electronic/__init__.py
|
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/ru/data/time/__init__.py
|
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/ru/data/currency/__init__.py
|
# Copyright (c) 2022, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# gaph_utils is kept due to the fatc that importing from en folders will cause import errors that the data file names have to be the same with what are in the en folder
import logging
import os
import string
from pathlib import Path
from typing import Dict
import pynini
from pynini import Far
from pynini.export import export
from pynini.lib import byte, pynutil, utf8
NEMO_CHAR = utf8.VALID_UTF8_CHAR
NEMO_DIGIT = byte.DIGIT
NEMO_LOWER = pynini.union(*string.ascii_lowercase).optimize()
NEMO_UPPER = pynini.union(*string.ascii_uppercase).optimize()
NEMO_ALPHA = pynini.union(NEMO_LOWER, NEMO_UPPER).optimize()
NEMO_NON_BREAKING_SPACE = u"\u00A0"
NEMO_SPACE = " "
NEMO_WHITE_SPACE = pynini.union(" ", "\t", "\n", "\r", u"\u00A0").optimize()
NEMO_NOT_SPACE = pynini.difference(NEMO_CHAR, NEMO_WHITE_SPACE).optimize()
NEMO_NOT_QUOTE = pynini.difference(NEMO_CHAR, r'"').optimize()
NEMO_PUNCT = pynini.union(*map(pynini.escape, string.punctuation)).optimize()
NEMO_SIGMA = pynini.closure(NEMO_CHAR)
delete_space = pynutil.delete(pynini.closure(NEMO_WHITE_SPACE))
delete_zero_or_one_space = pynutil.delete(pynini.closure(NEMO_WHITE_SPACE, 0, 1))
insert_space = pynutil.insert(" ")
delete_extra_space = pynini.cross(pynini.closure(NEMO_WHITE_SPACE, 1), " ")
def generator_main(file_name: str, graphs: Dict[str, 'pynini.FstLike']):
"""
Exports graph as OpenFst finite state archive (FAR) file with given file name and rule name.
Args:
file_name: exported file name
graphs: Mapping of a rule name and Pynini WFST graph to be exported
"""
exporter = export.Exporter(file_name)
for rule, graph in graphs.items():
exporter[rule] = graph.optimize()
exporter.close()
logging.info(f'Created {file_name}')
class GraphFst:
"""
Base class for all grammar fsts.
Args:
name: name of grammar class
kind: either 'classify' or 'verbalize'
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, name: str, kind: str, deterministic: bool = True):
self.name = name
self.kind = kind
self._fst = None
self.deterministic = deterministic
self.far_path = Path(os.path.dirname(__file__) + '/grammars/' + kind + '/' + name + '.far')
if self.far_exist():
self._fst = Far(self.far_path, mode="r", arc_type="standard", far_type="default").get_fst()
def far_exist(self) -> bool:
"""
Returns true if FAR can be loaded
"""
return self.far_path.exists()
@property
def fst(self) -> 'pynini.FstLike':
return self._fst
@fst.setter
def fst(self, fst):
self._fst = fst
def add_tokens(self, fst) -> 'pynini.FstLike':
"""
Wraps class name around to given fst
Args:
fst: input fst
Returns:
Fst: fst
"""
return pynutil.insert(f"{self.name} {{ ") + fst + pynutil.insert(" }")
def delete_tokens(self, fst) -> 'pynini.FstLike':
"""
Deletes class name wrap around output of given fst
Args:
fst: input fst
Returns:
Fst: fst
"""
res = (
pynutil.delete(f"{self.name}")
+ delete_space
+ pynutil.delete("{")
+ delete_space
+ fst
+ delete_space
+ pynutil.delete("}")
)
return res @ pynini.cdrewrite(pynini.cross(u"\u00A0", " "), "", "", NEMO_SIGMA)
def convert_space(fst) -> 'pynini.FstLike':
"""
Converts space to nonbreaking space.
Used only in tagger grammars for transducing token values within quotes, e.g. name: "hello kitty"
This is making transducer significantly slower, so only use when there could be potential spaces within quotes, otherwise leave it.
Args:
fst: input fst
Returns output fst where breaking spaces are converted to non breaking spaces
"""
return fst @ pynini.cdrewrite(pynini.cross(NEMO_SPACE, NEMO_NON_BREAKING_SPACE), "", "", NEMO_SIGMA)
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/zh/graph_utils.py
|
# Copyright (c) 2022, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/zh/__init__.py
|
# Copyright (c) 2022, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# gaph_utils is kept due to the fatc that importing from en folders will cause import errors that the data file names have to be the same with what are in the en folder
import csv
import os
UNIT_1e01 = '十'
UNIT_1e02 = '百'
UNIT_1e03 = '千'
UNIT_1e04 = '万'
def get_abs_path(rel_path):
"""
Get absolute path
Args:
rel_path: relative path to this file
Returns absolute path
"""
return os.path.dirname(os.path.abspath(__file__)) + '/' + rel_path
def load_labels(abs_path):
"""
loads relative path file as dictionary
Args:
abs_path: absolute path
Returns dictionary of mappings
"""
label_tsv = open(abs_path, encoding="utf-8")
labels = list(csv.reader(label_tsv, delimiter="\t"))
return labels
def augment_labels_with_punct_at_end(labels):
"""
augments labels: if key ends on a punctuation that value does not have, add a new label
where the value maintains the punctuation
Args:
labels : input labels
Returns:
additional labels
"""
res = []
for label in labels:
if len(label) > 1:
if label[0][-1] == "." and label[1][-1] != ".":
res.append([label[0], label[1] + "."] + label[2:])
return res
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/zh/utils.py
|
# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.zh.graph_utils import GraphFst
from nemo_text_processing.text_normalization.zh.utils import get_abs_path
from pynini.lib import pynutil
class TimeFst(GraphFst):
"""
Finite state transducer for clasifying time, e.g.
01:02 -> tokens { time { hours: "一" minutes: "二" } }
1:02:03 -> tokens { time { hours: "一" minutes: "二" second: "三" } }
1点5分19秒 -> tokens { time { hours: "一" minutes: "五" second: "秒" } }
1点1刻 -> tokens { time { hours: "一" minutes: "一刻" } }
"""
def __init__(self, deterministic: bool = True, lm: bool = False):
super().__init__(name="time", kind="classify", deterministic=deterministic)
# mappings imported
hour = pynini.string_file(get_abs_path("data/time/hour.tsv"))
minute = pynini.string_file(get_abs_path("data/time/minute.tsv"))
second = pynini.string_file(get_abs_path("data/time/second.tsv"))
alphabet_am = pynini.string_file(get_abs_path("data/time/AM.tsv"))
alphabet_pm = pynini.string_file(get_abs_path("data/time/PM.tsv"))
# gramamr for time, separated by colons 05:03:13
symbol = pynutil.delete(":") | pynutil.delete(":")
hour_component = pynutil.insert("hour: \"") + hour + pynutil.insert('点') + pynutil.insert("\"")
minute_component = pynutil.insert("minute: \"") + minute + pynutil.insert('分') + pynutil.insert("\"")
second_component = pynutil.insert("second: \"") + second + pynutil.insert('秒') + pynutil.insert("\"")
# combining 3 components
hour_minute_second = (
hour_component
+ symbol
+ pynutil.insert(' ')
+ minute_component
+ symbol
+ pynutil.insert(' ')
+ second_component
)
hour_minute = hour_component + symbol + pynutil.insert(' ') + minute_component
graph_colon = hour_minute_second | hour_minute
# gramamr for time as clock, with morphems, 点, 分, 秒
hour_clock = pynini.accep("点") | pynini.cross("點", "点")
minute_clock = pynini.accep("分") | pynini.accep('刻')
second_clock = pynini.accep('秒')
# grammar for time, as period of time 小时,分钟,秒
hour_duration = (
pynini.accep('个点')
| pynini.cross("個點", "個点")
| pynini.accep('小时')
| pynini.cross('小時', '小时')
| pynini.accep('个小时')
| pynini.cross('個小時', '个小时')
| pynini.accep('个钟头')
| pynini.cross('個鐘頭', '个钟头')
)
minute_duration = pynini.accep("分钟") | pynini.accep('刻') | pynini.accep('刻钟')
second_duration = pynini.accep("秒钟") | pynini.cross('秒鐘', '秒钟') | pynini.accep('秒')
# combining two above
hour_component = pynutil.insert("hour: \"") + hour + (hour_clock | hour_duration) + pynutil.insert("\"")
minute_component = (
pynutil.insert("minute: \"") + minute + (minute_clock | minute_duration) + pynutil.insert("\"")
)
second_component = (
pynutil.insert("second: \"") + second + (second_clock | second_duration) + pynutil.insert("\"")
)
hour_minute = hour_component + pynutil.insert(' ') + minute_component
hour_second = hour_component + pynutil.insert(' ') + second_component
minute_second = minute_component + pynutil.insert(' ') + second_component
clock_all = hour_component + pynutil.insert(' ') + minute_component + pynutil.insert(' ') + second_component
graph_clock_period = (
hour_component
| minute_component
| second_component
| hour_minute
| hour_second
| minute_second
| clock_all
)
# gramamr for time, back count; 五点差n分n秒
backcount = pynutil.insert("verb: \"") + pynini.accep('差') + pynutil.insert("\"")
graph_hour = (
(
pynini.closure(backcount)
+ pynutil.insert(' ')
+ hour_component
+ pynutil.insert(' ')
+ pynini.closure(backcount)
+ pynutil.insert(' ')
+ minute_component
)
| (
pynini.closure(backcount)
+ pynutil.insert(' ')
+ hour_component
+ pynutil.insert(' ')
+ pynini.closure(backcount)
+ pynutil.insert(' ')
+ second_component
)
| (
pynini.closure(backcount)
+ pynutil.insert(' ')
+ hour_component
+ pynutil.insert(' ')
+ pynini.closure(backcount)
+ pynutil.insert(' ')
+ minute_component
+ pynutil.insert(' ')
+ second_component
)
)
graph_minute = minute_component + pynutil.insert(' ') + backcount + pynutil.insert(' ') + second_component
graph_backcount = graph_hour | graph_minute
# grammar for time, with am, pr, or Mandarin words as prefiex/suffix 早上5点 05:04:04am
suffix_am = pynini.closure(alphabet_am, 0, 1)
suffix_pm = pynini.closure(alphabet_pm, 0, 1)
am_component = pynutil.insert("suffix: \"") + suffix_am + pynutil.insert("\"")
pm_component = pynutil.insert("suffix: \"") + suffix_pm + pynutil.insert("\"")
graph_suffix = (
(graph_clock_period | graph_colon | graph_backcount) + pynutil.insert(' ') + (am_component | pm_component)
)
graph = graph_colon | graph_clock_period | graph_backcount | graph_suffix
final_graph = self.add_tokens(graph)
self.fst = final_graph.optimize()
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/zh/taggers/time.py
|
# Copyright (c) 2022, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.zh.graph_utils import GraphFst, insert_space
from nemo_text_processing.text_normalization.zh.utils import get_abs_path
from pynini.lib import pynutil
class Measure(GraphFst):
'''
1kg -> tokens { measure { cardinal { integer: "一" } units: "千克" } }
'''
def __init__(self, cardinal: GraphFst, decimal: GraphFst, deterministic: bool = True, lm: bool = False):
super().__init__(name="measure", kind="classify", deterministic=deterministic)
units_en = pynini.string_file(get_abs_path("data/measure/units_en.tsv"))
units_zh = pynini.string_file(get_abs_path("data/measure/units_zh.tsv"))
graph_cardinal = cardinal.just_cardinals
integer_component = pynutil.insert("integer: \"") + graph_cardinal + pynutil.insert("\"")
unit_component = pynutil.insert("units: \"") + (units_en | units_zh) + pynutil.insert("\"")
graph_cardinal_measure = integer_component + insert_space + unit_component
decimal = decimal.decimal
graph_decimal = (
decimal + insert_space + pynutil.insert("units: \"") + (units_en | units_zh) + pynutil.insert("\"")
)
graph_sign = (
(pynutil.insert("negative: \"") + pynini.accep("负") + pynutil.insert("\""))
| (pynutil.insert("negative: \"") + pynini.cross("負", "负") + pynutil.insert("\""))
| (pynutil.insert("negative: \"") + pynini.cross("-", "负") + pynutil.insert("\""))
)
graph = pynini.closure(graph_sign + insert_space) + (graph_cardinal_measure | graph_decimal)
self.fst = self.add_tokens(graph).optimize()
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/zh/taggers/measure.py
|
# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.zh.graph_utils import GraphFst
from pynini.lib import pynutil
class FractionFst(GraphFst):
"""
Finite state transducer for classifying fraction, e.g.,
1/2 -> tokens { fraction { denominator: "二" numerator: "一"} }
5又1/2 -> tokens { fraction { integer_part: "五" denominator: "二" numerator: "一" } }
5又2分之1 -> tokens { {} }
2分之1 -> tokens { fraction { denominator: "二" numerator: "一"} }
100分之1 -> tokens { fraction { denominator: "一百" numerator: "一"} }
百分之1 -> tokens { fraction { denominator: "百" numerator: "一"} }
98% -> tokens { fraction { denominator: "百" numerator: "九十八"} }
Args:
cardinal: CardinalFst, decimal: DecimalFst
"""
def __init__(self, cardinal: GraphFst, decimal: GraphFst, deterministic: bool = True, lm: bool = False):
super().__init__(name="fraction", kind="classify", deterministic=deterministic)
graph_cardinals = cardinal.just_cardinals
graph_decimal = decimal.decimal
slash = pynutil.delete('/')
morpheme = pynutil.delete('分之')
suffix = pynini.union(
"百",
"千",
"万",
"十万",
"百万",
"千万",
"亿",
"十亿",
"百亿",
"千亿",
"萬",
"十萬",
"百萬",
"千萬",
"億",
"十億",
"百億",
"千億",
"拾萬",
"佰萬",
"仟萬",
"拾億",
"佰億",
"仟億",
"拾万",
"佰万",
"仟万",
"仟亿",
"佰亿",
"仟亿",
)
integer_component = pynutil.insert("integer_part: \"") + graph_cardinals + pynutil.insert("\"")
denominator_component = pynutil.insert("denominator: \"") + graph_cardinals + pynutil.insert("\"")
numerator_component = pynutil.insert("numerator: \"") + graph_cardinals + pynutil.insert("\"")
graph_with_integer = (
pynini.closure(integer_component + pynutil.delete('又'), 0, 1)
+ pynutil.insert(' ')
+ numerator_component
+ slash
+ pynutil.insert(' ')
+ denominator_component
)
graph_only_slash = numerator_component + slash + pynutil.insert(' ') + denominator_component
graph_morpheme = (denominator_component + morpheme + pynutil.insert(' ') + numerator_component) | (
integer_component
+ pynutil.delete('又')
+ pynutil.insert(' ')
+ denominator_component
+ morpheme
+ pynutil.insert(' ')
+ numerator_component
)
graph_with_suffix = (
pynini.closure(pynutil.insert("denominator: \"") + suffix + pynutil.insert("\""), 0, 1)
+ morpheme
+ pynutil.insert(' ')
+ numerator_component
)
percentage = pynutil.delete('%')
graph_percentage = (
numerator_component
+ percentage
+ pynutil.insert(' ')
+ pynutil.insert("denominator: \"百")
+ pynutil.insert("\"")
)
graph_hundred = pynutil.delete('100%') + pynutil.insert('numerator: \"百\" denominator: \"百"')
graph_optional_sign = (
(pynini.closure(pynutil.insert("negative: ") + pynini.cross("-", "\"负\"")))
| (pynini.closure(pynutil.insert("positive: ") + pynini.cross("+", "\"正\"")))
| (pynutil.insert("positive: ") + pynutil.insert("\"") + pynini.accep('正') + pynutil.insert("\""))
| (
pynutil.insert('negative: ')
+ pynutil.insert("\"")
+ (pynini.accep('负') | pynini.cross('負', '负'))
+ pynutil.insert("\"")
)
)
graph_decimals = (
graph_decimal
+ pynutil.insert(" ")
+ percentage
+ pynutil.insert("denominator: \"百")
+ pynutil.insert("\"")
)
graph = (
graph_with_integer
| graph_only_slash
| graph_morpheme
| graph_with_suffix
| graph_percentage
| graph_decimals
| pynutil.add_weight(graph_hundred, -3.0)
)
graph_with_sign = (
(graph_optional_sign + pynutil.insert(" ") + graph_with_integer)
| (graph_optional_sign + pynutil.insert(" ") + graph_only_slash)
| (graph_optional_sign + pynutil.insert(" ") + graph_morpheme)
| (graph_optional_sign + pynutil.insert(" ") + graph_with_suffix)
| (graph_optional_sign + pynutil.insert(" ") + graph_percentage)
| pynutil.add_weight((graph_optional_sign + pynutil.insert(" ") + graph_hundred), -3.0)
)
final_graph = graph | graph_with_sign
final_graph = self.add_tokens(final_graph)
self.fst = final_graph.optimize()
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/zh/taggers/fraction.py
|
# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.zh.graph_utils import GraphFst
from pynini.lib import pynutil
class OrdinalFst(GraphFst):
"""
Finite state transducer for classifying ordinal, e.g.
第100 -> ordinal { integer: "第一百" }
Args:
cardinal: CardinalFst
"""
def __init__(self, cardinal: GraphFst):
super().__init__(name="ordinal", kind="verbalize")
graph_cardinal = cardinal.just_cardinals
morpheme = pynini.accep('第')
graph_ordinal = morpheme + graph_cardinal
graph_ordinal_final = pynutil.insert("integer: \"") + graph_ordinal + pynutil.insert("\"")
# range
range_source = pynini.accep("从")
range_goal = (
pynini.accep("-")
| pynini.accep("~")
| pynini.accep("——")
| pynini.accep("—")
| pynini.accep("到")
| pynini.accep("至")
)
graph_range_source = (
pynini.closure((pynutil.insert("range: \"") + range_source + pynutil.insert("\" ")), 0, 1)
+ pynutil.insert("integer: \"")
+ graph_ordinal
+ pynutil.insert("\"")
+ pynutil.insert(" range: \"")
+ range_goal
+ pynutil.insert("\" ")
+ pynutil.insert("integer: \"")
+ (graph_ordinal | graph_cardinal)
+ pynutil.insert("\"")
)
graph_range_goal = (
pynutil.insert("integer: \"")
+ graph_ordinal
+ pynutil.insert("\"")
+ pynutil.insert(" range: \"")
+ range_goal
+ pynutil.insert("\" ")
+ pynutil.insert("integer: \"")
+ (graph_ordinal | graph_cardinal)
+ pynutil.insert("\"")
)
graph_range_final = graph_range_source | graph_range_goal
final_graph = graph_ordinal_final | graph_range_final
graph_ordinal_final = self.add_tokens(final_graph)
self.fst = graph_ordinal_final.optimize()
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/zh/taggers/ordinal.py
|
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.zh.graph_utils import GraphFst, convert_space
from nemo_text_processing.text_normalization.zh.utils import get_abs_path, load_labels
from pynini.lib import pynutil
class WhiteListFst(GraphFst):
"""
Finite state transducer for classifying whitelist, e.g.
"Mr." -> tokens { name: "mister" }
This class has highest priority among all classifier grammars. Whitelisted tokens are defined and loaded from "data/whitelist.tsv".
Args:
input_case: accepting either "lower_cased" or "cased" input.
deterministic: if True will provide a single transduction option,
for False multiple options (used for audio-based normalization)
input_file: path to a file with whitelist replacements
"""
def __init__(self, input_case: str, deterministic: bool = True, input_file: str = None):
super().__init__(name="whitelist", kind="classify", deterministic=deterministic)
def _get_whitelist_graph(input_case, file):
whitelist = load_labels(file)
if input_case == "lower_cased":
whitelist = [[x[0].lower()] + x[1:] for x in whitelist]
graph = pynini.string_map(whitelist)
return graph
graph = _get_whitelist_graph(input_case, get_abs_path("data/whitelist.tsv"))
if not deterministic and input_case != "lower_cased":
graph |= pynutil.add_weight(
_get_whitelist_graph("lower_cased", get_abs_path("data/whitelist.tsv")), weight=0.0001
)
if input_file:
whitelist_provided = _get_whitelist_graph(input_case, input_file)
if not deterministic:
graph |= whitelist_provided
else:
graph = whitelist_provided
if not deterministic:
units_graph = _get_whitelist_graph(input_case, file=get_abs_path("data/measure/measurements.tsv"))
graph |= units_graph
self.graph = graph
self.final_graph = convert_space(self.graph).optimize()
self.fst = (pynutil.insert("name: \"") + self.final_graph + pynutil.insert("\"")).optimize()
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/zh/taggers/whitelist.py
|
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import os
import pynini
from nemo_text_processing.text_normalization.zh.graph_utils import (
NEMO_CHAR,
NEMO_DIGIT,
GraphFst,
delete_extra_space,
delete_space,
generator_main,
)
from nemo_text_processing.text_normalization.zh.taggers.cardinal import CardinalFst
from nemo_text_processing.text_normalization.zh.taggers.date import DateFst
from nemo_text_processing.text_normalization.zh.taggers.decimal import DecimalFst
from nemo_text_processing.text_normalization.zh.taggers.fraction import FractionFst
from nemo_text_processing.text_normalization.zh.taggers.math_symbol import MathSymbol
from nemo_text_processing.text_normalization.zh.taggers.measure import Measure
from nemo_text_processing.text_normalization.zh.taggers.money import MoneyFst
from nemo_text_processing.text_normalization.zh.taggers.ordinal import OrdinalFst
from nemo_text_processing.text_normalization.zh.taggers.preprocessor import PreProcessor
from nemo_text_processing.text_normalization.zh.taggers.punctuation import PunctuationFst
from nemo_text_processing.text_normalization.zh.taggers.time import TimeFst
from nemo_text_processing.text_normalization.zh.taggers.whitelist import WhiteListFst
from nemo_text_processing.text_normalization.zh.taggers.word import Char
from pynini.lib import pynutil
class ClassifyFst(GraphFst):
"""
Final class that composes all other classification grammars. This class can process an entire sentence, that is lower cased.
For deployment, this grammar will be compiled and exported to OpenFst Finite State Archive (FAR) File.
More details to deployment at NeMo/tools/text_processing_deployment.
Args:
input_case: accepting either "lower_cased" or "cased" input.
deterministic: if True will provide a single transduction option,
for False multiple options (used for audio-based normalization)
cache_dir: path to a dir with .far grammar file. Set to None to avoid using cache.
overwrite_cache: set to True to overwrite .far files
whitelist: path to a file with whitelist replacements
"""
def __init__(
self,
input_case: str,
deterministic: bool = False,
cache_dir: str = None,
overwrite_cache: bool = False,
whitelist: str = None,
):
super().__init__(name="tokenize_and_classify", kind="classify", deterministic=deterministic)
far_file = None
if cache_dir is not None and cache_dir != "None":
os.makedirs(cache_dir, exist_ok=True)
whitelist_file = os.path.basename(whitelist) if whitelist else ""
far_file = os.path.join(
# cache_dir, f"_{input_case}_zh_tn_{deterministic}_deterministic{whitelist_file}.far"
cache_dir,
f"_{input_case}_zh_tn_{deterministic}_deterministic_{whitelist_file}.far",
)
if not overwrite_cache and far_file and os.path.exists(far_file):
self.fst = pynini.Far(far_file, mode="r")["tokenize_and_classify"]
no_digits = pynini.closure(pynini.difference(NEMO_CHAR, NEMO_DIGIT))
self.fst_no_digits = pynini.compose(self.fst, no_digits).optimize()
logging.info(f"ClassifyFst.fst was restored from {far_file}.")
else:
logging.info(f"Creating ClassifyFst grammars. This might take some time...")
cardinal = CardinalFst()
cardinal_graph = cardinal.fst
ordinal = OrdinalFst(cardinal=cardinal)
ordinal_graph = ordinal.fst
decimal = DecimalFst(cardinal=cardinal, deterministic=deterministic)
decimal_graph = decimal.fst
fraction = FractionFst(cardinal=cardinal, decimal=decimal, deterministic=deterministic)
fraction_graph = fraction.fst
date = DateFst(deterministic=deterministic)
date_graph = date.fst
word_graph = Char(deterministic=deterministic).fst
self.time = TimeFst(deterministic=deterministic)
time_graph = self.time.fst
money = MoneyFst(cardinal=cardinal, decimal=decimal, deterministic=deterministic)
money_graph = money.fst
self.math = MathSymbol(deterministic=deterministic)
math_graph = self.math.fst
self.measure = Measure(cardinal=cardinal, decimal=decimal, deterministic=deterministic)
measure_graph = self.measure.fst
self.whitelist = WhiteListFst(input_case=input_case, deterministic=deterministic, input_file=whitelist)
whitelist_graph = self.whitelist.fst
punct_graph = PunctuationFst(deterministic=deterministic).fst
classify = (
pynutil.add_weight(whitelist_graph, 1.001)
| pynutil.add_weight(cardinal_graph, -2.0)
| pynutil.add_weight(time_graph, 1.1)
| pynutil.add_weight(fraction_graph, -1.1)
| pynutil.add_weight(date_graph, -1.0)
| pynutil.add_weight(ordinal_graph, 1.1)
| pynutil.add_weight(decimal_graph, -1.0)
| pynutil.add_weight(money_graph, -1.1)
| pynutil.add_weight(math_graph, 1.1)
| pynutil.add_weight(measure_graph, -1.1)
| pynutil.add_weight(word_graph, 1.1)
)
classify |= pynutil.add_weight(word_graph, 100)
punct = pynutil.insert("tokens { ") + pynutil.add_weight(punct_graph, weight=1.1) + pynutil.insert(" }")
token = pynutil.insert("tokens { ") + classify + pynutil.insert(" }")
token_plus_punct = (
pynini.closure(punct + pynutil.insert(" ")) + token + pynini.closure(pynutil.insert(" ") + punct)
)
graph = token_plus_punct + pynini.closure(pynutil.add_weight(delete_extra_space, 1.1) + token_plus_punct)
graph = delete_space + graph + delete_space
# self.fst = graph.optimize()
tagger = graph.optimize()
preprocessor = PreProcessor(remove_interjections=True, fullwidth_to_halfwidth=True,)
self.fst = preprocessor.fst @ tagger
no_digits = pynini.closure(pynini.difference(NEMO_CHAR, NEMO_DIGIT))
self.fst_no_digits = pynini.compose(self.fst, no_digits).optimize()
if far_file:
generator_main(far_file, {"tokenize_and_classify": self.fst})
logging.info(f"ClassifyFst grammars are saved to {far_file}.")
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/zh/taggers/tokenize_and_classify.py
|
# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
from unicodedata import category
import pynini
from nemo_text_processing.text_normalization.en.graph_utils import NEMO_NOT_SPACE, NEMO_SIGMA, GraphFst
from nemo_text_processing.text_normalization.en.utils import get_abs_path, load_labels
from pynini.examples import plurals
from pynini.lib import pynutil
class PunctuationFst(GraphFst):
"""
Finite state transducer for classifying punctuation
e.g. a, -> tokens { name: "a" } tokens { name: "," }
Args:
deterministic: if True will provide a single transduction option,
for False multiple transduction are generated (used for audio-based normalization)
"""
def __init__(self, deterministic: bool = True):
super().__init__(name="punctuation", kind="classify", deterministic=deterministic)
s = "!#%&\'()*+,-./:;<=>?@^_`{|}~\"。,;-《》“”"
punct_symbols_to_exclude = ["[", "]"]
punct_unicode = [
chr(i)
for i in range(sys.maxunicode)
if category(chr(i)).startswith("P") and chr(i) not in punct_symbols_to_exclude
]
whitelist_symbols = load_labels(get_abs_path("data/whitelist/symbol.tsv"))
whitelist_symbols = [x[0] for x in whitelist_symbols]
self.punct_marks = [p for p in punct_unicode + list(s) if p not in whitelist_symbols]
punct = pynini.union(*self.punct_marks)
punct = pynini.closure(punct, 1)
emphasis = (
pynini.accep("<")
+ (
(pynini.closure(NEMO_NOT_SPACE - pynini.union("<", ">"), 1) + pynini.closure(pynini.accep("/"), 0, 1))
| (pynini.accep("/") + pynini.closure(NEMO_NOT_SPACE - pynini.union("<", ">"), 1))
)
+ pynini.accep(">")
)
punct = plurals._priority_union(emphasis, punct, NEMO_SIGMA)
self.graph = punct
self.fst = (pynutil.insert("name: \"") + self.graph + pynutil.insert("\"")).optimize()
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/zh/taggers/punctuation.py
|
# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/zh/taggers/__init__.py
|
# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.zh.graph_utils import GraphFst
from nemo_text_processing.text_normalization.zh.utils import get_abs_path
from pynini.lib import pynutil
def get_quantity(decimal):
suffix = pynini.union(
"万",
"十万",
"百万",
"千万",
"亿",
"十亿",
"百亿",
"千亿",
"萬",
"十萬",
"百萬",
"千萬",
"億",
"十億",
"百億",
"千億",
"拾萬",
"佰萬",
"仟萬",
"拾億",
"佰億",
"仟億",
"拾万",
"佰万",
"仟万",
"仟亿",
"佰亿",
"仟亿",
"万亿",
"萬億",
)
res = decimal + pynutil.insert(" quantity: \"") + suffix + pynutil.insert("\"")
return res
class DecimalFst(GraphFst):
"""
Finite state transducer for classifying decimal, e.g.
0.5 -> decimal { integer_part: "零" fractional_part: "五" }
0.5万 -> decimal { integer_part: "零" fractional_part: "五" quantity: "万" }
-0.5万 -> decimal { negative: "负" integer_part: "零" fractional_part: "五" quantity: "万"}
Args:
cardinal: CardinalFst
"""
def __init__(self, cardinal: GraphFst, deterministic: bool = True, lm: bool = False):
super().__init__(name="decimal", kind="classify", deterministic=deterministic)
cardinal_before_decimal = cardinal.just_cardinals
cardinal_after_decimal = pynini.string_file(get_abs_path("data/number/digit.tsv")) | pynini.closure(
pynini.cross('0', '零')
)
decimal_point = pynini.closure(pynutil.delete('.'), 0, 1)
graph_integer = pynutil.insert("integer_part: \"") + cardinal_before_decimal + pynutil.insert("\"")
graph_fraction = (
pynutil.insert("fractional_part: \"") + pynini.closure(cardinal_after_decimal, 1) + pynutil.insert("\"")
)
graph_decimal = graph_integer + decimal_point + pynutil.insert(" ") + graph_fraction
graph_sign = (
(pynini.closure(pynutil.insert("negative: ") + pynini.cross("-", "\"负\"")) + pynutil.insert(" "))
) | (
(
pynutil.insert('negative: ')
+ pynutil.insert("\"")
+ (pynini.accep('负') | pynini.cross('負', '负'))
+ pynutil.insert("\"")
+ pynutil.insert(' ')
)
)
graph_with_sign = graph_sign + graph_decimal
graph_regular = graph_with_sign | graph_decimal
# graph_decimal_quantity = get_quantity(graph_decimal, cardinal.just_cardinals)
graph_decimal_quantity = get_quantity(graph_decimal)
graph_sign_quantity = graph_sign + graph_decimal_quantity
graph_quantity = graph_decimal_quantity | graph_sign_quantity
# final_graph = graph_decimal | graph_sign | graph_decimal_quantity | graph_sign_quantity
final_graph = graph_regular | graph_quantity
self.decimal = final_graph
final_graph = self.add_tokens(final_graph)
self.fst = final_graph.optimize()
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/zh/taggers/decimal.py
|
# Copyright (c) 2022, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.zh.graph_utils import NEMO_SIGMA, GraphFst
from nemo_text_processing.text_normalization.zh.utils import get_abs_path
from pynini.lib import pynutil
class PreProcessor(GraphFst):
'''
Preprocessing of TN:
1. interjections removal such as '啊, 呃'
2. fullwidth -> halfwidth char conversion
好啊 -> 好
呃对 -> 对
: -> :
; -> ;
'''
def __init__(
self, remove_interjections: bool = True, fullwidth_to_halfwidth: bool = True,
):
super().__init__(name="PreProcessor", kind="processor")
graph = pynini.cdrewrite('', '', '', NEMO_SIGMA)
if remove_interjections:
remove_interjections_graph = pynutil.delete(pynini.string_file(get_abs_path('data/denylist/denylist.tsv')))
graph @= pynini.cdrewrite(remove_interjections_graph, '', '', NEMO_SIGMA)
if fullwidth_to_halfwidth:
fullwidth_to_halfwidth_graph = pynini.string_file(get_abs_path('data/char/fullwidth_to_halfwidth.tsv'))
graph @= pynini.cdrewrite(fullwidth_to_halfwidth_graph, '', '', NEMO_SIGMA)
self.fst = graph.optimize()
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/zh/taggers/preprocessor.py
|
# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.zh.graph_utils import GraphFst
from nemo_text_processing.text_normalization.zh.utils import get_abs_path
from pynini.lib import pynutil
class MoneyFst(GraphFst):
"""
Finite state transducer for classifying moneys, e.g.
'$23' -> money { integer: "二十三" currency: "美元" }
'23美元' -> money { integer: "二十三" currency: "美元" }
"""
def __init__(self, cardinal: GraphFst, decimal: GraphFst, deterministic: bool = True, lm: bool = False):
super().__init__(name="money", kind="classify", deterministic=deterministic)
cardinal = cardinal.just_cardinals
decimal = decimal.decimal
currency = pynini.string_file(get_abs_path("data/money/currency_major.tsv"))
currency_mandarin = pynini.string_file(get_abs_path("data/money/currency_mandarin.tsv"))
# regular money gramamr with currency symbols $1000
currency_component = pynutil.insert("currency: \"") + currency + pynutil.insert("\"")
number_component = pynutil.insert("integer: \"") + cardinal + pynutil.insert("\"")
graph_regular_money = currency_component + pynutil.insert(" ") + number_component
# 块 元 毛 with optional symbols
unit_major = (
pynini.accep("块")
| pynini.accep("元")
| pynini.closure(pynini.cross("塊", "块"), 1)
| pynini.closure(pynini.cross("塊錢", "块钱"), 1)
| pynini.accep("块钱")
)
unit_minor = pynini.accep("角") | pynini.accep("毛")
unit_minor_alt = pynini.accep("分")
currency_mandarin_component = pynutil.insert("currency: \"") + currency_mandarin + pynutil.insert("\"")
unit_components = (
(pynutil.insert("currency: \"") + unit_major + pynutil.insert("\""))
| (pynutil.insert("currency_major: \"") + unit_minor + pynutil.insert("\""))
| (pynutil.insert("currency_minor: \"") + unit_minor_alt + pynutil.insert("\""))
)
graph_unit_only = (
number_component
+ pynutil.insert(" ")
+ unit_components
+ pynini.closure(pynutil.insert(" ") + currency_mandarin_component, 0, 1)
)
graph_units = pynini.closure(graph_unit_only, 1, 3)
# only currency part as mandarins
graph_mandarin_money = number_component + pynutil.insert(" ") + currency_mandarin_component
# larger money as decimals
graph_decimal_money = (decimal + pynutil.insert(" ") + currency_mandarin_component) | (
currency_component + pynutil.insert(" ") + decimal
)
graph = (
graph_regular_money | graph_units | pynutil.add_weight(graph_mandarin_money, -3.0) | graph_decimal_money
)
final_graph = graph
final_graph = self.add_tokens(final_graph)
self.fst = final_graph.optimize()
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/zh/taggers/money.py
|
# Copyright (c) 2022, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.zh.graph_utils import GraphFst
from nemo_text_processing.text_normalization.zh.taggers.cardinal import CardinalFst
from nemo_text_processing.text_normalization.zh.utils import get_abs_path
from pynini.lib import pynutil
class MathSymbol(GraphFst):
'''
+ -> tokens { sign: "加" }
'''
def __init__(self, deterministic: bool = True, lm: bool = False):
super().__init__(name="sign", kind="classify", deterministic=deterministic)
'''
add your sign in data/math/symbol.tsv,this graph just convert sigh to character,you can add more
cases with detailed cases
'''
score_sign = pynini.string_file(get_abs_path("data/math/score.tsv")) | pynini.string_file(
get_abs_path("data/math/symbol.tsv")
)
score = (
pynutil.insert("score: \"")
+ pynini.closure(score_sign, 0, 1)
+ CardinalFst().just_cardinals
+ score_sign
+ CardinalFst().just_cardinals
+ pynutil.insert("\"")
)
graph = score
self.fst = graph.optimize()
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/zh/taggers/math_symbol.py
|
# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.zh.graph_utils import GraphFst
from nemo_text_processing.text_normalization.zh.utils import get_abs_path
from pynini.lib import pynutil
class CardinalFst(GraphFst):
"""
Finite state transducer for classifying cardinals, e.g.
'23' -> cardinal { integer: "二十三" }
-10000 -> cardinal { negative: "负" integer: "一万" }
+10000 -> cardinal { positive: "正" integer: "一万" }
"""
def __init__(self, deterministic: bool = True, lm: bool = False):
super().__init__(name="cardinal", kind="classify", deterministic=deterministic)
# imports
zero = pynini.string_file(get_abs_path("data/number/zero.tsv"))
digit = pynini.string_file(get_abs_path("data/number/digit.tsv"))
digit_tens = pynini.string_file(get_abs_path("data/number/digit_tens.tsv"))
# morphemes inserted + punctuation
tens_digit = pynutil.insert('十')
hundred_digit = pynutil.insert('百')
thousand_digit = pynutil.insert('千')
tenthousand_digit = pynutil.insert('万')
hundredmillion_digit = pynutil.insert('亿')
delete_punct = pynini.closure(pynutil.delete(',') | pynutil.delete(','))
# 十几; 10-19
graph_teen = (
pynini.closure(delete_punct)
+ pynini.cross('1', '十')
+ (
(pynini.closure(delete_punct) + (pynini.closure(delete_punct) + digit))
| (pynini.closure(delete_punct) + pynini.cross('0', ''))
)
)
# 十几; 10-19 but when not alone, but within a larger number, (e.g, 119)
graph_teen_alt = (
(pynini.closure(delete_punct) + (pynini.cross('1', '一十') + pynini.closure(delete_punct) + digit))
| (pynini.closure(delete_punct) + pynini.cross('10', '一十'))
| (pynini.closure(delete_punct) + (pynini.cross('1,0', '一十') | pynini.cross('1,0', '一十')))
) # when the teen is not by itself but with in a larger number
# 几十; 20-99
graph_tens = (
pynini.closure(delete_punct)
+ (digit_tens + tens_digit + pynini.closure(delete_punct) + ((pynini.closure(delete_punct) + digit)))
) | (
digit_tens + tens_digit + (pynini.closure(delete_punct) + (pynini.cross('0', '') | pynini.cross(',0', '')))
)
# 百; 100-999; hundreds
graph_hundred = (
(
digit
+ (
pynutil.delete('00')
| (pynutil.delete(',00') | pynutil.delete(',00'))
| (pynutil.delete('0,0') | pynutil.delete('0,0'))
)
+ hundred_digit
)
| (digit + hundred_digit + (graph_tens | graph_teen_alt))
| (
digit
+ hundred_digit
+ (
(pynini.cross(',0', '零') | pynini.cross(',0', '零'))
| pynini.cross('0', '零')
| (pynini.cross('0,', '零') | pynini.cross('0,', '零'))
)
+ digit
)
)
# 千; 1000-9999; thousands
graph_thousand = (
(
digit
+ (
(pynutil.delete(',000') | pynutil.delete('000') | pynutil.delete('0,00') | pynutil.delete('00,0'))
| (
pynutil.delete(',000')
| pynutil.delete('000')
| pynutil.delete('0,00')
| pynutil.delete('00,0')
)
)
+ thousand_digit
)
| (digit + pynini.closure(delete_punct) + thousand_digit + graph_hundred)
| (
digit
+ thousand_digit
+ (pynini.cross('0', '零') | ((pynini.cross(',0', '零') | pynini.cross(',0', '零'))))
+ (graph_tens | graph_teen_alt)
)
| (
digit
+ pynini.closure(delete_punct)
+ thousand_digit
+ (
pynini.cross('00', '零')
| (pynini.cross(',00', '零') | pynini.cross(',00', '零'))
| (pynini.cross('0,0', '零') | pynini.cross('0,0', '零'))
| (pynini.cross('00,', '零') | pynini.cross('00,', '零'))
)
+ digit
)
)
# 万; 10000-99999; ten thousands
graph_tenthousand = (
(
digit
+ (pynutil.delete('0000') | (pynutil.delete('0,000') | pynutil.delete('0,000')))
+ tenthousand_digit
)
| (digit + tenthousand_digit + graph_thousand)
| (
digit
+ tenthousand_digit
+ (pynini.cross('0', '零') | (pynini.cross('0,', '零') | pynini.cross('0,', '零')))
+ graph_hundred
)
| (
digit
+ tenthousand_digit
+ (pynini.cross('00', '零') | (pynini.cross('0,0', '零') | pynini.cross('0,0', '零')))
+ (graph_tens | graph_teen_alt)
)
| (
digit
+ tenthousand_digit
+ (pynini.cross('000', '零') | (pynini.cross('0,00', '零') | pynini.cross('0,00', '零')))
+ digit
)
)
# 十万; 100000-999999; hundred thousands
graph_hundredthousand = (
pynutil.add_weight(
(
(graph_tens | graph_teen)
+ tenthousand_digit
+ (pynutil.delete('0000') | (pynutil.delete('0,000') | pynutil.delete('0,000')))
),
-0.1,
)
| ((graph_tens | graph_teen) + tenthousand_digit + graph_thousand)
| (
(graph_tens | graph_teen)
+ tenthousand_digit
+ (pynini.cross('0', '零') | (pynini.cross('0,', '零') | pynini.cross('0,', '零')))
+ graph_hundred
)
| (
(graph_tens | graph_teen)
+ tenthousand_digit
+ (pynini.cross('00', '零') | (pynini.cross('0,0', '零') | pynini.cross('0,0', '零')))
+ (graph_tens | graph_teen_alt)
)
| (
(graph_tens | graph_teen)
+ tenthousand_digit
+ (pynini.cross('000', '零') | (pynini.cross('0,00', '零') | pynini.cross('0,00', '零')))
+ digit
)
)
# 百万; 1000000-9999999; millions
graph_million = (
pynutil.add_weight(
(
graph_hundred
+ tenthousand_digit
+ (pynutil.delete('0000') | (pynutil.delete('0,000') | pynutil.delete('0,000')))
),
-1.0,
)
| (graph_hundred + tenthousand_digit + graph_thousand)
| (
graph_hundred
+ tenthousand_digit
+ (pynini.cross('0', '零') | (pynini.cross('0,', '零') | pynini.cross('0,', '零')))
+ graph_hundred
)
| (
graph_hundred
+ tenthousand_digit
+ (pynini.cross('00', '零') | (pynini.cross('0,0', '零') | pynini.cross('0,0', '零')))
+ (graph_tens | graph_teen_alt)
)
| (
graph_hundred
+ tenthousand_digit
+ (pynini.cross('000', '零') | (pynini.cross('0,00', '零') | pynini.cross('0,00', '零')))
+ digit
)
)
# 千万; 10000000-99999999; ten millions
graph_tenmillion = (
pynutil.add_weight(
(
graph_thousand
+ (pynutil.delete('0000') | (pynutil.delete('0,000') | pynutil.delete('0,000')))
+ tenthousand_digit
),
-1.0,
)
| (graph_thousand + tenthousand_digit + graph_thousand)
| (
graph_thousand
+ tenthousand_digit
+ (pynini.cross('0', '零') | (pynini.cross('0,', '零') | pynini.cross('0,', '零')))
+ graph_hundred
)
| (
graph_thousand
+ tenthousand_digit
+ (pynini.cross('00', '零') | (pynini.cross('0,0', '零') | pynini.cross('0,0', '零')))
+ (graph_tens | graph_teen_alt)
)
| (
graph_thousand
+ tenthousand_digit
+ (pynini.cross('000', '零') | (pynini.cross('0,00', '零') | pynini.cross('0,00', '零')))
+ digit
)
)
# 亿; 100000000-999999999; hundred millions
graph_hundredmillion = (
pynutil.add_weight(
(
digit
+ (pynutil.delete('00000000') | (pynutil.delete('00,000,000') | pynutil.delete('00,000,000')))
+ hundredmillion_digit
),
-2.0,
)
| pynutil.add_weight((digit + hundredmillion_digit + graph_tenmillion), -1.9)
| pynutil.add_weight((digit + hundredmillion_digit + pynutil.delete('0') + graph_million), -1.8)
| pynutil.add_weight(
(digit + hundredmillion_digit + pynutil.delete('00') + pynutil.insert('零') + graph_hundredthousand),
-1.7,
)
| pynutil.add_weight(
(
digit
+ hundredmillion_digit
+ (pynutil.delete('000') | (pynutil.delete('00,0') | pynutil.delete('00,0')))
+ pynutil.insert('零')
+ graph_tenthousand
),
-1.6,
)
| pynutil.add_weight(
(
digit
+ hundredmillion_digit
+ (pynutil.delete('0000') | (pynutil.delete('00,00') | pynutil.delete('00,00')))
+ pynutil.insert('零')
+ graph_thousand
),
-1.5,
)
| pynutil.add_weight(
(
digit
+ hundredmillion_digit
+ (pynutil.delete('00000') | (pynutil.delete('00,000,') | pynutil.delete('00,000,')))
+ pynutil.insert('零')
+ graph_hundred
),
-1.4,
)
| pynutil.add_weight(
(
digit
+ hundredmillion_digit
+ (pynutil.delete('000000') | (pynutil.delete('00,000,0') | pynutil.delete('00,000,0')))
+ pynutil.insert('零')
+ (graph_tens | graph_teen_alt)
),
-1.3,
)
| pynutil.add_weight(
(
digit
+ hundredmillion_digit
+ (pynutil.delete('0000000') | (pynutil.delete('00,000,00') | pynutil.delete('00,000,00')))
+ pynutil.insert('零')
+ digit
),
-1.2,
)
)
# 十亿; 1000000000-9999999999; billions
graph_billion = (
pynutil.add_weight(
(
(graph_tens | graph_teen)
+ (pynutil.delete('00000000') | (pynutil.delete('00,000,000') | pynutil.delete('00,000,000')))
+ hundredmillion_digit
),
-2.0,
)
| pynutil.add_weight(((graph_tens | graph_teen) + hundredmillion_digit + graph_tenmillion), -1.9)
| pynutil.add_weight(
((graph_tens | graph_teen) + hundredmillion_digit + pynutil.delete('0') + graph_million), -1.8
)
| pynutil.add_weight(
(
(graph_tens | graph_teen)
+ hundredmillion_digit
+ pynutil.delete('00')
+ pynutil.insert('零')
+ graph_hundredthousand
),
-1.7,
)
| pynutil.add_weight(
(
(graph_tens | graph_teen)
+ hundredmillion_digit
+ (pynutil.delete('000') | (pynutil.delete('00,0') | pynutil.delete('00,0')))
+ pynutil.insert('零')
+ graph_tenthousand
),
-1.6,
)
| pynutil.add_weight(
(
(graph_tens | graph_teen)
+ hundredmillion_digit
+ (pynutil.delete('0000') | (pynutil.delete('00,00') | pynutil.delete('00,00')))
+ pynutil.insert('零')
+ graph_thousand
),
-1.5,
)
| pynutil.add_weight(
(
(graph_tens | graph_teen)
+ hundredmillion_digit
+ (pynutil.delete('00000') | (pynutil.delete('00,000,') | pynutil.delete('00,000,')))
+ pynutil.insert('零')
+ graph_hundred
),
-1.4,
)
| pynutil.add_weight(
(
(graph_tens | graph_teen)
+ hundredmillion_digit
+ (pynutil.delete('000000') | (pynutil.delete('00,000,0') | pynutil.delete('00,000,0')))
+ pynutil.insert('零')
+ (graph_tens | graph_teen_alt)
),
-1.3,
)
| pynutil.add_weight(
(
(graph_tens | graph_teen)
+ hundredmillion_digit
+ (pynutil.delete('0000000') | (pynutil.delete('00,000,00') | pynutil.delete('00,000,00')))
+ pynutil.insert('零')
+ digit
),
-1.2,
)
)
# 百亿; 10000000000-99999999999; ten billions
graph_tenbillion = (
pynutil.add_weight(
(
graph_hundred
+ (pynutil.delete('00000000') | (pynutil.delete('00,000,000') | pynutil.delete('00,000,000')))
+ hundredmillion_digit
),
-2.0,
)
| pynutil.add_weight((graph_hundred + hundredmillion_digit + graph_tenmillion), -1.9)
| pynutil.add_weight((graph_hundred + hundredmillion_digit + pynutil.delete('0') + graph_million), -1.8)
| pynutil.add_weight(
(
graph_hundred
+ hundredmillion_digit
+ pynutil.delete('00')
+ pynutil.insert('零')
+ graph_hundredthousand
),
-1.7,
)
| pynutil.add_weight(
(
graph_hundred
+ hundredmillion_digit
+ (pynutil.delete('000') | (pynutil.delete('00,0') | pynutil.delete('00,0')))
+ pynutil.insert('零')
+ graph_tenthousand
),
-1.6,
)
| pynutil.add_weight(
(
graph_hundred
+ hundredmillion_digit
+ (pynutil.delete('0000') | (pynutil.delete('00,00') | pynutil.delete('00,00')))
+ pynutil.insert('零')
+ graph_thousand
),
-1.5,
)
| pynutil.add_weight(
(
graph_hundred
+ hundredmillion_digit
+ (pynutil.delete('00000') | (pynutil.delete('00,000,') | pynutil.delete('00,000,')))
+ pynutil.insert('零')
+ graph_hundred
),
-1.4,
)
| pynutil.add_weight(
(
graph_hundred
+ hundredmillion_digit
+ (pynutil.delete('000000') | (pynutil.delete('00,000,0') | pynutil.delete('00,000,0')))
+ pynutil.insert('零')
+ (graph_tens | graph_teen_alt)
),
-1.3,
)
| pynutil.add_weight(
(
graph_hundred
+ hundredmillion_digit
+ (pynutil.delete('0000000') | (pynutil.delete('00,000,00') | pynutil.delete('00,000,00')))
+ pynutil.insert('零')
+ digit
),
-1.2,
)
)
# 千亿; 100000000000-999999999999; hundred billions
graph_hundredbillion = (
pynutil.add_weight(
(
graph_thousand
+ hundredmillion_digit
+ (pynutil.delete('00000000') | (pynutil.delete('00,000,000') | pynutil.delete('00,000,000')))
),
-2.0,
)
| pynutil.add_weight((graph_thousand + hundredmillion_digit + graph_tenmillion), -1.9)
| pynutil.add_weight((graph_thousand + hundredmillion_digit + pynutil.delete('0') + graph_million), -1.8)
| pynutil.add_weight(
(
graph_thousand
+ hundredmillion_digit
+ pynutil.delete('00')
+ pynutil.insert('零')
+ graph_hundredthousand
),
-1.7,
)
| pynutil.add_weight(
(
graph_thousand
+ hundredmillion_digit
+ (pynutil.delete('000') | (pynutil.delete('00,0') | pynutil.delete('00,0')))
+ pynutil.insert('零')
+ graph_tenthousand
),
-1.6,
)
| pynutil.add_weight(
(
graph_thousand
+ hundredmillion_digit
+ (pynutil.delete('0000') | (pynutil.delete('00,00') | pynutil.delete('00,00')))
+ pynutil.insert('零')
+ graph_thousand
),
-1.5,
)
| pynutil.add_weight(
(
graph_thousand
+ hundredmillion_digit
+ (pynutil.delete('00000') | (pynutil.delete('00,000,') | pynutil.delete('00,000,')))
+ pynutil.insert('零')
+ graph_hundred
),
-1.4,
)
| pynutil.add_weight(
(
graph_thousand
+ hundredmillion_digit
+ (pynutil.delete('000000') | (pynutil.delete('00,000,0') | pynutil.delete('00,000,0')))
+ pynutil.insert('零')
+ (graph_tens | graph_teen_alt)
),
-1.3,
)
| pynutil.add_weight(
(
graph_thousand
+ hundredmillion_digit
+ (pynutil.delete('0000000') | (pynutil.delete('00,000,00') | pynutil.delete('00,000,00')))
+ pynutil.insert('零')
+ digit
),
-1.2,
)
)
suffix = pynini.union(
"万",
"十万",
"百万",
"千万",
"亿",
"十亿",
"百亿",
"千亿",
"萬",
"十萬",
"百萬",
"千萬",
"億",
"十億",
"百億",
"千億",
"拾萬",
"佰萬",
"仟萬",
"拾億",
"佰億",
"仟億",
"拾万",
"佰万",
"仟万",
"仟亿",
"佰亿",
"仟亿",
"万亿",
"萬億",
)
graph_mandarin = pynini.closure(
(
(
digit
| graph_teen
| graph_tens
| graph_hundred
| graph_thousand
| graph_tenthousand
| graph_hundredthousand
)
+ suffix
)
)
# combining all the graph above
graph = pynini.union(
pynutil.add_weight(graph_hundredbillion, -2.0),
pynutil.add_weight(graph_tenbillion, -1.9),
pynutil.add_weight(graph_billion, -1.8),
pynutil.add_weight(graph_hundredmillion, -1.7),
pynutil.add_weight(graph_tenmillion, -1.6),
pynutil.add_weight(graph_million, -1.5),
pynutil.add_weight(graph_hundredthousand, -1.4),
pynutil.add_weight(graph_tenthousand, -1.3),
pynutil.add_weight(graph_thousand, -1.2),
pynutil.add_weight(graph_hundred, -1.1),
pynutil.add_weight(graph_tens, -1.0),
graph_teen,
digit,
zero,
)
# adding optional +(正)/-(负) signs
graph_sign = (
(pynutil.insert("positive: \"") + pynini.accep("正") + pynutil.insert("\""))
| (pynutil.insert("negative: \"") + pynini.accep("负") + pynutil.insert("\""))
| (pynutil.insert("negative: \"") + pynini.cross("負", "负") + pynutil.insert("\""))
| (pynutil.insert("negative: \"") + pynini.cross("-", "负") + pynutil.insert("\""))
| (pynutil.insert("positive: \"") + pynini.cross("+", "正") + pynutil.insert("\""))
)
graph_mandarin_sign = graph_sign + pynutil.insert(" ") + graph_mandarin
# final graph
final_graph_sign = (
graph_sign + pynutil.insert(" ") + pynutil.insert("integer: \"") + graph + pynutil.insert("\"")
)
final_graph_numbers_only = pynutil.insert("integer: \"") + graph + pynutil.insert("\"")
# imprted when building other grammars
self.just_cardinals = graph | graph_mandarin | final_graph_sign | graph_mandarin_sign
graph_mandarins = pynutil.insert("integer: \"") + graph_mandarin + pynutil.insert("\"")
final_graph = final_graph_numbers_only | final_graph_sign | graph_mandarins | graph_mandarin_sign
final_graph = self.add_tokens(final_graph)
self.fst = final_graph.optimize()
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/zh/taggers/cardinal.py
|
# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.zh.graph_utils import GraphFst
from nemo_text_processing.text_normalization.zh.utils import get_abs_path
from pynini.lib import pynutil
class DateFst(GraphFst):
"""
Finite state transducer for classfying dates, e.g.
2002年 -> tokens { date { year: "二零零二" } }
2002-01-28 -> tokens { date { year: "二零零二" month: "一" day: "二十八"} }
2002/01/28 -> tokens { date { year: "二零零二" month: "一" day: "二十八"} }
2002.01.28 -> tokens { date { year: "二零零二" month: "一" day: "二十八"} }
2002年2月 -> tokens { date { year: "二零零二" month: "二" } }
2月11日 -> tokens { date { month: "二" day: "十一" } }
2002/02 -> is an error format according to the national standard
02/11 -> is an error format according to the national standard
According to national standard, only when the year, month, and day are all exist, it is allowed to use symbols to separate them
"""
def __init__(self, deterministic: bool = True, lm: bool = False):
super().__init__(name="date", kind="classify", deterministic=deterministic)
graph_digit = pynini.string_file(get_abs_path("data/number/digit.tsv"))
graph_zero = pynini.string_file(get_abs_path("data/number/zero.tsv"))
month = pynini.string_file(get_abs_path("data/date/months.tsv"))
day = pynini.string_file(get_abs_path("data/date/day.tsv"))
suffix = pynini.string_file(get_abs_path("data/date/suffixes.tsv"))
delete_sign = pynutil.delete('/') | pynutil.delete('-') | pynutil.delete('.') | pynutil.delete('·')
delete_day = pynutil.delete('号') | pynutil.delete('號') | pynutil.delete('日')
# grammar for only year, month, or day
# atleast accep two digit to distinguish from year used for time
# don't accept 日 to distinguish from day used fro time
only_year = (
pynutil.insert("year: \"")
+ pynini.closure(graph_digit | graph_zero, 2)
+ pynutil.delete('年')
+ pynutil.insert("\"")
)
only_month = pynutil.insert("month: \"") + month + pynutil.delete('月') + pynutil.insert("\"")
only_day = pynutil.insert("day: \"") + day + delete_day + pynutil.insert("\"")
graph_only_date = only_year | only_month | only_day
year_month = only_year + pynutil.insert(' ') + only_month
month_day = only_month + pynutil.insert(' ') + only_day
graph_all = only_year + pynutil.insert(' ') + only_month + pynutil.insert(' ') + only_day
graph_combination = year_month | month_day | graph_all
year_component = (
pynutil.insert("year: \"")
+ pynini.closure(graph_digit | graph_zero, 2, 4)
+ delete_sign
+ pynutil.insert("\"")
)
month_component = pynutil.insert("month: \"") + month + delete_sign + pynutil.insert("\"")
day_component = pynutil.insert("day: \"") + day + pynutil.insert("\"")
graph_sign = year_component + pynutil.insert(' ') + month_component + pynutil.insert(' ') + day_component
graph_all = graph_only_date | graph_sign | graph_combination
prefix = (
pynini.accep('公元')
| pynini.accep('西元')
| pynini.accep('公元前')
| pynini.accep('西元前')
| pynini.accep('纪元')
| pynini.accep('纪元前')
)
prefix_component = pynutil.insert("era: \"") + prefix + pynutil.insert("\"")
graph_prefix = prefix_component + pynutil.insert(' ') + (pynutil.add_weight(graph_all, -2.0))
suffix_component = pynutil.insert("era: \"") + suffix + pynutil.insert("\"")
graph_suffix = (pynutil.add_weight(graph_all, -2.0)) + pynutil.insert(' ') + suffix_component
graph_affix = graph_prefix | graph_suffix
graph_suffix_year = (
pynutil.insert("year: \"") + pynini.closure((graph_digit | graph_zero), 1) + pynutil.insert("\"")
)
graph_suffix_year = graph_suffix_year + pynutil.insert(' ') + suffix_component
graph_with_era = graph_suffix_year | graph_affix
graph = graph_only_date | graph_combination | graph_sign | graph_with_era
# range
symbol = pynini.accep("-") | pynini.accep("~") | pynini.accep("——") | pynini.accep("—")
range_source = pynutil.insert("range: \"") + pynini.closure("从", 0, 1) + pynutil.insert("\"")
range_goal = (
pynutil.insert("range: \"")
+ (pynini.closure("到", 0, 1) | pynini.closure("至", 0, 1) | symbol)
+ pynutil.insert("\"")
)
graph_source = (
range_source + pynutil.insert(' ') + graph + pynutil.insert(' ') + range_goal + pynutil.insert(' ') + graph
)
graph_goal = graph + pynutil.insert(' ') + range_goal + pynutil.insert(' ') + graph
graph_range_final = graph_source | graph_goal
final_graph = pynutil.add_weight(graph, -2.0) | graph_range_final
self.final_graph = final_graph.optimize()
self.fst = self.add_tokens(self.final_graph).optimize()
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/zh/taggers/date.py
|
# Copyright (c) 2022, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.zh.graph_utils import NEMO_NOT_SPACE, GraphFst
from pynini.lib import pynutil
class Char(GraphFst):
'''
你 -> char { name: "你" }
'''
def __init__(self, deterministic: bool = True, lm: bool = False):
super().__init__(name="char", kind="classify", deterministic=deterministic)
graph = pynutil.insert("name: \"") + pynini.closure(NEMO_NOT_SPACE, 1) + pynutil.insert("\"")
self.fst = graph.optimize()
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/zh/taggers/word.py
|
# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.zh.graph_utils import NEMO_NOT_QUOTE, GraphFst, delete_space
from nemo_text_processing.text_normalization.zh.utils import get_abs_path
from pynini.lib import pynutil
class TimeFst(GraphFst):
"""
Finite state transducer for verbalizing time e.g.
tokens { time { hour: "五点" } } -> 五点
tokens { time { minute: "三分" }' } -> 三分
tokens { time { hour: "五点" minute: "三分" } } -> 五点三分
tokens { time { affix: "am" hour: "五点" verb: "差" minute: "三分" }' } -> 早上五点差三分
tokens { time { affix: "am" hour: "一点" minute: "三分" } } -> 深夜一点三分
"""
def __init__(self, deterministic: bool = True):
super().__init__(name="time", kind="verbalize", deterministic=deterministic)
# data imported to process am/pm into mandarin
alphabet_am = pynini.string_file(get_abs_path("data/time/AM.tsv"))
alphabet_pm = pynini.string_file(get_abs_path("data/time/PM.tsv"))
# fundamental components
hour_component = pynutil.delete("hour: \"") + pynini.closure(NEMO_NOT_QUOTE) + pynutil.delete("\"")
minute_component = pynutil.delete("minute: \"") + pynini.closure(NEMO_NOT_QUOTE) + pynutil.delete("\"")
second_component = pynutil.delete("second: \"") + pynini.closure(NEMO_NOT_QUOTE) + pynutil.delete("\"")
graph_regular = (
hour_component
| minute_component
| second_component
| (hour_component + delete_space + minute_component + delete_space + second_component)
| (hour_component + delete_space + minute_component)
| (hour_component + delete_space + second_component)
| (minute_component + delete_space + second_component)
)
# back count 三点差五分
delete_verb = pynutil.delete("verb: \"") + pynini.accep("差") + pynutil.delete("\"")
graph_back_count = (
(
pynini.closure(delete_verb + pynutil.insert(' '))
+ hour_component
+ delete_space
+ pynini.closure(delete_verb)
+ delete_space
+ minute_component
)
| (
pynini.closure(delete_verb + pynutil.insert(' '))
+ hour_component
+ delete_space
+ pynini.closure(delete_verb)
+ delete_space
+ second_component
)
| (
pynini.closure(delete_verb + pynutil.insert(' '))
+ hour_component
+ delete_space
+ pynini.closure(delete_verb)
+ delete_space
+ minute_component
+ delete_space
+ second_component
)
)
graph = graph_regular | graph_back_count
delete_suffix = pynutil.delete("suffix: \"") + pynini.closure(alphabet_am | alphabet_pm) + pynutil.delete("\"")
graph = graph | (graph + delete_space + delete_suffix)
final_graph = graph
delete_tokens = self.delete_tokens(final_graph)
self.fst = delete_tokens.optimize()
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/zh/verbalizers/time.py
|
# Copyright (c) 2022, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.zh.graph_utils import NEMO_NOT_QUOTE, GraphFst, delete_space
from pynini.lib import pynutil
class Measure(GraphFst):
'''
tokens { measure { cardinal: "一" } units: "千克" } } -> 一千克
'''
def __init__(self, deterministic: bool = True, lm: bool = False):
super().__init__(name="measure", kind="verbalize", deterministic=deterministic)
sign_component = pynutil.delete("negative: \"") + pynini.closure(NEMO_NOT_QUOTE) + pynutil.delete("\"")
integer_component = pynutil.delete("integer: \"") + pynini.closure(NEMO_NOT_QUOTE) + pynutil.delete("\"")
unit_component = pynutil.delete("units: \"") + pynini.closure(NEMO_NOT_QUOTE) + pynutil.delete("\"")
cardinal_graph = integer_component + delete_space + unit_component
decimal_graph = (
pynutil.delete("integer_part: \"")
+ pynini.closure(NEMO_NOT_QUOTE)
+ pynutil.delete("\"")
+ pynutil.insert("点")
+ delete_space
+ pynutil.delete("fractional_part: \"")
+ pynini.closure(NEMO_NOT_QUOTE, 0)
+ pynutil.delete("\"")
+ delete_space
+ pynutil.delete("units: \"")
+ pynini.closure(NEMO_NOT_QUOTE)
+ pynutil.delete("\"")
)
graph = pynini.closure(sign_component + delete_space) + (cardinal_graph | decimal_graph)
self.fst = self.delete_tokens(graph).optimize()
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/zh/verbalizers/measure.py
|
# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.zh.graph_utils import NEMO_NOT_QUOTE, GraphFst, delete_space
from pynini.lib import pynutil
class FractionFst(GraphFst):
"""
Finite state transducer for verbalizing fraction e.g.
tokens { fraction { denominator: "二" numerator: "一"} } -> 二分之一
tokens { fraction { integer_part: "一" denominator: "二" numerator: "一" } } -> 一又二分之一
"""
def __init__(self, decimal: GraphFst, deterministic: bool = True, lm: bool = False):
super().__init__(name="fraction", kind="verbalize", deterministic=deterministic)
graph_decimal = decimal.decimal_component
integer_part = (
pynutil.delete("integer_part:")
+ delete_space
+ pynutil.delete("\"")
+ pynini.closure(NEMO_NOT_QUOTE)
+ pynutil.insert("又")
+ pynutil.delete("\"")
)
denominator_part = (
pynutil.delete("denominator:")
+ delete_space
+ pynutil.delete("\"")
+ pynini.closure(NEMO_NOT_QUOTE)
+ pynutil.delete("\"")
)
numerator_part = (
pynutil.delete("numerator:")
+ delete_space
+ pynutil.delete("\"")
+ pynini.closure(NEMO_NOT_QUOTE)
+ pynutil.delete("\"")
)
sign_part = (
pynutil.delete("positive:")
+ delete_space
+ pynutil.delete("\"")
+ pynini.closure(NEMO_NOT_QUOTE)
+ pynutil.delete("\"")
) | (
pynutil.delete("negative:")
+ delete_space
+ pynutil.delete("\"")
+ pynini.closure(NEMO_NOT_QUOTE)
+ pynutil.delete("\"")
)
graph_with_integer = (
integer_part + delete_space + denominator_part + delete_space + pynutil.insert('分之') + numerator_part
)
graph_no_integer = denominator_part + delete_space + pynutil.insert('分之') + numerator_part
graph = graph_with_integer | graph_no_integer
graph_with_sign = sign_part + delete_space + graph
graph_with_decimal = denominator_part + delete_space + pynutil.insert('分之') + graph_decimal
final_graph = graph_with_sign | graph | graph_with_decimal
delete_tokens = self.delete_tokens(final_graph)
self.fst = delete_tokens.optimize()
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/zh/verbalizers/fraction.py
|
# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.zh.graph_utils import NEMO_NOT_QUOTE, GraphFst, delete_space
from pynini.lib import pynutil
class OrdinalFst(GraphFst):
"""
Finite state transducer for verbalizing ordinal e.g.
tokens { ordinal { integer: "第一千万" } } -> 第一千万
"""
def __init__(self):
super().__init__(name="ordinal", kind="verbalize")
symbol = pynini.union("-", "~", "——", "—")
dash = pynini.cross(symbol, "到")
delete_morpheme = (
pynutil.delete("range: \"") + (pynini.closure('从') | (pynini.closure('到') | dash)) + pynutil.delete("\"")
)
graph_integer = (
pynutil.delete("integer:")
+ delete_space
+ pynutil.delete("\"")
+ pynini.closure("第", 0, 1)
+ pynini.closure(NEMO_NOT_QUOTE)
+ pynutil.delete("\"")
)
graph_range = (
pynini.closure(delete_morpheme, 0, 1)
+ pynini.closure(delete_space, 0, 1)
+ graph_integer
+ delete_space
+ delete_morpheme
+ delete_space
+ graph_integer
)
final_graph = pynutil.add_weight(graph_integer, -2.0) | graph_range
delete_tokens = self.delete_tokens(final_graph)
self.fst = delete_tokens.optimize()
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/zh/verbalizers/ordinal.py
|
# Copyright (c) 2022, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.zh.graph_utils import (
NEMO_ALPHA,
NEMO_DIGIT,
NEMO_PUNCT,
NEMO_SIGMA,
NEMO_WHITE_SPACE,
GraphFst,
)
from nemo_text_processing.text_normalization.zh.utils import get_abs_path
from pynini.lib import pynutil, utf8
class PostProcessor(GraphFst):
'''
Postprocessing of TN, now contains:
1. punctuation removal
2. letter case conversion
3. oov tagger
'''
def __init__(
self, remove_puncts: bool = False, to_upper: bool = False, to_lower: bool = False, tag_oov: bool = False,
):
super().__init__(name="PostProcessor", kind="processor")
graph = pynini.cdrewrite('', '', '', NEMO_SIGMA)
if remove_puncts:
remove_puncts_graph = pynutil.delete(
pynini.union(NEMO_PUNCT, pynini.string_file(get_abs_path('data/char/punctuations_zh.tsv')))
)
graph @= pynini.cdrewrite(remove_puncts_graph, "", "", NEMO_SIGMA).optimize()
if to_upper or to_lower:
if to_upper:
conv_cases_graph = pynini.inverse(pynini.string_file(get_abs_path('data/char/upper_to_lower.tsv')))
else:
conv_cases_graph = pynini.string_file(get_abs_path('data/char/upper_to_lower.tsv'))
graph @= pynini.cdrewrite(conv_cases_graph, "", "", NEMO_SIGMA).optimize()
if tag_oov:
zh_charset_std = pynini.string_file(get_abs_path("data/char/charset_national_standard_2013_8105.tsv"))
zh_charset_ext = pynini.string_file(get_abs_path("data/char/charset_extension.tsv"))
zh_charset = (
zh_charset_std | zh_charset_ext | pynini.string_file(get_abs_path("data/char/punctuations_zh.tsv"))
)
en_charset = NEMO_DIGIT | NEMO_ALPHA | NEMO_PUNCT | NEMO_WHITE_SPACE
charset = zh_charset | en_charset
with open(get_abs_path("data/char/oov_tags.tsv"), "r") as f:
tags = f.readline().strip().split('\t')
assert len(tags) == 2
ltag, rtag = tags
oov_charset = pynini.difference(utf8.VALID_UTF8_CHAR, charset)
tag_oov_graph = pynutil.insert(ltag) + oov_charset + pynutil.insert(rtag)
graph @= pynini.cdrewrite(tag_oov_graph, "", "", NEMO_SIGMA).optimize()
self.fst = graph.optimize()
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/zh/verbalizers/postprocessor.py
|
# Copyright (c) 2022, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.zh.graph_utils import GraphFst
from nemo_text_processing.text_normalization.zh.verbalizers.cardinal import CardinalFst
from nemo_text_processing.text_normalization.zh.verbalizers.date import DateFst
from nemo_text_processing.text_normalization.zh.verbalizers.decimal import DecimalFst
from nemo_text_processing.text_normalization.zh.verbalizers.fraction import FractionFst
from nemo_text_processing.text_normalization.zh.verbalizers.math_symbol import MathSymbol
from nemo_text_processing.text_normalization.zh.verbalizers.measure import Measure
from nemo_text_processing.text_normalization.zh.verbalizers.money import MoneyFst
from nemo_text_processing.text_normalization.zh.verbalizers.ordinal import OrdinalFst
from nemo_text_processing.text_normalization.zh.verbalizers.time import TimeFst
from nemo_text_processing.text_normalization.zh.verbalizers.whitelist import Whitelist
from nemo_text_processing.text_normalization.zh.verbalizers.word import Char
class VerbalizeFst(GraphFst):
"""
Composes other verbalizer grammars.
For deployment, this grammar will be compiled and exported to OpenFst Finite State Archive (FAR) File.
More details to deployment at NeMo/tools/text_processing_deployment.
Args:
deterministic: if True will provide a single transduction option,
for False multiple options (used for audio-based normalization)
"""
def __init__(self, deterministic: bool = True):
super().__init__(name="verbalize", kind="verbalize", deterministic=deterministic)
date = DateFst(deterministic=deterministic)
cardinal = CardinalFst(deterministic=deterministic)
char = Char(deterministic=deterministic)
decimal = DecimalFst(deterministic=deterministic)
fraction = FractionFst(decimal=decimal, deterministic=deterministic)
math_symbol = MathSymbol(deterministic=deterministic)
money = MoneyFst(decimal=decimal, deterministic=deterministic)
measure = Measure(deterministic=deterministic)
ordinal = OrdinalFst()
time = TimeFst(deterministic=deterministic)
whitelist = Whitelist(deterministic=deterministic)
graph = pynini.union(
date.fst,
cardinal.fst,
decimal.fst,
fraction.fst,
char.fst,
math_symbol.fst,
money.fst,
measure.fst,
ordinal.fst,
time.fst,
whitelist.fst,
)
self.fst = graph.optimize()
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/zh/verbalizers/verbalize.py
|
# Copyright (c) 2022, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pynini
from nemo_text_processing.text_normalization.zh.graph_utils import NEMO_NOT_QUOTE, GraphFst
from pynini.lib import pynutil
class Whitelist(GraphFst):
'''
tokens { whitelist: "ATM" } -> A T M
'''
def __init__(self, deterministic: bool = True, lm: bool = False):
super().__init__(name="whitelist", kind="verbalize", deterministic=deterministic)
remove_erhua = pynutil.delete("erhua: \"") + pynutil.delete("儿") + pynutil.delete("\"")
whitelist = pynutil.delete("name: \"") + pynini.closure(NEMO_NOT_QUOTE) + pynutil.delete("\"")
graph = remove_erhua | whitelist
self.fst = graph.optimize()
|
NeMo-text-processing-main
|
nemo_text_processing/text_normalization/zh/verbalizers/whitelist.py
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.