|
| 1 | +# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved. |
| 2 | +# Copyright (c) 2024, WENET COMMUNITY. Xingchen Song (sxc19@tsinghua.org.cn). |
| 3 | +# |
| 4 | +# Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | +# you may not use this file except in compliance with the License. |
| 6 | +# You may obtain a copy of the License at |
| 7 | +# |
| 8 | +# http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | +# |
| 10 | +# Unless required by applicable law or agreed to in writing, software |
| 11 | +# distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | +# See the License for the specific language governing permissions and |
| 14 | +# limitations under the License. |
| 15 | + |
| 16 | +import pynini |
| 17 | +from pynini.lib import pynutil |
| 18 | + |
| 19 | +from tn.processor import Processor |
| 20 | +from tn.utils import get_abs_path |
| 21 | +from tn.english.rules.cardinal import Cardinal |
| 22 | + |
| 23 | + |
| 24 | +class Ordinal(Processor): |
| 25 | + |
| 26 | + def __init__(self, deterministic: bool = False): |
| 27 | + """ |
| 28 | + Args: |
| 29 | + deterministic: if True will provide a single transduction option, |
| 30 | + for False multiple transduction are generated (used for audio-based normalization) |
| 31 | + """ |
| 32 | + super().__init__("ordinal", ordertype="tn") |
| 33 | + self.deterministic = deterministic |
| 34 | + self.build_tagger() |
| 35 | + self.build_verbalizer() |
| 36 | + |
| 37 | + def build_tagger(self): |
| 38 | + """ |
| 39 | + Finite state transducer for classifying ordinal, e.g. |
| 40 | + 13th -> ordinal { integer: "thirteen" } |
| 41 | + """ |
| 42 | + cardinal = Cardinal(self.deterministic) |
| 43 | + cardinal_graph = cardinal.graph |
| 44 | + cardinal_format = pynini.closure(self.DIGIT | pynini.accep(",")) |
| 45 | + st_format = (pynini.closure(cardinal_format + |
| 46 | + (self.DIGIT - "1"), 0, 1) + |
| 47 | + pynini.accep("1") + |
| 48 | + pynutil.delete(pynini.union("st", "ST", "ˢᵗ"))) |
| 49 | + nd_format = (pynini.closure(cardinal_format + |
| 50 | + (self.DIGIT - "1"), 0, 1) + |
| 51 | + pynini.accep("2") + |
| 52 | + pynutil.delete(pynini.union("nd", "ND", "ⁿᵈ"))) |
| 53 | + rd_format = (pynini.closure(cardinal_format + |
| 54 | + (self.DIGIT - "1"), 0, 1) + |
| 55 | + pynini.accep("3") + |
| 56 | + pynutil.delete(pynini.union("rd", "RD", "ʳᵈ"))) |
| 57 | + th_format = pynini.closure( |
| 58 | + (self.DIGIT - "1" - "2" - "3") |
| 59 | + | (cardinal_format + "1" + self.DIGIT) |
| 60 | + | (cardinal_format + (self.DIGIT - "1") + |
| 61 | + (self.DIGIT - "1" - "2" - "3")), |
| 62 | + 1, |
| 63 | + ) + pynutil.delete(pynini.union("th", "TH", "ᵗʰ")) |
| 64 | + self.graph = (st_format | nd_format | rd_format |
| 65 | + | th_format) @ cardinal_graph |
| 66 | + final_graph = pynutil.insert( |
| 67 | + "integer: \"") + self.graph + pynutil.insert("\"") |
| 68 | + final_graph = self.add_tokens(final_graph) |
| 69 | + self.tagger = final_graph.optimize() |
| 70 | + |
| 71 | + def build_verbalizer(self): |
| 72 | + """ |
| 73 | + Finite state transducer for verbalizing ordinal, e.g. |
| 74 | + ordinal { integer: "thirteen" } } -> thirteenth |
| 75 | + """ |
| 76 | + graph_digit = pynini.string_file( |
| 77 | + get_abs_path("english/data/ordinal/digit.tsv")).invert() |
| 78 | + graph_teens = pynini.string_file( |
| 79 | + get_abs_path("english/data/ordinal/teen.tsv")).invert() |
| 80 | + |
| 81 | + graph = (pynutil.delete("integer:") + self.DELETE_SPACE + |
| 82 | + pynutil.delete("\"") + pynini.closure(self.NOT_QUOTE, 1) + |
| 83 | + pynutil.delete("\"")) |
| 84 | + convert_rest = pynutil.insert("th") |
| 85 | + |
| 86 | + suffix = pynini.cdrewrite( |
| 87 | + graph_digit | graph_teens | pynini.cross("ty", "tieth") |
| 88 | + | convert_rest, |
| 89 | + "", |
| 90 | + "[EOS]", |
| 91 | + pynini.closure(self.VCHAR), |
| 92 | + ).optimize() |
| 93 | + self.graph = pynini.compose(graph, suffix) |
| 94 | + self.suffix = suffix |
| 95 | + delete_tokens = self.delete_tokens(self.graph) |
| 96 | + self.verbalizer = delete_tokens.optimize() |
0 commit comments