diff --git a/genshi/filters/i18n.py b/genshi/filters/i18n.py
|
a
|
b
|
|
| 22 | 22 | |
| 23 | 23 | from genshi.core import Attrs, Namespace, QName, START, END, TEXT, START_NS, \ |
| 24 | 24 | END_NS, XML_NAMESPACE, _ensure |
| 25 | | from genshi.template.base import Template, EXPR, SUB |
| | 25 | from genshi.template.base import Template, Context, EXPR, SUB, INCLUDE |
| 26 | 26 | from genshi.template.markup import MarkupTemplate, EXEC |
| 27 | 27 | |
| 28 | 28 | __all__ = ['Translator', 'extract'] |
| … |
… |
|
| 107 | 107 | self.ignore_tags = ignore_tags |
| 108 | 108 | self.include_attrs = include_attrs |
| 109 | 109 | self.extract_text = extract_text |
| | 110 | self.i18n_domains = [] |
| 110 | 111 | |
| 111 | 112 | def __call__(self, stream, ctxt=None, search_text=True, msgbuf=None): |
| 112 | 113 | """Translate any localizable strings in the given stream. |
| … |
… |
|
| 128 | 129 | include_attrs = self.include_attrs |
| 129 | 130 | ugettext = self.translator.ugettext |
| 130 | 131 | ungettext = self.translator.ungettext |
| | 132 | try: |
| | 133 | dugettext = self.translator.dugettext |
| | 134 | dungettext = self.translator.dungettext |
| | 135 | except AttributeError: |
| | 136 | # No domain support, show a warning??? |
| | 137 | dugettext = lambda d, s: ugettext(s) |
| | 138 | dungettext = lambda d, s, p, n: ungettext(s, p, n) |
| 131 | 139 | |
| 132 | 140 | if not self.extract_text: |
| 133 | 141 | search_text = False |
| 134 | 142 | skip = 0 |
| 135 | 143 | i18n_msg = I18N_NAMESPACE['msg'] |
| 136 | 144 | i18n_choose = I18N_NAMESPACE['choose'] |
| | 145 | i18n_domain = I18N_NAMESPACE['domain'] |
| 137 | 146 | ns_prefixes = [] |
| 138 | 147 | xml_lang = XML_NAMESPACE['lang'] |
| | 148 | |
| | 149 | if not ctxt: |
| | 150 | ctxt = Context() |
| | 151 | |
| | 152 | i18n_domains = ctxt.get('i18n_domains', []) |
| 139 | 153 | |
| 140 | 154 | for kind, data, pos in stream: |
| 141 | 155 | |
| … |
… |
|
| 151 | 165 | # handle different events that can be localized |
| 152 | 166 | if kind is START: |
| 153 | 167 | tag, attrs = data |
| | 168 | |
| | 169 | if i18n_domain in attrs: |
| | 170 | i18n_domains.append((tag, attrs.get(i18n_domain).strip())) |
| | 171 | attrs -= i18n_domain |
| | 172 | |
| 154 | 173 | if tag in self.ignore_tags or \ |
| 155 | 174 | isinstance(attrs.get(xml_lang), basestring): |
| 156 | 175 | skip += 1 |
| 157 | 176 | yield kind, data, pos |
| 158 | 177 | continue |
| 159 | | |
| | 178 | |
| 160 | 179 | new_attrs = [] |
| 161 | 180 | changed = False |
| 162 | 181 | for name, value in attrs: |
| 163 | 182 | newval = value |
| 164 | 183 | if search_text and isinstance(value, basestring): |
| 165 | 184 | if name in include_attrs: |
| 166 | | newval = ugettext(value) |
| | 185 | if i18n_domains: |
| | 186 | newval = dugettext(i18n_domains[-1][1], value) |
| | 187 | else: |
| | 188 | newval = ugettext(value) |
| 167 | 189 | else: |
| | 190 | # Update context with current domains |
| | 191 | ctxt['i18n_domains'] = i18n_domains |
| 168 | 192 | newval = list(self(_ensure(value), ctxt, |
| 169 | | search_text=False) |
| 170 | | ) |
| | 193 | search_text=False)) |
| 171 | 194 | if newval != value: |
| 172 | 195 | value = newval |
| 173 | 196 | changed = True |
| … |
… |
|
| 185 | 208 | elif i18n_choose in attrs: |
| 186 | 209 | params = attrs.get(i18n_choose) |
| 187 | 210 | msgbuf = MessageBuffer(params) |
| 188 | | attrs -= i18n_choose |
| | 211 | attrs -= i18n_choose |
| 189 | 212 | |
| 190 | 213 | yield kind, (tag, attrs), pos |
| 191 | 214 | |
| … |
… |
|
| 193 | 216 | if not msgbuf: |
| 194 | 217 | text = data.strip() |
| 195 | 218 | if text: |
| 196 | | data = data.replace(text, unicode(ugettext(text))) |
| | 219 | if i18n_domains: |
| | 220 | data = data.replace(text, |
| | 221 | unicode(dugettext(i18n_domains[-1][1], text))) |
| | 222 | else: |
| | 223 | data = data.replace(text, unicode(ugettext(text))) |
| 197 | 224 | yield kind, data, pos |
| 198 | 225 | else: |
| 199 | 226 | msgbuf.append(kind, data, pos) |
| … |
… |
|
| 206 | 233 | if not msgbuf.depth: |
| 207 | 234 | if msgbuf.singular or msgbuf.plural: |
| 208 | 235 | singular, plural, expr = msgbuf.format() |
| 209 | | events = ungettext(singular, plural, |
| 210 | | expr.evaluate(ctxt)) |
| | 236 | if i18n_domains: |
| | 237 | events = dungettext(i18n_domains[-1][1], |
| | 238 | singular, plural, |
| | 239 | expr.evaluate(ctxt)) |
| | 240 | else: |
| | 241 | events = ungettext(singular, plural, |
| | 242 | expr.evaluate(ctxt)) |
| 211 | 243 | else: |
| 212 | | events = ugettext(msgbuf.format()) |
| | 244 | if i18n_domains: |
| | 245 | events = dugettext(i18n_domains[-1][1], |
| | 246 | msgbuf.format()) |
| | 247 | else: |
| | 248 | events = ugettext(msgbuf.format()) |
| 213 | 249 | for event in msgbuf.translate(events): |
| 214 | 250 | yield event |
| 215 | 251 | msgbuf = None |
| 216 | 252 | yield kind, data, pos |
| 217 | | |
| | 253 | |
| 218 | 254 | elif kind is SUB: |
| 219 | 255 | subkind, substream = data |
| | 256 | # Update context with current domains |
| | 257 | ctxt['i18n_domains'] = i18n_domains |
| 220 | 258 | new_substream = list(self(substream, ctxt, msgbuf=msgbuf)) |
| 221 | 259 | yield kind, (subkind, new_substream), pos |
| 222 | 260 | |
| … |
… |
|
| 225 | 263 | |
| 226 | 264 | elif kind is END_NS and data in ns_prefixes: |
| 227 | 265 | ns_prefixes.remove(data) |
| 228 | | |
| 229 | 266 | else: |
| 230 | 267 | yield kind, data, pos |
| | 268 | |
| | 269 | if kind is END and i18n_domains and i18n_domains[-1][1] == data: |
| | 270 | i18n_domains.pop() |
| | 271 | # Update context with current domains |
| | 272 | ctxt['i18n_domains'] = i18n_domains |
| 231 | 273 | |
| 232 | 274 | GETTEXT_FUNCTIONS = ('_', 'gettext', 'ngettext', 'dgettext', 'dngettext', |
| 233 | 275 | 'ugettext', 'ungettext') |
diff --git a/genshi/template/base.py b/genshi/template/base.py
|
a
|
b
|
|
| 560 | 560 | template files. |
| 561 | 561 | """ |
| 562 | 562 | from genshi.template.loader import TemplateNotFound |
| | 563 | from genshi.filters.i18n import Translator |
| 563 | 564 | |
| 564 | 565 | for event in stream: |
| 565 | 566 | if event[0] is INCLUDE: |