-
Notifications
You must be signed in to change notification settings - Fork 5.1k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Work on loading UI translations (#2969)
* Load translations for Javascript in page template * Normalise language codes to gettext format with underscores * .mo files need to be under LC_MESSAGES as well * remove unused JS code * Normalise result in test * Fix for opening files on Py 2 * Fix location of I18N directory * Add translation files to package_data
- Loading branch information
Showing
12 changed files
with
123 additions
and
71 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,99 @@ | ||
"""Server functions for loading translations | ||
""" | ||
from collections import defaultdict | ||
import errno | ||
import io | ||
import json | ||
from os.path import dirname, join as pjoin | ||
import re | ||
|
||
I18N_DIR = dirname(__file__) | ||
# Cache structure: | ||
# {'nbjs': { # Domain | ||
# 'zh-CN': { # Language code | ||
# <english string>: <translated string> | ||
# ... | ||
# } | ||
# }} | ||
TRANSLATIONS_CACHE = {'nbjs': {}} | ||
|
||
|
||
_accept_lang_re = re.compile(r''' | ||
(?P<lang>[a-zA-Z]{1,8}(-[a-zA-Z]{1,8})?) | ||
(\s*;\s*q\s*=\s* | ||
(?P<qvalue>[01](.\d+)?) | ||
)?''', re.VERBOSE) | ||
|
||
def parse_accept_lang_header(accept_lang): | ||
"""Parses the 'Accept-Language' HTTP header. | ||
Returns a list of language codes in *ascending* order of preference | ||
(with the most preferred language last). | ||
""" | ||
by_q = defaultdict(list) | ||
for part in accept_lang.split(','): | ||
m = _accept_lang_re.match(part.strip()) | ||
if not m: | ||
continue | ||
lang, qvalue = m.group('lang', 'qvalue') | ||
# Browser header format is zh-CN, gettext uses zh_CN | ||
lang = lang.replace('-', '_') | ||
if qvalue is None: | ||
qvalue = 1. | ||
else: | ||
qvalue = float(qvalue) | ||
if qvalue == 0: | ||
continue # 0 means not accepted | ||
by_q[qvalue].append(lang) | ||
|
||
res = [] | ||
for qvalue, langs in sorted(by_q.items()): | ||
res.extend(sorted(langs)) | ||
return res | ||
|
||
def load(language, domain='nbjs'): | ||
"""Load translations from an nbjs.json file""" | ||
try: | ||
f = io.open(pjoin(I18N_DIR, language, 'LC_MESSAGES', 'nbjs.json'), | ||
encoding='utf-8') | ||
except IOError as e: | ||
if e.errno != errno.ENOENT: | ||
raise | ||
return {} | ||
|
||
with f: | ||
data = json.load(f) | ||
return data["locale_data"][domain] | ||
|
||
def cached_load(language, domain='nbjs'): | ||
"""Load translations for one language, using in-memory cache if available""" | ||
domain_cache = TRANSLATIONS_CACHE[domain] | ||
try: | ||
return domain_cache[language] | ||
except KeyError: | ||
data = load(language, domain) | ||
domain_cache[language] = data | ||
return data | ||
|
||
def combine_translations(accept_language, domain='nbjs'): | ||
"""Combine translations for multiple accepted languages. | ||
Returns data re-packaged in jed1.x format. | ||
""" | ||
lang_codes = parse_accept_lang_header(accept_language) | ||
combined = {} | ||
for language in lang_codes: | ||
if language == 'en': | ||
# en is default, all translations are in frontend. | ||
combined.clear() | ||
else: | ||
combined.update(cached_load(language, domain)) | ||
|
||
combined[''] = {"domain":"nbjs"} | ||
|
||
return { | ||
"domain": domain, | ||
"locale_data": { | ||
domain: combined | ||
} | ||
} |
File renamed without changes.
File renamed without changes.
File renamed without changes.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file was deleted.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,10 @@ | ||
import nose.tools as nt | ||
|
||
from notebook import i18n | ||
|
||
def test_parse_accept_lang_header(): | ||
palh = i18n.parse_accept_lang_header | ||
nt.assert_equal(palh(''), []) | ||
nt.assert_equal(palh('zh-CN,en-GB;q=0.7,en;q=0.3'), | ||
['en', 'en_GB', 'zh_CN']) | ||
nt.assert_equal(palh('nl,fr;q=0'), ['nl']) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters