Initial commit
This commit is contained in:
234
node_modules/node-blockly/blockly/i18n/common.py
generated
vendored
Normal file
234
node_modules/node-blockly/blockly/i18n/common.py
generated
vendored
Normal file
@@ -0,0 +1,234 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
# Code shared by translation conversion scripts.
|
||||
#
|
||||
# Copyright 2013 Google Inc.
|
||||
# https://developers.google.com/blockly/
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import codecs
|
||||
import json
|
||||
import os
|
||||
from datetime import datetime
|
||||
|
||||
class InputError(Exception):
|
||||
"""Exception raised for errors in the input.
|
||||
|
||||
Attributes:
|
||||
location -- where error occurred
|
||||
msg -- explanation of the error
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, location, msg):
|
||||
Exception.__init__(self, '{0}: {1}'.format(location, msg))
|
||||
self.location = location
|
||||
self.msg = msg
|
||||
|
||||
|
||||
def read_json_file(filename):
|
||||
"""Read a JSON file as UTF-8 into a dictionary, discarding @metadata.
|
||||
|
||||
Args:
|
||||
filename: The filename, which must end ".json".
|
||||
|
||||
Returns:
|
||||
The dictionary.
|
||||
|
||||
Raises:
|
||||
InputError: The filename did not end with ".json" or an error occurred
|
||||
while opening or reading the file.
|
||||
"""
|
||||
if not filename.endswith('.json'):
|
||||
raise InputError(filename, 'filenames must end with ".json"')
|
||||
try:
|
||||
# Read in file.
|
||||
with codecs.open(filename, 'r', 'utf-8') as infile:
|
||||
defs = json.load(infile)
|
||||
if '@metadata' in defs:
|
||||
del defs['@metadata']
|
||||
return defs
|
||||
except ValueError, e:
|
||||
print('Error reading ' + filename)
|
||||
raise InputError(filename, str(e))
|
||||
|
||||
|
||||
def _create_qqq_file(output_dir):
|
||||
"""Creates a qqq.json file with message documentation for translatewiki.net.
|
||||
|
||||
The file consists of key-value pairs, where the keys are message ids and
|
||||
the values are descriptions for the translators of the messages.
|
||||
What documentation exists for the format can be found at:
|
||||
http://translatewiki.net/wiki/Translating:Localisation_for_developers#Message_documentation
|
||||
|
||||
The file should be closed by _close_qqq_file().
|
||||
|
||||
Parameters:
|
||||
output_dir: The output directory.
|
||||
|
||||
Returns:
|
||||
A pointer to a file to which a left brace and newline have been written.
|
||||
|
||||
Raises:
|
||||
IOError: An error occurred while opening or writing the file.
|
||||
"""
|
||||
qqq_file_name = os.path.join(os.curdir, output_dir, 'qqq.json')
|
||||
qqq_file = codecs.open(qqq_file_name, 'w', 'utf-8')
|
||||
print 'Created file: ' + qqq_file_name
|
||||
qqq_file.write('{\n')
|
||||
return qqq_file
|
||||
|
||||
|
||||
def _close_qqq_file(qqq_file):
|
||||
"""Closes a qqq.json file created and opened by _create_qqq_file().
|
||||
|
||||
This writes the final newlines and right brace.
|
||||
|
||||
Args:
|
||||
qqq_file: A file created by _create_qqq_file().
|
||||
|
||||
Raises:
|
||||
IOError: An error occurred while writing to or closing the file.
|
||||
"""
|
||||
qqq_file.write('\n}\n')
|
||||
qqq_file.close()
|
||||
|
||||
|
||||
def _create_lang_file(author, lang, output_dir):
|
||||
"""Creates a <lang>.json file for translatewiki.net.
|
||||
|
||||
The file consists of metadata, followed by key-value pairs, where the keys
|
||||
are message ids and the values are the messages in the language specified
|
||||
by the corresponding command-line argument. The file should be closed by
|
||||
_close_lang_file().
|
||||
|
||||
Args:
|
||||
author: Name and email address of contact for translators.
|
||||
lang: ISO 639-1 source language code.
|
||||
output_dir: Relative directory for output files.
|
||||
|
||||
Returns:
|
||||
A pointer to a file to which the metadata has been written.
|
||||
|
||||
Raises:
|
||||
IOError: An error occurred while opening or writing the file.
|
||||
"""
|
||||
lang_file_name = os.path.join(os.curdir, output_dir, lang + '.json')
|
||||
lang_file = codecs.open(lang_file_name, 'w', 'utf-8')
|
||||
print 'Created file: ' + lang_file_name
|
||||
# string.format doesn't like printing braces, so break up our writes.
|
||||
lang_file.write('{\n\t"@metadata": {')
|
||||
lang_file.write("""
|
||||
\t\t"author": "{0}",
|
||||
\t\t"lastupdated": "{1}",
|
||||
\t\t"locale": "{2}",
|
||||
\t\t"messagedocumentation" : "qqq"
|
||||
""".format(author, str(datetime.now()), lang))
|
||||
lang_file.write('\t},\n')
|
||||
return lang_file
|
||||
|
||||
|
||||
def _close_lang_file(lang_file):
|
||||
"""Closes a <lang>.json file created with _create_lang_file().
|
||||
|
||||
This also writes the terminating left brace and newline.
|
||||
|
||||
Args:
|
||||
lang_file: A file opened with _create_lang_file().
|
||||
|
||||
Raises:
|
||||
IOError: An error occurred while writing to or closing the file.
|
||||
"""
|
||||
lang_file.write('\n}\n')
|
||||
lang_file.close()
|
||||
|
||||
|
||||
def _create_key_file(output_dir):
|
||||
"""Creates a keys.json file mapping Closure keys to Blockly keys.
|
||||
|
||||
Args:
|
||||
output_dir: Relative directory for output files.
|
||||
|
||||
Raises:
|
||||
IOError: An error occurred while creating the file.
|
||||
"""
|
||||
key_file_name = os.path.join(os.curdir, output_dir, 'keys.json')
|
||||
key_file = open(key_file_name, 'w')
|
||||
key_file.write('{\n')
|
||||
print 'Created file: ' + key_file_name
|
||||
return key_file
|
||||
|
||||
|
||||
def _close_key_file(key_file):
|
||||
"""Closes a key file created and opened with _create_key_file().
|
||||
|
||||
Args:
|
||||
key_file: A file created by _create_key_file().
|
||||
|
||||
Raises:
|
||||
IOError: An error occurred while writing to or closing the file.
|
||||
"""
|
||||
key_file.write('\n}\n')
|
||||
key_file.close()
|
||||
|
||||
|
||||
def write_files(author, lang, output_dir, units, write_key_file):
|
||||
"""Writes the output files for the given units.
|
||||
|
||||
There are three possible output files:
|
||||
* lang_file: JSON file mapping meanings (e.g., Maze.turnLeft) to the
|
||||
English text. The base name of the language file is specified by the
|
||||
"lang" command-line argument.
|
||||
* key_file: JSON file mapping meanings to Soy-generated keys (long hash
|
||||
codes). This is only output if the parameter write_key_file is True.
|
||||
* qqq_file: JSON file mapping meanings to descriptions.
|
||||
|
||||
Args:
|
||||
author: Name and email address of contact for translators.
|
||||
lang: ISO 639-1 source language code.
|
||||
output_dir: Relative directory for output files.
|
||||
units: A list of dictionaries with entries for 'meaning', 'source',
|
||||
'description', and 'keys' (the last only if write_key_file is true),
|
||||
in the order desired in the output files.
|
||||
write_key_file: Whether to output a keys.json file.
|
||||
|
||||
Raises:
|
||||
IOError: An error occurs opening, writing to, or closing a file.
|
||||
KeyError: An expected key is missing from units.
|
||||
"""
|
||||
lang_file = _create_lang_file(author, lang, output_dir)
|
||||
qqq_file = _create_qqq_file(output_dir)
|
||||
if write_key_file:
|
||||
key_file = _create_key_file(output_dir)
|
||||
first_entry = True
|
||||
for unit in units:
|
||||
if not first_entry:
|
||||
lang_file.write(',\n')
|
||||
if write_key_file:
|
||||
key_file.write(',\n')
|
||||
qqq_file.write(',\n')
|
||||
lang_file.write(u'\t"{0}": "{1}"'.format(
|
||||
unit['meaning'],
|
||||
unit['source'].replace('"', "'")))
|
||||
if write_key_file:
|
||||
key_file.write('"{0}": "{1}"'.format(unit['meaning'], unit['key']))
|
||||
qqq_file.write(u'\t"{0}": "{1}"'.format(
|
||||
unit['meaning'],
|
||||
unit['description'].replace('"', "'").replace(
|
||||
'{lb}', '{').replace('{rb}', '}')))
|
||||
first_entry = False
|
||||
_close_lang_file(lang_file)
|
||||
if write_key_file:
|
||||
_close_key_file(key_file)
|
||||
_close_qqq_file(qqq_file)
|
||||
166
node_modules/node-blockly/blockly/i18n/create_messages.py
generated
vendored
Executable file
166
node_modules/node-blockly/blockly/i18n/create_messages.py
generated
vendored
Executable file
@@ -0,0 +1,166 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
# Generate .js files defining Blockly core and language messages.
|
||||
#
|
||||
# Copyright 2013 Google Inc.
|
||||
# https://developers.google.com/blockly/
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import argparse
|
||||
import codecs
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from common import read_json_file
|
||||
|
||||
|
||||
_NEWLINE_PATTERN = re.compile('[\n\r]')
|
||||
|
||||
|
||||
def string_is_ascii(s):
|
||||
try:
|
||||
s.decode('ascii')
|
||||
return True
|
||||
except UnicodeEncodeError:
|
||||
return False
|
||||
|
||||
def load_constants(filename):
|
||||
"""Read in constants file, which must be output in every language."""
|
||||
constant_defs = read_json_file(filename);
|
||||
constants_text = '\n'
|
||||
for key in constant_defs:
|
||||
value = constant_defs[key]
|
||||
value = value.replace('"', '\\"')
|
||||
constants_text += u'\n/** @export */ Blockly.Msg.{0} = \"{1}\";'.format(
|
||||
key, value)
|
||||
return constants_text
|
||||
|
||||
def main():
|
||||
"""Generate .js files defining Blockly core and language messages."""
|
||||
|
||||
# Process command-line arguments.
|
||||
parser = argparse.ArgumentParser(description='Convert JSON files to JS.')
|
||||
parser.add_argument('--source_lang', default='en',
|
||||
help='ISO 639-1 source language code')
|
||||
parser.add_argument('--source_lang_file',
|
||||
default=os.path.join('json', 'en.json'),
|
||||
help='Path to .json file for source language')
|
||||
parser.add_argument('--source_synonym_file',
|
||||
default=os.path.join('json', 'synonyms.json'),
|
||||
help='Path to .json file with synonym definitions')
|
||||
parser.add_argument('--source_constants_file',
|
||||
default=os.path.join('json', 'constants.json'),
|
||||
help='Path to .json file with constant definitions')
|
||||
parser.add_argument('--output_dir', default='js/',
|
||||
help='relative directory for output files')
|
||||
parser.add_argument('--key_file', default='keys.json',
|
||||
help='relative path to input keys file')
|
||||
parser.add_argument('--quiet', action='store_true', default=False,
|
||||
help='do not write anything to standard output')
|
||||
parser.add_argument('files', nargs='+', help='input files')
|
||||
args = parser.parse_args()
|
||||
if not args.output_dir.endswith(os.path.sep):
|
||||
args.output_dir += os.path.sep
|
||||
|
||||
# Read in source language .json file, which provides any values missing
|
||||
# in target languages' .json files.
|
||||
source_defs = read_json_file(os.path.join(os.curdir, args.source_lang_file))
|
||||
# Make sure the source file doesn't contain a newline or carriage return.
|
||||
for key, value in source_defs.items():
|
||||
if _NEWLINE_PATTERN.search(value):
|
||||
print('ERROR: definition of {0} in {1} contained a newline character.'.
|
||||
format(key, args.source_lang_file))
|
||||
sys.exit(1)
|
||||
sorted_keys = source_defs.keys()
|
||||
sorted_keys.sort()
|
||||
|
||||
# Read in synonyms file, which must be output in every language.
|
||||
synonym_defs = read_json_file(os.path.join(
|
||||
os.curdir, args.source_synonym_file))
|
||||
synonym_text = '\n'.join([u'/** @export */ Blockly.Msg.{0} = Blockly.Msg.{1};'
|
||||
.format(key, synonym_defs[key]) for key in synonym_defs])
|
||||
|
||||
# Read in constants file, which must be output in every language.
|
||||
constants_text = load_constants(os.path.join(os.curdir, args.source_constants_file))
|
||||
|
||||
# Create each output file.
|
||||
for arg_file in args.files:
|
||||
(_, filename) = os.path.split(arg_file)
|
||||
target_lang = filename[:filename.index('.')]
|
||||
if target_lang not in ('qqq', 'keys', 'synonyms', 'constants'):
|
||||
target_defs = read_json_file(os.path.join(os.curdir, arg_file))
|
||||
|
||||
# Verify that keys are 'ascii'
|
||||
bad_keys = [key for key in target_defs if not string_is_ascii(key)]
|
||||
if bad_keys:
|
||||
print(u'These keys in {0} contain non ascii characters: {1}'.format(
|
||||
filename, ', '.join(bad_keys)))
|
||||
|
||||
# If there's a '\n' or '\r', remove it and print a warning.
|
||||
for key, value in target_defs.items():
|
||||
if _NEWLINE_PATTERN.search(value):
|
||||
print(u'WARNING: definition of {0} in {1} contained '
|
||||
'a newline character.'.
|
||||
format(key, arg_file))
|
||||
target_defs[key] = _NEWLINE_PATTERN.sub(' ', value)
|
||||
|
||||
# Output file.
|
||||
outname = os.path.join(os.curdir, args.output_dir, target_lang + '.js')
|
||||
with codecs.open(outname, 'w', 'utf-8') as outfile:
|
||||
outfile.write(
|
||||
"""// This file was automatically generated. Do not modify.
|
||||
|
||||
'use strict';
|
||||
|
||||
goog.provide('Blockly.Msg.{0}');
|
||||
|
||||
goog.require('Blockly.Msg');
|
||||
|
||||
""".format(target_lang.replace('-', '.')))
|
||||
# For each key in the source language file, output the target value
|
||||
# if present; otherwise, output the source language value with a
|
||||
# warning comment.
|
||||
for key in sorted_keys:
|
||||
if key in target_defs:
|
||||
value = target_defs[key]
|
||||
comment = ''
|
||||
del target_defs[key]
|
||||
else:
|
||||
value = source_defs[key]
|
||||
comment = ' // untranslated'
|
||||
value = value.replace('"', '\\"')
|
||||
outfile.write(u'/** @export */ Blockly.Msg.{0} = "{1}";{2}\n'
|
||||
.format(key, value, comment))
|
||||
|
||||
# Announce any keys defined only for target language.
|
||||
if target_defs:
|
||||
extra_keys = [key for key in target_defs if key not in synonym_defs]
|
||||
synonym_keys = [key for key in target_defs if key in synonym_defs]
|
||||
if not args.quiet:
|
||||
if extra_keys:
|
||||
print(u'These extra keys appeared in {0}: {1}'.format(
|
||||
filename, ', '.join(extra_keys)))
|
||||
if synonym_keys:
|
||||
print(u'These synonym keys appeared in {0}: {1}'.format(
|
||||
filename, ', '.join(synonym_keys)))
|
||||
|
||||
outfile.write(synonym_text)
|
||||
outfile.write(constants_text)
|
||||
|
||||
if not args.quiet:
|
||||
print('Created {0}.'.format(outname))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
73
node_modules/node-blockly/blockly/i18n/dedup_json.py
generated
vendored
Executable file
73
node_modules/node-blockly/blockly/i18n/dedup_json.py
generated
vendored
Executable file
@@ -0,0 +1,73 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
# Consolidates duplicate key-value pairs in a JSON file.
|
||||
# If the same key is used with different values, no warning is given,
|
||||
# and there is no guarantee about which key-value pair will be output.
|
||||
# There is also no guarantee as to the order of the key-value pairs
|
||||
# output.
|
||||
#
|
||||
# Copyright 2013 Google Inc.
|
||||
# https://developers.google.com/blockly/
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import argparse
|
||||
import codecs
|
||||
import json
|
||||
from common import InputError
|
||||
|
||||
|
||||
def main():
|
||||
"""Parses arguments and iterates over files.
|
||||
|
||||
Raises:
|
||||
IOError: An I/O error occurred with an input or output file.
|
||||
InputError: Input JSON could not be parsed.
|
||||
"""
|
||||
|
||||
# Set up argument parser.
|
||||
parser = argparse.ArgumentParser(
|
||||
description='Removes duplicate key-value pairs from JSON files.')
|
||||
parser.add_argument('--suffix', default='',
|
||||
help='optional suffix for output files; '
|
||||
'if empty, files will be changed in place')
|
||||
parser.add_argument('files', nargs='+', help='input files')
|
||||
args = parser.parse_args()
|
||||
|
||||
# Iterate over files.
|
||||
for filename in args.files:
|
||||
# Read in json using Python libraries. This eliminates duplicates.
|
||||
print('Processing ' + filename + '...')
|
||||
try:
|
||||
with codecs.open(filename, 'r', 'utf-8') as infile:
|
||||
j = json.load(infile)
|
||||
except ValueError, e:
|
||||
print('Error reading ' + filename)
|
||||
raise InputError(file, str(e))
|
||||
|
||||
# Built up output strings as an array to make output of delimiters easier.
|
||||
output = []
|
||||
for key in j:
|
||||
if key != '@metadata':
|
||||
output.append('\t"' + key + '": "' +
|
||||
j[key].replace('\n', '\\n') + '"')
|
||||
|
||||
# Output results.
|
||||
with codecs.open(filename + args.suffix, 'w', 'utf-8') as outfile:
|
||||
outfile.write('{\n')
|
||||
outfile.write(',\n'.join(output))
|
||||
outfile.write('\n}\n')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
136
node_modules/node-blockly/blockly/i18n/js_to_json.py
generated
vendored
Executable file
136
node_modules/node-blockly/blockly/i18n/js_to_json.py
generated
vendored
Executable file
@@ -0,0 +1,136 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
# Gives the translation status of the specified apps and languages.
|
||||
#
|
||||
# Copyright 2013 Google Inc.
|
||||
# https://developers.google.com/blockly/
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Extracts messages from .js files into .json files for translation.
|
||||
|
||||
Specifically, lines with the following formats are extracted:
|
||||
|
||||
/// Here is a description of the following message.
|
||||
Blockly.SOME_KEY = 'Some value';
|
||||
|
||||
Adjacent "///" lines are concatenated.
|
||||
|
||||
There are two output files, each of which is proper JSON. For each key, the
|
||||
file en.json would get an entry of the form:
|
||||
|
||||
"Blockly.SOME_KEY", "Some value",
|
||||
|
||||
The file qqq.json would get:
|
||||
|
||||
"Blockly.SOME_KEY", "Here is a description of the following message.",
|
||||
|
||||
Commas would of course be omitted for the final entry of each value.
|
||||
|
||||
@author Ellen Spertus (ellen.spertus@gmail.com)
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import codecs
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
from common import write_files
|
||||
|
||||
|
||||
_INPUT_DEF_PATTERN = re.compile("""Blockly.Msg.(\w*)\s*=\s*'(.*)';?\r?$""")
|
||||
|
||||
_INPUT_SYN_PATTERN = re.compile(
|
||||
"""Blockly.Msg.(\w*)\s*=\s*Blockly.Msg.(\w*);""")
|
||||
|
||||
_CONSTANT_DESCRIPTION_PATTERN = re.compile(
|
||||
"""{{Notranslate}}""", re.IGNORECASE)
|
||||
|
||||
def main():
|
||||
# Set up argument parser.
|
||||
parser = argparse.ArgumentParser(description='Create translation files.')
|
||||
parser.add_argument(
|
||||
'--author',
|
||||
default='Ellen Spertus <ellen.spertus@gmail.com>',
|
||||
help='name and email address of contact for translators')
|
||||
parser.add_argument('--lang', default='en',
|
||||
help='ISO 639-1 source language code')
|
||||
parser.add_argument('--output_dir', default='json',
|
||||
help='relative directory for output files')
|
||||
parser.add_argument('--input_file', default='messages.js',
|
||||
help='input file')
|
||||
parser.add_argument('--quiet', action='store_true', default=False,
|
||||
help='only display warnings, not routine info')
|
||||
args = parser.parse_args()
|
||||
if (not args.output_dir.endswith(os.path.sep)):
|
||||
args.output_dir += os.path.sep
|
||||
|
||||
# Read and parse input file.
|
||||
results = []
|
||||
synonyms = {}
|
||||
constants = {} # Values that are constant across all languages.
|
||||
description = ''
|
||||
infile = codecs.open(args.input_file, 'r', 'utf-8')
|
||||
for line in infile:
|
||||
if line.startswith('///'):
|
||||
if description:
|
||||
description = description + ' ' + line[3:].strip()
|
||||
else:
|
||||
description = line[3:].strip()
|
||||
else:
|
||||
match = _INPUT_DEF_PATTERN.match(line)
|
||||
if match:
|
||||
key = match.group(1)
|
||||
value = match.group(2).replace("\\'", "'")
|
||||
if not description:
|
||||
print('Warning: No description for ' + result['meaning'])
|
||||
if (description and _CONSTANT_DESCRIPTION_PATTERN.search(description)):
|
||||
constants[key] = value
|
||||
else:
|
||||
result = {}
|
||||
result['meaning'] = key
|
||||
result['source'] = value
|
||||
result['description'] = description
|
||||
results.append(result)
|
||||
description = ''
|
||||
else:
|
||||
match = _INPUT_SYN_PATTERN.match(line)
|
||||
if match:
|
||||
if description:
|
||||
print('Warning: Description preceding definition of synonym {0}.'.
|
||||
format(match.group(1)))
|
||||
description = ''
|
||||
synonyms[match.group(1)] = match.group(2)
|
||||
infile.close()
|
||||
|
||||
# Create <lang_file>.json, keys.json, and qqq.json.
|
||||
write_files(args.author, args.lang, args.output_dir, results, False)
|
||||
|
||||
# Create synonyms.json.
|
||||
synonym_file_name = os.path.join(os.curdir, args.output_dir, 'synonyms.json')
|
||||
with open(synonym_file_name, 'w') as outfile:
|
||||
json.dump(synonyms, outfile)
|
||||
if not args.quiet:
|
||||
print("Wrote {0} synonym pairs to {1}.".format(
|
||||
len(synonyms), synonym_file_name))
|
||||
|
||||
# Create constants.json
|
||||
constants_file_name = os.path.join(os.curdir, args.output_dir, 'constants.json')
|
||||
with open(constants_file_name, 'w') as outfile:
|
||||
json.dump(constants, outfile)
|
||||
if not args.quiet:
|
||||
print("Wrote {0} constant pairs to {1}.".format(
|
||||
len(constants), synonym_file_name))
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
185
node_modules/node-blockly/blockly/i18n/json_to_js.py
generated
vendored
Executable file
185
node_modules/node-blockly/blockly/i18n/json_to_js.py
generated
vendored
Executable file
@@ -0,0 +1,185 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
# Converts .json files into .js files for use within Blockly apps.
|
||||
#
|
||||
# Copyright 2013 Google Inc.
|
||||
# https://developers.google.com/blockly/
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import argparse
|
||||
import codecs # for codecs.open(..., 'utf-8')
|
||||
import glob
|
||||
import json # for json.load()
|
||||
import os # for os.path()
|
||||
import subprocess # for subprocess.check_call()
|
||||
from common import InputError
|
||||
from common import read_json_file
|
||||
|
||||
|
||||
# Store parsed command-line arguments in global variable.
|
||||
args = None
|
||||
|
||||
|
||||
def _create_xlf(target_lang):
|
||||
"""Creates a <target_lang>.xlf file for Soy.
|
||||
|
||||
Args:
|
||||
target_lang: The ISO 639 language code for the target language.
|
||||
This is used in the name of the file and in the metadata.
|
||||
|
||||
Returns:
|
||||
A pointer to a file to which the metadata has been written.
|
||||
|
||||
Raises:
|
||||
IOError: An error occurred while opening or writing the file.
|
||||
"""
|
||||
filename = os.path.join(os.curdir, args.output_dir, target_lang + '.xlf')
|
||||
out_file = codecs.open(filename, 'w', 'utf-8')
|
||||
out_file.write("""<?xml version="1.0" encoding="UTF-8"?>
|
||||
<xliff version="1.2" xmlns="urn:oasis:names:tc:xliff:document:1.2">
|
||||
<file original="SoyMsgBundle"
|
||||
datatype="x-soy-msg-bundle"
|
||||
xml:space="preserve"
|
||||
source-language="{0}"
|
||||
target-language="{1}">
|
||||
<body>""".format(args.source_lang, target_lang))
|
||||
return out_file
|
||||
|
||||
|
||||
def _close_xlf(xlf_file):
|
||||
"""Closes a <target_lang>.xlf file created with create_xlf().
|
||||
|
||||
This includes writing the terminating XML.
|
||||
|
||||
Args:
|
||||
xlf_file: A pointer to a file created by _create_xlf().
|
||||
|
||||
Raises:
|
||||
IOError: An error occurred while writing to or closing the file.
|
||||
"""
|
||||
xlf_file.write("""
|
||||
</body>
|
||||
</file>
|
||||
</xliff>
|
||||
""")
|
||||
xlf_file.close()
|
||||
|
||||
|
||||
def _process_file(path_to_json, target_lang, key_dict):
|
||||
"""Creates an .xlf file corresponding to the specified .json input file.
|
||||
|
||||
The name of the input file must be target_lang followed by '.json'.
|
||||
The name of the output file will be target_lang followed by '.js'.
|
||||
|
||||
Args:
|
||||
path_to_json: Path to the directory of xx.json files.
|
||||
target_lang: A IETF language code (RFC 4646), such as 'es' or 'pt-br'.
|
||||
key_dict: Dictionary mapping Blockly keys (e.g., Maze.turnLeft) to
|
||||
Closure keys (hash numbers).
|
||||
|
||||
Raises:
|
||||
IOError: An I/O error occurred with an input or output file.
|
||||
InputError: Input JSON could not be parsed.
|
||||
KeyError: Key found in input file but not in key file.
|
||||
"""
|
||||
keyfile = os.path.join(path_to_json, target_lang + '.json')
|
||||
j = read_json_file(keyfile)
|
||||
out_file = _create_xlf(target_lang)
|
||||
for key in j:
|
||||
if key != '@metadata':
|
||||
try:
|
||||
identifier = key_dict[key]
|
||||
except KeyError, e:
|
||||
print('Key "%s" is in %s but not in %s' %
|
||||
(key, keyfile, args.key_file))
|
||||
raise e
|
||||
target = j.get(key)
|
||||
out_file.write(u"""
|
||||
<trans-unit id="{0}" datatype="html">
|
||||
<target>{1}</target>
|
||||
</trans-unit>""".format(identifier, target))
|
||||
_close_xlf(out_file)
|
||||
|
||||
|
||||
def main():
|
||||
"""Parses arguments and iterates over files."""
|
||||
|
||||
# Set up argument parser.
|
||||
parser = argparse.ArgumentParser(description='Convert JSON files to JS.')
|
||||
parser.add_argument('--source_lang', default='en',
|
||||
help='ISO 639-1 source language code')
|
||||
parser.add_argument('--output_dir', default='generated',
|
||||
help='relative directory for output files')
|
||||
parser.add_argument('--key_file', default='json' + os.path.sep + 'keys.json',
|
||||
help='relative path to input keys file')
|
||||
parser.add_argument('--template', default='template.soy')
|
||||
parser.add_argument('--path_to_jar',
|
||||
default='..' + os.path.sep + 'apps' + os.path.sep
|
||||
+ '_soy',
|
||||
help='relative path from working directory to '
|
||||
'SoyToJsSrcCompiler.jar')
|
||||
parser.add_argument('files', nargs='+', help='input files')
|
||||
|
||||
# Initialize global variables.
|
||||
global args
|
||||
args = parser.parse_args()
|
||||
|
||||
# Make sure output_dir ends with slash.
|
||||
if (not args.output_dir.endswith(os.path.sep)):
|
||||
args.output_dir += os.path.sep
|
||||
|
||||
# Read in keys.json, mapping descriptions (e.g., Maze.turnLeft) to
|
||||
# Closure keys (long hash numbers).
|
||||
key_file = open(args.key_file)
|
||||
key_dict = json.load(key_file)
|
||||
key_file.close()
|
||||
|
||||
# Process each input file.
|
||||
print('Creating .xlf files...')
|
||||
processed_langs = []
|
||||
if len(args.files) == 1:
|
||||
# Windows does not expand globs automatically.
|
||||
args.files = glob.glob(args.files[0])
|
||||
for arg_file in args.files:
|
||||
(path_to_json, filename) = os.path.split(arg_file)
|
||||
if not filename.endswith('.json'):
|
||||
raise InputError(filename, 'filenames must end with ".json"')
|
||||
target_lang = filename[:filename.index('.')]
|
||||
if not target_lang in ('qqq', 'keys'):
|
||||
processed_langs.append(target_lang)
|
||||
_process_file(path_to_json, target_lang, key_dict)
|
||||
|
||||
# Output command line for Closure compiler.
|
||||
if processed_langs:
|
||||
print('Creating .js files...')
|
||||
processed_lang_list = ','.join(processed_langs)
|
||||
subprocess.check_call([
|
||||
'java',
|
||||
'-jar', os.path.join(args.path_to_jar, 'SoyToJsSrcCompiler.jar'),
|
||||
'--locales', processed_lang_list,
|
||||
'--messageFilePathFormat', args.output_dir + '{LOCALE}.xlf',
|
||||
'--outputPathFormat', args.output_dir + '{LOCALE}.js',
|
||||
'--srcs', args.template])
|
||||
if len(processed_langs) == 1:
|
||||
print('Created ' + processed_lang_list + '.js in ' + args.output_dir)
|
||||
else:
|
||||
print('Created {' + processed_lang_list + '}.js in ' + args.output_dir)
|
||||
|
||||
for lang in processed_langs:
|
||||
os.remove(args.output_dir + lang + '.xlf')
|
||||
print('Removed .xlf files.')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
47
node_modules/node-blockly/blockly/i18n/tests.py
generated
vendored
Normal file
47
node_modules/node-blockly/blockly/i18n/tests.py
generated
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Tests of i18n scripts.
|
||||
#
|
||||
# Copyright 2013 Google Inc.
|
||||
# https://developers.google.com/blockly/
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import common
|
||||
import re
|
||||
import unittest
|
||||
|
||||
class TestSequenceFunctions(unittest.TestCase):
|
||||
def test_insert_breaks(self):
|
||||
spaces = re.compile(r'\s+|\\n')
|
||||
def contains_all_chars(orig, result):
|
||||
return re.sub(spaces, '', orig) == re.sub(spaces, '', result)
|
||||
|
||||
sentences = [u'Quay Pegman qua bên trái hoặc bên phải 90 độ.',
|
||||
u'Foo bar baz this is english that is okay bye.',
|
||||
u'If there is a path in the specified direction, \nthen ' +
|
||||
u'do some actions.',
|
||||
u'If there is a path in the specified direction, then do ' +
|
||||
u'the first block of actions. Otherwise, do the second ' +
|
||||
u'block of actions.']
|
||||
for sentence in sentences:
|
||||
output = common.insert_breaks(sentence, 30, 50)
|
||||
self.assert_(contains_all_chars(sentence, output),
|
||||
u'Mismatch between:\n{0}\n{1}'.format(
|
||||
re.sub(spaces, '', sentence),
|
||||
re.sub(spaces, '', output)))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
232
node_modules/node-blockly/blockly/i18n/xliff_to_json.py
generated
vendored
Executable file
232
node_modules/node-blockly/blockly/i18n/xliff_to_json.py
generated
vendored
Executable file
@@ -0,0 +1,232 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
# Converts .xlf files into .json files for use at http://translatewiki.net.
|
||||
#
|
||||
# Copyright 2013 Google Inc.
|
||||
# https://developers.google.com/blockly/
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
from xml.dom import minidom
|
||||
from common import InputError
|
||||
from common import write_files
|
||||
|
||||
# Global variables
|
||||
args = None # Parsed command-line arguments.
|
||||
|
||||
|
||||
def _parse_trans_unit(trans_unit):
|
||||
"""Converts a trans-unit XML node into a more convenient dictionary format.
|
||||
|
||||
Args:
|
||||
trans_unit: An XML representation of a .xlf translation unit.
|
||||
|
||||
Returns:
|
||||
A dictionary with useful information about the translation unit.
|
||||
The returned dictionary is guaranteed to have an entry for 'key' and
|
||||
may have entries for 'source', 'target', 'description', and 'meaning'
|
||||
if present in the argument.
|
||||
|
||||
Raises:
|
||||
InputError: A required field was not present.
|
||||
"""
|
||||
|
||||
def get_value(tag_name):
|
||||
elts = trans_unit.getElementsByTagName(tag_name)
|
||||
if not elts:
|
||||
return None
|
||||
elif len(elts) == 1:
|
||||
return ''.join([child.toxml() for child in elts[0].childNodes])
|
||||
else:
|
||||
raise InputError('', 'Unable to extract ' + tag_name)
|
||||
|
||||
result = {}
|
||||
key = trans_unit.getAttribute('id')
|
||||
if not key:
|
||||
raise InputError('', 'id attribute not found')
|
||||
result['key'] = key
|
||||
|
||||
# Get source and target, if present.
|
||||
try:
|
||||
result['source'] = get_value('source')
|
||||
result['target'] = get_value('target')
|
||||
except InputError, e:
|
||||
raise InputError(key, e.msg)
|
||||
|
||||
# Get notes, using the from value as key and the data as value.
|
||||
notes = trans_unit.getElementsByTagName('note')
|
||||
for note in notes:
|
||||
from_value = note.getAttribute('from')
|
||||
if from_value and len(note.childNodes) == 1:
|
||||
result[from_value] = note.childNodes[0].data
|
||||
else:
|
||||
raise InputError(key, 'Unable to extract ' + from_value)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def _process_file(filename):
|
||||
"""Builds list of translation units from input file.
|
||||
|
||||
Each translation unit in the input file includes:
|
||||
- an id (opaquely generated by Soy)
|
||||
- the Blockly name for the message
|
||||
- the text in the source language (generally English)
|
||||
- a description for the translator
|
||||
|
||||
The Soy and Blockly ids are joined with a hyphen and serve as the
|
||||
keys in both output files. The value is the corresponding text (in the
|
||||
<lang>.json file) or the description (in the qqq.json file).
|
||||
|
||||
Args:
|
||||
filename: The name of an .xlf file produced by Closure.
|
||||
|
||||
Raises:
|
||||
IOError: An I/O error occurred with an input or output file.
|
||||
InputError: The input file could not be parsed or lacked required
|
||||
fields.
|
||||
|
||||
Returns:
|
||||
A list of dictionaries produced by parse_trans_unit().
|
||||
"""
|
||||
try:
|
||||
results = [] # list of dictionaries (return value)
|
||||
names = [] # list of names of encountered keys (local variable)
|
||||
try:
|
||||
parsed_xml = minidom.parse(filename)
|
||||
except IOError:
|
||||
# Don't get caught by below handler
|
||||
raise
|
||||
except Exception, e:
|
||||
print
|
||||
raise InputError(filename, str(e))
|
||||
|
||||
# Make sure needed fields are present and non-empty.
|
||||
for trans_unit in parsed_xml.getElementsByTagName('trans-unit'):
|
||||
unit = _parse_trans_unit(trans_unit)
|
||||
for key in ['description', 'meaning', 'source']:
|
||||
if not key in unit or not unit[key]:
|
||||
raise InputError(filename + ':' + unit['key'],
|
||||
key + ' not found')
|
||||
if unit['description'].lower() == 'ibid':
|
||||
if unit['meaning'] not in names:
|
||||
# If the term has not already been described, the use of 'ibid'
|
||||
# is an error.
|
||||
raise InputError(
|
||||
filename,
|
||||
'First encountered definition of: ' + unit['meaning']
|
||||
+ ' has definition: ' + unit['description']
|
||||
+ '. This error can occur if the definition was not'
|
||||
+ ' provided on the first appearance of the message'
|
||||
+ ' or if the source (English-language) messages differ.')
|
||||
else:
|
||||
# If term has already been described, 'ibid' was used correctly,
|
||||
# and we output nothing.
|
||||
pass
|
||||
else:
|
||||
if unit['meaning'] in names:
|
||||
raise InputError(filename,
|
||||
'Second definition of: ' + unit['meaning'])
|
||||
names.append(unit['meaning'])
|
||||
results.append(unit)
|
||||
|
||||
return results
|
||||
except IOError, e:
|
||||
print 'Error with file {0}: {1}'.format(filename, e.strerror)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def sort_units(units, templates):
|
||||
"""Sorts the translation units by their definition order in the template.
|
||||
|
||||
Args:
|
||||
units: A list of dictionaries produced by parse_trans_unit()
|
||||
that have a non-empty value for the key 'meaning'.
|
||||
templates: A string containing the Soy templates in which each of
|
||||
the units' meanings is defined.
|
||||
|
||||
Returns:
|
||||
A new list of translation units, sorted by the order in which
|
||||
their meaning is defined in the templates.
|
||||
|
||||
Raises:
|
||||
InputError: If a meaning definition cannot be found in the
|
||||
templates.
|
||||
"""
|
||||
def key_function(unit):
|
||||
match = re.search(
|
||||
'\\smeaning\\s*=\\s*"{0}"\\s'.format(unit['meaning']),
|
||||
templates)
|
||||
if match:
|
||||
return match.start()
|
||||
else:
|
||||
raise InputError(args.templates,
|
||||
'msg definition for meaning not found: ' +
|
||||
unit['meaning'])
|
||||
return sorted(units, key=key_function)
|
||||
|
||||
|
||||
def main():
|
||||
"""Parses arguments and processes the specified file.
|
||||
|
||||
Raises:
|
||||
IOError: An I/O error occurred with an input or output file.
|
||||
InputError: Input files lacked required fields.
|
||||
"""
|
||||
# Set up argument parser.
|
||||
parser = argparse.ArgumentParser(description='Create translation files.')
|
||||
parser.add_argument(
|
||||
'--author',
|
||||
default='Ellen Spertus <ellen.spertus@gmail.com>',
|
||||
help='name and email address of contact for translators')
|
||||
parser.add_argument('--lang', default='en',
|
||||
help='ISO 639-1 source language code')
|
||||
parser.add_argument('--output_dir', default='json',
|
||||
help='relative directory for output files')
|
||||
parser.add_argument('--xlf', help='file containing xlf definitions')
|
||||
parser.add_argument('--templates', default=['template.soy'], nargs='+',
|
||||
help='relative path to Soy templates, comma or space '
|
||||
'separated (used for ordering messages)')
|
||||
global args
|
||||
args = parser.parse_args()
|
||||
|
||||
# Make sure output_dir ends with slash.
|
||||
if (not args.output_dir.endswith(os.path.sep)):
|
||||
args.output_dir += os.path.sep
|
||||
|
||||
# Process the input file, and sort the entries.
|
||||
units = _process_file(args.xlf)
|
||||
files = []
|
||||
for arg in args.templates:
|
||||
for filename in arg.split(','):
|
||||
filename = filename.strip();
|
||||
if filename:
|
||||
with open(filename) as myfile:
|
||||
files.append(' '.join(line.strip() for line in myfile))
|
||||
sorted_units = sort_units(units, ' '.join(files))
|
||||
|
||||
# Write the output files.
|
||||
write_files(args.author, args.lang, args.output_dir, sorted_units, True)
|
||||
|
||||
# Delete the input .xlf file.
|
||||
os.remove(args.xlf)
|
||||
print('Removed ' + args.xlf)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
Reference in New Issue
Block a user